hexsha stringlengths 40 40 | size int64 2 1.02M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 245 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 245 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 245 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 1.02M | avg_line_length float64 1 417k | max_line_length int64 1 987k | alphanum_fraction float64 0 1 | content_no_comment stringlengths 0 1.01M | is_comment_constant_removed bool 1
class | is_sharp_comment_removed bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1c2f59f39c0d409f389961cf1d7651c68a90c8d9 | 7,092 | py | Python | src/ObjectDetection.py | mzur/unknot | 07cc75d1fc94b1767e12bd9d55c1eac13be1fbfe | [
"MIT"
] | null | null | null | src/ObjectDetection.py | mzur/unknot | 07cc75d1fc94b1767e12bd9d55c1eac13be1fbfe | [
"MIT"
] | null | null | null | src/ObjectDetection.py | mzur/unknot | 07cc75d1fc94b1767e12bd9d55c1eac13be1fbfe | [
"MIT"
] | 1 | 2022-01-26T08:13:09.000Z | 2022-01-26T08:13:09.000Z | import numpy as np
import os.path
import imgaug.augmenters as iaa
import json
from pyvips import Image as VipsImage
from . import PatchesCollection
from . import Dataset as ImageDataset
from . import utils
from .mrcnn import config as mrcnn_config
from .mrcnn import utils as mrcnn_utils
from .mrcnn import model as mrcnn_model
class Config(mrcnn_config.Config):
def __init__(self, train_patches, config={}):
self.NAME = 'unknot'
# Add one for the background class (0).
self.NUM_CLASSES = 2
# Disable validation since we do not have ground truth.
self.VALIDATION_STEPS = 0
self.MEAN_PIXEL = np.array(train_patches.mean_pixel)
self.AUGMENTATION = iaa.SomeOf((0, None), [
iaa.Fliplr(1.0),
iaa.Flipud(1.0),
iaa.Affine(rotate=[90, 180, 270]),
iaa.GaussianBlur(sigma=(1.0, 2.0)),
iaa.JpegCompression(compression=(25, 50)),
], random_order=True)
for key, value in config.items():
setattr(self, key, value)
super().__init__()
class TrainingConfig(Config):
def __init__(self, train_patches, config={}):
self.IMAGE_MAX_DIM = train_patches.crop_dimension
super().__init__(train_patches, config)
# In total, we want to train with about 2000 images per epoch.
self.STEPS_PER_EPOCH = round(2000 / self.IMAGES_PER_GPU)
class InferenceConfig(Config):
def __init__(self, train_patches, config={}):
self.IMAGES_PER_GPU = 1
self.IMAGE_MIN_DIM = 64
self.IMAGE_RESIZE_MODE = "pad64"
super().__init__(train_patches, config)
class Dataset(mrcnn_utils.Dataset):
def __init__(self, images, name='no_name', masks=[], classes={}, ignore_classes=[]):
super().__init__()
# Convert to the required dict with image IDs.
images = {k: v for k, v in enumerate(images)}
self.images = images
self.masks = masks
self.name = name
self.classes = classes
# Always ignore the background class.
self.ignore_classes = set([0] + ignore_classes)
def prepare(self):
for class_id, class_name in self.classes.items():
self.add_class(self.name, class_id, class_name)
for image_id, image_file in self.images.items():
self.add_image(self.name, image_id, image_file)
super().prepare()
def load_mask(self, image_index):
file = self.masks[image_index]
data = np.load(file, allow_pickle=True)
classes = []
masks = []
for mask in data['masks']:
source_class_id = 1
if source_class_id not in self.ignore_classes:
classes.append(self.map_source_class_id('{}.{}'.format(self.name, source_class_id)))
masks.append(mask)
if len(classes) == 0:
return super().load_mask(image_index)
classes = np.array(classes, dtype=np.int32)
masks = np.stack(masks, axis = 2).astype(np.bool)
return masks, classes
class TrainingDataset(Dataset):
def __init__(self, train_patches):
images = train_patches.get_images_paths()
masks = train_patches.get_masks_paths()
classes = {1: 'Interesting'}
super().__init__(images=images, masks=masks, classes=classes)
class InferenceDataset(Dataset):
def __init__(self, images):
classes = {1: 'Interesting'}
super().__init__(images=images, classes=classes)
class ObjectDetector(object):
def __init__(self, model_dir):
self.model_dir = model_dir
def perform_training(self, annotation_patches, scheme, config={}, initial_model=None):
if not isinstance(annotation_patches, PatchesCollection.PatchesCollection):
raise TypeError('The annotation patches must be a PatchesCollection.')
if not annotation_patches.exists:
raise RuntimeError('The annotation patches do not exist.')
utils.ensure_dir(self.model_dir)
train_config = TrainingConfig(annotation_patches, config)
train_dataset = TrainingDataset(annotation_patches)
train_config.display()
train_dataset.prepare()
model = mrcnn_model.MaskRCNN(mode="training", config=train_config, model_dir=self.model_dir)
if initial_model:
exclude_layers = [
"mrcnn_class_logits",
"mrcnn_bbox_fc",
"mrcnn_bbox",
"mrcnn_mask",
]
model.load_weights(initial_model, by_name=True, exclude=exclude_layers)
epochs = 0
for train_step in scheme:
print('Train step: ', train_step)
epochs += train_step['epochs']
model.train(train_dataset,
val_dataset=None,
learning_rate=train_step['learning_rate'],
epochs=epochs,
layers=train_step['layers'],
augmentation=train_config.AUGMENTATION
)
model_path = os.path.join(self.model_dir, "mask_rcnn_final.h5")
model.keras_model.save_weights(model_path)
def perform_inference(self, annotation_patches, dataset, target_dir):
if not isinstance(dataset, ImageDataset.Dataset):
raise TypeError('The dataset must be a Dataset.')
images = [image.path for image in dataset.get_test_images()]
config = InferenceConfig(annotation_patches)
dataset = InferenceDataset(images)
config.display()
dataset.prepare()
utils.ensure_dir(target_dir)
model_path = os.path.join(self.model_dir, "mask_rcnn_final.h5")
if not os.path.exists(model_path):
raise RuntimeError('The trained model file does not exist. Perform training first.')
model = mrcnn_model.MaskRCNN(mode="inference", config=config, model_dir=self.model_dir)
model.load_weights(model_path, by_name=True)
for i, image_info in enumerate(dataset.image_info):
print('Processing image {}'.format(os.path.basename(image_info['path'])))
image = dataset.load_image(i)
results = model.detect([image])
self.process_inference_result(results[0], image_info, target_dir)
def process_inference_result(self, result, image_info, target_dir):
filename = os.path.basename(image_info['path'])
points = []
for roi, score in zip(result['rois'], result['scores']):
# ROIs are stored as (y1, x1, y2, x2).
y = min(roi[0], roi[2])
x = min(roi[1], roi[3])
h = abs(roi[0] - roi[2])
w = abs(roi[1] - roi[3])
rx = round(w / 2)
ry = round(h / 2)
r = max(rx, ry)
points.append([int(x + rx), int(y + ry), int(r), float(score)])
path = os.path.join(target_dir, '{}.json'.format(filename))
with open(path, 'w') as outfile:
json.dump(points, outfile)
image = VipsImage.new_from_file(image_info['path'])
width, height = image.width, image.height
mask = np.zeros((height, width), dtype=np.bool)
for m in result['masks']:
mask += m
mask = mask.astype(np.uint8) * 255
path = os.path.join(target_dir, '{}.png'.format(filename))
image = VipsImage.new_from_memory(mask, width, height, 1, 'uchar')
image.write_to_file(path)
| 34.764706 | 98 | 0.654822 | import numpy as np
import os.path
import imgaug.augmenters as iaa
import json
from pyvips import Image as VipsImage
from . import PatchesCollection
from . import Dataset as ImageDataset
from . import utils
from .mrcnn import config as mrcnn_config
from .mrcnn import utils as mrcnn_utils
from .mrcnn import model as mrcnn_model
class Config(mrcnn_config.Config):
def __init__(self, train_patches, config={}):
self.NAME = 'unknot'
self.NUM_CLASSES = 2
self.VALIDATION_STEPS = 0
self.MEAN_PIXEL = np.array(train_patches.mean_pixel)
self.AUGMENTATION = iaa.SomeOf((0, None), [
iaa.Fliplr(1.0),
iaa.Flipud(1.0),
iaa.Affine(rotate=[90, 180, 270]),
iaa.GaussianBlur(sigma=(1.0, 2.0)),
iaa.JpegCompression(compression=(25, 50)),
], random_order=True)
for key, value in config.items():
setattr(self, key, value)
super().__init__()
class TrainingConfig(Config):
def __init__(self, train_patches, config={}):
self.IMAGE_MAX_DIM = train_patches.crop_dimension
super().__init__(train_patches, config)
self.STEPS_PER_EPOCH = round(2000 / self.IMAGES_PER_GPU)
class InferenceConfig(Config):
def __init__(self, train_patches, config={}):
self.IMAGES_PER_GPU = 1
self.IMAGE_MIN_DIM = 64
self.IMAGE_RESIZE_MODE = "pad64"
super().__init__(train_patches, config)
class Dataset(mrcnn_utils.Dataset):
def __init__(self, images, name='no_name', masks=[], classes={}, ignore_classes=[]):
super().__init__()
images = {k: v for k, v in enumerate(images)}
self.images = images
self.masks = masks
self.name = name
self.classes = classes
self.ignore_classes = set([0] + ignore_classes)
def prepare(self):
for class_id, class_name in self.classes.items():
self.add_class(self.name, class_id, class_name)
for image_id, image_file in self.images.items():
self.add_image(self.name, image_id, image_file)
super().prepare()
def load_mask(self, image_index):
file = self.masks[image_index]
data = np.load(file, allow_pickle=True)
classes = []
masks = []
for mask in data['masks']:
source_class_id = 1
if source_class_id not in self.ignore_classes:
classes.append(self.map_source_class_id('{}.{}'.format(self.name, source_class_id)))
masks.append(mask)
if len(classes) == 0:
return super().load_mask(image_index)
classes = np.array(classes, dtype=np.int32)
masks = np.stack(masks, axis = 2).astype(np.bool)
return masks, classes
class TrainingDataset(Dataset):
def __init__(self, train_patches):
images = train_patches.get_images_paths()
masks = train_patches.get_masks_paths()
classes = {1: 'Interesting'}
super().__init__(images=images, masks=masks, classes=classes)
class InferenceDataset(Dataset):
def __init__(self, images):
classes = {1: 'Interesting'}
super().__init__(images=images, classes=classes)
class ObjectDetector(object):
def __init__(self, model_dir):
self.model_dir = model_dir
def perform_training(self, annotation_patches, scheme, config={}, initial_model=None):
if not isinstance(annotation_patches, PatchesCollection.PatchesCollection):
raise TypeError('The annotation patches must be a PatchesCollection.')
if not annotation_patches.exists:
raise RuntimeError('The annotation patches do not exist.')
utils.ensure_dir(self.model_dir)
train_config = TrainingConfig(annotation_patches, config)
train_dataset = TrainingDataset(annotation_patches)
train_config.display()
train_dataset.prepare()
model = mrcnn_model.MaskRCNN(mode="training", config=train_config, model_dir=self.model_dir)
if initial_model:
exclude_layers = [
"mrcnn_class_logits",
"mrcnn_bbox_fc",
"mrcnn_bbox",
"mrcnn_mask",
]
model.load_weights(initial_model, by_name=True, exclude=exclude_layers)
epochs = 0
for train_step in scheme:
print('Train step: ', train_step)
epochs += train_step['epochs']
model.train(train_dataset,
val_dataset=None,
learning_rate=train_step['learning_rate'],
epochs=epochs,
layers=train_step['layers'],
augmentation=train_config.AUGMENTATION
)
model_path = os.path.join(self.model_dir, "mask_rcnn_final.h5")
model.keras_model.save_weights(model_path)
def perform_inference(self, annotation_patches, dataset, target_dir):
if not isinstance(dataset, ImageDataset.Dataset):
raise TypeError('The dataset must be a Dataset.')
images = [image.path for image in dataset.get_test_images()]
config = InferenceConfig(annotation_patches)
dataset = InferenceDataset(images)
config.display()
dataset.prepare()
utils.ensure_dir(target_dir)
model_path = os.path.join(self.model_dir, "mask_rcnn_final.h5")
if not os.path.exists(model_path):
raise RuntimeError('The trained model file does not exist. Perform training first.')
model = mrcnn_model.MaskRCNN(mode="inference", config=config, model_dir=self.model_dir)
model.load_weights(model_path, by_name=True)
for i, image_info in enumerate(dataset.image_info):
print('Processing image {}'.format(os.path.basename(image_info['path'])))
image = dataset.load_image(i)
results = model.detect([image])
self.process_inference_result(results[0], image_info, target_dir)
def process_inference_result(self, result, image_info, target_dir):
filename = os.path.basename(image_info['path'])
points = []
for roi, score in zip(result['rois'], result['scores']):
y = min(roi[0], roi[2])
x = min(roi[1], roi[3])
h = abs(roi[0] - roi[2])
w = abs(roi[1] - roi[3])
rx = round(w / 2)
ry = round(h / 2)
r = max(rx, ry)
points.append([int(x + rx), int(y + ry), int(r), float(score)])
path = os.path.join(target_dir, '{}.json'.format(filename))
with open(path, 'w') as outfile:
json.dump(points, outfile)
image = VipsImage.new_from_file(image_info['path'])
width, height = image.width, image.height
mask = np.zeros((height, width), dtype=np.bool)
for m in result['masks']:
mask += m
mask = mask.astype(np.uint8) * 255
path = os.path.join(target_dir, '{}.png'.format(filename))
image = VipsImage.new_from_memory(mask, width, height, 1, 'uchar')
image.write_to_file(path)
| true | true |
1c2f5c3293a33761af41af44889aab4c1a8debe8 | 8,223 | py | Python | test/IECoreScene/SmoothSkinningDataTest.py | aitorvfx/cortex | c0c27794fc67ccfce68b064e284747165c49ef1c | [
"BSD-3-Clause"
] | 5 | 2015-09-13T14:49:30.000Z | 2017-02-04T21:04:59.000Z | test/IECoreScene/SmoothSkinningDataTest.py | aitorvfx/cortex | c0c27794fc67ccfce68b064e284747165c49ef1c | [
"BSD-3-Clause"
] | 1 | 2018-11-07T19:40:15.000Z | 2018-11-07T19:40:15.000Z | test/IECoreScene/SmoothSkinningDataTest.py | aitorvfx/cortex | c0c27794fc67ccfce68b064e284747165c49ef1c | [
"BSD-3-Clause"
] | 3 | 2015-02-03T17:13:40.000Z | 2022-01-07T15:55:00.000Z | ##########################################################################
#
# Copyright (c) 2007-2013, Image Engine Design Inc. All rights reserved.
#
# Copyright 2010 Dr D Studios Pty Limited (ACN 127 184 954) (Dr. D Studios),
# its affiliates and/or its licensors.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of Image Engine Design nor the names of any
# other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
"""Unit test for SmoothSkinningData binding"""
import imath
import IECore
import IECoreScene
import os
import unittest
class TestSmoothSkinningData( unittest.TestCase ) :
def testData( self ) :
# test the object
s = IECoreScene.SmoothSkinningData()
self.assertEqual( s.influenceNames(), IECore.StringVectorData() )
self.assertEqual( s.influencePose(), IECore.M44fVectorData() )
self.assertEqual( s.pointIndexOffsets(), IECore.IntVectorData() )
self.assertEqual( s.pointInfluenceCounts(), IECore.IntVectorData() )
self.assertEqual( s.pointInfluenceIndices(), IECore.IntVectorData() )
self.assertEqual( s.pointInfluenceWeights(), IECore.FloatVectorData() )
self.assertEqual( s, s )
self.assertEqual( s, s.copy() )
self.assertEqual( s, IECoreScene.SmoothSkinningData() )
self.assertRaises( Exception, s.validate() )
def testIO( self ) :
# test fileIndexedIO, read and write
ok_jn = IECore.StringVectorData( [ 'jointA', 'jointB' ] )
ok_ip = IECore.M44fVectorData( [imath.M44f(),imath.M44f()] )
ok_pio = IECore.IntVectorData( [0, 2, 4] )
ok_pic = IECore.IntVectorData( [2, 2, 1] )
ok_pii = IECore.IntVectorData( [0, 1, 0, 1, 1] )
ok_piw = IECore.FloatVectorData( [0.5, 0.5, 0.2, 0.8, 1.0] )
s = IECoreScene.SmoothSkinningData(ok_jn, ok_ip, ok_pio, ok_pic, ok_pii, ok_piw)
iface = IECore.IndexedIO.create( "test/IECore/ssd.fio", IECore.IndexedIO.OpenMode.Write )
s.save( iface, "test" )
ss = IECore.Object.load( iface, "test" )
self.assertEqual( s, ss )
def testDataStorage(self ):
#test the object can store data
ok_jn = IECore.StringVectorData( [ 'jointA', 'jointB' ] )
ok_ip = IECore.M44fVectorData( [imath.M44f(),imath.M44f()] )
ok_pio = IECore.IntVectorData( [0, 2, 4] )
ok_pic = IECore.IntVectorData( [2, 2, 1] )
ok_pii = IECore.IntVectorData( [0, 1, 0, 1, 1] )
ok_piw = IECore.FloatVectorData( [0.5, 0.5, 0.2, 0.8, 1.0] )
s = IECoreScene.SmoothSkinningData(ok_jn, ok_ip, ok_pio, ok_pic, ok_pii, ok_piw)
self.assertEqual( s.influenceNames() , ok_jn )
self.assertEqual( s.influencePose() , ok_ip )
self.assertEqual( s.pointIndexOffsets() , ok_pio )
self.assertEqual( s.pointInfluenceCounts() , ok_pic )
self.assertEqual( s.pointInfluenceIndices() , ok_pii )
self.assertEqual( s.pointInfluenceWeights() , ok_piw )
self.assertEqual( s, s )
self.assertEqual( s, s.copy() )
self.assertRaises( Exception, s.validate() )
iface = IECore.IndexedIO.create( "test/IECore/ssd.fio", IECore.IndexedIO.OpenMode.Write )
s.save( iface, "test" )
ss = IECore.Object.load( iface, "test" )
self.assertEqual( s, ss )
def testValidate(self):
# good data
ok_jn = IECore.StringVectorData( [ 'jointA', 'jointB' ] )
ok_ip = IECore.M44fVectorData( [imath.M44f(),imath.M44f()] )
ok_pio = IECore.IntVectorData( [0, 2, 4] )
ok_pic = IECore.IntVectorData( [2, 2, 1] )
ok_pii = IECore.IntVectorData( [0, 1, 0, 1, 1] )
ok_piw = IECore.FloatVectorData( [0.5, 0.5, 0.2, 0.8, 1.0] )
# data with invalid nr of elements
iv_jn = IECore.StringVectorData( [ 'jointA', 'jointB', 'jointC' ] )
iv_ip = IECore.M44fVectorData( [imath.M44f()] )
iv_pio1 = IECore.IntVectorData( [0, 2, 4, 666] )
iv_pic1 = IECore.IntVectorData( [2, 2 ] )
iv_pii1 = IECore.IntVectorData( [0, 1, 0, 1, 1, 666] )
iv_piw = IECore.FloatVectorData( [0.5, 0.5, 0.2] )
# data with invalid ids
iv_pio2 = IECore.IntVectorData( [0, 2, 666] )
iv_pii2 = IECore.IntVectorData( [0, 1, 666, 1, 1] )
# data with invalid counts
iv_pic2 = IECore.IntVectorData( [2, 0, 1 ] )
# data with count / index mismatch
iv_pio3 = IECore.IntVectorData( [0, 3, 4] )
iv_pic3 = IECore.IntVectorData( [3, 1, 1] )
# test all is ok
IECoreScene.SmoothSkinningData(ok_jn, ok_ip, ok_pio, ok_pic, ok_pii, ok_piw).validate()
# test wrong nr of influenceNames, influencePose
self.assertRaises( Exception, IECoreScene.SmoothSkinningData(iv_jn, ok_ip, ok_pio, ok_pic, ok_pii, ok_piw).validate )
self.assertRaises( Exception, IECoreScene.SmoothSkinningData(ok_jn, iv_ip, ok_pio, ok_pic, ok_pii, ok_piw).validate )
# test wrong nr of pointIndexOffsets, pointInfluenceCounts
self.assertRaises( Exception, IECoreScene.SmoothSkinningData(ok_jn, ok_ip, iv_pio1, ok_pic, ok_pii, ok_piw).validate )
self.assertRaises( Exception, IECoreScene.SmoothSkinningData(ok_jn, ok_ip, ok_pio, iv_pic1, ok_pii, ok_piw).validate )
# test wrong nr of pointInfluenceIndices, pointInfluenceWeights
self.assertRaises( Exception, IECoreScene.SmoothSkinningData(ok_jn, ok_ip, ok_pio, ok_pic, iv_pii1, ok_piw).validate )
self.assertRaises( Exception, IECoreScene.SmoothSkinningData(ok_jn, ok_ip, ok_pio, ok_pic, ok_pii, iv_piw).validate )
# test invalid ids
self.assertRaises( Exception, IECoreScene.SmoothSkinningData(ok_jn, ok_ip, iv_pio2, ok_pic, ok_pii, ok_piw).validate )
self.assertRaises( Exception, IECoreScene.SmoothSkinningData(ok_jn, ok_ip, ok_pio, ok_pic, iv_pii2, ok_piw).validate )
# test wrong counts
self.assertRaises( Exception, IECoreScene.SmoothSkinningData(ok_jn, ok_ip, ok_pio, iv_pic2, ok_pii, ok_piw).validate )
# test count id mismatching
self.assertRaises( Exception, IECoreScene.SmoothSkinningData(ok_jn, ok_ip, iv_pio3, ok_pic, ok_pii, ok_piw).validate )
self.assertRaises( Exception, IECoreScene.SmoothSkinningData(ok_jn, ok_ip, ok_pio, iv_pic3, ok_pii, ok_piw).validate )
# todo: add reference test data we are happy with
# def testRef(self):
# load reference data we are sure is cool
# ss = Reader.create( "test/IECore/data/cobFiles/smoothSkinningData.cob" ).read()
# self.assert_( ss.isValid() );
def tearDown( self ) :
if os.path.isfile("test/IECore/ssd.fio"):
os.remove("test/IECore/ssd.fio")
if __name__ == "__main__":
unittest.main()
| 47.531792 | 126 | 0.661802 | on, IECoreScene.SmoothSkinningData(ok_jn, ok_ip, ok_pio, iv_pic3, ok_pii, ok_piw).validate )
def tearDown( self ) :
if os.path.isfile("test/IECore/ssd.fio"):
os.remove("test/IECore/ssd.fio")
if __name__ == "__main__":
unittest.main()
| true | true |
1c2f5c3e47e8669740635666bf04fe5e71169c9e | 512 | py | Python | displ/pwscf/extractQEBands_test.py | tflovorn/displ | 094c194c54f02d463353075c6ca82f457f1247fa | [
"MIT"
] | 4 | 2018-04-09T20:39:24.000Z | 2021-06-19T12:21:52.000Z | displ/pwscf/extractQEBands_test.py | tflovorn/displ | 094c194c54f02d463353075c6ca82f457f1247fa | [
"MIT"
] | null | null | null | displ/pwscf/extractQEBands_test.py | tflovorn/displ | 094c194c54f02d463353075c6ca82f457f1247fa | [
"MIT"
] | 4 | 2018-04-09T20:39:41.000Z | 2021-06-19T12:21:53.000Z | import unittest
from displ.pwscf.extractQEBands import extractQEBands
class TestExtractQEBands(unittest.TestCase):
def test_ExtractQEBandsTa(self):
nbnd, nks, evlist = extractQEBands("test_data/Ta110_bands_test.dat")
self.assertEqual(nbnd, 112)
self.assertEqual(nks, 181)
self.assertEqual(evlist[0][0], (0.0, 0.0, 0.0))
self.assertEqual(evlist[0][1][0], -215.189)
self.assertEqual(evlist[0][1][nbnd-1], 43.570)
if __name__ == "__main__":
unittest.main()
| 34.133333 | 76 | 0.681641 | import unittest
from displ.pwscf.extractQEBands import extractQEBands
class TestExtractQEBands(unittest.TestCase):
def test_ExtractQEBandsTa(self):
nbnd, nks, evlist = extractQEBands("test_data/Ta110_bands_test.dat")
self.assertEqual(nbnd, 112)
self.assertEqual(nks, 181)
self.assertEqual(evlist[0][0], (0.0, 0.0, 0.0))
self.assertEqual(evlist[0][1][0], -215.189)
self.assertEqual(evlist[0][1][nbnd-1], 43.570)
if __name__ == "__main__":
unittest.main()
| true | true |
1c2f5c6a7204ca7918540349bdc9cb4d1d0fa24b | 1,075 | py | Python | data/migrations/versions/d42c175b439a_backfill_state_id_and_make_it_unique.py | giuseppe/quay | a1b7e4b51974edfe86f66788621011eef2667e6a | [
"Apache-2.0"
] | 2,027 | 2019-11-12T18:05:48.000Z | 2022-03-31T22:25:04.000Z | data/migrations/versions/d42c175b439a_backfill_state_id_and_make_it_unique.py | giuseppe/quay | a1b7e4b51974edfe86f66788621011eef2667e6a | [
"Apache-2.0"
] | 496 | 2019-11-12T18:13:37.000Z | 2022-03-31T10:43:45.000Z | data/migrations/versions/d42c175b439a_backfill_state_id_and_make_it_unique.py | giuseppe/quay | a1b7e4b51974edfe86f66788621011eef2667e6a | [
"Apache-2.0"
] | 249 | 2019-11-12T18:02:27.000Z | 2022-03-22T12:19:19.000Z | """
Backfill state_id and make it unique.
Revision ID: d42c175b439a
Revises: 3e8cc74a1e7b
Create Date: 2017-01-18 15:11:01.635632
"""
# revision identifiers, used by Alembic.
revision = "d42c175b439a"
down_revision = "3e8cc74a1e7b"
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade(op, tables, tester):
# Backfill the queueitem table's state_id field with unique values for all entries which are
# empty.
conn = op.get_bind()
conn.execute("update queueitem set state_id = id where state_id = ''")
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index("queueitem_state_id", table_name="queueitem")
op.create_index("queueitem_state_id", "queueitem", ["state_id"], unique=True)
# ### end Alembic commands ###
def downgrade(op, tables, tester):
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index("queueitem_state_id", table_name="queueitem")
op.create_index("queueitem_state_id", "queueitem", ["state_id"], unique=False)
# ### end Alembic commands ###
| 31.617647 | 96 | 0.712558 |
revision = "d42c175b439a"
down_revision = "3e8cc74a1e7b"
import sqlalchemy as sa
from sqlalchemy.dialects import mysql
def upgrade(op, tables, tester):
# empty.
conn = op.get_bind()
conn.execute("update queueitem set state_id = id where state_id = ''")
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index("queueitem_state_id", table_name="queueitem")
op.create_index("queueitem_state_id", "queueitem", ["state_id"], unique=True)
# ### end Alembic commands ###
def downgrade(op, tables, tester):
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index("queueitem_state_id", table_name="queueitem")
op.create_index("queueitem_state_id", "queueitem", ["state_id"], unique=False)
# ### end Alembic commands ###
| true | true |
1c2f5c71d6e86238f8d4f38b52e27d16894fa8e5 | 14,253 | py | Python | ami/flowchart/FlowchartGraphicsView.py | chuckie82/ami | 7adb72c709afe4c1af53ef7f0d2b0e3639c63bf3 | [
"BSD-3-Clause-LBNL"
] | 6 | 2018-05-31T21:37:15.000Z | 2022-01-24T15:22:46.000Z | ami/flowchart/FlowchartGraphicsView.py | chuckie82/ami | 7adb72c709afe4c1af53ef7f0d2b0e3639c63bf3 | [
"BSD-3-Clause-LBNL"
] | 68 | 2019-06-06T21:00:49.000Z | 2022-03-14T22:35:29.000Z | ami/flowchart/FlowchartGraphicsView.py | chuckie82/ami | 7adb72c709afe4c1af53ef7f0d2b0e3639c63bf3 | [
"BSD-3-Clause-LBNL"
] | 2 | 2020-12-13T01:53:05.000Z | 2021-07-19T04:56:51.000Z | from pyqtgraph.Qt import QtGui, QtCore, QtWidgets
from pyqtgraph.widgets.GraphicsView import GraphicsView
from pyqtgraph.graphicsItems.ViewBox import ViewBox
from pyqtgraph import GridItem, GraphicsWidget
from ami.flowchart.Node import NodeGraphicsItem, find_nearest
from ami.flowchart.library.common import SourceNode
def clamp(pos):
pos = [find_nearest(pos.x()), find_nearest(pos.y())]
pos[0] = max(min(pos[0], 5e3), 0)
pos[1] = max(min(pos[1], 5e3), -900)
return QtCore.QPointF(*pos)
class CommentName(GraphicsWidget):
def __init__(self, parent=None):
super().__init__(parent)
self.label = QtWidgets.QGraphicsTextItem("Enter comment here", parent=self)
self.label.setTextInteractionFlags(QtCore.Qt.TextEditorInteraction)
self.setGraphicsItem(self.label)
def text(self):
return self.label.toPlainText()
def setText(self, text):
self.label.setPlainText(text)
class CommentRect(GraphicsWidget):
# Copyright 2015-2019 Ilgar Lunin, Pedro Cabrera
# taken from pyflow
__backgroundColor = QtGui.QColor(100, 100, 255, 50)
__pen = QtGui.QPen(QtGui.QColor(255, 255, 255), 1.0, QtCore.Qt.DashLine)
def __init__(self, view=None, mouseDownPos=QtCore.QPointF(0, 0), id=0):
super().__init__()
self.setZValue(2)
self.id = id
self.headerLayout = QtGui.QGraphicsLinearLayout(QtCore.Qt.Horizontal)
self.commentName = CommentName(parent=self)
self.headerLayout.addItem(self.commentName)
self.__mouseDownPos = mouseDownPos
self.setPos(self.__mouseDownPos)
self.resize(0, 0)
self.selectFullyIntersectedItems = True
self.childNodes = set()
self.buildMenu()
if view:
self.view = view
self.view.addItem(self)
def collidesWithItem(self, item):
if self.selectFullyIntersectedItems:
return self.sceneBoundingRect().contains(item.sceneBoundingRect())
return super().collidesWithItem(item)
def setDragPoint(self, dragPoint):
topLeft = QtCore.QPointF(self.__mouseDownPos)
bottomRight = QtCore.QPointF(dragPoint)
if dragPoint.x() < self.__mouseDownPos.x():
topLeft.setX(dragPoint.x())
bottomRight.setX(self.__mouseDownPos.x())
if dragPoint.y() < self.__mouseDownPos.y():
topLeft.setY(dragPoint.y())
bottomRight.setY(self.__mouseDownPos.y())
self.setPos(topLeft)
self.resize(max(bottomRight.x() - topLeft.x(), 100),
max(bottomRight.y() - topLeft.y(), 100))
def paint(self, painter, option, widget):
rect = self.windowFrameRect()
painter.setBrush(self.__backgroundColor)
painter.setPen(self.__pen)
painter.drawRect(rect)
def destroy(self):
self.view.removeItem(self)
del self.view.commentRects[self.id]
def nodeCreated(self, node):
item = node.graphicsItem()
if self.collidesWithItem(item):
self.childNodes.add(item)
def updateChildren(self, children):
collided = set()
for child in children:
if self.collidesWithItem(child):
collided.add(child)
self.childNodes = collided
def mouseDragEvent(self, ev):
if ev.button() == QtCore.Qt.LeftButton:
boundingRect = self.boundingRect()
width = boundingRect.width()
height = boundingRect.height()
rect = QtCore.QRectF(width - 50, height - 50, 50, 50)
if rect.contains(ev.pos()):
ev.ignore()
self.view.commentRect = self
else:
ev.accept()
pos = self.pos()+self.mapToParent(ev.pos())-self.mapToParent(ev.lastPos())
old_pos = self.pos()
if ev.isFinish():
pos = clamp(pos)
self.setPos(pos)
diff = pos - old_pos
for child in self.childNodes:
child.moveBy(*diff)
def mouseClickEvent(self, ev):
if int(ev.button()) == int(QtCore.Qt.RightButton):
ev.accept()
self.raiseContextMenu(ev)
def buildMenu(self):
self.menu = QtGui.QMenu()
self.menu.setTitle("Comment")
self.menu.addAction("Remove Comment", self.destroy)
def raiseContextMenu(self, ev):
menu = self.scene().addParentContextMenus(self, self.menu, ev)
pos = ev.screenPos()
menu.popup(QtCore.QPoint(pos.x(), pos.y()))
def saveState(self):
rect = self.sceneBoundingRect()
topLeft = clamp(self.view.mapToView(rect.topLeft()))
bottomRight = clamp(self.view.mapToView(rect.bottomRight()))
return {'id': self.id,
'text': self.commentName.text(),
'topLeft': (topLeft.x(), topLeft.y()),
'bottomRight': (bottomRight.x(), bottomRight.y())}
def restoreState(self, state):
self.id = state['id']
self.commentName.setText(state['text'])
self.__mouseDownPos = QtCore.QPointF(*state['topLeft'])
self.setDragPoint(QtCore.QPointF(*state['bottomRight']))
class SelectionRect(GraphicsWidget):
# Copyright 2015-2019 Ilgar Lunin, Pedro Cabrera
# taken from pyflow
__backgroundColor = QtGui.QColor(100, 100, 100, 50)
__pen = QtGui.QPen(QtGui.QColor(255, 255, 255), 1.0, QtCore.Qt.DashLine)
def __init__(self, view, mouseDownPos):
super().__init__()
self.setZValue(2)
self.view = view
self.view.addItem(self)
self.__mouseDownPos = mouseDownPos
self.setPos(self.__mouseDownPos)
self.resize(0, 0)
self.selectFullyIntersectedItems = True
def collidesWithItem(self, item):
if self.selectFullyIntersectedItems:
return self.sceneBoundingRect().contains(item.sceneBoundingRect())
return super().collidesWithItem(item)
def setDragPoint(self, dragPoint):
topLeft = QtCore.QPointF(self.__mouseDownPos)
bottomRight = QtCore.QPointF(dragPoint)
if dragPoint.x() < self.__mouseDownPos.x():
topLeft.setX(dragPoint.x())
bottomRight.setX(self.__mouseDownPos.x())
if dragPoint.y() < self.__mouseDownPos.y():
topLeft.setY(dragPoint.y())
bottomRight.setY(self.__mouseDownPos.y())
self.setPos(topLeft)
self.resize(bottomRight.x() - topLeft.x(),
bottomRight.y() - topLeft.y())
def paint(self, painter, option, widget):
rect = self.windowFrameRect()
painter.setBrush(self.__backgroundColor)
painter.setPen(self.__pen)
painter.drawRect(rect)
def destroy(self):
self.view.removeItem(self)
class FlowchartGraphicsView(GraphicsView):
sigHoverOver = QtCore.Signal(object)
sigClicked = QtCore.Signal(object)
def __init__(self, widget, *args):
super().__init__(*args, useOpenGL=False, background=0.75)
self.widget = widget
self.setAcceptDrops(True)
self._vb = FlowchartViewBox(widget, lockAspect=True, invertY=True)
self.setCentralItem(self._vb)
self.setRenderHint(QtGui.QPainter.Antialiasing, True)
def viewBox(self):
return self._vb
def dragEnterEvent(self, ev):
ev.accept()
def saveState(self):
return self._vb.saveState()
def restoreState(self, state):
self._vb.restoreState(state)
class FlowchartViewBox(ViewBox):
def __init__(self, widget, *args, **kwargs):
super().__init__(*args, **kwargs)
self.widget = widget
self.chart = widget.chart
self.setLimits(minXRange=200, minYRange=200,
xMin=-1000, yMin=-1000, xMax=5.2e3, yMax=5.2e3)
self.addItem(GridItem())
self.setAcceptDrops(True)
self.setRange(xRange=(0, 800), yRange=(0, 800))
self.mouseMode = "Pan"
self.selectionRect = None
self.selected_nodes = []
self.copy = False
self.paste_pos = None
self.mouse_pos = None
self.commentRect = None
self.commentId = 0
self.commentRects = {}
def setMouseMode(self, mode):
assert mode in ["Select", "Pan", "Comment"]
self.mouseMode = mode
def getMenu(self, ev):
# called by ViewBox to create a new context menu
self._fc_menu = QtGui.QMenu()
self._subMenus = self.getContextMenus(ev)
for menu in self._subMenus:
self._fc_menu.addMenu(menu)
if self.selected_nodes:
self.selected_node_menu = QtGui.QMenu("Selection")
if not self.copy:
self.selected_node_menu.addAction("Copy", self.copySelectedNodes)
else:
self.selected_node_menu.addAction("Paste", self.pasteSelectedNodes)
self.paste_pos = ev.pos()
self.selected_node_menu.addAction("Delete", self.deleteSelectedNodes)
self._fc_menu.addMenu(self.selected_node_menu)
self.mouse_pos = self.mapToView(ev.pos())
return self._fc_menu
def copySelectedNodes(self):
self.copy = True
def pasteSelectedNodes(self):
# TODO figure out right positions and preserve topology?
pos = self.mapToView(self.paste_pos)
for node in self.selected_nodes:
self.widget.chart.createNode(type(node).__name__, pos=pos, prompt=False)
pos += QtCore.QPointF(200, 0)
def deleteSelectedNodes(self):
for node in self.selected_nodes:
node.close()
def getContextMenus(self, ev):
# called by scene to add menus on to someone else's context menu
sourceMenu = self.widget.buildSourceMenu(ev.scenePos())
sourceMenu.setTitle("Add Source")
operationMenu = self.widget.buildOperationMenu(ev.scenePos())
operationMenu.setTitle("Add Operation")
return [sourceMenu, operationMenu, ViewBox.getMenu(self, ev)]
def decode_data(self, arr):
data = []
item = {}
ds = QtCore.QDataStream(arr)
while not ds.atEnd():
ds.readInt32()
ds.readInt32()
map_items = ds.readInt32()
for i in range(map_items):
key = ds.readInt32()
value = QtCore.QVariant()
ds >> value
item[QtCore.Qt.ItemDataRole(key)] = value
data.append(item)
return data
def mouseDragEvent(self, ev):
ev.accept()
if self.mouseMode == "Pan":
super().mouseDragEvent(ev)
elif self.mouseMode == "Select":
if ev.isStart():
self.selectionRect = SelectionRect(self, self.mapToView(ev.buttonDownPos()))
if self.selectionRect:
self.selectionRect.setDragPoint(self.mapToView(ev.pos()))
if ev.isFinish():
self.selected_nodes = []
for item in self.allChildren():
if not isinstance(item, NodeGraphicsItem):
continue
if self.selectionRect.collidesWithItem(item):
item.node.recolor("selected")
self.selected_nodes.append(item.node)
self.copy = False
self.selectionRect.destroy()
self.selectionRect = None
elif self.mouseMode == "Comment":
if ev.isStart() and self.commentRect is None:
pos = clamp(self.mapToView(ev.buttonDownPos()))
self.commentRect = CommentRect(self, pos, self.commentId)
self.chart.sigNodeCreated.connect(self.commentRect.nodeCreated)
self.commentId += 1
if self.commentRect:
pos = clamp(self.mapToView(ev.pos()))
self.commentRect.setDragPoint(pos)
if ev.isFinish():
self.commentRects[self.commentRect.id] = self.commentRect
for item in self.allChildren():
if isinstance(item, NodeGraphicsItem) and self.commentRect.collidesWithItem(item):
self.commentRect.childNodes.add(item)
self.commentRect = None
def mousePressEvent(self, ev):
ev.accept()
super().mousePressEvent(ev)
if ev.button() == QtCore.Qt.LeftButton:
for node in self.selected_nodes:
node.recolor()
children = filter(lambda item: isinstance(item, NodeGraphicsItem), self.allChildren())
for id, comment in self.commentRects.items():
comment.updateChildren(children)
def dropEvent(self, ev):
if ev.mimeData().hasFormat('application/x-qabstractitemmodeldatalist'):
arr = ev.mimeData().data('application/x-qabstractitemmodeldatalist')
node = self.decode_data(arr)[0][0].value()
try:
self.widget.chart.createNode(node, pos=self.mapToView(ev.pos()), prompt=True)
ev.accept()
return
except KeyError:
pass
try:
node_type = self.widget.chart.source_library.getSourceType(node)
if node not in self.widget.chart._graph:
node = SourceNode(name=node, terminals={'Out': {'io': 'out', 'ttype': node_type}})
self.widget.chart.addNode(node=node, pos=self.mapToView(ev.pos()))
ev.accept()
return
except KeyError:
pass
else:
ev.ignore()
def saveState(self):
state = {'comments': []}
for id, comment in self.commentRects.items():
state['comments'].append(comment.saveState())
return state
def restoreState(self, state):
self.commentId = 0
for commentState in state['comments']:
comment = CommentRect(view=self)
comment.restoreState(commentState)
self.addItem(comment)
self.commentRects[commentState['id']] = comment
self.commentId = max(commentState['id']+1, self.commentId)
| 34.59466 | 102 | 0.601207 | from pyqtgraph.Qt import QtGui, QtCore, QtWidgets
from pyqtgraph.widgets.GraphicsView import GraphicsView
from pyqtgraph.graphicsItems.ViewBox import ViewBox
from pyqtgraph import GridItem, GraphicsWidget
from ami.flowchart.Node import NodeGraphicsItem, find_nearest
from ami.flowchart.library.common import SourceNode
def clamp(pos):
pos = [find_nearest(pos.x()), find_nearest(pos.y())]
pos[0] = max(min(pos[0], 5e3), 0)
pos[1] = max(min(pos[1], 5e3), -900)
return QtCore.QPointF(*pos)
class CommentName(GraphicsWidget):
def __init__(self, parent=None):
super().__init__(parent)
self.label = QtWidgets.QGraphicsTextItem("Enter comment here", parent=self)
self.label.setTextInteractionFlags(QtCore.Qt.TextEditorInteraction)
self.setGraphicsItem(self.label)
def text(self):
return self.label.toPlainText()
def setText(self, text):
self.label.setPlainText(text)
class CommentRect(GraphicsWidget):
__backgroundColor = QtGui.QColor(100, 100, 255, 50)
__pen = QtGui.QPen(QtGui.QColor(255, 255, 255), 1.0, QtCore.Qt.DashLine)
def __init__(self, view=None, mouseDownPos=QtCore.QPointF(0, 0), id=0):
super().__init__()
self.setZValue(2)
self.id = id
self.headerLayout = QtGui.QGraphicsLinearLayout(QtCore.Qt.Horizontal)
self.commentName = CommentName(parent=self)
self.headerLayout.addItem(self.commentName)
self.__mouseDownPos = mouseDownPos
self.setPos(self.__mouseDownPos)
self.resize(0, 0)
self.selectFullyIntersectedItems = True
self.childNodes = set()
self.buildMenu()
if view:
self.view = view
self.view.addItem(self)
def collidesWithItem(self, item):
if self.selectFullyIntersectedItems:
return self.sceneBoundingRect().contains(item.sceneBoundingRect())
return super().collidesWithItem(item)
def setDragPoint(self, dragPoint):
topLeft = QtCore.QPointF(self.__mouseDownPos)
bottomRight = QtCore.QPointF(dragPoint)
if dragPoint.x() < self.__mouseDownPos.x():
topLeft.setX(dragPoint.x())
bottomRight.setX(self.__mouseDownPos.x())
if dragPoint.y() < self.__mouseDownPos.y():
topLeft.setY(dragPoint.y())
bottomRight.setY(self.__mouseDownPos.y())
self.setPos(topLeft)
self.resize(max(bottomRight.x() - topLeft.x(), 100),
max(bottomRight.y() - topLeft.y(), 100))
def paint(self, painter, option, widget):
rect = self.windowFrameRect()
painter.setBrush(self.__backgroundColor)
painter.setPen(self.__pen)
painter.drawRect(rect)
def destroy(self):
self.view.removeItem(self)
del self.view.commentRects[self.id]
def nodeCreated(self, node):
item = node.graphicsItem()
if self.collidesWithItem(item):
self.childNodes.add(item)
def updateChildren(self, children):
collided = set()
for child in children:
if self.collidesWithItem(child):
collided.add(child)
self.childNodes = collided
def mouseDragEvent(self, ev):
if ev.button() == QtCore.Qt.LeftButton:
boundingRect = self.boundingRect()
width = boundingRect.width()
height = boundingRect.height()
rect = QtCore.QRectF(width - 50, height - 50, 50, 50)
if rect.contains(ev.pos()):
ev.ignore()
self.view.commentRect = self
else:
ev.accept()
pos = self.pos()+self.mapToParent(ev.pos())-self.mapToParent(ev.lastPos())
old_pos = self.pos()
if ev.isFinish():
pos = clamp(pos)
self.setPos(pos)
diff = pos - old_pos
for child in self.childNodes:
child.moveBy(*diff)
def mouseClickEvent(self, ev):
if int(ev.button()) == int(QtCore.Qt.RightButton):
ev.accept()
self.raiseContextMenu(ev)
def buildMenu(self):
self.menu = QtGui.QMenu()
self.menu.setTitle("Comment")
self.menu.addAction("Remove Comment", self.destroy)
def raiseContextMenu(self, ev):
menu = self.scene().addParentContextMenus(self, self.menu, ev)
pos = ev.screenPos()
menu.popup(QtCore.QPoint(pos.x(), pos.y()))
def saveState(self):
rect = self.sceneBoundingRect()
topLeft = clamp(self.view.mapToView(rect.topLeft()))
bottomRight = clamp(self.view.mapToView(rect.bottomRight()))
return {'id': self.id,
'text': self.commentName.text(),
'topLeft': (topLeft.x(), topLeft.y()),
'bottomRight': (bottomRight.x(), bottomRight.y())}
def restoreState(self, state):
self.id = state['id']
self.commentName.setText(state['text'])
self.__mouseDownPos = QtCore.QPointF(*state['topLeft'])
self.setDragPoint(QtCore.QPointF(*state['bottomRight']))
class SelectionRect(GraphicsWidget):
__backgroundColor = QtGui.QColor(100, 100, 100, 50)
__pen = QtGui.QPen(QtGui.QColor(255, 255, 255), 1.0, QtCore.Qt.DashLine)
def __init__(self, view, mouseDownPos):
super().__init__()
self.setZValue(2)
self.view = view
self.view.addItem(self)
self.__mouseDownPos = mouseDownPos
self.setPos(self.__mouseDownPos)
self.resize(0, 0)
self.selectFullyIntersectedItems = True
def collidesWithItem(self, item):
if self.selectFullyIntersectedItems:
return self.sceneBoundingRect().contains(item.sceneBoundingRect())
return super().collidesWithItem(item)
def setDragPoint(self, dragPoint):
topLeft = QtCore.QPointF(self.__mouseDownPos)
bottomRight = QtCore.QPointF(dragPoint)
if dragPoint.x() < self.__mouseDownPos.x():
topLeft.setX(dragPoint.x())
bottomRight.setX(self.__mouseDownPos.x())
if dragPoint.y() < self.__mouseDownPos.y():
topLeft.setY(dragPoint.y())
bottomRight.setY(self.__mouseDownPos.y())
self.setPos(topLeft)
self.resize(bottomRight.x() - topLeft.x(),
bottomRight.y() - topLeft.y())
def paint(self, painter, option, widget):
rect = self.windowFrameRect()
painter.setBrush(self.__backgroundColor)
painter.setPen(self.__pen)
painter.drawRect(rect)
def destroy(self):
self.view.removeItem(self)
class FlowchartGraphicsView(GraphicsView):
sigHoverOver = QtCore.Signal(object)
sigClicked = QtCore.Signal(object)
def __init__(self, widget, *args):
super().__init__(*args, useOpenGL=False, background=0.75)
self.widget = widget
self.setAcceptDrops(True)
self._vb = FlowchartViewBox(widget, lockAspect=True, invertY=True)
self.setCentralItem(self._vb)
self.setRenderHint(QtGui.QPainter.Antialiasing, True)
def viewBox(self):
return self._vb
def dragEnterEvent(self, ev):
ev.accept()
def saveState(self):
return self._vb.saveState()
def restoreState(self, state):
self._vb.restoreState(state)
class FlowchartViewBox(ViewBox):
def __init__(self, widget, *args, **kwargs):
super().__init__(*args, **kwargs)
self.widget = widget
self.chart = widget.chart
self.setLimits(minXRange=200, minYRange=200,
xMin=-1000, yMin=-1000, xMax=5.2e3, yMax=5.2e3)
self.addItem(GridItem())
self.setAcceptDrops(True)
self.setRange(xRange=(0, 800), yRange=(0, 800))
self.mouseMode = "Pan"
self.selectionRect = None
self.selected_nodes = []
self.copy = False
self.paste_pos = None
self.mouse_pos = None
self.commentRect = None
self.commentId = 0
self.commentRects = {}
def setMouseMode(self, mode):
assert mode in ["Select", "Pan", "Comment"]
self.mouseMode = mode
def getMenu(self, ev):
self._fc_menu = QtGui.QMenu()
self._subMenus = self.getContextMenus(ev)
for menu in self._subMenus:
self._fc_menu.addMenu(menu)
if self.selected_nodes:
self.selected_node_menu = QtGui.QMenu("Selection")
if not self.copy:
self.selected_node_menu.addAction("Copy", self.copySelectedNodes)
else:
self.selected_node_menu.addAction("Paste", self.pasteSelectedNodes)
self.paste_pos = ev.pos()
self.selected_node_menu.addAction("Delete", self.deleteSelectedNodes)
self._fc_menu.addMenu(self.selected_node_menu)
self.mouse_pos = self.mapToView(ev.pos())
return self._fc_menu
def copySelectedNodes(self):
self.copy = True
def pasteSelectedNodes(self):
pos = self.mapToView(self.paste_pos)
for node in self.selected_nodes:
self.widget.chart.createNode(type(node).__name__, pos=pos, prompt=False)
pos += QtCore.QPointF(200, 0)
def deleteSelectedNodes(self):
for node in self.selected_nodes:
node.close()
def getContextMenus(self, ev):
sourceMenu = self.widget.buildSourceMenu(ev.scenePos())
sourceMenu.setTitle("Add Source")
operationMenu = self.widget.buildOperationMenu(ev.scenePos())
operationMenu.setTitle("Add Operation")
return [sourceMenu, operationMenu, ViewBox.getMenu(self, ev)]
def decode_data(self, arr):
data = []
item = {}
ds = QtCore.QDataStream(arr)
while not ds.atEnd():
ds.readInt32()
ds.readInt32()
map_items = ds.readInt32()
for i in range(map_items):
key = ds.readInt32()
value = QtCore.QVariant()
ds >> value
item[QtCore.Qt.ItemDataRole(key)] = value
data.append(item)
return data
def mouseDragEvent(self, ev):
ev.accept()
if self.mouseMode == "Pan":
super().mouseDragEvent(ev)
elif self.mouseMode == "Select":
if ev.isStart():
self.selectionRect = SelectionRect(self, self.mapToView(ev.buttonDownPos()))
if self.selectionRect:
self.selectionRect.setDragPoint(self.mapToView(ev.pos()))
if ev.isFinish():
self.selected_nodes = []
for item in self.allChildren():
if not isinstance(item, NodeGraphicsItem):
continue
if self.selectionRect.collidesWithItem(item):
item.node.recolor("selected")
self.selected_nodes.append(item.node)
self.copy = False
self.selectionRect.destroy()
self.selectionRect = None
elif self.mouseMode == "Comment":
if ev.isStart() and self.commentRect is None:
pos = clamp(self.mapToView(ev.buttonDownPos()))
self.commentRect = CommentRect(self, pos, self.commentId)
self.chart.sigNodeCreated.connect(self.commentRect.nodeCreated)
self.commentId += 1
if self.commentRect:
pos = clamp(self.mapToView(ev.pos()))
self.commentRect.setDragPoint(pos)
if ev.isFinish():
self.commentRects[self.commentRect.id] = self.commentRect
for item in self.allChildren():
if isinstance(item, NodeGraphicsItem) and self.commentRect.collidesWithItem(item):
self.commentRect.childNodes.add(item)
self.commentRect = None
def mousePressEvent(self, ev):
ev.accept()
super().mousePressEvent(ev)
if ev.button() == QtCore.Qt.LeftButton:
for node in self.selected_nodes:
node.recolor()
children = filter(lambda item: isinstance(item, NodeGraphicsItem), self.allChildren())
for id, comment in self.commentRects.items():
comment.updateChildren(children)
def dropEvent(self, ev):
if ev.mimeData().hasFormat('application/x-qabstractitemmodeldatalist'):
arr = ev.mimeData().data('application/x-qabstractitemmodeldatalist')
node = self.decode_data(arr)[0][0].value()
try:
self.widget.chart.createNode(node, pos=self.mapToView(ev.pos()), prompt=True)
ev.accept()
return
except KeyError:
pass
try:
node_type = self.widget.chart.source_library.getSourceType(node)
if node not in self.widget.chart._graph:
node = SourceNode(name=node, terminals={'Out': {'io': 'out', 'ttype': node_type}})
self.widget.chart.addNode(node=node, pos=self.mapToView(ev.pos()))
ev.accept()
return
except KeyError:
pass
else:
ev.ignore()
def saveState(self):
state = {'comments': []}
for id, comment in self.commentRects.items():
state['comments'].append(comment.saveState())
return state
def restoreState(self, state):
self.commentId = 0
for commentState in state['comments']:
comment = CommentRect(view=self)
comment.restoreState(commentState)
self.addItem(comment)
self.commentRects[commentState['id']] = comment
self.commentId = max(commentState['id']+1, self.commentId)
| true | true |
1c2f5c7f8953b6491c7c1611f124d3706f3c0b55 | 1,727 | py | Python | SoftLayer/CLI/block/snapshot/list.py | dvzrv/softlayer-python | 9a5f6c6981bcc370084537b4d1769383499ce90d | [
"MIT"
] | 126 | 2015-01-05T05:09:22.000Z | 2021-07-02T00:16:35.000Z | SoftLayer/CLI/block/snapshot/list.py | dvzrv/softlayer-python | 9a5f6c6981bcc370084537b4d1769383499ce90d | [
"MIT"
] | 969 | 2015-01-05T15:55:31.000Z | 2022-03-31T19:55:20.000Z | SoftLayer/CLI/block/snapshot/list.py | dvzrv/softlayer-python | 9a5f6c6981bcc370084537b4d1769383499ce90d | [
"MIT"
] | 176 | 2015-01-22T11:23:40.000Z | 2022-02-11T13:16:58.000Z | """List block storage snapshots."""
# :license: MIT, see LICENSE for more details.
import click
import SoftLayer
from SoftLayer.CLI import columns as column_helper
from SoftLayer.CLI import environment
from SoftLayer.CLI import formatting
from SoftLayer.CLI import helpers
COLUMNS = [
column_helper.Column('id', ('id',), mask='id'),
column_helper.Column('name', ('notes',), mask='notes'),
column_helper.Column('created', ('snapshotCreationTimestamp',),
mask='snapshotCreationTimestamp'),
column_helper.Column('size_bytes', ('snapshotSizeBytes',),
mask='snapshotSizeBytes'),
]
DEFAULT_COLUMNS = [
'id',
'name',
'created',
'size_bytes'
]
@click.command()
@click.argument('volume_id')
@click.option('--sortby', help='Column to sort by',
default='created')
@click.option('--columns',
callback=column_helper.get_formatter(COLUMNS),
help='Columns to display. Options: {0}'.format(
', '.join(column.name for column in COLUMNS)),
default=','.join(DEFAULT_COLUMNS))
@environment.pass_env
def cli(env, volume_id, sortby, columns):
"""List block storage snapshots."""
block_manager = SoftLayer.BlockStorageManager(env.client)
resolved_id = helpers.resolve_id(block_manager.resolve_ids, volume_id, 'Volume Id')
snapshots = block_manager.get_block_volume_snapshot_list(
resolved_id,
mask=columns.mask()
)
table = formatting.Table(columns.columns)
table.sortby = sortby
for snapshot in snapshots:
table.add_row([value or formatting.blank()
for value in columns.row(snapshot)])
env.fout(table)
| 30.839286 | 87 | 0.65663 |
import click
import SoftLayer
from SoftLayer.CLI import columns as column_helper
from SoftLayer.CLI import environment
from SoftLayer.CLI import formatting
from SoftLayer.CLI import helpers
COLUMNS = [
column_helper.Column('id', ('id',), mask='id'),
column_helper.Column('name', ('notes',), mask='notes'),
column_helper.Column('created', ('snapshotCreationTimestamp',),
mask='snapshotCreationTimestamp'),
column_helper.Column('size_bytes', ('snapshotSizeBytes',),
mask='snapshotSizeBytes'),
]
DEFAULT_COLUMNS = [
'id',
'name',
'created',
'size_bytes'
]
@click.command()
@click.argument('volume_id')
@click.option('--sortby', help='Column to sort by',
default='created')
@click.option('--columns',
callback=column_helper.get_formatter(COLUMNS),
help='Columns to display. Options: {0}'.format(
', '.join(column.name for column in COLUMNS)),
default=','.join(DEFAULT_COLUMNS))
@environment.pass_env
def cli(env, volume_id, sortby, columns):
block_manager = SoftLayer.BlockStorageManager(env.client)
resolved_id = helpers.resolve_id(block_manager.resolve_ids, volume_id, 'Volume Id')
snapshots = block_manager.get_block_volume_snapshot_list(
resolved_id,
mask=columns.mask()
)
table = formatting.Table(columns.columns)
table.sortby = sortby
for snapshot in snapshots:
table.add_row([value or formatting.blank()
for value in columns.row(snapshot)])
env.fout(table)
| true | true |
1c2f5df2b341f8429bcdc733f4d4efc8edbf36a8 | 5,513 | py | Python | contrib/seeds/makeseeds.py | ctscoin/cts-core-original | 73eaf46e6cf5a1fb26099bb1ab8ae5d1e846780a | [
"MIT"
] | 1 | 2018-09-23T14:58:24.000Z | 2018-09-23T14:58:24.000Z | contrib/seeds/makeseeds.py | ctscoin/cts-core-original | 73eaf46e6cf5a1fb26099bb1ab8ae5d1e846780a | [
"MIT"
] | null | null | null | contrib/seeds/makeseeds.py | ctscoin/cts-core-original | 73eaf46e6cf5a1fb26099bb1ab8ae5d1e846780a | [
"MIT"
] | 1 | 2018-09-04T02:36:33.000Z | 2018-09-04T02:36:33.000Z | #!/usr/bin/env python3
# Copyright (c) 2013-2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Generate seeds.txt from Pieter's DNS seeder
#
NSEEDS=512
MAX_SEEDS_PER_ASN=2
MIN_BLOCKS = 615801
# These are hosts that have been observed to be behaving strangely (e.g.
# aggressively connecting to every node).
SUSPICIOUS_HOSTS = {
""
}
import re
import sys
import dns.resolver
import collections
PATTERN_IPV4 = re.compile(r"^((\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})):(\d+)$")
PATTERN_IPV6 = re.compile(r"^\[([0-9a-z:]+)\]:(\d+)$")
PATTERN_ONION = re.compile(r"^([abcdefghijklmnopqrstuvwxyz234567]{16}\.onion):(\d+)$")
PATTERN_AGENT = re.compile(r"^(/CTSCore:2.2.(0|1|99)/)$")
def parseline(line):
sline = line.split()
if len(sline) < 11:
return None
m = PATTERN_IPV4.match(sline[0])
sortkey = None
ip = None
if m is None:
m = PATTERN_IPV6.match(sline[0])
if m is None:
m = PATTERN_ONION.match(sline[0])
if m is None:
return None
else:
net = 'onion'
ctstr = sortkey = m.group(1)
port = int(m.group(2))
else:
net = 'ipv6'
if m.group(1) in ['::']: # Not interested in localhost
return None
ctstr = m.group(1)
sortkey = ctstr # XXX parse IPv6 into number, could use name_to_ipv6 from generate-seeds
port = int(m.group(2))
else:
# Do IPv4 sanity check
ip = 0
for i in range(0,4):
if int(m.group(i+2)) < 0 or int(m.group(i+2)) > 255:
return None
ip = ip + (int(m.group(i+2)) << (8*(3-i)))
if ip == 0:
return None
net = 'ipv4'
sortkey = ip
ctstr = m.group(1)
port = int(m.group(6))
# Skip bad results.
if sline[1] == 0:
return None
# Extract uptime %.
uptime30 = float(sline[7][:-1])
# Extract Unix timestamp of last success.
lastsuccess = int(sline[2])
# Extract protocol version.
version = int(sline[10])
# Extract user agent.
if len(sline) > 11:
agent = sline[11][1:] + sline[12][:-1]
else:
agent = sline[11][1:-1]
# Extract service flags.
service = int(sline[9], 16)
# Extract blocks.
blocks = int(sline[8])
# Construct result.
return {
'net': net,
'ip': ctstr,
'port': port,
'ipnum': ip,
'uptime': uptime30,
'lastsuccess': lastsuccess,
'version': version,
'agent': agent,
'service': service,
'blocks': blocks,
'sortkey': sortkey,
}
def filtermultiport(cts):
'''Filter out hosts with more nodes per IP'''
hist = collections.defaultdict(list)
for ip in cts:
hist[ip['sortkey']].append(ip)
return [value[0] for (key,value) in list(hist.items()) if len(value)==1]
# Based on Greg Maxwell's seed_filter.py
def filterbyasn(cts, max_per_asn, max_total):
# Sift out cts by type
cts_ipv4 = [ip for ip in cts if ip['net'] == 'ipv4']
cts_ipv6 = [ip for ip in cts if ip['net'] == 'ipv6']
cts_onion = [ip for ip in cts if ip['net'] == 'onion']
# Filter IPv4 by ASN
result = []
asn_count = {}
for ip in cts_ipv4:
if len(result) == max_total:
break
try:
asn = int([x.to_text() for x in dns.resolver.query('.'.join(reversed(ip['ip'].split('.'))) + '.origin.asn.cymru.com', 'TXT').response.answer][0].split('\"')[1].split(' ')[0])
if asn not in asn_count:
asn_count[asn] = 0
if asn_count[asn] == max_per_asn:
continue
asn_count[asn] += 1
result.append(ip)
except:
sys.stderr.write('ERR: Could not resolve ASN for "' + ip['ip'] + '"\n')
# TODO: filter IPv6 by ASN
# Add back non-IPv4
result.extend(cts_ipv6)
result.extend(cts_onion)
return result
def main():
lines = sys.stdin.readlines()
cts = [parseline(line) for line in lines]
# Skip entries with valid address.
cts = [ip for ip in cts if ip is not None]
# Skip entries from suspicious hosts.
cts = [ip for ip in cts if ip['ip'] not in SUSPICIOUS_HOSTS]
# Enforce minimal number of blocks.
cts = [ip for ip in cts if ip['blocks'] >= MIN_BLOCKS]
# Require service bit 1.
cts = [ip for ip in cts if (ip['service'] & 1) == 1]
# Require at least 50% 30-day uptime.
cts = [ip for ip in cts if ip['uptime'] > 50]
# Require a known and recent user agent.
cts = [ip for ip in cts if PATTERN_AGENT.match(re.sub(' ', '-', ip['agent']))]
# Sort by availability (and use last success as tie breaker)
cts.sort(key=lambda x: (x['uptime'], x['lastsuccess'], x['ip']), reverse=True)
# Filter out hosts with multiple bitcoin ports, these are likely abusive
cts = filtermultiport(cts)
# Look up ASNs and limit results, both per ASN and globally.
cts = filterbyasn(cts, MAX_SEEDS_PER_ASN, NSEEDS)
# Sort the results by IP address (for deterministic output).
cts.sort(key=lambda x: (x['net'], x['sortkey']))
for ip in cts:
if ip['net'] == 'ipv6':
print('[%s]:%i' % (ip['ip'], ip['port']))
else:
print('%s:%i' % (ip['ip'], ip['port']))
if __name__ == '__main__':
main()
| 32.052326 | 186 | 0.566842 |
#
NSEEDS=512
MAX_SEEDS_PER_ASN=2
MIN_BLOCKS = 615801
# These are hosts that have been observed to be behaving strangely (e.g.
# aggressively connecting to every node).
SUSPICIOUS_HOSTS = {
""
}
import re
import sys
import dns.resolver
import collections
PATTERN_IPV4 = re.compile(r"^((\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})):(\d+)$")
PATTERN_IPV6 = re.compile(r"^\[([0-9a-z:]+)\]:(\d+)$")
PATTERN_ONION = re.compile(r"^([abcdefghijklmnopqrstuvwxyz234567]{16}\.onion):(\d+)$")
PATTERN_AGENT = re.compile(r"^(/CTSCore:2.2.(0|1|99)/)$")
def parseline(line):
sline = line.split()
if len(sline) < 11:
return None
m = PATTERN_IPV4.match(sline[0])
sortkey = None
ip = None
if m is None:
m = PATTERN_IPV6.match(sline[0])
if m is None:
m = PATTERN_ONION.match(sline[0])
if m is None:
return None
else:
net = 'onion'
ctstr = sortkey = m.group(1)
port = int(m.group(2))
else:
net = 'ipv6'
if m.group(1) in ['::']: # Not interested in localhost
return None
ctstr = m.group(1)
sortkey = ctstr # XXX parse IPv6 into number, could use name_to_ipv6 from generate-seeds
port = int(m.group(2))
else:
# Do IPv4 sanity check
ip = 0
for i in range(0,4):
if int(m.group(i+2)) < 0 or int(m.group(i+2)) > 255:
return None
ip = ip + (int(m.group(i+2)) << (8*(3-i)))
if ip == 0:
return None
net = 'ipv4'
sortkey = ip
ctstr = m.group(1)
port = int(m.group(6))
# Skip bad results.
if sline[1] == 0:
return None
# Extract uptime %.
uptime30 = float(sline[7][:-1])
# Extract Unix timestamp of last success.
lastsuccess = int(sline[2])
# Extract protocol version.
version = int(sline[10])
# Extract user agent.
if len(sline) > 11:
agent = sline[11][1:] + sline[12][:-1]
else:
agent = sline[11][1:-1]
# Extract service flags.
service = int(sline[9], 16)
# Extract blocks.
blocks = int(sline[8])
# Construct result.
return {
'net': net,
'ip': ctstr,
'port': port,
'ipnum': ip,
'uptime': uptime30,
'lastsuccess': lastsuccess,
'version': version,
'agent': agent,
'service': service,
'blocks': blocks,
'sortkey': sortkey,
}
def filtermultiport(cts):
hist = collections.defaultdict(list)
for ip in cts:
hist[ip['sortkey']].append(ip)
return [value[0] for (key,value) in list(hist.items()) if len(value)==1]
# Based on Greg Maxwell's seed_filter.py
def filterbyasn(cts, max_per_asn, max_total):
cts_ipv4 = [ip for ip in cts if ip['net'] == 'ipv4']
cts_ipv6 = [ip for ip in cts if ip['net'] == 'ipv6']
cts_onion = [ip for ip in cts if ip['net'] == 'onion']
result = []
asn_count = {}
for ip in cts_ipv4:
if len(result) == max_total:
break
try:
asn = int([x.to_text() for x in dns.resolver.query('.'.join(reversed(ip['ip'].split('.'))) + '.origin.asn.cymru.com', 'TXT').response.answer][0].split('\"')[1].split(' ')[0])
if asn not in asn_count:
asn_count[asn] = 0
if asn_count[asn] == max_per_asn:
continue
asn_count[asn] += 1
result.append(ip)
except:
sys.stderr.write('ERR: Could not resolve ASN for "' + ip['ip'] + '"\n')
# TODO: filter IPv6 by ASN
# Add back non-IPv4
result.extend(cts_ipv6)
result.extend(cts_onion)
return result
def main():
lines = sys.stdin.readlines()
cts = [parseline(line) for line in lines]
# Skip entries with valid address.
cts = [ip for ip in cts if ip is not None]
# Skip entries from suspicious hosts.
cts = [ip for ip in cts if ip['ip'] not in SUSPICIOUS_HOSTS]
# Enforce minimal number of blocks.
cts = [ip for ip in cts if ip['blocks'] >= MIN_BLOCKS]
# Require service bit 1.
cts = [ip for ip in cts if (ip['service'] & 1) == 1]
# Require at least 50% 30-day uptime.
cts = [ip for ip in cts if ip['uptime'] > 50]
# Require a known and recent user agent.
cts = [ip for ip in cts if PATTERN_AGENT.match(re.sub(' ', '-', ip['agent']))]
# Sort by availability (and use last success as tie breaker)
cts.sort(key=lambda x: (x['uptime'], x['lastsuccess'], x['ip']), reverse=True)
# Filter out hosts with multiple bitcoin ports, these are likely abusive
cts = filtermultiport(cts)
# Look up ASNs and limit results, both per ASN and globally.
cts = filterbyasn(cts, MAX_SEEDS_PER_ASN, NSEEDS)
# Sort the results by IP address (for deterministic output).
cts.sort(key=lambda x: (x['net'], x['sortkey']))
for ip in cts:
if ip['net'] == 'ipv6':
print('[%s]:%i' % (ip['ip'], ip['port']))
else:
print('%s:%i' % (ip['ip'], ip['port']))
if __name__ == '__main__':
main()
| true | true |
1c2f5fdb8b40a19e7373e6298e3f6b6169e9e8b4 | 6,354 | py | Python | ppcls/utils/config.py | PaddlePaddle/PaddleImgClass | f5265a1f2ab7aa113ae5245223f0528e3239a5e7 | [
"Apache-2.0"
] | 7 | 2020-03-30T04:32:01.000Z | 2020-03-30T07:51:00.000Z | ppcls/utils/config.py | PaddlePaddle/PaddleClassification | 51c1bdb27af15441995bf9840f7020cca9b7d9a8 | [
"Apache-2.0"
] | null | null | null | ppcls/utils/config.py | PaddlePaddle/PaddleClassification | 51c1bdb27af15441995bf9840f7020cca9b7d9a8 | [
"Apache-2.0"
] | 1 | 2020-04-07T17:03:24.000Z | 2020-04-07T17:03:24.000Z | # copyright (c) 2020 PaddlePaddle Authors. All Rights Reserve.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import copy
import argparse
import yaml
from ppcls.utils import logger
from ppcls.utils import check
__all__ = ['get_config']
class AttrDict(dict):
def __getattr__(self, key):
return self[key]
def __setattr__(self, key, value):
if key in self.__dict__:
self.__dict__[key] = value
else:
self[key] = value
def __deepcopy__(self, content):
return copy.deepcopy(dict(self))
def create_attr_dict(yaml_config):
from ast import literal_eval
for key, value in yaml_config.items():
if type(value) is dict:
yaml_config[key] = value = AttrDict(value)
if isinstance(value, str):
try:
value = literal_eval(value)
except BaseException:
pass
if isinstance(value, AttrDict):
create_attr_dict(yaml_config[key])
else:
yaml_config[key] = value
def parse_config(cfg_file):
"""Load a config file into AttrDict"""
with open(cfg_file, 'r') as fopen:
yaml_config = AttrDict(yaml.load(fopen, Loader=yaml.SafeLoader))
create_attr_dict(yaml_config)
return yaml_config
def print_dict(d, delimiter=0):
"""
Recursively visualize a dict and
indenting acrrording by the relationship of keys.
"""
placeholder = "-" * 60
for k, v in sorted(d.items()):
if isinstance(v, dict):
logger.info("{}{} : ".format(delimiter * " ", k))
print_dict(v, delimiter + 4)
elif isinstance(v, list) and len(v) >= 1 and isinstance(v[0], dict):
logger.info("{}{} : ".format(delimiter * " ", k))
for value in v:
print_dict(value, delimiter + 4)
else:
logger.info("{}{} : {}".format(delimiter * " ", k, v))
if k.isupper():
logger.info(placeholder)
def print_config(config):
"""
visualize configs
Arguments:
config: configs
"""
logger.advertise()
print_dict(config)
def check_config(config):
"""
Check config
"""
check.check_version()
use_gpu = config.get('use_gpu', True)
if use_gpu:
check.check_gpu()
architecture = config.get('ARCHITECTURE')
#check.check_architecture(architecture)
use_mix = config.get('use_mix', False)
check.check_mix(architecture, use_mix)
classes_num = config.get('classes_num')
check.check_classes_num(classes_num)
mode = config.get('mode', 'train')
if mode.lower() == 'train':
check.check_function_params(config, 'LEARNING_RATE')
check.check_function_params(config, 'OPTIMIZER')
def override(dl, ks, v):
"""
Recursively replace dict of list
Args:
dl(dict or list): dict or list to be replaced
ks(list): list of keys
v(str): value to be replaced
"""
def str2num(v):
try:
return eval(v)
except Exception:
return v
assert isinstance(dl, (list, dict)), ("{} should be a list or a dict")
assert len(ks) > 0, ('lenght of keys should larger than 0')
if isinstance(dl, list):
k = str2num(ks[0])
if len(ks) == 1:
assert k < len(dl), ('index({}) out of range({})'.format(k, dl))
dl[k] = str2num(v)
else:
override(dl[k], ks[1:], v)
else:
if len(ks) == 1:
# assert ks[0] in dl, ('{} is not exist in {}'.format(ks[0], dl))
if not ks[0] in dl:
print('A new field ({}) detected!'.format(ks[0], dl))
dl[ks[0]] = str2num(v)
else:
if ks[0] not in dl.keys():
dl[ks[0]] = {}
print("A new Series field ({}) detected!".format(ks[0], dl))
override(dl[ks[0]], ks[1:], v)
def override_config(config, options=None):
"""
Recursively override the config
Args:
config(dict): dict to be replaced
options(list): list of pairs(key0.key1.idx.key2=value)
such as: [
'topk=2',
'VALID.transforms.1.ResizeImage.resize_short=300'
]
Returns:
config(dict): replaced config
"""
if options is not None:
for opt in options:
assert isinstance(opt, str), (
"option({}) should be a str".format(opt))
assert "=" in opt, (
"option({}) should contain a ="
"to distinguish between key and value".format(opt))
pair = opt.split('=')
assert len(pair) == 2, ("there can be only a = in the option")
key, value = pair
keys = key.split('.')
override(config, keys, value)
return config
def get_config(fname, overrides=None, show=False):
"""
Read config from file
"""
assert os.path.exists(fname), (
'config file({}) is not exist'.format(fname))
config = parse_config(fname)
override_config(config, overrides)
if show:
print_config(config)
# check_config(config)
return config
def parse_args():
parser = argparse.ArgumentParser("generic-image-rec train script")
parser.add_argument(
'-c',
'--config',
type=str,
default='configs/config.yaml',
help='config file path')
parser.add_argument(
'-o',
'--override',
action='append',
default=[],
help='config options to be overridden')
parser.add_argument(
'-p',
'--profiler_options',
type=str,
default=None,
help='The option of profiler, which should be in format \"key1=value1;key2=value2;key3=value3\".'
)
args = parser.parse_args()
return args
| 29.691589 | 105 | 0.581523 |
import os
import copy
import argparse
import yaml
from ppcls.utils import logger
from ppcls.utils import check
__all__ = ['get_config']
class AttrDict(dict):
def __getattr__(self, key):
return self[key]
def __setattr__(self, key, value):
if key in self.__dict__:
self.__dict__[key] = value
else:
self[key] = value
def __deepcopy__(self, content):
return copy.deepcopy(dict(self))
def create_attr_dict(yaml_config):
from ast import literal_eval
for key, value in yaml_config.items():
if type(value) is dict:
yaml_config[key] = value = AttrDict(value)
if isinstance(value, str):
try:
value = literal_eval(value)
except BaseException:
pass
if isinstance(value, AttrDict):
create_attr_dict(yaml_config[key])
else:
yaml_config[key] = value
def parse_config(cfg_file):
with open(cfg_file, 'r') as fopen:
yaml_config = AttrDict(yaml.load(fopen, Loader=yaml.SafeLoader))
create_attr_dict(yaml_config)
return yaml_config
def print_dict(d, delimiter=0):
placeholder = "-" * 60
for k, v in sorted(d.items()):
if isinstance(v, dict):
logger.info("{}{} : ".format(delimiter * " ", k))
print_dict(v, delimiter + 4)
elif isinstance(v, list) and len(v) >= 1 and isinstance(v[0], dict):
logger.info("{}{} : ".format(delimiter * " ", k))
for value in v:
print_dict(value, delimiter + 4)
else:
logger.info("{}{} : {}".format(delimiter * " ", k, v))
if k.isupper():
logger.info(placeholder)
def print_config(config):
logger.advertise()
print_dict(config)
def check_config(config):
check.check_version()
use_gpu = config.get('use_gpu', True)
if use_gpu:
check.check_gpu()
architecture = config.get('ARCHITECTURE')
use_mix = config.get('use_mix', False)
check.check_mix(architecture, use_mix)
classes_num = config.get('classes_num')
check.check_classes_num(classes_num)
mode = config.get('mode', 'train')
if mode.lower() == 'train':
check.check_function_params(config, 'LEARNING_RATE')
check.check_function_params(config, 'OPTIMIZER')
def override(dl, ks, v):
def str2num(v):
try:
return eval(v)
except Exception:
return v
assert isinstance(dl, (list, dict)), ("{} should be a list or a dict")
assert len(ks) > 0, ('lenght of keys should larger than 0')
if isinstance(dl, list):
k = str2num(ks[0])
if len(ks) == 1:
assert k < len(dl), ('index({}) out of range({})'.format(k, dl))
dl[k] = str2num(v)
else:
override(dl[k], ks[1:], v)
else:
if len(ks) == 1:
if not ks[0] in dl:
print('A new field ({}) detected!'.format(ks[0], dl))
dl[ks[0]] = str2num(v)
else:
if ks[0] not in dl.keys():
dl[ks[0]] = {}
print("A new Series field ({}) detected!".format(ks[0], dl))
override(dl[ks[0]], ks[1:], v)
def override_config(config, options=None):
if options is not None:
for opt in options:
assert isinstance(opt, str), (
"option({}) should be a str".format(opt))
assert "=" in opt, (
"option({}) should contain a ="
"to distinguish between key and value".format(opt))
pair = opt.split('=')
assert len(pair) == 2, ("there can be only a = in the option")
key, value = pair
keys = key.split('.')
override(config, keys, value)
return config
def get_config(fname, overrides=None, show=False):
assert os.path.exists(fname), (
'config file({}) is not exist'.format(fname))
config = parse_config(fname)
override_config(config, overrides)
if show:
print_config(config)
return config
def parse_args():
parser = argparse.ArgumentParser("generic-image-rec train script")
parser.add_argument(
'-c',
'--config',
type=str,
default='configs/config.yaml',
help='config file path')
parser.add_argument(
'-o',
'--override',
action='append',
default=[],
help='config options to be overridden')
parser.add_argument(
'-p',
'--profiler_options',
type=str,
default=None,
help='The option of profiler, which should be in format \"key1=value1;key2=value2;key3=value3\".'
)
args = parser.parse_args()
return args
| true | true |
1c2f60243d397014305e83077634402f45c35613 | 7,252 | py | Python | calico_node/tests/st/libnetwork/test_labeling.py | ketkulka/calico | c5f432f127e0ccd00fa2ec90c2ee72378d2deef3 | [
"Apache-2.0"
] | 2 | 2015-03-06T14:26:51.000Z | 2019-09-04T15:00:43.000Z | calico_node/tests/st/libnetwork/test_labeling.py | ketkulka/calico | c5f432f127e0ccd00fa2ec90c2ee72378d2deef3 | [
"Apache-2.0"
] | null | null | null | calico_node/tests/st/libnetwork/test_labeling.py | ketkulka/calico | c5f432f127e0ccd00fa2ec90c2ee72378d2deef3 | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2017 Tigera, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import yaml
from nose_parameterized import parameterized
from unittest import skip
from tests.st.test_base import TestBase
from tests.st.utils.docker_host import DockerHost
from tests.st.utils.network import NETWORKING_LIBNETWORK
from tests.st.utils.utils import ETCD_CA, ETCD_CERT, \
ETCD_KEY, ETCD_HOSTNAME_SSL, ETCD_SCHEME, get_ip, \
retry_until_success, wipe_etcd
POST_DOCKER_COMMANDS = [
"docker load -i /code/calico-node.tar",
"docker load -i /code/busybox.tar",
"docker load -i /code/workload.tar",
]
if ETCD_SCHEME == "https":
ADDITIONAL_DOCKER_OPTIONS = "--cluster-store=etcd://%s:2379 " \
"--cluster-store-opt kv.cacertfile=%s " \
"--cluster-store-opt kv.certfile=%s " \
"--cluster-store-opt kv.keyfile=%s " % \
(ETCD_HOSTNAME_SSL, ETCD_CA, ETCD_CERT,
ETCD_KEY)
else:
ADDITIONAL_DOCKER_OPTIONS = "--cluster-store=etcd://%s:2379 " % \
get_ip()
# TODO: Re-enable
@skip("Disabled until libnetwork is updated for libcalico-go v3")
class TestLibnetworkLabeling(TestBase):
"""
Tests that labeling is correctly implemented in libnetwork. Setup
multiple networks and then run containers with labels and see that
policy will allow and block traffic.
"""
hosts = None
host = None
@classmethod
def setUpClass(cls):
wipe_etcd(get_ip())
# Rough idea for setup
#
# Network1 Network2
#
# container1 container2
# foo = bar baz = bop
#
# container3 container4
# foo = bing foo = bar
cls.hosts = []
cls.host1 = DockerHost(
"host1",
additional_docker_options=ADDITIONAL_DOCKER_OPTIONS,
post_docker_commands=POST_DOCKER_COMMANDS,
start_calico=False,
networking=NETWORKING_LIBNETWORK)
cls.host1_hostname = cls.host1.execute("hostname")
cls.hosts.append(cls.host1)
cls.host2 = DockerHost(
"host2",
additional_docker_options=ADDITIONAL_DOCKER_OPTIONS,
post_docker_commands=POST_DOCKER_COMMANDS,
start_calico=False,
networking=NETWORKING_LIBNETWORK)
cls.host2_hostname = cls.host1.execute("hostname")
cls.hosts.append(cls.host2)
for host in cls.hosts:
host.start_calico_node(options='--use-docker-networking-container-labels')
cls.network1 = cls.host1.create_network("network1")
cls.network2 = cls.host1.create_network("network2")
cls.workload1_nw1_foo_bar = cls.host1.create_workload(
"workload1", network=cls.network1,
labels=["org.projectcalico.label.foo=bar"])
cls.workload2_nw2_baz_bop = cls.host1.create_workload(
"workload2", network=cls.network2,
labels=["org.projectcalico.label.baz=bop"])
cls.workload3_nw1_foo_bing = cls.host2.create_workload(
"workload3", network=cls.network1,
labels=["org.projectcalico.label.foo=bing"])
cls.workload4_nw2_foo_bar = cls.host2.create_workload(
"workload4", network=cls.network2,
labels=["org.projectcalico.label.foo=bar"])
def setUp(self):
# Override the per-test setUp to avoid wiping etcd; instead only
# clean up the data we added.
self.host1.delete_all_resource("policy")
def tearDown(self):
self.host1.delete_all_resource("policy")
super(TestLibnetworkLabeling, self).tearDown()
@classmethod
def tearDownClass(cls):
# Tidy up
for host in cls.hosts:
host.remove_workloads()
for host in cls.hosts:
host.cleanup()
del host
def test_policy_only_selectors_allow_traffic(self):
self.host1.add_resource([
{
'apiVersion': 'projectcalico.org/v3',
'kind': 'NetworkPolicy',
'metadata': {'name': 'allowFooBarToBazBop'},
'spec': {
'ingress': [
{
'source': {'selector': 'foo == "bar"'},
'action': 'Allow',
},
],
'egress': [{'action': 'Deny'}],
'selector': 'baz == "bop"'
}
}, {
'apiVersion': 'projectcalico.org/v3',
'kind': 'NetworkPolicy',
'metadata': {'name': 'allowFooBarEgress'},
'spec': {
'selector': 'foo == "bar"',
'egress': [{'action': 'Allow'}]
}
}
])
retry_until_success(lambda: self.assert_ip_connectivity(
workload_list=[self.workload1_nw1_foo_bar,
self.workload4_nw2_foo_bar],
ip_pass_list=[self.workload2_nw2_baz_bop.ip],
ip_fail_list=[self.workload3_nw1_foo_bing.ip]), 3)
def test_no_policy_allows_no_traffic(self):
retry_until_success(lambda: self.assert_ip_connectivity(
workload_list=[self.workload1_nw1_foo_bar,
self.workload2_nw2_baz_bop,
self.workload3_nw1_foo_bing],
ip_pass_list=[],
ip_fail_list=[self.workload4_nw2_foo_bar.ip]), 2)
retry_until_success(lambda: self.assert_ip_connectivity(
workload_list=[self.workload2_nw2_baz_bop,
self.workload3_nw1_foo_bing,
self.workload4_nw2_foo_bar],
ip_pass_list=[],
ip_fail_list=[self.workload1_nw1_foo_bar.ip]), 2)
retry_until_success(lambda: self.assert_ip_connectivity(
workload_list=[self.workload1_nw1_foo_bar,
self.workload3_nw1_foo_bing,
self.workload4_nw2_foo_bar],
ip_pass_list=[],
ip_fail_list=[self.workload2_nw2_baz_bop.ip]), 2)
retry_until_success(lambda: self.assert_ip_connectivity(
workload_list=[self.workload1_nw1_foo_bar,
self.workload2_nw2_baz_bop,
self.workload4_nw2_foo_bar],
ip_pass_list=[],
ip_fail_list=[self.workload3_nw1_foo_bing.ip]), 2)
| 39.846154 | 86 | 0.579288 |
import json
import yaml
from nose_parameterized import parameterized
from unittest import skip
from tests.st.test_base import TestBase
from tests.st.utils.docker_host import DockerHost
from tests.st.utils.network import NETWORKING_LIBNETWORK
from tests.st.utils.utils import ETCD_CA, ETCD_CERT, \
ETCD_KEY, ETCD_HOSTNAME_SSL, ETCD_SCHEME, get_ip, \
retry_until_success, wipe_etcd
POST_DOCKER_COMMANDS = [
"docker load -i /code/calico-node.tar",
"docker load -i /code/busybox.tar",
"docker load -i /code/workload.tar",
]
if ETCD_SCHEME == "https":
ADDITIONAL_DOCKER_OPTIONS = "--cluster-store=etcd://%s:2379 " \
"--cluster-store-opt kv.cacertfile=%s " \
"--cluster-store-opt kv.certfile=%s " \
"--cluster-store-opt kv.keyfile=%s " % \
(ETCD_HOSTNAME_SSL, ETCD_CA, ETCD_CERT,
ETCD_KEY)
else:
ADDITIONAL_DOCKER_OPTIONS = "--cluster-store=etcd://%s:2379 " % \
get_ip()
@skip("Disabled until libnetwork is updated for libcalico-go v3")
class TestLibnetworkLabeling(TestBase):
hosts = None
host = None
@classmethod
def setUpClass(cls):
wipe_etcd(get_ip())
cls.hosts = []
cls.host1 = DockerHost(
"host1",
additional_docker_options=ADDITIONAL_DOCKER_OPTIONS,
post_docker_commands=POST_DOCKER_COMMANDS,
start_calico=False,
networking=NETWORKING_LIBNETWORK)
cls.host1_hostname = cls.host1.execute("hostname")
cls.hosts.append(cls.host1)
cls.host2 = DockerHost(
"host2",
additional_docker_options=ADDITIONAL_DOCKER_OPTIONS,
post_docker_commands=POST_DOCKER_COMMANDS,
start_calico=False,
networking=NETWORKING_LIBNETWORK)
cls.host2_hostname = cls.host1.execute("hostname")
cls.hosts.append(cls.host2)
for host in cls.hosts:
host.start_calico_node(options='--use-docker-networking-container-labels')
cls.network1 = cls.host1.create_network("network1")
cls.network2 = cls.host1.create_network("network2")
cls.workload1_nw1_foo_bar = cls.host1.create_workload(
"workload1", network=cls.network1,
labels=["org.projectcalico.label.foo=bar"])
cls.workload2_nw2_baz_bop = cls.host1.create_workload(
"workload2", network=cls.network2,
labels=["org.projectcalico.label.baz=bop"])
cls.workload3_nw1_foo_bing = cls.host2.create_workload(
"workload3", network=cls.network1,
labels=["org.projectcalico.label.foo=bing"])
cls.workload4_nw2_foo_bar = cls.host2.create_workload(
"workload4", network=cls.network2,
labels=["org.projectcalico.label.foo=bar"])
def setUp(self):
self.host1.delete_all_resource("policy")
def tearDown(self):
self.host1.delete_all_resource("policy")
super(TestLibnetworkLabeling, self).tearDown()
@classmethod
def tearDownClass(cls):
for host in cls.hosts:
host.remove_workloads()
for host in cls.hosts:
host.cleanup()
del host
def test_policy_only_selectors_allow_traffic(self):
self.host1.add_resource([
{
'apiVersion': 'projectcalico.org/v3',
'kind': 'NetworkPolicy',
'metadata': {'name': 'allowFooBarToBazBop'},
'spec': {
'ingress': [
{
'source': {'selector': 'foo == "bar"'},
'action': 'Allow',
},
],
'egress': [{'action': 'Deny'}],
'selector': 'baz == "bop"'
}
}, {
'apiVersion': 'projectcalico.org/v3',
'kind': 'NetworkPolicy',
'metadata': {'name': 'allowFooBarEgress'},
'spec': {
'selector': 'foo == "bar"',
'egress': [{'action': 'Allow'}]
}
}
])
retry_until_success(lambda: self.assert_ip_connectivity(
workload_list=[self.workload1_nw1_foo_bar,
self.workload4_nw2_foo_bar],
ip_pass_list=[self.workload2_nw2_baz_bop.ip],
ip_fail_list=[self.workload3_nw1_foo_bing.ip]), 3)
def test_no_policy_allows_no_traffic(self):
retry_until_success(lambda: self.assert_ip_connectivity(
workload_list=[self.workload1_nw1_foo_bar,
self.workload2_nw2_baz_bop,
self.workload3_nw1_foo_bing],
ip_pass_list=[],
ip_fail_list=[self.workload4_nw2_foo_bar.ip]), 2)
retry_until_success(lambda: self.assert_ip_connectivity(
workload_list=[self.workload2_nw2_baz_bop,
self.workload3_nw1_foo_bing,
self.workload4_nw2_foo_bar],
ip_pass_list=[],
ip_fail_list=[self.workload1_nw1_foo_bar.ip]), 2)
retry_until_success(lambda: self.assert_ip_connectivity(
workload_list=[self.workload1_nw1_foo_bar,
self.workload3_nw1_foo_bing,
self.workload4_nw2_foo_bar],
ip_pass_list=[],
ip_fail_list=[self.workload2_nw2_baz_bop.ip]), 2)
retry_until_success(lambda: self.assert_ip_connectivity(
workload_list=[self.workload1_nw1_foo_bar,
self.workload2_nw2_baz_bop,
self.workload4_nw2_foo_bar],
ip_pass_list=[],
ip_fail_list=[self.workload3_nw1_foo_bing.ip]), 2)
| true | true |
1c2f60784233e4fd75c444a1734a897e8f6f2993 | 1,457 | py | Python | utils.py | pgiraud/bheka | dca033e9061633c0780907c8be6630a429f232f1 | [
"MIT"
] | null | null | null | utils.py | pgiraud/bheka | dca033e9061633c0780907c8be6630a429f232f1 | [
"MIT"
] | null | null | null | utils.py | pgiraud/bheka | dca033e9061633c0780907c8be6630a429f232f1 | [
"MIT"
] | null | null | null | import os
import ConfigParser
import geojson
import shapely
from shapely.geometry import Polygon
from shapely.prepared import prep
from math import floor, ceil
# Maximum resolution
MAXRESOLUTION = 156543.0339
# X/Y axis limit
max = MAXRESOLUTION * 256 / 2
class TileBuilder(object):
def __init__(self, parameter):
self.a = parameter
def create_square(self, i, j):
xmin = i * self.a - max
ymin = j * self.a - max
xmax = (i + 1) * self.a - max
ymax = (j + 1) * self.a - max
return Polygon([(xmin, ymin), (xmax, ymin),
(xmax, ymax), (xmin, ymax)])
# This method finds the tiles that intersect the given geometry for the given
# zoom
def get_tiles_in_geom(geom, z):
xmin = geom.bounds[0]
ymin = geom.bounds[1]
xmax = geom.bounds[2]
ymax = geom.bounds[3]
# tile size (in meters) at the required zoom level
step = max / (2 ** (z - 1))
xminstep = int(floor((xmin + max) / step))
xmaxstep = int(ceil((xmax + max) / step))
yminstep = int(floor((ymin + max) / step))
ymaxstep = int(ceil((ymax + max) / step))
tb = TileBuilder(step)
tiles = []
prepared_geom = prep(geom)
for i in range(xminstep, xmaxstep + 1):
for j in range(yminstep, ymaxstep + 1):
tile = tb.create_square(i, j)
if prepared_geom.intersects(tile):
tiles.append({'x': i, 'y': j, 'z': z})
return tiles
| 26.017857 | 77 | 0.597804 | import os
import ConfigParser
import geojson
import shapely
from shapely.geometry import Polygon
from shapely.prepared import prep
from math import floor, ceil
MAXRESOLUTION = 156543.0339
max = MAXRESOLUTION * 256 / 2
class TileBuilder(object):
def __init__(self, parameter):
self.a = parameter
def create_square(self, i, j):
xmin = i * self.a - max
ymin = j * self.a - max
xmax = (i + 1) * self.a - max
ymax = (j + 1) * self.a - max
return Polygon([(xmin, ymin), (xmax, ymin),
(xmax, ymax), (xmin, ymax)])
def get_tiles_in_geom(geom, z):
xmin = geom.bounds[0]
ymin = geom.bounds[1]
xmax = geom.bounds[2]
ymax = geom.bounds[3]
step = max / (2 ** (z - 1))
xminstep = int(floor((xmin + max) / step))
xmaxstep = int(ceil((xmax + max) / step))
yminstep = int(floor((ymin + max) / step))
ymaxstep = int(ceil((ymax + max) / step))
tb = TileBuilder(step)
tiles = []
prepared_geom = prep(geom)
for i in range(xminstep, xmaxstep + 1):
for j in range(yminstep, ymaxstep + 1):
tile = tb.create_square(i, j)
if prepared_geom.intersects(tile):
tiles.append({'x': i, 'y': j, 'z': z})
return tiles
| true | true |
1c2f632c34820217849c7161d89ddc6085cbe221 | 3,010 | py | Python | spark_auto_mapper_fhir/backbone_elements/test_report_teardown.py | imranq2/SparkAutoMapper.FHIR | dd23b218fb0097d1edc2f3e688e8d6d4d7278bd2 | [
"Apache-2.0"
] | 1 | 2020-10-31T23:25:07.000Z | 2020-10-31T23:25:07.000Z | spark_auto_mapper_fhir/backbone_elements/test_report_teardown.py | icanbwell/SparkAutoMapper.FHIR | 98f368e781b46523142c7cb513c670d659a93c9b | [
"Apache-2.0"
] | null | null | null | spark_auto_mapper_fhir/backbone_elements/test_report_teardown.py | icanbwell/SparkAutoMapper.FHIR | 98f368e781b46523142c7cb513c670d659a93c9b | [
"Apache-2.0"
] | null | null | null | from __future__ import annotations
from typing import Optional, TYPE_CHECKING
from spark_auto_mapper_fhir.fhir_types.list import FhirList
from spark_auto_mapper_fhir.fhir_types.string import FhirString
from spark_auto_mapper_fhir.extensions.extension_base import ExtensionBase
from spark_auto_mapper_fhir.base_types.fhir_backbone_element_base import (
FhirBackboneElementBase,
)
if TYPE_CHECKING:
pass
# id_ (string)
# extension (Extension)
# modifierExtension (Extension)
# action (TestReport.Action2)
from spark_auto_mapper_fhir.backbone_elements.test_report_action2 import (
TestReportAction2,
)
# This file is auto-generated by generate_classes so do not edit manually
# noinspection PyPep8Naming
class TestReportTeardown(FhirBackboneElementBase):
"""
TestReport.Teardown
A summary of information based on the results of executing a TestScript.
"""
# noinspection PyPep8Naming
def __init__(
self,
*,
id_: Optional[FhirString] = None,
extension: Optional[FhirList[ExtensionBase]] = None,
modifierExtension: Optional[FhirList[ExtensionBase]] = None,
action: FhirList[TestReportAction2],
) -> None:
"""
A summary of information based on the results of executing a TestScript.
:param id_: None
:param extension: May be used to represent additional information that is not part of the basic
definition of the element. To make the use of extensions safe and manageable,
there is a strict set of governance applied to the definition and use of
extensions. Though any implementer can define an extension, there is a set of
requirements that SHALL be met as part of the definition of the extension.
:param modifierExtension: May be used to represent additional information that is not part of the basic
definition of the element and that modifies the understanding of the element
in which it is contained and/or the understanding of the containing element's
descendants. Usually modifier elements provide negation or qualification. To
make the use of extensions safe and manageable, there is a strict set of
governance applied to the definition and use of extensions. Though any
implementer can define an extension, there is a set of requirements that SHALL
be met as part of the definition of the extension. Applications processing a
resource are required to check for modifier extensions.
Modifier extensions SHALL NOT change the meaning of any elements on Resource
or DomainResource (including cannot change the meaning of modifierExtension
itself).
:param action: The teardown action will only contain an operation.
"""
super().__init__(
id_=id_,
extension=extension,
modifierExtension=modifierExtension,
action=action,
)
| 43 | 115 | 0.718272 | from __future__ import annotations
from typing import Optional, TYPE_CHECKING
from spark_auto_mapper_fhir.fhir_types.list import FhirList
from spark_auto_mapper_fhir.fhir_types.string import FhirString
from spark_auto_mapper_fhir.extensions.extension_base import ExtensionBase
from spark_auto_mapper_fhir.base_types.fhir_backbone_element_base import (
FhirBackboneElementBase,
)
if TYPE_CHECKING:
pass
from spark_auto_mapper_fhir.backbone_elements.test_report_action2 import (
TestReportAction2,
)
class TestReportTeardown(FhirBackboneElementBase):
def __init__(
self,
*,
id_: Optional[FhirString] = None,
extension: Optional[FhirList[ExtensionBase]] = None,
modifierExtension: Optional[FhirList[ExtensionBase]] = None,
action: FhirList[TestReportAction2],
) -> None:
super().__init__(
id_=id_,
extension=extension,
modifierExtension=modifierExtension,
action=action,
)
| true | true |
1c2f64085dc9bb8ab8b33047eb5099bbb908243d | 1,168 | py | Python | tests/python/test_native_functions.py | winnerineast/taichi | 57ae0abc374e0df8f0b54bde4bcb92d9d97ed269 | [
"MIT"
] | null | null | null | tests/python/test_native_functions.py | winnerineast/taichi | 57ae0abc374e0df8f0b54bde4bcb92d9d97ed269 | [
"MIT"
] | null | null | null | tests/python/test_native_functions.py | winnerineast/taichi | 57ae0abc374e0df8f0b54bde4bcb92d9d97ed269 | [
"MIT"
] | null | null | null | import taichi as ti
@ti.all_archs
def test_abs():
x = ti.var(ti.f32)
N = 16
@ti.layout
def place():
ti.root.dense(ti.i, N).place(x)
@ti.kernel
def func():
for i in range(N):
x[i] = abs(-i)
print(x[i])
ti.static_print(x[i])
func()
for i in range(N):
assert x[i] == i
@ti.all_archs
def test_int():
x = ti.var(ti.f32)
N = 16
@ti.layout
def place():
ti.root.dense(ti.i, N).place(x)
@ti.kernel
def func():
for i in range(N):
x[i] = int(x[i])
x[i] = float(int(x[i]) // 2)
for i in range(N):
x[i] = i + 0.4
func()
for i in range(N):
assert x[i] == i // 2
@ti.all_archs
def test_minmax():
x = ti.var(ti.f32)
y = ti.var(ti.f32)
minimum = ti.var(ti.f32)
maximum = ti.var(ti.f32)
N = 16
@ti.layout
def place():
ti.root.dense(ti.i, N).place(x, y, minimum, maximum)
@ti.kernel
def func():
for i in range(N):
minimum[i] = min(x[i], y[i])
maximum[i] = max(x[i], y[i])
for i in range(N):
x[i] = i
y[i] = N - i
func()
for i in range(N):
assert minimum[i] == min(x[i], y[i])
assert maximum[i] == max(x[i], y[i])
| 14.78481 | 56 | 0.516267 | import taichi as ti
@ti.all_archs
def test_abs():
x = ti.var(ti.f32)
N = 16
@ti.layout
def place():
ti.root.dense(ti.i, N).place(x)
@ti.kernel
def func():
for i in range(N):
x[i] = abs(-i)
print(x[i])
ti.static_print(x[i])
func()
for i in range(N):
assert x[i] == i
@ti.all_archs
def test_int():
x = ti.var(ti.f32)
N = 16
@ti.layout
def place():
ti.root.dense(ti.i, N).place(x)
@ti.kernel
def func():
for i in range(N):
x[i] = int(x[i])
x[i] = float(int(x[i]) // 2)
for i in range(N):
x[i] = i + 0.4
func()
for i in range(N):
assert x[i] == i // 2
@ti.all_archs
def test_minmax():
x = ti.var(ti.f32)
y = ti.var(ti.f32)
minimum = ti.var(ti.f32)
maximum = ti.var(ti.f32)
N = 16
@ti.layout
def place():
ti.root.dense(ti.i, N).place(x, y, minimum, maximum)
@ti.kernel
def func():
for i in range(N):
minimum[i] = min(x[i], y[i])
maximum[i] = max(x[i], y[i])
for i in range(N):
x[i] = i
y[i] = N - i
func()
for i in range(N):
assert minimum[i] == min(x[i], y[i])
assert maximum[i] == max(x[i], y[i])
| true | true |
1c2f664fdcc5106e6c749e61b4c37cde48aa3eab | 7,195 | py | Python | lib/dataset/COCOKeypoints.py | ducongju/HigherHRNet-Human-Pose-Estimation | 6986494e992fd58bced00543645fe8c49ec94c35 | [
"MIT"
] | null | null | null | lib/dataset/COCOKeypoints.py | ducongju/HigherHRNet-Human-Pose-Estimation | 6986494e992fd58bced00543645fe8c49ec94c35 | [
"MIT"
] | null | null | null | lib/dataset/COCOKeypoints.py | ducongju/HigherHRNet-Human-Pose-Estimation | 6986494e992fd58bced00543645fe8c49ec94c35 | [
"MIT"
] | null | null | null | # ------------------------------------------------------------------------------
# Copyright (c) Microsoft
# Licensed under the MIT License.
# Written by Bin Xiao (leoxiaobin@gmail.com)
# Modified by Bowen Cheng (bcheng9@illinois.edu)
# ------------------------------------------------------------------------------
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import logging
import numpy as np
import pycocotools
from .COCODataset import CocoDataset
from .target_generators import HeatmapGenerator
logger = logging.getLogger(__name__)
class CocoKeypoints(CocoDataset):
def __init__(self,
cfg,
dataset_name,
remove_images_without_annotations,
heatmap_generator,
joints_generator,
transforms=None):
super().__init__(cfg.DATASET.ROOT,
dataset_name,
cfg.DATASET.DATA_FORMAT)
if cfg.DATASET.WITH_CENTER:
assert cfg.DATASET.NUM_JOINTS == 18, 'Number of joint with center for COCO is 18'
else:
assert cfg.DATASET.NUM_JOINTS == 17, 'Number of joint for COCO is 17'
self.num_scales = self._init_check(heatmap_generator, joints_generator)
self.num_joints = cfg.DATASET.NUM_JOINTS
self.with_center = cfg.DATASET.WITH_CENTER
self.num_joints_without_center = self.num_joints - 1 \
if self.with_center else self.num_joints
self.scale_aware_sigma = cfg.DATASET.SCALE_AWARE_SIGMA
self.base_sigma = cfg.DATASET.BASE_SIGMA
self.base_size = cfg.DATASET.BASE_SIZE
self.int_sigma = cfg.DATASET.INT_SIGMA
if remove_images_without_annotations:
self.ids = [
img_id
for img_id in self.ids
if len(self.coco.getAnnIds(imgIds=img_id, iscrowd=None)) > 0
]
self.transforms = transforms
self.heatmap_generator = heatmap_generator
self.joints_generator = joints_generator
def __getitem__(self, idx):
img, anno = super().__getitem__(idx)
mask = self.get_mask(anno, idx)
anno = [
obj for obj in anno
if obj['iscrowd'] == 0 or obj['num_keypoints'] > 0
]
# TODO(bowen): to generate scale-aware sigma, modify `get_joints` to associate a sigma to each joint
joints = self.get_joints(anno)
mask_list = [mask.copy() for _ in range(self.num_scales)]
joints_list = [joints.copy() for _ in range(self.num_scales)]
target_list = list()
if self.transforms:
img, mask_list, joints_list = self.transforms(
img, mask_list, joints_list
)
for scale_id in range(self.num_scales):
target_t = self.heatmap_generator[scale_id](joints_list[scale_id])
joints_t = self.joints_generator[scale_id](joints_list[scale_id])
target_list.append(target_t.astype(np.float32))
mask_list[scale_id] = mask_list[scale_id].astype(np.float32)
joints_list[scale_id] = joints_t.astype(np.int32)
return img, target_list, mask_list, joints_list
def get_joints(self, anno):
num_people = len(anno)
if self.scale_aware_sigma:
joints = np.zeros((num_people, self.num_joints, 4)) # 对于每个人体的每个关节赋予不同的sigma值
else:
joints = np.zeros((num_people, self.num_joints, 3))
for i, obj in enumerate(anno):
joints[i, :self.num_joints_without_center, :3] = \
np.array(obj['keypoints']).reshape([-1, 3]) # 将一维列表转换为二维列表
# HigherHRNet没有用上centermap
if self.with_center:
joints_sum = np.sum(joints[i, :-1, :2], axis=0)
num_vis_joints = len(np.nonzero(joints[i, :-1, 2])[0])
if num_vis_joints > 0:
joints[i, -1, :2] = joints_sum / num_vis_joints
joints[i, -1, 2] = 1
# 设置人体之间的尺度感知sigma参数, 而人体内部没有尺度感知
# if self.scale_aware_sigma:
# # get person box
# box = obj['bbox']
# size = max(box[2], box[3]) # sigma大小以人体包围框的长边作为参考, 256时为2
# sigma = size / self.base_size * self.base_sigma # base_size = 256, base_sigma = 2.0
# if self.int_sigma:
# sigma = int(np.round(sigma + 0.5)) # 对sigma取整
# assert sigma > 0, sigma
# joints[i, :, 3] = sigma # 为某一个人的不同关节设置相同的值
########################### 人体外部尺度 ################################
if self.scale_aware_sigma:
# 人体外部尺度
box = obj['bbox']
intersize = max(box[2], box[3])
base_intersize = 128
base_intersigma = 2
# 线性变化
intersigma = intersize / base_intersize * base_intersigma
# 非线性变化
x = intersize / base_intersize
intersigma = (np.exp(x) - np.exp(-x)) / (np.exp(x) + np.exp(-x)) * base_intersigma
# 人体内部尺度
# 非截断设置
intrasize = np.array([.026, .025, .025, .035, .035, .079, .079, .072, .072,
.062, .062, .107, .107, .087, .087, .089, .089])
# 截断设置
intrasize = np.array([.062, .062, .062, .062, .062, .079, .079, .072, .072,
.062, .062, .107, .107, .087, .087, .089, .089])
base_intrasize = 0.062
base_intrasigma = 2
intrasigma = intrasize / base_intrasize * base_intrasigma
# 人体综合尺度
joints[i, :, 3] = intersigma * intrasigma
########################### 人体内部尺度 ################################
return joints
def get_mask(self, anno, idx):
coco = self.coco
img_info = coco.loadImgs(self.ids[idx])[0]
m = np.zeros((img_info['height'], img_info['width']))
for obj in anno:
if obj['iscrowd']:
rle = pycocotools.mask.frPyObjects(
obj['segmentation'], img_info['height'], img_info['width'])
m += pycocotools.mask.decode(rle)
elif obj['num_keypoints'] == 0:
rles = pycocotools.mask.frPyObjects(
obj['segmentation'], img_info['height'], img_info['width'])
for rle in rles:
m += pycocotools.mask.decode(rle)
return m < 0.5
def _init_check(self, heatmap_generator, joints_generator):
assert isinstance(heatmap_generator, (list, tuple)), 'heatmap_generator should be a list or tuple'
assert isinstance(joints_generator, (list, tuple)), 'joints_generator should be a list or tuple'
assert len(heatmap_generator) == len(joints_generator), \
'heatmap_generator and joints_generator should have same length,'\
'got {} vs {}.'.format(
len(heatmap_generator), len(joints_generator)
)
return len(heatmap_generator)
| 39.751381 | 108 | 0.54663 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import logging
import numpy as np
import pycocotools
from .COCODataset import CocoDataset
from .target_generators import HeatmapGenerator
logger = logging.getLogger(__name__)
class CocoKeypoints(CocoDataset):
def __init__(self,
cfg,
dataset_name,
remove_images_without_annotations,
heatmap_generator,
joints_generator,
transforms=None):
super().__init__(cfg.DATASET.ROOT,
dataset_name,
cfg.DATASET.DATA_FORMAT)
if cfg.DATASET.WITH_CENTER:
assert cfg.DATASET.NUM_JOINTS == 18, 'Number of joint with center for COCO is 18'
else:
assert cfg.DATASET.NUM_JOINTS == 17, 'Number of joint for COCO is 17'
self.num_scales = self._init_check(heatmap_generator, joints_generator)
self.num_joints = cfg.DATASET.NUM_JOINTS
self.with_center = cfg.DATASET.WITH_CENTER
self.num_joints_without_center = self.num_joints - 1 \
if self.with_center else self.num_joints
self.scale_aware_sigma = cfg.DATASET.SCALE_AWARE_SIGMA
self.base_sigma = cfg.DATASET.BASE_SIGMA
self.base_size = cfg.DATASET.BASE_SIZE
self.int_sigma = cfg.DATASET.INT_SIGMA
if remove_images_without_annotations:
self.ids = [
img_id
for img_id in self.ids
if len(self.coco.getAnnIds(imgIds=img_id, iscrowd=None)) > 0
]
self.transforms = transforms
self.heatmap_generator = heatmap_generator
self.joints_generator = joints_generator
def __getitem__(self, idx):
img, anno = super().__getitem__(idx)
mask = self.get_mask(anno, idx)
anno = [
obj for obj in anno
if obj['iscrowd'] == 0 or obj['num_keypoints'] > 0
]
joints = self.get_joints(anno)
mask_list = [mask.copy() for _ in range(self.num_scales)]
joints_list = [joints.copy() for _ in range(self.num_scales)]
target_list = list()
if self.transforms:
img, mask_list, joints_list = self.transforms(
img, mask_list, joints_list
)
for scale_id in range(self.num_scales):
target_t = self.heatmap_generator[scale_id](joints_list[scale_id])
joints_t = self.joints_generator[scale_id](joints_list[scale_id])
target_list.append(target_t.astype(np.float32))
mask_list[scale_id] = mask_list[scale_id].astype(np.float32)
joints_list[scale_id] = joints_t.astype(np.int32)
return img, target_list, mask_list, joints_list
def get_joints(self, anno):
num_people = len(anno)
if self.scale_aware_sigma:
joints = np.zeros((num_people, self.num_joints, 4))
else:
joints = np.zeros((num_people, self.num_joints, 3))
for i, obj in enumerate(anno):
joints[i, :self.num_joints_without_center, :3] = \
np.array(obj['keypoints']).reshape([-1, 3])
if self.with_center:
joints_sum = np.sum(joints[i, :-1, :2], axis=0)
num_vis_joints = len(np.nonzero(joints[i, :-1, 2])[0])
if num_vis_joints > 0:
joints[i, -1, :2] = joints_sum / num_vis_joints
joints[i, -1, 2] = 1
| true | true |
1c2f67eaa12319964a62ea23974494926d2481ac | 3,046 | py | Python | scripts/print_handlers_md.py | mvalik/freshmaker | 5d0642348b2605b951e1df1b1d0f887ea2d9d1fe | [
"MIT"
] | null | null | null | scripts/print_handlers_md.py | mvalik/freshmaker | 5d0642348b2605b951e1df1b1d0f887ea2d9d1fe | [
"MIT"
] | null | null | null | scripts/print_handlers_md.py | mvalik/freshmaker | 5d0642348b2605b951e1df1b1d0f887ea2d9d1fe | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright (c) 2017 Red Hat, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
#
# Prints all the available handlers and their dependencies on other services
# in Markdown format.
# It is intended to be called from the top-level Freshmaker git repository.
#
from __future__ import print_function
import os
import sys
# Set the PYTHON_PATH to top level Freshmaker directory and also set
# the FRESHMAKER_DEVELOPER_ENV to 1.
sys.path.append(os.getcwd())
os.environ["FRESHMAKER_DEVELOPER_ENV"] = "1"
def load_module(mod_name):
""" Take a string of the form 'fedmsg.consumers.ircbot'
and return the ircbot module.
"""
__import__(mod_name)
try:
return sys.modules[mod_name]
except AttributeError:
raise ImportError("%r not found" % (mod_name))
# Key is the name of handler, value is list of dependencies.
handlers = {}
# Iterate over all directories in the ./freshmaker/handlers directory
# and in each of them, try to find out handlers.
handlers_path = "./freshmaker/handlers/"
for name in os.listdir(handlers_path):
if not os.path.isdir(handlers_path + name) or name in ["__pycache__"]:
continue
mod = load_module("freshmaker.handlers." + name)
for submod_name in dir(mod):
try:
submod = getattr(mod, submod_name)
except AttributeError:
continue
key = None
deps = []
for cls in dir(submod):
if cls.endswith("Handler"):
key = "freshmaker.handlers." + name + ":" + cls
elif cls in ["PDC", "MBS", "Pulp", "Errata", "LightBlue"]:
deps.append(cls)
elif cls == "koji_service":
deps.append("Koji")
if key:
handlers[key] = deps
print("## List of Freshmaker handlers")
print("")
print("Following is the list of all available HANDLERS:")
print("")
for name, deps in handlers.items():
print("* `%s`" % name)
if deps:
print(" * Depends on: %s" % (", ".join(deps)))
| 35.011494 | 79 | 0.684504 |
from __future__ import print_function
import os
import sys
sys.path.append(os.getcwd())
os.environ["FRESHMAKER_DEVELOPER_ENV"] = "1"
def load_module(mod_name):
__import__(mod_name)
try:
return sys.modules[mod_name]
except AttributeError:
raise ImportError("%r not found" % (mod_name))
handlers = {}
handlers_path = "./freshmaker/handlers/"
for name in os.listdir(handlers_path):
if not os.path.isdir(handlers_path + name) or name in ["__pycache__"]:
continue
mod = load_module("freshmaker.handlers." + name)
for submod_name in dir(mod):
try:
submod = getattr(mod, submod_name)
except AttributeError:
continue
key = None
deps = []
for cls in dir(submod):
if cls.endswith("Handler"):
key = "freshmaker.handlers." + name + ":" + cls
elif cls in ["PDC", "MBS", "Pulp", "Errata", "LightBlue"]:
deps.append(cls)
elif cls == "koji_service":
deps.append("Koji")
if key:
handlers[key] = deps
print("## List of Freshmaker handlers")
print("")
print("Following is the list of all available HANDLERS:")
print("")
for name, deps in handlers.items():
print("* `%s`" % name)
if deps:
print(" * Depends on: %s" % (", ".join(deps)))
| true | true |
1c2f6a4b4184922a65c21a503ba64ca6d19a0edf | 106,877 | py | Python | yarGen.py | p-g-krish/yarGen | 70cb03fd91b877982b9f1bff414cb98dd54e7cf5 | [
"BSD-3-Clause"
] | null | null | null | yarGen.py | p-g-krish/yarGen | 70cb03fd91b877982b9f1bff414cb98dd54e7cf5 | [
"BSD-3-Clause"
] | null | null | null | yarGen.py | p-g-krish/yarGen | 70cb03fd91b877982b9f1bff414cb98dd54e7cf5 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
# -*- coding: iso-8859-1 -*-
# -*- coding: utf-8 -*-
#
# yarGen
# A Rule Generator for YARA Rules
#
# Florian Roth
__version__ = "0.22.0"
import os
import sys
if sys.version_info[0] > 2:
raise Exception("Some modules require Python 2, so please use that version instead of Python 3")
import argparse
import re
import traceback
import operator
import datetime
import time
import scandir
import pefile
import cPickle as pickle
import gzip
import urllib
from collections import Counter
from hashlib import sha256
from naiveBayesClassifier import tokenizer
from naiveBayesClassifier.trainer import Trainer
from naiveBayesClassifier.classifier import Classifier
import signal as signal_module
try:
from lxml import etree
lxml_available = True
except Exception as e:
print("[E] lxml not found - disabling PeStudio string check functionality")
lxml_available = False
RELEVANT_EXTENSIONS = [".asp", ".vbs", ".ps", ".ps1", ".tmp", ".bas", ".bat", ".cmd", ".com", ".cpl",
".crt", ".dll", ".exe", ".msc", ".scr", ".sys", ".vb", ".vbe", ".vbs", ".wsc",
".wsf", ".wsh", ".input", ".war", ".jsp", ".php", ".asp", ".aspx", ".psd1", ".psm1", ".py"]
REPO_URLS = {
'good-opcodes-part1.db': 'https://www.bsk-consulting.de/yargen/good-opcodes-part1.db',
'good-opcodes-part2.db': 'https://www.bsk-consulting.de/yargen/good-opcodes-part2.db',
'good-opcodes-part3.db': 'https://www.bsk-consulting.de/yargen/good-opcodes-part3.db',
'good-opcodes-part4.db': 'https://www.bsk-consulting.de/yargen/good-opcodes-part4.db',
'good-opcodes-part5.db': 'https://www.bsk-consulting.de/yargen/good-opcodes-part5.db',
'good-opcodes-part6.db': 'https://www.bsk-consulting.de/yargen/good-opcodes-part6.db',
'good-opcodes-part7.db': 'https://www.bsk-consulting.de/yargen/good-opcodes-part7.db',
'good-opcodes-part8.db': 'https://www.bsk-consulting.de/yargen/good-opcodes-part8.db',
'good-opcodes-part9.db': 'https://www.bsk-consulting.de/yargen/good-opcodes-part9.db',
'good-strings-part1.db': 'https://www.bsk-consulting.de/yargen/good-strings-part1.db',
'good-strings-part2.db': 'https://www.bsk-consulting.de/yargen/good-strings-part2.db',
'good-strings-part3.db': 'https://www.bsk-consulting.de/yargen/good-strings-part3.db',
'good-strings-part4.db': 'https://www.bsk-consulting.de/yargen/good-strings-part4.db',
'good-strings-part5.db': 'https://www.bsk-consulting.de/yargen/good-strings-part5.db',
'good-strings-part6.db': 'https://www.bsk-consulting.de/yargen/good-strings-part6.db',
'good-strings-part7.db': 'https://www.bsk-consulting.de/yargen/good-strings-part7.db',
'good-strings-part8.db': 'https://www.bsk-consulting.de/yargen/good-strings-part8.db',
'good-strings-part9.db': 'https://www.bsk-consulting.de/yargen/good-strings-part9.db',
'good-exports-part1.db': 'https://www.bsk-consulting.de/yargen/good-exports-part1.db',
'good-exports-part2.db': 'https://www.bsk-consulting.de/yargen/good-exports-part2.db',
'good-exports-part3.db': 'https://www.bsk-consulting.de/yargen/good-exports-part3.db',
'good-exports-part4.db': 'https://www.bsk-consulting.de/yargen/good-exports-part4.db',
'good-exports-part5.db': 'https://www.bsk-consulting.de/yargen/good-exports-part5.db',
'good-exports-part6.db': 'https://www.bsk-consulting.de/yargen/good-exports-part6.db',
'good-exports-part7.db': 'https://www.bsk-consulting.de/yargen/good-exports-part7.db',
'good-exports-part8.db': 'https://www.bsk-consulting.de/yargen/good-exports-part8.db',
'good-exports-part9.db': 'https://www.bsk-consulting.de/yargen/good-exports-part9.db',
'good-imphashes-part1.db': 'https://www.bsk-consulting.de/yargen/good-imphashes-part1.db',
'good-imphashes-part2.db': 'https://www.bsk-consulting.de/yargen/good-imphashes-part2.db',
'good-imphashes-part3.db': 'https://www.bsk-consulting.de/yargen/good-imphashes-part3.db',
'good-imphashes-part4.db': 'https://www.bsk-consulting.de/yargen/good-imphashes-part4.db',
'good-imphashes-part5.db': 'https://www.bsk-consulting.de/yargen/good-imphashes-part5.db',
'good-imphashes-part6.db': 'https://www.bsk-consulting.de/yargen/good-imphashes-part6.db',
'good-imphashes-part7.db': 'https://www.bsk-consulting.de/yargen/good-imphashes-part7.db',
'good-imphashes-part8.db': 'https://www.bsk-consulting.de/yargen/good-imphashes-part8.db',
'good-imphashes-part9.db': 'https://www.bsk-consulting.de/yargen/good-imphashes-part9.db',
}
PE_STRINGS_FILE = "./3rdparty/strings.xml"
KNOWN_IMPHASHES = {'a04dd9f5ee88d7774203e0a0cfa1b941': 'PsExec',
'2b8c9d9ab6fefc247adaf927e83dcea6': 'RAR SFX variant'}
def get_abs_path(filename):
return os.path.join(os.path.dirname(os.path.abspath(__file__)), filename)
def get_files(dir, notRecursive):
# Not Recursive
if notRecursive:
for filename in os.listdir(dir):
filePath = os.path.join(dir, filename)
if os.path.isdir(filePath):
continue
yield filePath
# Recursive
else:
for root, directories, files in scandir.walk(dir, followlinks=False):
for filename in files:
filePath = os.path.join(root, filename)
yield filePath
def parse_sample_dir(dir, notRecursive=False, generateInfo=False, onlyRelevantExtensions=False):
# Prepare dictionary
string_stats = {}
opcode_stats = {}
file_info = {}
known_sha1sums = []
for filePath in get_files(dir, notRecursive):
try:
print("[+] Processing %s ..." % filePath)
# Get Extension
extension = os.path.splitext(filePath)[1].lower()
if not extension in RELEVANT_EXTENSIONS and onlyRelevantExtensions:
if args.debug:
print("[-] EXTENSION %s - Skipping file %s" % (extension, filePath))
continue
# Info file check
if os.path.basename(filePath) == os.path.basename(args.b) or \
os.path.basename(filePath) == os.path.basename(args.r):
continue
# Size Check
size = 0
try:
size = os.stat(filePath).st_size
if size > (args.fs * 1024 * 1024):
if args.debug:
print("[-] File is to big - Skipping file %s (use -fs to adjust this behaviour)" % (filePath))
continue
except Exception as e:
pass
# Check and read file
try:
with open(filePath, 'rb') as f:
fileData = f.read()
except Exception as e:
print("[-] Cannot read file - skipping %s" % filePath)
# Extract strings from file
strings = extract_strings(fileData)
# Extract opcodes from file
opcodes = []
if use_opcodes:
print("[-] Extracting OpCodes: %s" % filePath)
opcodes = extract_opcodes(fileData)
# Add sha256 value
if generateInfo:
sha256sum = sha256(fileData).hexdigest()
file_info[filePath] = {}
file_info[filePath]["hash"] = sha256sum
file_info[filePath]["imphash"], file_info[filePath]["exports"] = get_pe_info(fileData)
# Skip if hash already known - avoid duplicate files
if sha256sum in known_sha1sums:
# if args.debug:
print("[-] Skipping strings/opcodes from %s due to MD5 duplicate detection" % filePath)
continue
else:
known_sha1sums.append(sha256sum)
# Magic evaluation
if not args.nomagic:
file_info[filePath]["magic"] = fileData[:2]
else:
file_info[filePath]["magic"] = ""
# File Size
file_info[filePath]["size"] = os.stat(filePath).st_size
# Add stats for basename (needed for inverse rule generation)
fileName = os.path.basename(filePath)
folderName = os.path.basename(os.path.dirname(filePath))
if fileName not in file_info:
file_info[fileName] = {}
file_info[fileName]["count"] = 0
file_info[fileName]["hashes"] = []
file_info[fileName]["folder_names"] = []
file_info[fileName]["count"] += 1
file_info[fileName]["hashes"].append(sha256sum)
if folderName not in file_info[fileName]["folder_names"]:
file_info[fileName]["folder_names"].append(folderName)
# Add strings to statistics
for string in strings:
# String is not already known
if string not in string_stats:
string_stats[string] = {}
string_stats[string]["count"] = 0
string_stats[string]["files"] = []
string_stats[string]["files_basename"] = {}
# String count
string_stats[string]["count"] += 1
# Add file information
if fileName not in string_stats[string]["files_basename"]:
string_stats[string]["files_basename"][fileName] = 0
string_stats[string]["files_basename"][fileName] += 1
string_stats[string]["files"].append(filePath)
# Add opcods to statistics
for opcode in opcodes:
# String is not already known
if opcode not in opcode_stats:
opcode_stats[opcode] = {}
opcode_stats[opcode]["count"] = 0
opcode_stats[opcode]["files"] = []
opcode_stats[opcode]["files_basename"] = {}
# opcode count
opcode_stats[opcode]["count"] += 1
# Add file information
if fileName not in opcode_stats[opcode]["files_basename"]:
opcode_stats[opcode]["files_basename"][fileName] = 0
opcode_stats[opcode]["files_basename"][fileName] += 1
opcode_stats[opcode]["files"].append(filePath)
if args.debug:
print("[+] Processed " + filePath + " Size: " + str(size) + " Strings: " + str(len(string_stats)) + \
" OpCodes: " + str(len(opcode_stats)) + " ... ")
except Exception as e:
traceback.print_exc()
print("[E] ERROR reading file: %s" % filePath)
return string_stats, opcode_stats, file_info
def parse_good_dir(dir, notRecursive=False, onlyRelevantExtensions=True):
# Prepare dictionary
all_strings = Counter()
all_opcodes = Counter()
all_imphashes = Counter()
all_exports = Counter()
for filePath in get_files(dir, notRecursive):
# Get Extension
extension = os.path.splitext(filePath)[1].lower()
if extension not in RELEVANT_EXTENSIONS and onlyRelevantExtensions:
if args.debug:
print("[-] EXTENSION %s - Skipping file %s" % (extension, filePath))
continue
# Size Check
size = 0
try:
size = os.stat(filePath).st_size
if size > (args.fs * 1024 * 1024):
continue
except Exception as e:
pass
# Check and read file
try:
with open(filePath, 'rb') as f:
fileData = f.read()
except Exception as e:
print("[-] Cannot read file - skipping %s" % filePath)
# Extract strings from file
strings = extract_strings(fileData)
# Append to all strings
all_strings.update(strings)
# Extract Opcodes from file
opcodes = []
if use_opcodes:
print("[-] Extracting OpCodes: %s" % filePath)
opcodes = extract_opcodes(fileData)
# Append to all opcodes
all_opcodes.update(opcodes)
# Imphash and Exports
(imphash, exports) = get_pe_info(fileData)
all_exports.update(exports)
all_imphashes.update([imphash])
if args.debug:
print("[+] Processed %s - %d strings %d opcodes %d exports and imphash %s" % (filePath, len(strings),
len(opcodes), len(exports),
imphash))
# return it as a set (unique strings)
return all_strings, all_opcodes, all_imphashes, all_exports
def extract_strings(fileData):
# String list
cleaned_strings = []
# Read file data
try:
# Read strings
strings_full = re.findall("[\x1f-\x7e]{6,}", fileData)
strings_limited = re.findall("[\x1f-\x7e]{6,%d}" % args.s, fileData)
strings_hex = extract_hex_strings(fileData)
strings = list(set(strings_full) | set(strings_limited) | set(strings_hex))
strings += [str("UTF16LE:%s" % ws.decode("utf-16le")) for ws in re.findall("(?:[\x1f-\x7e][\x00]){6,}", fileData)]
# Escape strings
for string in strings:
# Check if last bytes have been string and not yet saved to list
if len(string) > 0:
string = string.replace('\\', '\\\\')
string = string.replace('"', '\\"')
if string not in cleaned_strings:
cleaned_strings.append(string.lstrip(" "))
except Exception as e:
if args.debug:
traceback.print_exc()
pass
return cleaned_strings
def extract_opcodes(fileData):
# String list
opcodes = []
# Read file data
try:
pe = pefile.PE(data=fileData)
name = ""
ep = pe.OPTIONAL_HEADER.AddressOfEntryPoint
pos = 0
for sec in pe.sections:
if (ep >= sec.VirtualAddress) and \
(ep < (sec.VirtualAddress + sec.Misc_VirtualSize)):
name = sec.Name.replace('\x00', '')
break
else:
pos += 1
for section in pe.sections:
if section.Name.rstrip("\x00") == name:
text = section.get_data()
# Split text into subs
text_parts = re.split("[\x00]{3,}", text)
# Now truncate and encode opcodes
for text_part in text_parts:
if text_part == '' or len(text_part) < 8:
continue
opcodes.append(text_part[:16].encode('hex'))
except Exception as e:
#if args.debug:
# traceback.print_exc()
pass
return opcodes
def get_pe_info(fileData):
"""
Get different PE attributes and hashes
:param fileData:
:return:
"""
imphash = ""
exports = []
# Check for MZ header (speed improvement)
if fileData[:2] != "MZ":
return imphash, exports
try:
if args.debug:
print("Extracting PE information")
p = pefile.PE(data=fileData)
# Imphash
imphash = p.get_imphash()
# Exports (names)
for exp in p.DIRECTORY_ENTRY_EXPORT.symbols:
exports.append(exp.name)
except Exception as e:
#if args.debug:
# traceback.print_exc()
pass
return imphash, exports
def sample_string_evaluation(string_stats, opcode_stats, file_info):
# Generate Stats -----------------------------------------------------------
print("[+] Generating statistical data ...")
file_strings = {}
file_opcodes = {}
combinations = {}
inverse_stats = {}
max_combi_count = 0
super_rules = []
# OPCODE EVALUATION --------------------------------------------------------
for opcode in opcode_stats:
# If string occurs not too often in sample files
if opcode_stats[opcode]["count"] < 10:
# If string list in file dictionary not yet exists
for filePath in opcode_stats[opcode]["files"]:
if filePath in file_opcodes:
# Append string
file_opcodes[filePath].append(opcode)
else:
# Create list and than add the first string to the file
file_opcodes[filePath] = []
file_opcodes[filePath].append(opcode)
# STRING EVALUATION -------------------------------------------------------
# Iterate through strings found in malware files
for string in string_stats:
# If string occurs not too often in (goodware) sample files
if string_stats[string]["count"] < 10:
# If string list in file dictionary not yet exists
for filePath in string_stats[string]["files"]:
if filePath in file_strings:
# Append string
file_strings[filePath].append(string)
else:
# Create list and than add the first string to the file
file_strings[filePath] = []
file_strings[filePath].append(string)
# INVERSE RULE GENERATION -------------------------------------
if args.inverse:
for fileName in string_stats[string]["files_basename"]:
string_occurrance_count = string_stats[string]["files_basename"][fileName]
total_count_basename = file_info[fileName]["count"]
# print "string_occurance_count %s - total_count_basename %s" % ( string_occurance_count,
# total_count_basename )
if string_occurrance_count == total_count_basename:
if fileName not in inverse_stats:
inverse_stats[fileName] = []
if args.trace:
print("Appending %s to %s" % (string, fileName))
inverse_stats[fileName].append(string)
# SUPER RULE GENERATION -----------------------------------------------
if not nosuper and not args.inverse:
# SUPER RULES GENERATOR - preliminary work
# If a string occurs more than once in different files
# print sample_string_stats[string]["count"]
if string_stats[string]["count"] > 1:
if args.debug:
print("OVERLAP Count: %s\nString: \"%s\"%s" % (string_stats[string]["count"], string,
"\nFILE: ".join(string_stats[string]["files"])))
# Create a combination string from the file set that matches to that string
combi = ":".join(sorted(string_stats[string]["files"]))
# print "STRING: " + string
if args.debug:
print("COMBI: " + combi)
# If combination not yet known
if combi not in combinations:
combinations[combi] = {}
combinations[combi]["count"] = 1
combinations[combi]["strings"] = []
combinations[combi]["strings"].append(string)
combinations[combi]["files"] = string_stats[string]["files"]
else:
combinations[combi]["count"] += 1
combinations[combi]["strings"].append(string)
# Set the maximum combination count
if combinations[combi]["count"] > max_combi_count:
max_combi_count = combinations[combi]["count"]
# print "Max Combi Count set to: %s" % max_combi_count
print("[+] Generating Super Rules ... (a lot of foo magic)")
for combi_count in range(max_combi_count, 1, -1):
for combi in combinations:
if combi_count == combinations[combi]["count"]:
# print "Count %s - Combi %s" % ( str(combinations[combi]["count"]), combi )
# Filter the string set
# print "BEFORE"
# print len(combinations[combi]["strings"])
# print combinations[combi]["strings"]
string_set = combinations[combi]["strings"]
combinations[combi]["strings"] = []
combinations[combi]["strings"] = filter_string_set(string_set)
# print combinations[combi]["strings"]
# print "AFTER"
# print len(combinations[combi]["strings"])
# Combi String count after filtering
# print "String count after filtering: %s" % str(len(combinations[combi]["strings"]))
# If the string set of the combination has a required size
if len(combinations[combi]["strings"]) >= int(args.w):
# Remove the files in the combi rule from the simple set
if args.nosimple:
for file in combinations[combi]["files"]:
if file in file_strings:
del file_strings[file]
# Add it as a super rule
print("[-] Adding Super Rule with %s strings." % str(len(combinations[combi]["strings"])))
# if args.debug:
# print "Rule Combi: %s" % combi
super_rules.append(combinations[combi])
# Return all data
return (file_strings, file_opcodes, combinations, super_rules, inverse_stats)
def filter_opcode_set(opcode_set):
# Preferred Opcodes
pref_opcodes = [' 34 ', 'ff ff ff ']
# Useful set
useful_set = []
pref_set = []
for opcode in opcode_set:
if opcode in good_opcodes_db:
continue
# Format the opcode
formatted_opcode = get_opcode_string(opcode)
# Preferred opcodes
set_in_pref = False
for pref in pref_opcodes:
if pref in formatted_opcode:
pref_set.append(formatted_opcode)
set_in_pref = True
if set_in_pref:
continue
# Else add to useful set
useful_set.append(get_opcode_string(opcode))
# Preferred opcodes first
useful_set = pref_set + useful_set
# Only return the number of opcodes defined with the "-n" parameter
return useful_set[:int(args.n)]
def filter_string_set(string_set):
# This is the only set we have - even if it's a weak one
useful_set = []
# Bayes Classificator (new method)
stringClassifier = Classifier(stringTrainer.data, tokenizer)
# Local string scores
localStringScores = {}
# Local UTF strings
utfstrings = []
for string in string_set:
# Goodware string marker
goodstring = False
goodcount = 0
# Goodware Strings
if string in good_strings_db:
goodstring = True
goodcount = good_strings_db[string]
# print "%s - %s" % ( goodstring, good_strings[string] )
if args.excludegood:
continue
# UTF
original_string = string
if string[:8] == "UTF16LE:":
# print "removed UTF16LE from %s" % string
string = string[8:]
utfstrings.append(string)
# Good string evaluation (after the UTF modification)
if goodstring:
# Reduce the score by the number of occurence in goodware files
localStringScores[string] = (goodcount * -1) + 5
else:
localStringScores[string] = 0
# PEStudio String Blacklist Evaluation
if pestudio_available:
(pescore, type) = get_pestudio_score(string)
# print("PE Match: %s" % string)
# Reset score of goodware files to 5 if blacklisted in PEStudio
if type != "":
pestudioMarker[string] = type
# Modify the PEStudio blacklisted strings with their goodware stats count
if goodstring:
pescore = pescore - (goodcount / 1000.0)
# print "%s - %s - %s" % (string, pescore, goodcount)
localStringScores[string] = pescore
if not goodstring:
# Bayes Classifier
classification = stringClassifier.classify(string)
if classification[0][1] == 0 and len(string) > 10:
# Try to split the string into words and then check again
modified_string = re.sub(r'[\\\/\-\.\_<>="\']', ' ', string).rstrip(" ").lstrip(" ")
# print "Checking instead: %s" % modified_string
classification = stringClassifier.classify(modified_string)
#if args.debug:
# print "[D] Bayes Score: %s %s" % (str(classification), string)
localStringScores[string] += classification[0][1]
# Length Score
#length = len(string)
#if length > int(args.y) and length < int(args.s):
# localStringScores[string] += round(len(string) / 8, 2)
#if length >= int(args.s):
# localStringScores[string] += 1
# Reduction
if ".." in string:
localStringScores[string] -= 5
if " " in string:
localStringScores[string] -= 5
# Packer Strings
if re.search(r'(WinRAR\\SFX)', string):
localStringScores[string] -= 4
# US ASCII char
if "\x1f" in string:
localStringScores[string] -= 4
# Chains of 00s
if string.count('0000000000') > 2:
localStringScores[string] -= 5
# Repeated characters
if re.search(r'(?!.* ([A-Fa-f0-9])\1{8,})', string):
localStringScores[string] -= 5
# Certain strings add-ons ----------------------------------------------
# Extensions - Drive
if re.search(r'[A-Za-z]:\\', string, re.IGNORECASE):
localStringScores[string] += 2
# Relevant file extensions
if re.search(r'(\.exe|\.pdb|\.scr|\.log|\.cfg|\.txt|\.dat|\.msi|\.com|\.bat|\.dll|\.pdb|\.vbs|'
r'\.tmp|\.sys|\.ps1|\.vbp|\.hta|\.lnk)', string, re.IGNORECASE):
localStringScores[string] += 4
# System keywords
if re.search(r'(cmd.exe|system32|users|Documents and|SystemRoot|Grant|hello|password|process|log)',
string, re.IGNORECASE):
localStringScores[string] += 5
# Protocol Keywords
if re.search(r'(ftp|irc|smtp|command|GET|POST|Agent|tor2web|HEAD)', string, re.IGNORECASE):
localStringScores[string] += 5
# Connection keywords
if re.search(r'(error|http|closed|fail|version|proxy)', string, re.IGNORECASE):
localStringScores[string] += 3
# Browser User Agents
if re.search(r'(Mozilla|MSIE|Windows NT|Macintosh|Gecko|Opera|User\-Agent)', string, re.IGNORECASE):
localStringScores[string] += 5
# Temp and Recycler
if re.search(r'(TEMP|Temporary|Appdata|Recycler)', string, re.IGNORECASE):
localStringScores[string] += 4
# Malicious keywords - hacktools
if re.search(r'(scan|sniff|poison|intercept|fake|spoof|sweep|dump|flood|inject|forward|scan|vulnerable|'
r'credentials|creds|coded|p0c|Content|host)', string, re.IGNORECASE):
localStringScores[string] += 5
# Network keywords
if re.search(r'(address|port|listen|remote|local|process|service|mutex|pipe|frame|key|lookup|connection)',
string, re.IGNORECASE):
localStringScores[string] += 3
# Drive
if re.search(r'([C-Zc-z]:\\)', string, re.IGNORECASE):
localStringScores[string] += 4
# IP
if re.search(
r'\b(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\b',
string, re.IGNORECASE): # IP Address
localStringScores[string] += 5
# Copyright Owner
if re.search(r'(coded | c0d3d |cr3w\b|Coded by |codedby)', string, re.IGNORECASE):
localStringScores[string] += 7
# Extension generic
if re.search(r'\.[a-zA-Z]{3}\b', string):
localStringScores[string] += 3
# All upper case
if re.search(r'^[A-Z]{6,}$', string):
localStringScores[string] += 2.5
# All lower case
if re.search(r'^[a-z]{6,}$', string):
localStringScores[string] += 2
# All lower with space
if re.search(r'^[a-z\s]{6,}$', string):
localStringScores[string] += 2
# All characters
if re.search(r'^[A-Z][a-z]{5,}$', string):
localStringScores[string] += 2
# URL
if re.search(r'(%[a-z][:\-,;]|\\\\%s|\\\\[A-Z0-9a-z%]+\\[A-Z0-9a-z%]+)', string):
localStringScores[string] += 2.5
# certificates
if re.search(r'(thawte|trustcenter|signing|class|crl|CA|certificate|assembly)', string, re.IGNORECASE):
localStringScores[string] -= 4
# Parameters
if re.search(r'( \-[a-z]{,2}[\s]?[0-9]?| /[a-z]+[\s]?[\w]*)', string, re.IGNORECASE):
localStringScores[string] += 4
# Directory
if re.search(r'([a-zA-Z]:|^|%)\\[A-Za-z]{4,30}\\', string):
localStringScores[string] += 4
# Executable - not in directory
if re.search(r'^[^\\]+\.(exe|com|scr|bat|sys)$', string, re.IGNORECASE):
localStringScores[string] += 4
# Date placeholders
if re.search(r'(yyyy|hh:mm|dd/mm|mm/dd|%s:%s:)', string, re.IGNORECASE):
localStringScores[string] += 3
# Placeholders
if re.search(r'[^A-Za-z](%s|%d|%i|%02d|%04d|%2d|%3s)[^A-Za-z]', string, re.IGNORECASE):
localStringScores[string] += 3
# String parts from file system elements
if re.search(r'(cmd|com|pipe|tmp|temp|recycle|bin|secret|private|AppData|driver|config)', string,
re.IGNORECASE):
localStringScores[string] += 3
# Programming
if re.search(r'(execute|run|system|shell|root|cimv2|login|exec|stdin|read|process|netuse|script|share)',
string, re.IGNORECASE):
localStringScores[string] += 3
# Credentials
if re.search(r'(user|pass|login|logon|token|cookie|creds|hash|ticket|NTLM|LMHASH|kerberos|spnego|session|'
r'identif|account|login|auth|privilege)', string, re.IGNORECASE):
localStringScores[string] += 3
# Malware
if re.search(r'(\.[a-z]/[^/]+\.txt|)', string, re.IGNORECASE):
localStringScores[string] += 3
# Variables
if re.search(r'%[A-Z_]+%', string, re.IGNORECASE):
localStringScores[string] += 4
# RATs / Malware
if re.search(r'(spy|logger|dark|cryptor|RAT\b|eye|comet|evil|xtreme|poison|meterpreter|metasploit|/veil|Blood)',
string, re.IGNORECASE):
localStringScores[string] += 5
# Missed user profiles
if re.search(r'[\\](users|profiles|username|benutzer|Documents and Settings|Utilisateurs|Utenti|'
r'Usuários)[\\]', string, re.IGNORECASE):
localStringScores[string] += 3
# Strings: Words ending with numbers
if re.search(r'^[A-Z][a-z]+[0-9]+$', string, re.IGNORECASE):
localStringScores[string] += 1
# Spying
if re.search(r'(implant)', string, re.IGNORECASE):
localStringScores[string] += 1
# Program Path - not Programs or Windows
if re.search(r'^[Cc]:\\\\[^PW]', string):
localStringScores[string] += 3
# Special strings
if re.search(r'(\\\\\.\\|kernel|.dll|usage|\\DosDevices\\)', string, re.IGNORECASE):
localStringScores[string] += 5
# Parameters
if re.search(r'( \-[a-z] | /[a-z] | \-[a-z]:[a-zA-Z]| \/[a-z]:[a-zA-Z])', string):
localStringScores[string] += 4
# File
if re.search(r'^[a-zA-Z0-9]{3,40}\.[a-zA-Z]{3}', string, re.IGNORECASE):
localStringScores[string] += 3
# Comment Line / Output Log
if re.search(r'^([\*\#]+ |\[[\*\-\+]\] |[\-=]> |\[[A-Za-z]\] )', string):
localStringScores[string] += 4
# Output typo / special expression
if re.search(r'(!\.$|!!!$| :\)$| ;\)$|fucked|[\w]\.\.\.\.$)', string):
localStringScores[string] += 4
# Base64
if re.search(r'^(?:[A-Za-z0-9+/]{4}){30,}(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=)?$', string) and \
re.search(r'[A-Za-z]', string) and re.search(r'[0-9]', string):
localStringScores[string] += 6
# Base64 Executables
if re.search(r'(TVqQAAMAAAAEAAAA//8AALgAAAA|TVpQAAIAAAAEAA8A//8AALgAAAA|TVqAAAEAAAAEABAAAAAAAAAAAAA|'
r'TVoAAAAAAAAAAAAAAAAAAAAAAAA|TVpTAQEAAAAEAAAA//8AALgAAAA)', string):
localStringScores[string] += 5
# Malicious intent
if re.search(r'(loader|cmdline|ntlmhash|lmhash|infect|encrypt|exec|elevat|dump|target|victim|override|'
r'traverse|mutex|pawnde|exploited|shellcode|injected|spoofed|dllinjec|exeinj|reflective|'
r'payload|inject|back conn)',
string, re.IGNORECASE):
localStringScores[string] += 5
# Privileges
if re.search(r'(administrator|highest|system|debug|dbg|admin|adm|root) privilege', string, re.IGNORECASE):
localStringScores[string] += 4
# System file/process names
if re.search(r'(LSASS|SAM|lsass.exe|cmd.exe|LSASRV.DLL)', string):
localStringScores[string] += 4
# System file/process names
if re.search(r'(\.exe|\.dll|\.sys)$', string, re.IGNORECASE):
localStringScores[string] += 4
# Indicators that string is valid
if re.search(r'(^\\\\)', string, re.IGNORECASE):
localStringScores[string] += 1
# Compiler output directories
if re.search(r'(\\Release\\|\\Debug\\|\\bin|\\sbin)', string, re.IGNORECASE):
localStringScores[string] += 2
# Special - Malware related strings
if re.search(r'(Management Support Team1|/c rundll32|DTOPTOOLZ Co.|net start|Exec|taskkill)', string):
localStringScores[string] += 4
# Powershell
if re.search(r'(bypass|windowstyle | hidden |-command|IEX |Invoke-Expression|Net.Webclient|Invoke[A-Z]|'
r'Net.WebClient|-w hidden |-encoded'
r'-encodedcommand| -nop |MemoryLoadLibrary|FromBase64String|Download|EncodedCommand)', string, re.IGNORECASE):
localStringScores[string] += 4
# WMI
if re.search(r'( /c WMIC)', string, re.IGNORECASE):
localStringScores[string] += 3
# Windows Commands
if re.search(r'( net user | net group |ping |whoami |bitsadmin |rundll32.exe javascript:|'
r'schtasks.exe /create|/c start )',
string, re.IGNORECASE):
localStringScores[string] += 3
# JavaScript
if re.search(r'(new ActiveXObject\("WScript.Shell"\).Run|.Run\("cmd.exe|.Run\("%comspec%\)|'
r'.Run\("c:\\Windows|.RegisterXLL\()', string, re.IGNORECASE):
localStringScores[string] += 3
# Signing Certificates
if re.search(r'( Inc | Co.| Ltd.,| LLC| Limited)', string):
localStringScores[string] += 2
# Privilege escalation
if re.search(r'(sysprep|cryptbase|secur32)', string, re.IGNORECASE):
localStringScores[string] += 2
# Webshells
if re.search(r'(isset\($post\[|isset\($get\[|eval\(Request)', string, re.IGNORECASE):
localStringScores[string] += 2
# Suspicious words 1
if re.search(r'(impersonate|drop|upload|download|execute|shell|\bcmd\b|decode|rot13|decrypt)', string,
re.IGNORECASE):
localStringScores[string] += 2
# Suspicious words 1
if re.search(r'([+] |[-] |[*] |injecting|exploit|dumped|dumping|scanning|scanned|elevation|'
r'elevated|payload|vulnerable|payload|reverse connect|bind shell|reverse shell| dump | '
r'back connect |privesc|privilege escalat|debug privilege| inject |interactive shell|'
r'shell commands| spawning |] target |] Transmi|] Connect|] connect|] Dump|] command |'
r'] token|] Token |] Firing | hashes | etc/passwd| SAM | NTML|unsupported target|'
r'race condition|Token system |LoaderConfig| add user |ile upload |ile download |'
r'Attaching to |ser has been successfully added|target system |LSA Secrets|DefaultPassword|'
r'Password: |loading dll|.Execute\(|Shellcode|Loader|inject x86|inject x64|bypass|katz|'
r'sploit|ms[0-9][0-9][^0-9]|\bCVE[^a-zA-Z]|privilege::|lsadump|door)',
string, re.IGNORECASE):
localStringScores[string] += 4
# Mutex / Named Pipes
if re.search(r'(Mutex|NamedPipe|\\Global\\|\\pipe\\)', string, re.IGNORECASE):
localStringScores[string] += 3
# Usage
if re.search(r'(isset\($post\[|isset\($get\[)', string, re.IGNORECASE):
localStringScores[string] += 2
# Hash
if re.search(r'\b([a-f0-9]{32}|[a-f0-9]{40}|[a-f0-9]{64})\b', string, re.IGNORECASE):
localStringScores[string] += 2
# Persistence
if re.search(r'(sc.exe |schtasks|at \\\\|at [0-9]{2}:[0-9]{2})', string, re.IGNORECASE):
localStringScores[string] += 3
# Unix/Linux
if re.search(r'(;chmod |; chmod |sh -c|/dev/tcp/|/bin/telnet|selinux| shell| cp /bin/sh )', string,
re.IGNORECASE):
localStringScores[string] += 3
# Attack
if re.search(
r'(attacker|brute force|bruteforce|connecting back|EXHAUSTIVE|exhaustion| spawn| evil| elevated)',
string, re.IGNORECASE):
localStringScores[string] += 3
# Strings with less value
if re.search(r'(abcdefghijklmnopqsst|ABCDEFGHIJKLMNOPQRSTUVWXYZ|0123456789:;)', string, re.IGNORECASE):
localStringScores[string] -= 5
# VB Backdoors
if re.search(
r'(kill|wscript|plugins|svr32|Select |)',
string, re.IGNORECASE):
localStringScores[string] += 3
# Suspicious strings - combo / special characters
if re.search(
r'([a-z]{4,}[!\?]|\[[!+\-]\] |[a-zA-Z]{4,}...)',
string, re.IGNORECASE):
localStringScores[string] += 3
if re.search(
r'(-->|!!!| <<< | >>> )',
string, re.IGNORECASE):
localStringScores[string] += 5
# Swear words
if re.search(
r'\b(fuck|damn|shit|penis)\b',
string, re.IGNORECASE):
localStringScores[string] += 5
# Scripting Strings
if re.search(
r'(%APPDATA%|%USERPROFILE%|Public|Roaming|& del|& rm| && |script)',
string, re.IGNORECASE):
localStringScores[string] += 3
# UACME Bypass
if re.search(
r'(Elevation|pwnd|pawn|elevate to)',
string, re.IGNORECASE):
localStringScores[string] += 3
# ENCODING DETECTIONS --------------------------------------------------
try:
if len(string) > 8:
# Try different ways - fuzz string
# Base64
if args.trace:
print("Starting Base64 string analysis ...")
for m_string in (string, string[1:], string[1:] + "=", string + "=", string + "=="):
if is_base_64(m_string):
decoded_string = m_string.decode('base64')
# print decoded_string
if is_ascii_string(decoded_string, padding_allowed=True):
# print "match"
localStringScores[string] += 10
base64strings[string] = decoded_string
# Hex Encoded string
if args.trace:
print("Starting Hex encoded string analysis ...")
for m_string in ([string, re.sub('[^a-zA-Z0-9]', '', string)]):
#print m_string
if is_hex_encoded(m_string):
#print("^ is HEX")
decoded_string = m_string.decode('hex')
#print removeNonAsciiDrop(decoded_string)
if is_ascii_string(decoded_string, padding_allowed=True):
# not too many 00s
if '00' in m_string:
if len(m_string) / float(m_string.count('0')) <= 1.2:
continue
#print("^ is ASCII / WIDE")
localStringScores[string] += 8
hexEncStrings[string] = decoded_string
except Exception as e:
if args.debug:
traceback.print_exc()
pass
# Reversed String -----------------------------------------------------
if string[::-1] in good_strings_db:
localStringScores[string] += 10
reversedStrings[string] = string[::-1]
# Certain string reduce -----------------------------------------------
if re.search(r'(rundll32\.exe$|kernel\.dll$)', string, re.IGNORECASE):
localStringScores[string] -= 4
# Set the global string score
stringScores[original_string] = localStringScores[string]
if args.debug:
if string in utfstrings:
is_utf = True
else:
is_utf = False
# print "SCORE: %s\tUTF: %s\tSTRING: %s" % ( localStringScores[string], is_utf, string )
sorted_set = sorted(localStringScores.iteritems(), key=operator.itemgetter(1), reverse=True)
# Only the top X strings
c = 0
result_set = []
for string in sorted_set:
# Skip the one with a score lower than -z X
if not args.noscorefilter and not args.inverse:
if string[1] < int(args.z):
continue
if string[0] in utfstrings:
result_set.append("UTF16LE:%s" % string[0])
else:
result_set.append(string[0])
#c += 1
#if c > int(args.rc):
# break
if args.trace:
print("RESULT SET:")
print(result_set)
# return the filtered set
return result_set
def generate_general_condition(file_info):
"""
Generates a general condition for a set of files
:param file_info:
:return:
"""
conditions_string = ""
conditions = []
pe_module_neccessary = False
# Different Magic Headers and File Sizes
magic_headers = []
file_sizes = []
imphashes = []
try:
for filePath in file_info:
# Short file name info used for inverse generation has no magic/size fields
if "magic" not in file_info[filePath]:
continue
magic = file_info[filePath]["magic"]
size = file_info[filePath]["size"]
imphash = file_info[filePath]["imphash"]
# Add them to the lists
if magic not in magic_headers and magic != "":
magic_headers.append(magic)
if size not in file_sizes:
file_sizes.append(size)
if imphash not in imphashes and imphash != "":
imphashes.append(imphash)
# If different magic headers are less than 5
if len(magic_headers) <= 5:
magic_string = " or ".join(get_uint_string(h) for h in magic_headers)
if " or " in magic_string:
conditions.append("( {0} )".format(magic_string))
else:
conditions.append("{0}".format(magic_string))
# Biggest size multiplied with maxsize_multiplier
if not args.nofilesize and len(file_sizes) > 0:
conditions.append(get_file_range(max(file_sizes)))
# If different magic headers are less than 5
if len(imphashes) == 1:
conditions.append("pe.imphash() == \"{0}\"".format(imphashes[0]))
pe_module_neccessary = True
# If enough attributes were special
condition_string = " and ".join(conditions)
except Exception as e:
if args.debug:
traceback.print_exc()
exit(1)
print("[E] ERROR while generating general condition - check the global rule and remove it if it's faulty")
return condition_string, pe_module_neccessary
def generate_rules(file_strings, file_opcodes, super_rules, file_info, inverse_stats):
# Write to file ---------------------------------------------------
if args.o:
try:
fh = open(args.o, 'w')
except Exception as e:
traceback.print_exc()
# General Info
general_info = "/*\n"
general_info += " YARA Rule Set\n"
general_info += " Author: {0}\n".format(args.a)
general_info += " Date: {0}\n".format(get_timestamp_basic())
general_info += " Identifier: {0}\n".format(identifier)
general_info += " Reference: {0}\n".format(reference)
if args.l != "":
general_info += " License: {0}\n".format(args.l)
general_info += "*/\n\n"
fh.write(general_info)
# GLOBAL RULES ----------------------------------------------------
if args.globalrule:
condition, pe_module_necessary = generate_general_condition(file_info)
# Global Rule
if condition != "":
global_rule = "/* Global Rule -------------------------------------------------------------- */\n"
global_rule += "/* Will be evaluated first, speeds up scanning process, remove at will */\n\n"
global_rule += "global private rule gen_characteristics {\n"
global_rule += " condition:\n"
global_rule += " {0}\n".format(condition)
global_rule += "}\n\n"
# Write rule
if args.o:
fh.write(global_rule)
# General vars
rules = ""
printed_rules = {}
opcodes_to_add = []
rule_count = 0
inverse_rule_count = 0
super_rule_count = 0
pe_module_necessary = False
if not args.inverse:
# PROCESS SIMPLE RULES ----------------------------------------------------
print("[+] Generating Simple Rules ...")
# Apply intelligent filters
print("[-] Applying intelligent filters to string findings ...")
for filePath in file_strings:
print("[-] Filtering string set for %s ..." % filePath)
# Replace the original string set with the filtered one
string_set = file_strings[filePath]
file_strings[filePath] = []
file_strings[filePath] = filter_string_set(string_set)
# Replace the original string set with the filtered one
if filePath not in file_opcodes:
file_opcodes[filePath] = []
else:
print("[-] Filtering opcode set for %s ..." % filePath)
opcode_set = file_opcodes[filePath]
file_opcodes[filePath] = []
file_opcodes[filePath] = filter_opcode_set(opcode_set)
# GENERATE SIMPLE RULES -------------------------------------------
fh.write("/* Rule Set ----------------------------------------------------------------- */\n\n")
for filePath in file_strings:
# Skip if there is nothing to do
if len(file_strings[filePath]) == 0:
print("[W] Not enough high scoring strings to create a rule. "
"(Try -z 0 to reduce the min score or --opcodes to include opcodes) FILE: %s" % filePath)
continue
elif len(file_strings[filePath]) == 0 and len(file_opcodes[filePath]) == 0:
print("[W] Not enough high scoring strings and opcodes to create a rule. " \
"(Try -z 0 to reduce the min score) FILE: %s" % filePath)
continue
# Create Rule
try:
rule = ""
(path, file) = os.path.split(filePath)
# Prepare name
fileBase = os.path.splitext(file)[0]
# Create a clean new name
cleanedName = fileBase
# Adapt length of rule name
if len(fileBase) < 8: # if name is too short add part from path
cleanedName = path.split('\\')[-1:][0] + "_" + cleanedName
# File name starts with a number
if re.search(r'^[0-9]', cleanedName):
cleanedName = "sig_" + cleanedName
# clean name from all characters that would cause errors
cleanedName = re.sub('[^\w]', r'_', cleanedName)
# Check if already printed
if cleanedName in printed_rules:
printed_rules[cleanedName] += 1
cleanedName = cleanedName + "_" + str(printed_rules[cleanedName])
else:
printed_rules[cleanedName] = 1
# Print rule title ----------------------------------------
rule += "rule %s {\n" % cleanedName
# Meta data -----------------------------------------------
rule += " meta:\n"
rule += " description = \"%s - file %s\"\n" % (prefix, file)
rule += " author = \"%s\"\n" % args.a
rule += " reference = \"%s\"\n" % reference
rule += " date = \"%s\"\n" % get_timestamp_basic()
rule += " hash1 = \"%s\"\n" % file_info[filePath]["hash"]
rule += " strings:\n"
# Get the strings -----------------------------------------
# Rule String generation
(rule_strings, opcodes_included, string_rule_count, high_scoring_strings) = \
get_rule_strings(file_strings[filePath], file_opcodes[filePath])
rule += rule_strings
# Extract rul strings
if args.strings:
strings = get_strings(file_strings[filePath])
write_strings(filePath, strings, args.e, args.score)
# Condition -----------------------------------------------
# Conditions list (will later be joined with 'or')
conditions = [] # AND connected
subconditions = [] # OR connected
# Condition PE
# Imphash and Exports - applicable to PE files only
condition_pe = []
condition_pe_part1 = []
condition_pe_part2 = []
if not args.noextras and file_info[filePath]["magic"] == "MZ":
# Add imphash - if certain conditions are met
if file_info[filePath]["imphash"] not in good_imphashes_db and file_info[filePath]["imphash"] != "":
# Comment to imphash
imphash = file_info[filePath]["imphash"]
comment = ""
if imphash in KNOWN_IMPHASHES:
comment = " /* {0} */".format(KNOWN_IMPHASHES[imphash])
# Add imphash to condition
condition_pe_part1.append("pe.imphash() == \"{0}\"{1}".format(imphash, comment))
pe_module_necessary = True
if file_info[filePath]["exports"]:
e_count = 0
for export in file_info[filePath]["exports"]:
if export not in good_exports_db:
condition_pe_part2.append("pe.exports(\"{0}\")".format(export))
e_count += 1
pe_module_necessary = True
if e_count > 5:
break
# 1st Part of Condition 1
basic_conditions = []
# Filesize
if not args.nofilesize:
basic_conditions.insert(0, get_file_range(file_info[filePath]["size"]))
# Magic
if file_info[filePath]["magic"] != "":
uint_string = get_uint_string(file_info[filePath]["magic"])
basic_conditions.insert(0, uint_string)
# Basic Condition
if len(basic_conditions):
conditions.append(" and ".join(basic_conditions))
# Add extra PE conditions to condition 1
pe_conditions_add = False
if condition_pe_part1 or condition_pe_part2:
if len(condition_pe_part1) == 1:
condition_pe.append(condition_pe_part1[0])
elif len(condition_pe_part1) > 1:
condition_pe.append("( %s )" % " or ".join(condition_pe_part1))
if len(condition_pe_part2) == 1:
condition_pe.append(condition_pe_part2[0])
elif len(condition_pe_part2) > 1:
condition_pe.append("( %s )" % " and ".join(condition_pe_part2))
# Marker that PE conditions have been added
pe_conditions_add = True
# Add to sub condition
subconditions.append(" and ".join(condition_pe))
# String combinations
cond_op = "" # opcodes condition
cond_hs = "" # high scoring strings condition
cond_ls = "" # low scoring strings condition
low_scoring_strings = (string_rule_count - high_scoring_strings)
if high_scoring_strings > 0:
cond_hs = "1 of ($x*)"
if low_scoring_strings > 0:
if low_scoring_strings > 10:
if high_scoring_strings > 0:
cond_ls = "4 of them"
else:
cond_ls = "8 of them"
else:
cond_ls = "all of them"
# If low scoring and high scoring
cond_combined = "all of them"
needs_brackets = False
if low_scoring_strings > 0 and high_scoring_strings > 0:
# If PE conditions have been added, don't be so strict with the strings
if pe_conditions_add:
cond_combined = "{0} or {1}".format(cond_hs, cond_ls)
needs_brackets = True
else:
cond_combined = "{0} and {1}".format(cond_hs, cond_ls)
elif low_scoring_strings > 0 and not high_scoring_strings > 0:
cond_combined = "{0}".format(cond_ls)
elif not low_scoring_strings > 0 and high_scoring_strings > 0:
cond_combined = "{0}".format(cond_hs)
if opcodes_included:
cond_op = " and all of ($op*)"
# Opcodes (if needed)
if cond_op or needs_brackets:
subconditions.append("( {0}{1} )".format(cond_combined, cond_op))
else:
subconditions.append(cond_combined)
# Now add string condition to the conditions
if len(subconditions) == 1:
conditions.append(subconditions[0])
elif len(subconditions) > 1:
conditions.append("( %s )" % " or ".join(subconditions))
# Create condition string
condition_string = " and\n ".join(conditions)
rule += " condition:\n"
rule += " %s\n" % condition_string
rule += "}\n\n"
# Add to rules string
rules += rule
rule_count += 1
except Exception as e:
traceback.print_exc()
# GENERATE SUPER RULES --------------------------------------------
if not nosuper and not args.inverse:
rules += "/* Super Rules ------------------------------------------------------------- */\n\n"
super_rule_names = []
print("[+] Generating Super Rules ...")
printed_combi = {}
for super_rule in super_rules:
try:
rule = ""
# Prepare Name
rule_name = ""
file_list = []
# Loop through files
imphashes = Counter()
for filePath in super_rule["files"]:
(path, file) = os.path.split(filePath)
file_list.append(file)
# Prepare name
fileBase = os.path.splitext(file)[0]
# Create a clean new name
cleanedName = fileBase
# Append it to the full name
rule_name += "_" + cleanedName
# Check if imphash of all files is equal
imphash = file_info[filePath]["imphash"]
if imphash != "-" and imphash != "":
imphashes.update([imphash])
# Imphash usable
if len(imphashes) == 1:
unique_imphash = imphashes.items()[0][0]
if unique_imphash in good_imphashes_db:
unique_imphash = ""
# Shorten rule name
rule_name = rule_name[:124]
# Add count if rule name already taken
if rule_name not in super_rule_names:
rule_name = "%s_%s" % (rule_name, super_rule_count)
super_rule_names.append(rule_name)
# Create a list of files
file_listing = ", ".join(file_list)
# File name starts with a number
if re.search(r'^[0-9]', rule_name):
rule_name = "sig_" + rule_name
# clean name from all characters that would cause errors
rule_name = re.sub('[^\w]', r'_', rule_name)
# Check if already printed
if rule_name in printed_rules:
printed_combi[rule_name] += 1
rule_name = rule_name + "_" + str(printed_combi[rule_name])
else:
printed_combi[rule_name] = 1
# Print rule title
rule += "rule %s {\n" % rule_name
rule += " meta:\n"
rule += " description = \"%s - from files %s\"\n" % (prefix, file_listing)
rule += " author = \"%s\"\n" % args.a
rule += " reference = \"%s\"\n" % reference
rule += " date = \"%s\"\n" % get_timestamp_basic()
for i, filePath in enumerate(super_rule["files"]):
rule += " hash%s = \"%s\"\n" % (str(i + 1), file_info[filePath]["hash"])
rule += " strings:\n"
# Adding the strings
if file_opcodes.get(filePath) is None:
tmp_file_opcodes = {}
else:
tmp_file_opcodes = file_opcodes.get(filePath)
(rule_strings, opcodes_included, string_rule_count, high_scoring_strings) = \
get_rule_strings(super_rule["strings"], tmp_file_opcodes)
rule += rule_strings
# Condition -----------------------------------------------
# Conditions list (will later be joined with 'or')
conditions = []
# 1st condition
# Evaluate the general characteristics
file_info_super = {}
for filePath in super_rule["files"]:
file_info_super[filePath] = file_info[filePath]
condition_strings, pe_module_necessary_gen = generate_general_condition(file_info_super)
if pe_module_necessary_gen:
pe_module_necessary = True
# 2nd condition
# String combinations
cond_op = "" # opcodes condition
cond_hs = "" # high scoring strings condition
cond_ls = "" # low scoring strings condition
low_scoring_strings = (string_rule_count - high_scoring_strings)
if high_scoring_strings > 0:
cond_hs = "1 of ($x*)"
if low_scoring_strings > 0:
if low_scoring_strings > 10:
if high_scoring_strings > 0:
cond_ls = "4 of them"
else:
cond_ls = "8 of them"
else:
cond_ls = "all of them"
# If low scoring and high scoring
cond_combined = "all of them"
if low_scoring_strings > 0 and high_scoring_strings > 0:
cond_combined = "{0} and {1}".format(cond_hs, cond_ls)
elif low_scoring_strings > 0 and not high_scoring_strings > 0:
cond_combined = "{0}".format(cond_ls)
elif not low_scoring_strings > 0 and high_scoring_strings > 0:
cond_combined = "{0}".format(cond_hs)
if opcodes_included:
cond_op = " and all of ($op*)"
condition2 = "( {0} ){1}".format(cond_combined, cond_op)
conditions.append(" and ".join([condition_strings, condition2]))
# 3nd condition
# In memory detection base condition (no magic, no filesize)
condition_pe = "all of them"
conditions.append(condition_pe)
# Create condition string
condition_string = "\n ) or ( ".join(conditions)
rule += " condition:\n"
rule += " ( %s )\n" % condition_string
rule += "}\n\n"
# print rule
# Add to rules string
rules += rule
super_rule_count += 1
except Exception as e:
traceback.print_exc()
try:
# WRITING RULES TO FILE
# PE Module -------------------------------------------------------
if not args.noextras:
if pe_module_necessary:
fh.write('import "pe"\n\n')
# RULES -----------------------------------------------------------
if args.o:
fh.write(rules)
except Exception as e:
traceback.print_exc()
# PROCESS INVERSE RULES ---------------------------------------------------
# print inverse_stats.keys()
if args.inverse:
print("[+] Generating inverse rules ...")
inverse_rules = ""
# Apply intelligent filters -------------------------------------------
print("[+] Applying intelligent filters to string findings ...")
for fileName in inverse_stats:
print("[-] Filtering string set for %s ..." % fileName)
# Replace the original string set with the filtered one
string_set = inverse_stats[fileName]
inverse_stats[fileName] = []
inverse_stats[fileName] = filter_string_set(string_set)
# Preset if empty
if fileName not in file_opcodes:
file_opcodes[fileName] = {}
# GENERATE INVERSE RULES -------------------------------------------
fh.write("/* Inverse Rules ------------------------------------------------------------- */\n\n")
for fileName in inverse_stats:
try:
rule = ""
# Create a clean new name
cleanedName = fileName.replace(".", "_")
# Add ANOMALY
cleanedName += "_ANOMALY"
# File name starts with a number
if re.search(r'^[0-9]', cleanedName):
cleanedName = "sig_" + cleanedName
# clean name from all characters that would cause errors
cleanedName = re.sub('[^\w]', r'_', cleanedName)
# Check if already printed
if cleanedName in printed_rules:
printed_rules[cleanedName] += 1
cleanedName = cleanedName + "_" + str(printed_rules[cleanedName])
else:
printed_rules[cleanedName] = 1
# Print rule title ----------------------------------------
rule += "rule %s {\n" % cleanedName
# Meta data -----------------------------------------------
rule += " meta:\n"
rule += " description = \"%s for anomaly detection - file %s\"\n" % (prefix, fileName)
rule += " author = \"%s\"\n" % args.a
rule += " reference = \"%s\"\n" % reference
rule += " date = \"%s\"\n" % get_timestamp_basic()
for i, hash in enumerate(file_info[fileName]["hashes"]):
rule += " hash%s = \"%s\"\n" % (str(i + 1), hash)
rule += " strings:\n"
# Get the strings -----------------------------------------
# Rule String generation
(rule_strings, opcodes_included, string_rule_count, high_scoring_strings) = \
get_rule_strings(inverse_stats[fileName], file_opcodes[fileName])
rule += rule_strings
# Condition -----------------------------------------------
folderNames = ""
if not args.nodirname:
folderNames += "and ( filepath matches /"
folderNames += "$/ or filepath matches /".join(file_info[fileName]["folder_names"])
folderNames += "$/ )"
condition = "filename == \"%s\" %s and not ( all of them )" % (fileName, folderNames)
rule += " condition:\n"
rule += " %s\n" % condition
rule += "}\n\n"
# print rule
# Add to rules string
inverse_rules += rule
except Exception as e:
traceback.print_exc()
try:
# Try to write rule to file
if args.o:
fh.write(inverse_rules)
inverse_rule_count += 1
except Exception as e:
traceback.print_exc()
# Close the rules file --------------------------------------------
if args.o:
try:
fh.close()
except Exception as e:
traceback.print_exc()
# Print rules to command line -------------------------------------
if args.debug:
print(rules)
return (rule_count, inverse_rule_count, super_rule_count)
def get_rule_strings(string_elements, opcode_elements):
rule_strings = ""
high_scoring_strings = 0
string_rule_count = 0
# Adding the strings --------------------------------------
for i, string in enumerate(string_elements):
# Collect the data
is_fullword = True
initial_string = string
enc = " ascii"
base64comment = ""
hexEncComment = ""
reversedComment = ""
fullword = ""
pestudio_comment = ""
score_comment = ""
goodware_comment = ""
if string in good_strings_db:
goodware_comment = " /* Goodware String - occured %s times */" % (good_strings_db[string])
if string in stringScores:
if args.score:
score_comment += " /* score: '%.2f'*/" % (stringScores[string])
else:
print("NO SCORE: %s" % string)
if string[:8] == "UTF16LE:":
string = string[8:]
enc = " wide"
if string in base64strings:
base64comment = " /* base64 encoded string '%s' */" % base64strings[string]
if string in hexEncStrings:
hexEncComment = " /* hex encoded string '%s' */" % removeNonAsciiDrop(hexEncStrings[string])
if string in pestudioMarker and args.score:
pestudio_comment = " /* PEStudio Blacklist: %s */" % pestudioMarker[string]
if string in reversedStrings:
reversedComment = " /* reversed goodware string '%s' */" % reversedStrings[string]
# Extra checks
if is_hex_encoded(string, check_length=False):
is_fullword = False
# Checking string length
if len(string) >= args.s:
# cut string
string = string[:args.s].rstrip("\\")
# not fullword anymore
is_fullword = False
# Show as fullword
if is_fullword:
fullword = " fullword"
# Now compose the rule line
if float(stringScores[initial_string]) > score_highly_specific:
high_scoring_strings += 1
rule_strings += " $x%s = \"%s\"%s%s%s%s%s%s%s%s\n" % (
str(i + 1), string, fullword, enc, base64comment, reversedComment, pestudio_comment, score_comment,
goodware_comment, hexEncComment)
else:
rule_strings += " $s%s = \"%s\"%s%s%s%s%s%s%s%s\n" % (
str(i + 1), string, fullword, enc, base64comment, reversedComment, pestudio_comment, score_comment,
goodware_comment, hexEncComment)
# If too many string definitions found - cut it at the
# count defined via command line param -rc
if (i + 1) >= int(args.rc):
break
string_rule_count += 1
# If too few strings - add opcodes
# Adding the strings --------------------------------------
opcodes_included = False
if len(opcode_elements) > 0:
rule_strings += "\n"
for i, opcode in enumerate(opcode_elements):
rule_strings += " $op%s = { %s }\n" % (str(i), opcode)
opcodes_included = True
else:
if args.opcodes:
print("[-] Not enough unique opcodes found to include them")
return rule_strings, opcodes_included, string_rule_count, high_scoring_strings
def get_strings(string_elements):
"""
Get a dictionary of all string types
:param string_elements:
:return:
"""
strings = {
"ascii": [],
"wide": [],
"base64 encoded": [],
"hex encoded": [],
"reversed": []
}
# Adding the strings --------------------------------------
for i, string in enumerate(string_elements):
if string[:8] == "UTF16LE:":
string = string[8:]
strings["wide"].append(string)
elif string in base64strings:
strings["base64 encoded"].append(string)
elif string in hexEncStrings:
strings["hex encoded"].append(string)
elif string in reversedStrings:
strings["reversed"].append(string)
else:
strings["ascii"].append(string)
return strings
def write_strings(filePath, strings, output_dir, scores):
"""
Writes string information to an output file
:param filePath:
:param strings:
:param output_dir:
:param scores:
:return:
"""
SECTIONS = ["ascii", "wide", "base64 encoded", "hex encoded", "reversed"]
# File
filename = os.path.basename(filePath)
strings_filename = os.path.join(output_dir, "%s_strings.txt" % filename)
print("[+] Writing strings to file %s" % strings_filename)
# Strings
output_string = []
for key in SECTIONS:
# Skip empty
if len(strings[key]) < 1:
continue
# Section
output_string.append("%s Strings" % key.upper())
output_string.append("------------------------------------------------------------------------")
for string in strings[key]:
if scores:
score = "unknown"
if key == "wide":
score = stringScores["UTF16LE:%s" % string]
else:
score = stringScores[string]
output_string.append("%d;%s" % score, string)
else:
output_string.append(string)
# Empty line between sections
output_string.append("\n")
with open(strings_filename, "w") as fh:
fh.write("\n".join(output_string))
def initialize_pestudio_strings():
pestudio_strings = {}
tree = etree.parse(get_abs_path(PE_STRINGS_FILE))
pestudio_strings["strings"] = tree.findall(".//string")
pestudio_strings["av"] = tree.findall(".//av")
pestudio_strings["folder"] = tree.findall(".//folder")
pestudio_strings["os"] = tree.findall(".//os")
pestudio_strings["reg"] = tree.findall(".//reg")
pestudio_strings["guid"] = tree.findall(".//guid")
pestudio_strings["ssdl"] = tree.findall(".//ssdl")
pestudio_strings["ext"] = tree.findall(".//ext")
pestudio_strings["agent"] = tree.findall(".//agent")
pestudio_strings["oid"] = tree.findall(".//oid")
pestudio_strings["priv"] = tree.findall(".//priv")
# Obsolete
# for elem in string_elems:
# strings.append(elem.text)
return pestudio_strings
def initialize_bayes_filter():
# BayesTrainer
stringTrainer = Trainer(tokenizer)
# Read the sample files and train the algorithm
print("[-] Training filter with good strings from ./lib/good.txt")
with open(get_abs_path("./lib/good.txt"), "r") as fh_goodstrings:
for line in fh_goodstrings:
# print line.rstrip("\n")
stringTrainer.train(line.rstrip("\n"), "string")
modified_line = re.sub(r'(\\\\|\/|\-|\.|\_)', ' ', line)
stringTrainer.train(modified_line, "string")
return stringTrainer
def get_pestudio_score(string):
for type in pestudio_strings:
for elem in pestudio_strings[type]:
# Full match
if elem.text.lower() == string.lower():
# Exclude the "extension" black list for now
if type != "ext":
return 5, type
return 0, ""
def get_opcode_string(opcode):
return ' '.join(opcode[i:i + 2] for i in range(0, len(opcode), 2))
def get_uint_string(magic):
if len(magic) == 2:
return "uint16(0) == 0x{1}{0}".format(magic[0].encode('hex'), magic[1].encode('hex'))
if len(magic) == 4:
return "uint32(0) == 0x{3}{2}{1}{0}".format(magic[0].encode('hex'), magic[1].encode('hex'),
magic[2].encode('hex'), magic[3].encode('hex'))
return ""
def get_file_range(size):
size_string = ""
try:
# max sample size - args.fm times the original size
max_size_b = size * args.fm
# Minimum size
if max_size_b < 1024:
max_size_b = 1024
# in KB
max_size = max_size_b / 1024
max_size_kb = max_size
# Round
if len(str(max_size)) == 2:
max_size = int(round(max_size, -1))
elif len(str(max_size)) == 3:
max_size = int(round(max_size, -2))
elif len(str(max_size)) == 4:
max_size = int(round(max_size, -3))
elif len(str(max_size)) == 5:
max_size = int(round(max_size, -3))
size_string = "filesize < {0}KB".format(max_size)
if args.debug:
print("File Size Eval: SampleSize (b): {0} SizeWithMultiplier (b/Kb): {1} / {2} RoundedSize: {3}".format(
str(size), str(max_size_b), str(max_size_kb), str(max_size)))
except Exception as e:
if args.debug:
traceback.print_exc()
pass
finally:
return size_string
def get_timestamp_basic(date_obj=None):
if not date_obj:
date_obj = datetime.datetime.now()
date_str = date_obj.strftime("%Y-%m-%d")
return date_str
def is_ascii_char(b, padding_allowed=False):
if padding_allowed:
if (ord(b) < 127 and ord(b) > 31) or ord(b) == 0:
return 1
else:
if ord(b) < 127 and ord(b) > 31:
return 1
return 0
def is_ascii_string(string, padding_allowed=False):
for b in string:
if padding_allowed:
if not ((ord(b) < 127 and ord(b) > 31) or ord(b) == 0):
return 0
else:
if not (ord(b) < 127 and ord(b) > 31):
return 0
return 1
def is_base_64(s):
return (len(s) % 4 == 0) and re.match('^[A-Za-z0-9+/]+[=]{0,2}$', s)
def is_hex_encoded(s, check_length=True):
if re.match('^[A-Fa-f0-9]+$', s):
if check_length:
if len(s) % 2 == 0:
return True
else:
return True
return False
def extract_hex_strings(s):
strings = []
hex_strings = re.findall("([a-fA-F0-9]{10,})", s)
for string in list(hex_strings):
hex_strings += string.split('0000')
hex_strings += string.split('0d0a')
hex_strings += re.findall(r'((?:0000|002[a-f0-9]|00[3-9a-f][0-9a-f]){6,})', string, re.IGNORECASE)
hex_strings = list(set(hex_strings))
# ASCII Encoded Strings
for string in hex_strings:
for x in string.split('00'):
if len(x) > 10:
strings.append(x)
# WIDE Encoded Strings
for string in hex_strings:
try:
if len(string) % 2 != 0 or len(string) < 8:
continue
dec = string.replace('00', '').decode('hex')
#print("Testing: %s" % string)
#print("Decoded: %s" % dec)
if is_ascii_string(dec, padding_allowed=True):
#print("CAN USE >>>>>>>>>>>>>>>>>>>>>>>> %s" % string)
strings.append(string)
except Exception as e:
traceback.print_exc()
#print len(hex_strings)
#sys.exit(0)
return strings
def removeNonAsciiDrop(string):
nonascii = "error"
try:
# Generate a new string without disturbing characters
nonascii = "".join(i for i in string if ord(i)<127 and ord(i)>31)
except Exception as e:
traceback.print_exc()
pass
return nonascii
def save(object, filename, protocol=0):
file = gzip.GzipFile(filename, 'wb')
file.write(pickle.dumps(object, protocol))
file.close()
def load(filename):
file = gzip.GzipFile(filename, 'rb')
buffer = ""
while 1:
data = file.read()
if data == "":
break
buffer += data
object = pickle.loads(buffer)
del (buffer)
file.close()
return object
def update_databases():
# Preparations
try:
dbDir = './dbs/'
if not os.path.exists(dbDir):
os.makedirs(dbDir)
except Exception as e:
if args.debug:
traceback.print_exc()
print("Error while creating the database directory ./dbs")
sys.exit(1)
# Downloading current repository
try:
for filename, repo_url in REPO_URLS.iteritems():
print("Downloading %s from %s ..." % (filename, repo_url))
fileDownloader = urllib.URLopener()
fileDownloader.retrieve(repo_url, "./dbs/%s" % filename)
except Exception as e:
if args.debug:
traceback.print_exc()
print("Error while downloading the database file - check your Internet connection")
print("Alterntive download link: https://drive.google.com/drive/folders/0B2S_IOa0MiOHS0xmekR6VWRhZ28")
print("Download the files and place them into the ./dbs/ folder")
sys.exit(1)
def processSampleDir(targetDir):
"""
Processes samples in a given directory and creates a yara rule file
:param directory:
:return:
"""
# Special strings
base64strings = {}
hexEncStrings = {}
reversedStrings = {}
pestudioMarker = {}
stringScores = {}
# Extract all information
(sample_string_stats, sample_opcode_stats, file_info) = \
parse_sample_dir(targetDir, args.nr, generateInfo=True, onlyRelevantExtensions=args.oe)
# Evaluate Strings
(file_strings, file_opcodes, combinations, super_rules, inverse_stats) = \
sample_string_evaluation(sample_string_stats, sample_opcode_stats, file_info)
# Create Rule Files
(rule_count, inverse_rule_count, super_rule_count) = \
generate_rules(file_strings, file_opcodes, super_rules, file_info, inverse_stats)
if args.inverse:
print("[=] Generated %s INVERSE rules." % str(inverse_rule_count))
else:
print("[=] Generated %s SIMPLE rules." % str(rule_count))
if not nosuper:
print("[=] Generated %s SUPER rules." % str(super_rule_count))
print("[=] All rules written to %s" % args.o)
def emptyFolder(dir):
"""
Removes all files from a given folder
:return:
"""
for file in os.listdir(dir):
filePath = os.path.join(dir, file)
try:
if os.path.isfile(filePath):
print("[!] Removing %s ..." % filePath)
os.unlink(filePath)
except Exception as e:
print(e)
def getReference(ref):
"""
Get a reference string - if the provided string is the path to a text file, then read the contents and return it as
reference
:param ref:
:return:
"""
if os.path.exists(ref):
reference = getFileContent(ref)
print("[+] Read reference from file %s > %s" % (ref, reference))
return reference
else:
return ref
def getIdentifier(id, path):
"""
Get a identifier string - if the provided string is the path to a text file, then read the contents and return it as
reference, otherwise use the last element of the full path
:param ref:
:return:
"""
# Identifier
if id == "not set" or not os.path.exists(id):
# Identifier is the highest folder name
return os.path.basename(path.rstrip('/'))
else:
# Read identifier from file
identifier = getFileContent(id)
print("[+] Read identifier from file %s > %s" % (id, identifier))
return identifier
def getPrefix(prefix, identifier):
"""
Get a prefix string for the rule description based on the identifier
:param prefix:
:param identifier:
:return:
"""
if prefix == "Auto-generated rule":
return identifier
else:
return prefix
def getFileContent(file):
"""
Gets the contents of a file (limited to 1024 characters)
:param file:
:return:
"""
try:
with open(file) as f:
return f.read(1024)
except Exception as e:
return "not found"
# CTRL+C Handler --------------------------------------------------------------
def signal_handler(signal_name, frame):
print("> yarGen's work has been interrupted")
sys.exit(0)
def print_welcome():
print("###############################################################################")
print(" ______")
print(" __ ______ ______/ ____/__ ____")
print(" / / / / __ `/ ___/ / __/ _ \/ __ \\")
print(" / /_/ / /_/ / / / /_/ / __/ / / /")
print(" \__, /\__,_/_/ \____/\___/_/ /_/")
print(" /____/")
print(" ")
print(" Yara Rule Generator by Florian Roth")
print(" December 2018")
print(" Version %s" % __version__)
print(" ")
print("###############################################################################")
# MAIN ################################################################
if __name__ == '__main__':
# Signal handler for CTRL+C
signal_module.signal(signal_module.SIGINT, signal_handler)
# Parse Arguments
parser = argparse.ArgumentParser(description='yarGen')
group_creation = parser.add_argument_group('Rule Creation')
group_creation.add_argument('-m', help='Path to scan for malware')
group_creation.add_argument('-y', help='Minimum string length to consider (default=8)', metavar='min-size',
default=8)
group_creation.add_argument('-z', help='Minimum score to consider (default=0)', metavar='min-score', default=0)
group_creation.add_argument('-x', help='Score required to set string as \'highly specific string\' (default: 30)',
metavar='high-scoring', default=30)
group_creation.add_argument('-w', help='Minimum number of strings that overlap to create a super rule (default: 5)',
metavar='superrule-overlap', default=5)
group_creation.add_argument('-s', help='Maximum length to consider (default=128)', metavar='max-size', default=128)
group_creation.add_argument('-rc', help='Maximum number of strings per rule (default=20, intelligent filtering '
'will be applied)', metavar='maxstrings', default=20)
group_creation.add_argument('--excludegood', help='Force the exclude all goodware strings', action='store_true',
default=False)
group_output = parser.add_argument_group('Rule Output')
group_output.add_argument('-o', help='Output rule file', metavar='output_rule_file', default='yargen_rules.yar')
group_output.add_argument('-e', help='Output directory for string exports', metavar='output_dir_strings', default='')
group_output.add_argument('-a', help='Author Name', metavar='author', default='yarGen Rule Generator')
group_output.add_argument('-r', help='Reference (can be string or text file)', metavar='ref',
default='https://github.com/Neo23x0/yarGen')
group_output.add_argument('-l', help='License', metavar='lic', default='')
group_output.add_argument('-p', help='Prefix for the rule description', metavar='prefix',
default='Auto-generated rule')
group_output.add_argument('-b', help='Text file from which the identifier is read (default: last folder name in '
'the full path, e.g. "myRAT" if -m points to /mnt/mal/myRAT)',
metavar='identifier',
default='not set')
group_output.add_argument('--score', help='Show the string scores as comments in the rules', action='store_true',
default=False)
group_output.add_argument('--strings', help='Show the string scores as comments in the rules', action='store_true',
default=False)
group_output.add_argument('--nosimple', help='Skip simple rule creation for files included in super rules',
action='store_true', default=False)
group_output.add_argument('--nomagic', help='Don\'t include the magic header condition statement',
action='store_true', default=False)
group_output.add_argument('--nofilesize', help='Don\'t include the filesize condition statement',
action='store_true', default=False)
group_output.add_argument('-fm', help='Multiplier for the maximum \'filesize\' condition value (default: 3)',
default=3)
group_output.add_argument('--globalrule', help='Create global rules (improved rule set speed)',
action='store_true', default=False)
group_output.add_argument('--nosuper', action='store_true', default=False, help='Don\'t try to create super rules '
'that match against various files')
group_db = parser.add_argument_group('Database Operations')
group_db.add_argument('--update', action='store_true', default=False, help='Update the local strings and opcodes '
'dbs from the online repository')
group_db.add_argument('-g', help='Path to scan for goodware (dont use the database shipped with yaraGen)')
group_db.add_argument('-u', action='store_true', default=False, help='Update local standard goodware database with '
'a new analysis result (used with -g)')
group_db.add_argument('-c', action='store_true', default=False, help='Create new local goodware database '
'(use with -g and optionally -i "identifier")')
group_db.add_argument('-i', default="", help='Specify an identifier for the newly created databases '
'(good-strings-identifier.db, good-opcodes-identifier.db)')
group_general = parser.add_argument_group('General Options')
group_general.add_argument('--dropzone', action='store_true', default=False,
help='Dropzone mode - monitors a directory [-m] for new samples to process'
'WARNING: Processed files will be deleted!')
group_general.add_argument('--nr', action='store_true', default=False, help='Do not recursively scan directories')
group_general.add_argument('--oe', action='store_true', default=False, help='Only scan executable extensions EXE, '
'DLL, ASP, JSP, PHP, BIN, INFECTED')
group_general.add_argument('-fs', help='Max file size in MB to analyze (default=10)', metavar='size-in-MB',
default=10)
group_general.add_argument('--noextras', action='store_true', default=False,
help='Don\'t use extras like Imphash or PE header specifics')
group_general.add_argument('--debug', action='store_true', default=False, help='Debug output')
group_general.add_argument('--trace', action='store_true', default=False, help='Trace output')
group_opcode = parser.add_argument_group('Other Features')
group_opcode.add_argument('--opcodes', action='store_true', default=False, help='Do use the OpCode feature '
'(use this if not enough high '
'scoring strings can be found)')
group_opcode.add_argument('-n', help='Number of opcodes to add if not enough high scoring string could be found '
'(default=3)', metavar='opcode-num', default=3)
group_inverse = parser.add_argument_group('Inverse Mode (unstable)')
group_inverse.add_argument('--inverse', help=argparse.SUPPRESS, action='store_true', default=False)
group_inverse.add_argument('--nodirname', help=argparse.SUPPRESS, action='store_true', default=False)
group_inverse.add_argument('--noscorefilter', help=argparse.SUPPRESS, action='store_true', default=False)
args = parser.parse_args()
# Print Welcome
print_welcome()
if not args.update and not args.m and not args.g:
parser.print_help()
print("")
print("[E] You have to select --update to update yarGens database or -m for signature generation or -g for the "
"creation of goodware string collections "
"(see https://github.com/Neo23x0/yarGen#examples for more details)")
sys.exit(1)
# Update
if args.update:
update_databases()
print("[+] Updated databases - you can now start creating YARA rules")
sys.exit(0)
# Typical input erros
if args.m:
if os.path.isfile(args.m):
print("[E] Input is a file, please use a directory instead (-m path)")
sys.exit(0)
# Opcodes evaluation or not
use_opcodes = False
if args.opcodes:
use_opcodes = True
# Read PEStudio string list
pestudio_strings = {}
pestudio_available = False
# Super Rule Generation
nosuper = args.nosuper
# Identifier
sourcepath = args.m
if args.g:
sourcepath = args.g
identifier = getIdentifier(args.b, sourcepath)
print("[+] Using identifier '%s'" % identifier)
# Reference
reference = getReference(args.r)
print("[+] Using reference '%s'" % reference)
# Prefix
prefix = getPrefix(args.p, identifier)
print("[+] Using prefix '%s'" % prefix)
if os.path.isfile(get_abs_path(PE_STRINGS_FILE)) and lxml_available:
print("[+] Processing PEStudio strings ...")
pestudio_strings = initialize_pestudio_strings()
pestudio_available = True
else:
if lxml_available:
print("\nTo improve the analysis process please download the awesome PEStudio tool by marc @ochsenmeier " \
"from http://winitor.com and place the file 'strings.xml' in the ./3rdparty directory.\n")
time.sleep(5)
# Highly specific string score
score_highly_specific = int(args.x)
# Scan goodware files
if args.g:
print("[+] Processing goodware files ...")
good_strings_db, good_opcodes_db, good_imphashes_db, good_exports_db = \
parse_good_dir(args.g, args.nr, args.oe)
# Update existing databases
if args.u:
try:
print("[+] Updating databases ...")
# Evaluate the database identifiers
db_identifier = ""
if args.i != "":
db_identifier = "-%s" % args.i
strings_db = "./dbs/good-strings%s.db" % db_identifier
opcodes_db = "./dbs/good-opcodes%s.db" % db_identifier
imphashes_db = "./dbs/good-imphashes%s.db" % db_identifier
exports_db = "./dbs/good-exports%s.db" % db_identifier
# Strings -----------------------------------------------------
print("[+] Updating %s ..." % strings_db)
good_pickle = load(get_abs_path(strings_db))
print("Old string database entries: %s" % len(good_pickle))
good_pickle.update(good_strings_db)
print("New string database entries: %s" % len(good_pickle))
save(good_pickle, strings_db)
# Opcodes -----------------------------------------------------
print("[+] Updating %s ..." % opcodes_db)
good_opcode_pickle = load(get_abs_path(opcodes_db))
print("Old opcode database entries: %s" % len(good_opcode_pickle))
good_opcode_pickle.update(good_opcodes_db)
print("New opcode database entries: %s" % len(good_opcode_pickle))
save(good_opcode_pickle, opcodes_db)
# Imphashes ---------------------------------------------------
print("[+] Updating %s ..." % imphashes_db)
good_imphashes_pickle = load(get_abs_path(imphashes_db))
print("Old opcode database entries: %s" % len(good_imphashes_pickle))
good_imphashes_pickle.update(good_imphashes_db)
print("New opcode database entries: %s" % len(good_imphashes_pickle))
save(good_imphashes_pickle, imphashes_db)
# Exports -----------------------------------------------------
print("[+] Updating %s ..." % exports_db)
good_exports_pickle = load(get_abs_path(exports_db))
print("Old opcode database entries: %s" % len(good_exports_pickle))
good_exports_pickle.update(good_exports_db)
print("New opcode database entries: %s" % len(good_exports_pickle))
save(good_exports_pickle, exports_db)
except Exception as e:
traceback.print_exc()
# Create new databases
if args.c:
print("[+] Creating local database ...")
# Evaluate the database identifiers
db_identifier = ""
if args.i != "":
db_identifier = "-%s" % args.i
strings_db = "./dbs/good-strings%s.db" % db_identifier
opcodes_db = "./dbs/good-opcodes%s.db" % db_identifier
imphashes_db = "./dbs/good-imphashes%s.db" % db_identifier
exports_db = "./dbs/good-exports%s.db" % db_identifier
# Creating the databases
print("[+] Using '%s' as filename for newly created strings database" % strings_db)
print("[+] Using '%s' as filename for newly created opcodes database" % opcodes_db)
print("[+] Using '%s' as filename for newly created opcodes database" % imphashes_db)
print("[+] Using '%s' as filename for newly created opcodes database" % exports_db)
try:
if os.path.isfile(strings_db):
raw_input("File %s alread exists. Press enter to proceed or CTRL+C to exit." % strings_db)
os.remove(strings_db)
if os.path.isfile(opcodes_db):
raw_input("File %s alread exists. Press enter to proceed or CTRL+C to exit." % opcodes_db)
os.remove(opcodes_db)
if os.path.isfile(imphashes_db):
raw_input("File %s alread exists. Press enter to proceed or CTRL+C to exit." % imphashes_db)
os.remove(imphashes_db)
if os.path.isfile(exports_db):
raw_input("File %s alread exists. Press enter to proceed or CTRL+C to exit." % exports_db)
os.remove(exports_db)
# Strings
good_pickle = Counter()
good_pickle = good_strings_db
# Opcodes
good_op_pickle = Counter()
good_op_pickle = good_opcodes_db
# Imphashes
good_imphashes_pickle = Counter()
good_imphashes_pickle = good_imphashes_db
# Exports
good_exports_pickle = Counter()
good_exports_pickle = good_exports_db
# Save
save(good_pickle, strings_db)
save(good_op_pickle, opcodes_db)
save(good_imphashes_pickle, imphashes_db)
save(good_exports_pickle, exports_db)
print("New database with %d string, %d opcode, %d imphash, %d export entries created. " \
"(remember to use --opcodes to extract opcodes from the samples and create the opcode databases)"\
% (len(good_strings_db), len(good_opcodes_db), len(good_imphashes_db), len(good_exports_db)))
except Exception as e:
traceback.print_exc()
# Analyse malware samples and create rules
else:
if use_opcodes:
print("[+] Reading goodware strings from database 'good-strings.db' and 'good-opcodes.db' ...")
print(" (This could take some time and uses at least 6 GB of RAM)")
else:
print("[+] Reading goodware strings from database 'good-strings.db' ...")
print(" (This could take some time and uses at least 3 GB of RAM)")
good_strings_db = Counter()
good_opcodes_db = Counter()
good_imphashes_db = Counter()
good_exports_db = Counter()
opcodes_num = 0
strings_num = 0
imphash_num = 0
exports_num = 0
# Initialize all databases
for file in os.listdir(get_abs_path("./dbs/")):
if not file.endswith(".db"):
continue
filePath = os.path.join("./dbs/", file)
# String databases
if file.startswith("good-strings"):
try:
print("[+] Loading %s ..." % filePath)
good_pickle = load(get_abs_path(filePath))
good_strings_db.update(good_pickle)
print("[+] Total: %s / Added %d entries" % (
len(good_strings_db), len(good_strings_db) - strings_num))
strings_num = len(good_strings_db)
except Exception as e:
traceback.print_exc()
# Opcode databases
if file.startswith("good-opcodes"):
try:
if use_opcodes:
print("[+] Loading %s ..." % filePath)
good_op_pickle = load(get_abs_path(filePath))
good_opcodes_db.update(good_op_pickle)
print("[+] Total: %s (removed duplicates) / Added %d entries" % (
len(good_opcodes_db), len(good_opcodes_db) - opcodes_num))
opcodes_num = len(good_opcodes_db)
except Exception as e:
use_opcodes = False
traceback.print_exc()
# Imphash databases
if file.startswith("good-imphash"):
try:
print("[+] Loading %s ..." % filePath)
good_imphashes_pickle = load(get_abs_path(filePath))
good_imphashes_db.update(good_imphashes_pickle)
print("[+] Total: %s / Added %d entries" % (
len(good_imphashes_db), len(good_imphashes_db) - imphash_num))
imphash_num = len(good_imphashes_db)
except Exception as e:
traceback.print_exc()
# Export databases
if file.startswith("good-exports"):
try:
print("[+] Loading %s ..." % filePath)
good_exports_pickle = load(get_abs_path(filePath))
good_exports_db.update(good_exports_pickle)
print("[+] Total: %s / Added %d entries" % (
len(good_exports_db), len(good_exports_db) - exports_num))
exports_num = len(good_exports_db)
except Exception as e:
traceback.print_exc()
if use_opcodes and len(good_opcodes_db) < 1:
print("[E] Missing goodware opcode databases."
" Please run 'yarGen.py --update' to retrieve the newest database set.")
use_opcodes = False
if len(good_exports_db) < 1 and len(good_imphashes_db) < 1:
print("[E] Missing goodware imphash/export databases. "
" Please run 'yarGen.py --update' to retrieve the newest database set.")
use_opcodes = False
if len(good_strings_db) < 1 and not args.c:
print("[E] Error - no goodware databases found. "
" Please run 'yarGen.py --update' to retrieve the newest database set.")
sys.exit(1)
# If malware directory given
if args.m:
# Initialize Bayes Trainer (we will use the goodware string database for this)
print("[+] Initializing Bayes Filter ...")
stringTrainer = initialize_bayes_filter()
# Deactivate super rule generation if there's only a single file in the folder
if len(os.listdir(args.m)) < 2:
nosuper = True
# Special strings
base64strings = {}
reversedStrings = {}
hexEncStrings = {}
pestudioMarker = {}
stringScores = {}
# Dropzone mode
if args.dropzone:
# Monitoring folder for changes
print("Monitoring %s for new sample files (processed samples will be removed)" % args.m)
while(True):
if len(os.listdir(args.m)) > 0:
# Deactivate super rule generation if there's only a single file in the folder
if len(os.listdir(args.m)) < 2:
nosuper = True
else:
nosuper = False
# Read a new identifier
identifier = getIdentifier(args.b, args.m)
# Read a new reference
reference = getReference(args.r)
# Generate a new description prefix
prefix = getPrefix(args.p, identifier)
# Process the samples
processSampleDir(args.m)
# Delete all samples from the dropzone folder
emptyFolder(args.m)
time.sleep(1)
else:
# Scan malware files
print("[+] Processing malware files ...")
processSampleDir(args.m)
print("[+] yarGen run finished") | 43.182626 | 135 | 0.532781 |
__version__ = "0.22.0"
import os
import sys
if sys.version_info[0] > 2:
raise Exception("Some modules require Python 2, so please use that version instead of Python 3")
import argparse
import re
import traceback
import operator
import datetime
import time
import scandir
import pefile
import cPickle as pickle
import gzip
import urllib
from collections import Counter
from hashlib import sha256
from naiveBayesClassifier import tokenizer
from naiveBayesClassifier.trainer import Trainer
from naiveBayesClassifier.classifier import Classifier
import signal as signal_module
try:
from lxml import etree
lxml_available = True
except Exception as e:
print("[E] lxml not found - disabling PeStudio string check functionality")
lxml_available = False
RELEVANT_EXTENSIONS = [".asp", ".vbs", ".ps", ".ps1", ".tmp", ".bas", ".bat", ".cmd", ".com", ".cpl",
".crt", ".dll", ".exe", ".msc", ".scr", ".sys", ".vb", ".vbe", ".vbs", ".wsc",
".wsf", ".wsh", ".input", ".war", ".jsp", ".php", ".asp", ".aspx", ".psd1", ".psm1", ".py"]
REPO_URLS = {
'good-opcodes-part1.db': 'https://www.bsk-consulting.de/yargen/good-opcodes-part1.db',
'good-opcodes-part2.db': 'https://www.bsk-consulting.de/yargen/good-opcodes-part2.db',
'good-opcodes-part3.db': 'https://www.bsk-consulting.de/yargen/good-opcodes-part3.db',
'good-opcodes-part4.db': 'https://www.bsk-consulting.de/yargen/good-opcodes-part4.db',
'good-opcodes-part5.db': 'https://www.bsk-consulting.de/yargen/good-opcodes-part5.db',
'good-opcodes-part6.db': 'https://www.bsk-consulting.de/yargen/good-opcodes-part6.db',
'good-opcodes-part7.db': 'https://www.bsk-consulting.de/yargen/good-opcodes-part7.db',
'good-opcodes-part8.db': 'https://www.bsk-consulting.de/yargen/good-opcodes-part8.db',
'good-opcodes-part9.db': 'https://www.bsk-consulting.de/yargen/good-opcodes-part9.db',
'good-strings-part1.db': 'https://www.bsk-consulting.de/yargen/good-strings-part1.db',
'good-strings-part2.db': 'https://www.bsk-consulting.de/yargen/good-strings-part2.db',
'good-strings-part3.db': 'https://www.bsk-consulting.de/yargen/good-strings-part3.db',
'good-strings-part4.db': 'https://www.bsk-consulting.de/yargen/good-strings-part4.db',
'good-strings-part5.db': 'https://www.bsk-consulting.de/yargen/good-strings-part5.db',
'good-strings-part6.db': 'https://www.bsk-consulting.de/yargen/good-strings-part6.db',
'good-strings-part7.db': 'https://www.bsk-consulting.de/yargen/good-strings-part7.db',
'good-strings-part8.db': 'https://www.bsk-consulting.de/yargen/good-strings-part8.db',
'good-strings-part9.db': 'https://www.bsk-consulting.de/yargen/good-strings-part9.db',
'good-exports-part1.db': 'https://www.bsk-consulting.de/yargen/good-exports-part1.db',
'good-exports-part2.db': 'https://www.bsk-consulting.de/yargen/good-exports-part2.db',
'good-exports-part3.db': 'https://www.bsk-consulting.de/yargen/good-exports-part3.db',
'good-exports-part4.db': 'https://www.bsk-consulting.de/yargen/good-exports-part4.db',
'good-exports-part5.db': 'https://www.bsk-consulting.de/yargen/good-exports-part5.db',
'good-exports-part6.db': 'https://www.bsk-consulting.de/yargen/good-exports-part6.db',
'good-exports-part7.db': 'https://www.bsk-consulting.de/yargen/good-exports-part7.db',
'good-exports-part8.db': 'https://www.bsk-consulting.de/yargen/good-exports-part8.db',
'good-exports-part9.db': 'https://www.bsk-consulting.de/yargen/good-exports-part9.db',
'good-imphashes-part1.db': 'https://www.bsk-consulting.de/yargen/good-imphashes-part1.db',
'good-imphashes-part2.db': 'https://www.bsk-consulting.de/yargen/good-imphashes-part2.db',
'good-imphashes-part3.db': 'https://www.bsk-consulting.de/yargen/good-imphashes-part3.db',
'good-imphashes-part4.db': 'https://www.bsk-consulting.de/yargen/good-imphashes-part4.db',
'good-imphashes-part5.db': 'https://www.bsk-consulting.de/yargen/good-imphashes-part5.db',
'good-imphashes-part6.db': 'https://www.bsk-consulting.de/yargen/good-imphashes-part6.db',
'good-imphashes-part7.db': 'https://www.bsk-consulting.de/yargen/good-imphashes-part7.db',
'good-imphashes-part8.db': 'https://www.bsk-consulting.de/yargen/good-imphashes-part8.db',
'good-imphashes-part9.db': 'https://www.bsk-consulting.de/yargen/good-imphashes-part9.db',
}
PE_STRINGS_FILE = "./3rdparty/strings.xml"
KNOWN_IMPHASHES = {'a04dd9f5ee88d7774203e0a0cfa1b941': 'PsExec',
'2b8c9d9ab6fefc247adaf927e83dcea6': 'RAR SFX variant'}
def get_abs_path(filename):
return os.path.join(os.path.dirname(os.path.abspath(__file__)), filename)
def get_files(dir, notRecursive):
if notRecursive:
for filename in os.listdir(dir):
filePath = os.path.join(dir, filename)
if os.path.isdir(filePath):
continue
yield filePath
else:
for root, directories, files in scandir.walk(dir, followlinks=False):
for filename in files:
filePath = os.path.join(root, filename)
yield filePath
def parse_sample_dir(dir, notRecursive=False, generateInfo=False, onlyRelevantExtensions=False):
string_stats = {}
opcode_stats = {}
file_info = {}
known_sha1sums = []
for filePath in get_files(dir, notRecursive):
try:
print("[+] Processing %s ..." % filePath)
extension = os.path.splitext(filePath)[1].lower()
if not extension in RELEVANT_EXTENSIONS and onlyRelevantExtensions:
if args.debug:
print("[-] EXTENSION %s - Skipping file %s" % (extension, filePath))
continue
if os.path.basename(filePath) == os.path.basename(args.b) or \
os.path.basename(filePath) == os.path.basename(args.r):
continue
size = 0
try:
size = os.stat(filePath).st_size
if size > (args.fs * 1024 * 1024):
if args.debug:
print("[-] File is to big - Skipping file %s (use -fs to adjust this behaviour)" % (filePath))
continue
except Exception as e:
pass
try:
with open(filePath, 'rb') as f:
fileData = f.read()
except Exception as e:
print("[-] Cannot read file - skipping %s" % filePath)
strings = extract_strings(fileData)
opcodes = []
if use_opcodes:
print("[-] Extracting OpCodes: %s" % filePath)
opcodes = extract_opcodes(fileData)
if generateInfo:
sha256sum = sha256(fileData).hexdigest()
file_info[filePath] = {}
file_info[filePath]["hash"] = sha256sum
file_info[filePath]["imphash"], file_info[filePath]["exports"] = get_pe_info(fileData)
if sha256sum in known_sha1sums:
print("[-] Skipping strings/opcodes from %s due to MD5 duplicate detection" % filePath)
continue
else:
known_sha1sums.append(sha256sum)
if not args.nomagic:
file_info[filePath]["magic"] = fileData[:2]
else:
file_info[filePath]["magic"] = ""
file_info[filePath]["size"] = os.stat(filePath).st_size
fileName = os.path.basename(filePath)
folderName = os.path.basename(os.path.dirname(filePath))
if fileName not in file_info:
file_info[fileName] = {}
file_info[fileName]["count"] = 0
file_info[fileName]["hashes"] = []
file_info[fileName]["folder_names"] = []
file_info[fileName]["count"] += 1
file_info[fileName]["hashes"].append(sha256sum)
if folderName not in file_info[fileName]["folder_names"]:
file_info[fileName]["folder_names"].append(folderName)
for string in strings:
if string not in string_stats:
string_stats[string] = {}
string_stats[string]["count"] = 0
string_stats[string]["files"] = []
string_stats[string]["files_basename"] = {}
string_stats[string]["count"] += 1
if fileName not in string_stats[string]["files_basename"]:
string_stats[string]["files_basename"][fileName] = 0
string_stats[string]["files_basename"][fileName] += 1
string_stats[string]["files"].append(filePath)
for opcode in opcodes:
if opcode not in opcode_stats:
opcode_stats[opcode] = {}
opcode_stats[opcode]["count"] = 0
opcode_stats[opcode]["files"] = []
opcode_stats[opcode]["files_basename"] = {}
opcode_stats[opcode]["count"] += 1
if fileName not in opcode_stats[opcode]["files_basename"]:
opcode_stats[opcode]["files_basename"][fileName] = 0
opcode_stats[opcode]["files_basename"][fileName] += 1
opcode_stats[opcode]["files"].append(filePath)
if args.debug:
print("[+] Processed " + filePath + " Size: " + str(size) + " Strings: " + str(len(string_stats)) + \
" OpCodes: " + str(len(opcode_stats)) + " ... ")
except Exception as e:
traceback.print_exc()
print("[E] ERROR reading file: %s" % filePath)
return string_stats, opcode_stats, file_info
def parse_good_dir(dir, notRecursive=False, onlyRelevantExtensions=True):
all_strings = Counter()
all_opcodes = Counter()
all_imphashes = Counter()
all_exports = Counter()
for filePath in get_files(dir, notRecursive):
extension = os.path.splitext(filePath)[1].lower()
if extension not in RELEVANT_EXTENSIONS and onlyRelevantExtensions:
if args.debug:
print("[-] EXTENSION %s - Skipping file %s" % (extension, filePath))
continue
size = 0
try:
size = os.stat(filePath).st_size
if size > (args.fs * 1024 * 1024):
continue
except Exception as e:
pass
try:
with open(filePath, 'rb') as f:
fileData = f.read()
except Exception as e:
print("[-] Cannot read file - skipping %s" % filePath)
strings = extract_strings(fileData)
all_strings.update(strings)
opcodes = []
if use_opcodes:
print("[-] Extracting OpCodes: %s" % filePath)
opcodes = extract_opcodes(fileData)
all_opcodes.update(opcodes)
(imphash, exports) = get_pe_info(fileData)
all_exports.update(exports)
all_imphashes.update([imphash])
if args.debug:
print("[+] Processed %s - %d strings %d opcodes %d exports and imphash %s" % (filePath, len(strings),
len(opcodes), len(exports),
imphash))
return all_strings, all_opcodes, all_imphashes, all_exports
def extract_strings(fileData):
cleaned_strings = []
try:
strings_full = re.findall("[\x1f-\x7e]{6,}", fileData)
strings_limited = re.findall("[\x1f-\x7e]{6,%d}" % args.s, fileData)
strings_hex = extract_hex_strings(fileData)
strings = list(set(strings_full) | set(strings_limited) | set(strings_hex))
strings += [str("UTF16LE:%s" % ws.decode("utf-16le")) for ws in re.findall("(?:[\x1f-\x7e][\x00]){6,}", fileData)]
for string in strings:
if len(string) > 0:
string = string.replace('\\', '\\\\')
string = string.replace('"', '\\"')
if string not in cleaned_strings:
cleaned_strings.append(string.lstrip(" "))
except Exception as e:
if args.debug:
traceback.print_exc()
pass
return cleaned_strings
def extract_opcodes(fileData):
opcodes = []
try:
pe = pefile.PE(data=fileData)
name = ""
ep = pe.OPTIONAL_HEADER.AddressOfEntryPoint
pos = 0
for sec in pe.sections:
if (ep >= sec.VirtualAddress) and \
(ep < (sec.VirtualAddress + sec.Misc_VirtualSize)):
name = sec.Name.replace('\x00', '')
break
else:
pos += 1
for section in pe.sections:
if section.Name.rstrip("\x00") == name:
text = section.get_data()
text_parts = re.split("[\x00]{3,}", text)
for text_part in text_parts:
if text_part == '' or len(text_part) < 8:
continue
opcodes.append(text_part[:16].encode('hex'))
except Exception as e:
pass
return opcodes
def get_pe_info(fileData):
imphash = ""
exports = []
if fileData[:2] != "MZ":
return imphash, exports
try:
if args.debug:
print("Extracting PE information")
p = pefile.PE(data=fileData)
imphash = p.get_imphash()
for exp in p.DIRECTORY_ENTRY_EXPORT.symbols:
exports.append(exp.name)
except Exception as e:
pass
return imphash, exports
def sample_string_evaluation(string_stats, opcode_stats, file_info):
print("[+] Generating statistical data ...")
file_strings = {}
file_opcodes = {}
combinations = {}
inverse_stats = {}
max_combi_count = 0
super_rules = []
for opcode in opcode_stats:
if opcode_stats[opcode]["count"] < 10:
for filePath in opcode_stats[opcode]["files"]:
if filePath in file_opcodes:
file_opcodes[filePath].append(opcode)
else:
file_opcodes[filePath] = []
file_opcodes[filePath].append(opcode)
for string in string_stats:
if string_stats[string]["count"] < 10:
for filePath in string_stats[string]["files"]:
if filePath in file_strings:
file_strings[filePath].append(string)
else:
file_strings[filePath] = []
file_strings[filePath].append(string)
if args.inverse:
for fileName in string_stats[string]["files_basename"]:
string_occurrance_count = string_stats[string]["files_basename"][fileName]
total_count_basename = file_info[fileName]["count"]
if string_occurrance_count == total_count_basename:
if fileName not in inverse_stats:
inverse_stats[fileName] = []
if args.trace:
print("Appending %s to %s" % (string, fileName))
inverse_stats[fileName].append(string)
if not nosuper and not args.inverse:
if string_stats[string]["count"] > 1:
if args.debug:
print("OVERLAP Count: %s\nString: \"%s\"%s" % (string_stats[string]["count"], string,
"\nFILE: ".join(string_stats[string]["files"])))
combi = ":".join(sorted(string_stats[string]["files"]))
if args.debug:
print("COMBI: " + combi)
if combi not in combinations:
combinations[combi] = {}
combinations[combi]["count"] = 1
combinations[combi]["strings"] = []
combinations[combi]["strings"].append(string)
combinations[combi]["files"] = string_stats[string]["files"]
else:
combinations[combi]["count"] += 1
combinations[combi]["strings"].append(string)
if combinations[combi]["count"] > max_combi_count:
max_combi_count = combinations[combi]["count"]
print("[+] Generating Super Rules ... (a lot of foo magic)")
for combi_count in range(max_combi_count, 1, -1):
for combi in combinations:
if combi_count == combinations[combi]["count"]:
string_set = combinations[combi]["strings"]
combinations[combi]["strings"] = []
combinations[combi]["strings"] = filter_string_set(string_set)
if len(combinations[combi]["strings"]) >= int(args.w):
if args.nosimple:
for file in combinations[combi]["files"]:
if file in file_strings:
del file_strings[file]
print("[-] Adding Super Rule with %s strings." % str(len(combinations[combi]["strings"])))
super_rules.append(combinations[combi])
return (file_strings, file_opcodes, combinations, super_rules, inverse_stats)
def filter_opcode_set(opcode_set):
pref_opcodes = [' 34 ', 'ff ff ff ']
useful_set = []
pref_set = []
for opcode in opcode_set:
if opcode in good_opcodes_db:
continue
formatted_opcode = get_opcode_string(opcode)
set_in_pref = False
for pref in pref_opcodes:
if pref in formatted_opcode:
pref_set.append(formatted_opcode)
set_in_pref = True
if set_in_pref:
continue
useful_set.append(get_opcode_string(opcode))
useful_set = pref_set + useful_set
return useful_set[:int(args.n)]
def filter_string_set(string_set):
useful_set = []
# Bayes Classificator (new method)
stringClassifier = Classifier(stringTrainer.data, tokenizer)
# Local string scores
localStringScores = {}
# Local UTF strings
utfstrings = []
for string in string_set:
# Goodware string marker
goodstring = False
goodcount = 0
# Goodware Strings
if string in good_strings_db:
goodstring = True
goodcount = good_strings_db[string]
# print "%s - %s" % ( goodstring, good_strings[string] )
if args.excludegood:
continue
# UTF
original_string = string
if string[:8] == "UTF16LE:":
# print "removed UTF16LE from %s" % string
string = string[8:]
utfstrings.append(string)
# Good string evaluation (after the UTF modification)
if goodstring:
# Reduce the score by the number of occurence in goodware files
localStringScores[string] = (goodcount * -1) + 5
else:
localStringScores[string] = 0
# PEStudio String Blacklist Evaluation
if pestudio_available:
(pescore, type) = get_pestudio_score(string)
# print("PE Match: %s" % string)
# Reset score of goodware files to 5 if blacklisted in PEStudio
if type != "":
pestudioMarker[string] = type
# Modify the PEStudio blacklisted strings with their goodware stats count
if goodstring:
pescore = pescore - (goodcount / 1000.0)
# print "%s - %s - %s" % (string, pescore, goodcount)
localStringScores[string] = pescore
if not goodstring:
# Bayes Classifier
classification = stringClassifier.classify(string)
if classification[0][1] == 0 and len(string) > 10:
# Try to split the string into words and then check again
modified_string = re.sub(r'[\\\/\-\.\_<>="\']', ' ', string).rstrip(" ").lstrip(" ")
# print "Checking instead: %s" % modified_string
classification = stringClassifier.classify(modified_string)
#if args.debug:
# print "[D] Bayes Score: %s %s" % (str(classification), string)
localStringScores[string] += classification[0][1]
# Length Score
#length = len(string)
#if length > int(args.y) and length < int(args.s):
# localStringScores[string] += round(len(string) / 8, 2)
#if length >= int(args.s):
# localStringScores[string] += 1
# Reduction
if ".." in string:
localStringScores[string] -= 5
if " " in string:
localStringScores[string] -= 5
# Packer Strings
if re.search(r'(WinRAR\\SFX)', string):
localStringScores[string] -= 4
# US ASCII char
if "\x1f" in string:
localStringScores[string] -= 4
# Chains of 00s
if string.count('0000000000') > 2:
localStringScores[string] -= 5
# Repeated characters
if re.search(r'(?!.* ([A-Fa-f0-9])\1{8,})', string):
localStringScores[string] -= 5
# Certain strings add-ons ----------------------------------------------
# Extensions - Drive
if re.search(r'[A-Za-z]:\\', string, re.IGNORECASE):
localStringScores[string] += 2
# Relevant file extensions
if re.search(r'(\.exe|\.pdb|\.scr|\.log|\.cfg|\.txt|\.dat|\.msi|\.com|\.bat|\.dll|\.pdb|\.vbs|'
r'\.tmp|\.sys|\.ps1|\.vbp|\.hta|\.lnk)', string, re.IGNORECASE):
localStringScores[string] += 4
# System keywords
if re.search(r'(cmd.exe|system32|users|Documents and|SystemRoot|Grant|hello|password|process|log)',
string, re.IGNORECASE):
localStringScores[string] += 5
# Protocol Keywords
if re.search(r'(ftp|irc|smtp|command|GET|POST|Agent|tor2web|HEAD)', string, re.IGNORECASE):
localStringScores[string] += 5
# Connection keywords
if re.search(r'(error|http|closed|fail|version|proxy)', string, re.IGNORECASE):
localStringScores[string] += 3
# Browser User Agents
if re.search(r'(Mozilla|MSIE|Windows NT|Macintosh|Gecko|Opera|User\-Agent)', string, re.IGNORECASE):
localStringScores[string] += 5
# Temp and Recycler
if re.search(r'(TEMP|Temporary|Appdata|Recycler)', string, re.IGNORECASE):
localStringScores[string] += 4
# Malicious keywords - hacktools
if re.search(r'(scan|sniff|poison|intercept|fake|spoof|sweep|dump|flood|inject|forward|scan|vulnerable|'
r'credentials|creds|coded|p0c|Content|host)', string, re.IGNORECASE):
localStringScores[string] += 5
# Network keywords
if re.search(r'(address|port|listen|remote|local|process|service|mutex|pipe|frame|key|lookup|connection)',
string, re.IGNORECASE):
localStringScores[string] += 3
# Drive
if re.search(r'([C-Zc-z]:\\)', string, re.IGNORECASE):
localStringScores[string] += 4
# IP
if re.search(
r'\b(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\b',
string, re.IGNORECASE): # IP Address
localStringScores[string] += 5
# Copyright Owner
if re.search(r'(coded | c0d3d |cr3w\b|Coded by |codedby)', string, re.IGNORECASE):
localStringScores[string] += 7
# Extension generic
if re.search(r'\.[a-zA-Z]{3}\b', string):
localStringScores[string] += 3
# All upper case
if re.search(r'^[A-Z]{6,}$', string):
localStringScores[string] += 2.5
# All lower case
if re.search(r'^[a-z]{6,}$', string):
localStringScores[string] += 2
# All lower with space
if re.search(r'^[a-z\s]{6,}$', string):
localStringScores[string] += 2
# All characters
if re.search(r'^[A-Z][a-z]{5,}$', string):
localStringScores[string] += 2
# URL
if re.search(r'(%[a-z][:\-,;]|\\\\%s|\\\\[A-Z0-9a-z%]+\\[A-Z0-9a-z%]+)', string):
localStringScores[string] += 2.5
# certificates
if re.search(r'(thawte|trustcenter|signing|class|crl|CA|certificate|assembly)', string, re.IGNORECASE):
localStringScores[string] -= 4
# Parameters
if re.search(r'( \-[a-z]{,2}[\s]?[0-9]?| /[a-z]+[\s]?[\w]*)', string, re.IGNORECASE):
localStringScores[string] += 4
# Directory
if re.search(r'([a-zA-Z]:|^|%)\\[A-Za-z]{4,30}\\', string):
localStringScores[string] += 4
# Executable - not in directory
if re.search(r'^[^\\]+\.(exe|com|scr|bat|sys)$', string, re.IGNORECASE):
localStringScores[string] += 4
# Date placeholders
if re.search(r'(yyyy|hh:mm|dd/mm|mm/dd|%s:%s:)', string, re.IGNORECASE):
localStringScores[string] += 3
# Placeholders
if re.search(r'[^A-Za-z](%s|%d|%i|%02d|%04d|%2d|%3s)[^A-Za-z]', string, re.IGNORECASE):
localStringScores[string] += 3
# String parts from file system elements
if re.search(r'(cmd|com|pipe|tmp|temp|recycle|bin|secret|private|AppData|driver|config)', string,
re.IGNORECASE):
localStringScores[string] += 3
# Programming
if re.search(r'(execute|run|system|shell|root|cimv2|login|exec|stdin|read|process|netuse|script|share)',
string, re.IGNORECASE):
localStringScores[string] += 3
# Credentials
if re.search(r'(user|pass|login|logon|token|cookie|creds|hash|ticket|NTLM|LMHASH|kerberos|spnego|session|'
r'identif|account|login|auth|privilege)', string, re.IGNORECASE):
localStringScores[string] += 3
# Malware
if re.search(r'(\.[a-z]/[^/]+\.txt|)', string, re.IGNORECASE):
localStringScores[string] += 3
# Variables
if re.search(r'%[A-Z_]+%', string, re.IGNORECASE):
localStringScores[string] += 4
# RATs / Malware
if re.search(r'(spy|logger|dark|cryptor|RAT\b|eye|comet|evil|xtreme|poison|meterpreter|metasploit|/veil|Blood)',
string, re.IGNORECASE):
localStringScores[string] += 5
# Missed user profiles
if re.search(r'[\\](users|profiles|username|benutzer|Documents and Settings|Utilisateurs|Utenti|'
r'Usuários)[\\]', string, re.IGNORECASE):
localStringScores[string] += 3
# Strings: Words ending with numbers
if re.search(r'^[A-Z][a-z]+[0-9]+$', string, re.IGNORECASE):
localStringScores[string] += 1
# Spying
if re.search(r'(implant)', string, re.IGNORECASE):
localStringScores[string] += 1
# Program Path - not Programs or Windows
if re.search(r'^[Cc]:\\\\[^PW]', string):
localStringScores[string] += 3
# Special strings
if re.search(r'(\\\\\.\\|kernel|.dll|usage|\\DosDevices\\)', string, re.IGNORECASE):
localStringScores[string] += 5
# Parameters
if re.search(r'( \-[a-z] | /[a-z] | \-[a-z]:[a-zA-Z]| \/[a-z]:[a-zA-Z])', string):
localStringScores[string] += 4
# File
if re.search(r'^[a-zA-Z0-9]{3,40}\.[a-zA-Z]{3}', string, re.IGNORECASE):
localStringScores[string] += 3
# Comment Line / Output Log
if re.search(r'^([\*\#]+ |\[[\*\-\+]\] |[\-=]> |\[[A-Za-z]\] )', string):
localStringScores[string] += 4
# Output typo / special expression
if re.search(r'(!\.$|!!!$| :\)$| ;\)$|fucked|[\w]\.\.\.\.$)', string):
localStringScores[string] += 4
# Base64
if re.search(r'^(?:[A-Za-z0-9+/]{4}){30,}(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=)?$', string) and \
re.search(r'[A-Za-z]', string) and re.search(r'[0-9]', string):
localStringScores[string] += 6
# Base64 Executables
if re.search(r'(TVqQAAMAAAAEAAAA//8AALgAAAA|TVpQAAIAAAAEAA8A//8AALgAAAA|TVqAAAEAAAAEABAAAAAAAAAAAAA|'
r'TVoAAAAAAAAAAAAAAAAAAAAAAAA|TVpTAQEAAAAEAAAA//8AALgAAAA)', string):
localStringScores[string] += 5
# Malicious intent
if re.search(r'(loader|cmdline|ntlmhash|lmhash|infect|encrypt|exec|elevat|dump|target|victim|override|'
r'traverse|mutex|pawnde|exploited|shellcode|injected|spoofed|dllinjec|exeinj|reflective|'
r'payload|inject|back conn)',
string, re.IGNORECASE):
localStringScores[string] += 5
# Privileges
if re.search(r'(administrator|highest|system|debug|dbg|admin|adm|root) privilege', string, re.IGNORECASE):
localStringScores[string] += 4
# System file/process names
if re.search(r'(LSASS|SAM|lsass.exe|cmd.exe|LSASRV.DLL)', string):
localStringScores[string] += 4
# System file/process names
if re.search(r'(\.exe|\.dll|\.sys)$', string, re.IGNORECASE):
localStringScores[string] += 4
# Indicators that string is valid
if re.search(r'(^\\\\)', string, re.IGNORECASE):
localStringScores[string] += 1
# Compiler output directories
if re.search(r'(\\Release\\|\\Debug\\|\\bin|\\sbin)', string, re.IGNORECASE):
localStringScores[string] += 2
# Special - Malware related strings
if re.search(r'(Management Support Team1|/c rundll32|DTOPTOOLZ Co.|net start|Exec|taskkill)', string):
localStringScores[string] += 4
# Powershell
if re.search(r'(bypass|windowstyle | hidden |-command|IEX |Invoke-Expression|Net.Webclient|Invoke[A-Z]|'
r'Net.WebClient|-w hidden |-encoded'
r'-encodedcommand| -nop |MemoryLoadLibrary|FromBase64String|Download|EncodedCommand)', string, re.IGNORECASE):
localStringScores[string] += 4
# WMI
if re.search(r'( /c WMIC)', string, re.IGNORECASE):
localStringScores[string] += 3
# Windows Commands
if re.search(r'( net user | net group |ping |whoami |bitsadmin |rundll32.exe javascript:|'
r'schtasks.exe /create|/c start )',
string, re.IGNORECASE):
localStringScores[string] += 3
# JavaScript
if re.search(r'(new ActiveXObject\("WScript.Shell"\).Run|.Run\("cmd.exe|.Run\("%comspec%\)|'
r'.Run\("c:\\Windows|.RegisterXLL\()', string, re.IGNORECASE):
localStringScores[string] += 3
if re.search(r'( Inc | Co.| Ltd.,| LLC| Limited)', string):
localStringScores[string] += 2
if re.search(r'(sysprep|cryptbase|secur32)', string, re.IGNORECASE):
localStringScores[string] += 2
if re.search(r'(isset\($post\[|isset\($get\[|eval\(Request)', string, re.IGNORECASE):
localStringScores[string] += 2
if re.search(r'(impersonate|drop|upload|download|execute|shell|\bcmd\b|decode|rot13|decrypt)', string,
re.IGNORECASE):
localStringScores[string] += 2
if re.search(r'([+] |[-] |[*] |injecting|exploit|dumped|dumping|scanning|scanned|elevation|'
r'elevated|payload|vulnerable|payload|reverse connect|bind shell|reverse shell| dump | '
r'back connect |privesc|privilege escalat|debug privilege| inject |interactive shell|'
r'shell commands| spawning |] target |] Transmi|] Connect|] connect|] Dump|] command |'
r'] token|] Token |] Firing | hashes | etc/passwd| SAM | NTML|unsupported target|'
r'race condition|Token system |LoaderConfig| add user |ile upload |ile download |'
r'Attaching to |ser has been successfully added|target system |LSA Secrets|DefaultPassword|'
r'Password: |loading dll|.Execute\(|Shellcode|Loader|inject x86|inject x64|bypass|katz|'
r'sploit|ms[0-9][0-9][^0-9]|\bCVE[^a-zA-Z]|privilege::|lsadump|door)',
string, re.IGNORECASE):
localStringScores[string] += 4
if re.search(r'(Mutex|NamedPipe|\\Global\\|\\pipe\\)', string, re.IGNORECASE):
localStringScores[string] += 3
if re.search(r'(isset\($post\[|isset\($get\[)', string, re.IGNORECASE):
localStringScores[string] += 2
if re.search(r'\b([a-f0-9]{32}|[a-f0-9]{40}|[a-f0-9]{64})\b', string, re.IGNORECASE):
localStringScores[string] += 2
if re.search(r'(sc.exe |schtasks|at \\\\|at [0-9]{2}:[0-9]{2})', string, re.IGNORECASE):
localStringScores[string] += 3
if re.search(r'(;chmod |; chmod |sh -c|/dev/tcp/|/bin/telnet|selinux| shell| cp /bin/sh )', string,
re.IGNORECASE):
localStringScores[string] += 3
if re.search(
r'(attacker|brute force|bruteforce|connecting back|EXHAUSTIVE|exhaustion| spawn| evil| elevated)',
string, re.IGNORECASE):
localStringScores[string] += 3
if re.search(r'(abcdefghijklmnopqsst|ABCDEFGHIJKLMNOPQRSTUVWXYZ|0123456789:;)', string, re.IGNORECASE):
localStringScores[string] -= 5
if re.search(
r'(kill|wscript|plugins|svr32|Select |)',
string, re.IGNORECASE):
localStringScores[string] += 3
if re.search(
r'([a-z]{4,}[!\?]|\[[!+\-]\] |[a-zA-Z]{4,}...)',
string, re.IGNORECASE):
localStringScores[string] += 3
if re.search(
r'(-->|!!!| <<< | >>> )',
string, re.IGNORECASE):
localStringScores[string] += 5
if re.search(
r'\b(fuck|damn|shit|penis)\b',
string, re.IGNORECASE):
localStringScores[string] += 5
if re.search(
r'(%APPDATA%|%USERPROFILE%|Public|Roaming|& del|& rm| && |script)',
string, re.IGNORECASE):
localStringScores[string] += 3
if re.search(
r'(Elevation|pwnd|pawn|elevate to)',
string, re.IGNORECASE):
localStringScores[string] += 3
try:
if len(string) > 8:
if args.trace:
print("Starting Base64 string analysis ...")
for m_string in (string, string[1:], string[1:] + "=", string + "=", string + "=="):
if is_base_64(m_string):
decoded_string = m_string.decode('base64')
if is_ascii_string(decoded_string, padding_allowed=True):
localStringScores[string] += 10
base64strings[string] = decoded_string
if args.trace:
print("Starting Hex encoded string analysis ...")
for m_string in ([string, re.sub('[^a-zA-Z0-9]', '', string)]):
if is_hex_encoded(m_string):
decoded_string = m_string.decode('hex')
if is_ascii_string(decoded_string, padding_allowed=True):
if '00' in m_string:
if len(m_string) / float(m_string.count('0')) <= 1.2:
continue
localStringScores[string] += 8
hexEncStrings[string] = decoded_string
except Exception as e:
if args.debug:
traceback.print_exc()
pass
if string[::-1] in good_strings_db:
localStringScores[string] += 10
reversedStrings[string] = string[::-1]
if re.search(r'(rundll32\.exe$|kernel\.dll$)', string, re.IGNORECASE):
localStringScores[string] -= 4
stringScores[original_string] = localStringScores[string]
if args.debug:
if string in utfstrings:
is_utf = True
else:
is_utf = False
sorted_set = sorted(localStringScores.iteritems(), key=operator.itemgetter(1), reverse=True)
c = 0
result_set = []
for string in sorted_set:
if not args.noscorefilter and not args.inverse:
if string[1] < int(args.z):
continue
if string[0] in utfstrings:
result_set.append("UTF16LE:%s" % string[0])
else:
result_set.append(string[0])
if args.trace:
print("RESULT SET:")
print(result_set)
return result_set
def generate_general_condition(file_info):
conditions_string = ""
conditions = []
pe_module_neccessary = False
magic_headers = []
file_sizes = []
imphashes = []
try:
for filePath in file_info:
if "magic" not in file_info[filePath]:
continue
magic = file_info[filePath]["magic"]
size = file_info[filePath]["size"]
imphash = file_info[filePath]["imphash"]
if magic not in magic_headers and magic != "":
magic_headers.append(magic)
if size not in file_sizes:
file_sizes.append(size)
if imphash not in imphashes and imphash != "":
imphashes.append(imphash)
if len(magic_headers) <= 5:
magic_string = " or ".join(get_uint_string(h) for h in magic_headers)
if " or " in magic_string:
conditions.append("( {0} )".format(magic_string))
else:
conditions.append("{0}".format(magic_string))
if not args.nofilesize and len(file_sizes) > 0:
conditions.append(get_file_range(max(file_sizes)))
if len(imphashes) == 1:
conditions.append("pe.imphash() == \"{0}\"".format(imphashes[0]))
pe_module_neccessary = True
condition_string = " and ".join(conditions)
except Exception as e:
if args.debug:
traceback.print_exc()
exit(1)
print("[E] ERROR while generating general condition - check the global rule and remove it if it's faulty")
return condition_string, pe_module_neccessary
def generate_rules(file_strings, file_opcodes, super_rules, file_info, inverse_stats):
# Write to file ---------------------------------------------------
if args.o:
try:
fh = open(args.o, 'w')
except Exception as e:
traceback.print_exc()
# General Info
general_info = "/*\n"
general_info += " YARA Rule Set\n"
general_info += " Author: {0}\n".format(args.a)
general_info += " Date: {0}\n".format(get_timestamp_basic())
general_info += " Identifier: {0}\n".format(identifier)
general_info += " Reference: {0}\n".format(reference)
if args.l != "":
general_info += " License: {0}\n".format(args.l)
general_info += "*/\n\n"
fh.write(general_info)
# GLOBAL RULES ----------------------------------------------------
if args.globalrule:
condition, pe_module_necessary = generate_general_condition(file_info)
# Global Rule
if condition != "":
global_rule = "/* Global Rule -------------------------------------------------------------- */\n"
global_rule += "/* Will be evaluated first, speeds up scanning process, remove at will */\n\n"
global_rule += "global private rule gen_characteristics {\n"
global_rule += " condition:\n"
global_rule += " {0}\n".format(condition)
global_rule += "}\n\n"
# Write rule
if args.o:
fh.write(global_rule)
# General vars
rules = ""
printed_rules = {}
opcodes_to_add = []
rule_count = 0
inverse_rule_count = 0
super_rule_count = 0
pe_module_necessary = False
if not args.inverse:
# PROCESS SIMPLE RULES ----------------------------------------------------
print("[+] Generating Simple Rules ...")
# Apply intelligent filters
print("[-] Applying intelligent filters to string findings ...")
for filePath in file_strings:
print("[-] Filtering string set for %s ..." % filePath)
# Replace the original string set with the filtered one
string_set = file_strings[filePath]
file_strings[filePath] = []
file_strings[filePath] = filter_string_set(string_set)
# Replace the original string set with the filtered one
if filePath not in file_opcodes:
file_opcodes[filePath] = []
else:
print("[-] Filtering opcode set for %s ..." % filePath)
opcode_set = file_opcodes[filePath]
file_opcodes[filePath] = []
file_opcodes[filePath] = filter_opcode_set(opcode_set)
# GENERATE SIMPLE RULES -------------------------------------------
fh.write("/* Rule Set ----------------------------------------------------------------- */\n\n")
for filePath in file_strings:
# Skip if there is nothing to do
if len(file_strings[filePath]) == 0:
print("[W] Not enough high scoring strings to create a rule. "
"(Try -z 0 to reduce the min score or --opcodes to include opcodes) FILE: %s" % filePath)
continue
elif len(file_strings[filePath]) == 0 and len(file_opcodes[filePath]) == 0:
print("[W] Not enough high scoring strings and opcodes to create a rule. " \
"(Try -z 0 to reduce the min score) FILE: %s" % filePath)
continue
# Create Rule
try:
rule = ""
(path, file) = os.path.split(filePath)
# Prepare name
fileBase = os.path.splitext(file)[0]
# Create a clean new name
cleanedName = fileBase
# Adapt length of rule name
if len(fileBase) < 8: # if name is too short add part from path
cleanedName = path.split('\\')[-1:][0] + "_" + cleanedName
# File name starts with a number
if re.search(r'^[0-9]', cleanedName):
cleanedName = "sig_" + cleanedName
# clean name from all characters that would cause errors
cleanedName = re.sub('[^\w]', r'_', cleanedName)
# Check if already printed
if cleanedName in printed_rules:
printed_rules[cleanedName] += 1
cleanedName = cleanedName + "_" + str(printed_rules[cleanedName])
else:
printed_rules[cleanedName] = 1
# Print rule title ----------------------------------------
rule += "rule %s {\n" % cleanedName
# Meta data -----------------------------------------------
rule += " meta:\n"
rule += " description = \"%s - file %s\"\n" % (prefix, file)
rule += " author = \"%s\"\n" % args.a
rule += " reference = \"%s\"\n" % reference
rule += " date = \"%s\"\n" % get_timestamp_basic()
rule += " hash1 = \"%s\"\n" % file_info[filePath]["hash"]
rule += " strings:\n"
# Get the strings -----------------------------------------
# Rule String generation
(rule_strings, opcodes_included, string_rule_count, high_scoring_strings) = \
get_rule_strings(file_strings[filePath], file_opcodes[filePath])
rule += rule_strings
# Extract rul strings
if args.strings:
strings = get_strings(file_strings[filePath])
write_strings(filePath, strings, args.e, args.score)
# Condition -----------------------------------------------
# Conditions list (will later be joined with 'or')
conditions = [] # AND connected
subconditions = [] # OR connected
# Condition PE
# Imphash and Exports - applicable to PE files only
condition_pe = []
condition_pe_part1 = []
condition_pe_part2 = []
if not args.noextras and file_info[filePath]["magic"] == "MZ":
# Add imphash - if certain conditions are met
if file_info[filePath]["imphash"] not in good_imphashes_db and file_info[filePath]["imphash"] != "":
# Comment to imphash
imphash = file_info[filePath]["imphash"]
comment = ""
if imphash in KNOWN_IMPHASHES:
comment = " /* {0} */".format(KNOWN_IMPHASHES[imphash])
# Add imphash to condition
condition_pe_part1.append("pe.imphash() == \"{0}\"{1}".format(imphash, comment))
pe_module_necessary = True
if file_info[filePath]["exports"]:
e_count = 0
for export in file_info[filePath]["exports"]:
if export not in good_exports_db:
condition_pe_part2.append("pe.exports(\"{0}\")".format(export))
e_count += 1
pe_module_necessary = True
if e_count > 5:
break
# 1st Part of Condition 1
basic_conditions = []
# Filesize
if not args.nofilesize:
basic_conditions.insert(0, get_file_range(file_info[filePath]["size"]))
# Magic
if file_info[filePath]["magic"] != "":
uint_string = get_uint_string(file_info[filePath]["magic"])
basic_conditions.insert(0, uint_string)
# Basic Condition
if len(basic_conditions):
conditions.append(" and ".join(basic_conditions))
# Add extra PE conditions to condition 1
pe_conditions_add = False
if condition_pe_part1 or condition_pe_part2:
if len(condition_pe_part1) == 1:
condition_pe.append(condition_pe_part1[0])
elif len(condition_pe_part1) > 1:
condition_pe.append("( %s )" % " or ".join(condition_pe_part1))
if len(condition_pe_part2) == 1:
condition_pe.append(condition_pe_part2[0])
elif len(condition_pe_part2) > 1:
condition_pe.append("( %s )" % " and ".join(condition_pe_part2))
# Marker that PE conditions have been added
pe_conditions_add = True
# Add to sub condition
subconditions.append(" and ".join(condition_pe))
# String combinations
cond_op = "" # opcodes condition
cond_hs = "" # high scoring strings condition
cond_ls = "" # low scoring strings condition
low_scoring_strings = (string_rule_count - high_scoring_strings)
if high_scoring_strings > 0:
cond_hs = "1 of ($x*)"
if low_scoring_strings > 0:
if low_scoring_strings > 10:
if high_scoring_strings > 0:
cond_ls = "4 of them"
else:
cond_ls = "8 of them"
else:
cond_ls = "all of them"
# If low scoring and high scoring
cond_combined = "all of them"
needs_brackets = False
if low_scoring_strings > 0 and high_scoring_strings > 0:
# If PE conditions have been added, don't be so strict with the strings
if pe_conditions_add:
cond_combined = "{0} or {1}".format(cond_hs, cond_ls)
needs_brackets = True
else:
cond_combined = "{0} and {1}".format(cond_hs, cond_ls)
elif low_scoring_strings > 0 and not high_scoring_strings > 0:
cond_combined = "{0}".format(cond_ls)
elif not low_scoring_strings > 0 and high_scoring_strings > 0:
cond_combined = "{0}".format(cond_hs)
if opcodes_included:
cond_op = " and all of ($op*)"
if cond_op or needs_brackets:
subconditions.append("( {0}{1} )".format(cond_combined, cond_op))
else:
subconditions.append(cond_combined)
if len(subconditions) == 1:
conditions.append(subconditions[0])
elif len(subconditions) > 1:
conditions.append("( %s )" % " or ".join(subconditions))
condition_string = " and\n ".join(conditions)
rule += " condition:\n"
rule += " %s\n" % condition_string
rule += "}\n\n"
rules += rule
rule_count += 1
except Exception as e:
traceback.print_exc()
if not nosuper and not args.inverse:
rules += "/* Super Rules ------------------------------------------------------------- */\n\n"
super_rule_names = []
print("[+] Generating Super Rules ...")
printed_combi = {}
for super_rule in super_rules:
try:
rule = ""
rule_name = ""
file_list = []
imphashes = Counter()
for filePath in super_rule["files"]:
(path, file) = os.path.split(filePath)
file_list.append(file)
fileBase = os.path.splitext(file)[0]
cleanedName = fileBase
rule_name += "_" + cleanedName
imphash = file_info[filePath]["imphash"]
if imphash != "-" and imphash != "":
imphashes.update([imphash])
if len(imphashes) == 1:
unique_imphash = imphashes.items()[0][0]
if unique_imphash in good_imphashes_db:
unique_imphash = ""
rule_name = rule_name[:124]
if rule_name not in super_rule_names:
rule_name = "%s_%s" % (rule_name, super_rule_count)
super_rule_names.append(rule_name)
file_listing = ", ".join(file_list)
if re.search(r'^[0-9]', rule_name):
rule_name = "sig_" + rule_name
rule_name = re.sub('[^\w]', r'_', rule_name)
if rule_name in printed_rules:
printed_combi[rule_name] += 1
rule_name = rule_name + "_" + str(printed_combi[rule_name])
else:
printed_combi[rule_name] = 1
rule += "rule %s {\n" % rule_name
rule += " meta:\n"
rule += " description = \"%s - from files %s\"\n" % (prefix, file_listing)
rule += " author = \"%s\"\n" % args.a
rule += " reference = \"%s\"\n" % reference
rule += " date = \"%s\"\n" % get_timestamp_basic()
for i, filePath in enumerate(super_rule["files"]):
rule += " hash%s = \"%s\"\n" % (str(i + 1), file_info[filePath]["hash"])
rule += " strings:\n"
if file_opcodes.get(filePath) is None:
tmp_file_opcodes = {}
else:
tmp_file_opcodes = file_opcodes.get(filePath)
(rule_strings, opcodes_included, string_rule_count, high_scoring_strings) = \
get_rule_strings(super_rule["strings"], tmp_file_opcodes)
rule += rule_strings
conditions = []
file_info_super = {}
for filePath in super_rule["files"]:
file_info_super[filePath] = file_info[filePath]
condition_strings, pe_module_necessary_gen = generate_general_condition(file_info_super)
if pe_module_necessary_gen:
pe_module_necessary = True
cond_op = ""
cond_hs = ""
cond_ls = ""
low_scoring_strings = (string_rule_count - high_scoring_strings)
if high_scoring_strings > 0:
cond_hs = "1 of ($x*)"
if low_scoring_strings > 0:
if low_scoring_strings > 10:
if high_scoring_strings > 0:
cond_ls = "4 of them"
else:
cond_ls = "8 of them"
else:
cond_ls = "all of them"
cond_combined = "all of them"
if low_scoring_strings > 0 and high_scoring_strings > 0:
cond_combined = "{0} and {1}".format(cond_hs, cond_ls)
elif low_scoring_strings > 0 and not high_scoring_strings > 0:
cond_combined = "{0}".format(cond_ls)
elif not low_scoring_strings > 0 and high_scoring_strings > 0:
cond_combined = "{0}".format(cond_hs)
if opcodes_included:
cond_op = " and all of ($op*)"
condition2 = "( {0} ){1}".format(cond_combined, cond_op)
conditions.append(" and ".join([condition_strings, condition2]))
condition_pe = "all of them"
conditions.append(condition_pe)
condition_string = "\n ) or ( ".join(conditions)
rule += " condition:\n"
rule += " ( %s )\n" % condition_string
rule += "}\n\n"
rules += rule
super_rule_count += 1
except Exception as e:
traceback.print_exc()
try:
if not args.noextras:
if pe_module_necessary:
fh.write('import "pe"\n\n')
if args.o:
fh.write(rules)
except Exception as e:
traceback.print_exc()
if args.inverse:
print("[+] Generating inverse rules ...")
inverse_rules = ""
print("[+] Applying intelligent filters to string findings ...")
for fileName in inverse_stats:
print("[-] Filtering string set for %s ..." % fileName)
string_set = inverse_stats[fileName]
inverse_stats[fileName] = []
inverse_stats[fileName] = filter_string_set(string_set)
if fileName not in file_opcodes:
file_opcodes[fileName] = {}
fh.write("/* Inverse Rules ------------------------------------------------------------- */\n\n")
for fileName in inverse_stats:
try:
rule = ""
cleanedName = fileName.replace(".", "_")
cleanedName += "_ANOMALY"
if re.search(r'^[0-9]', cleanedName):
cleanedName = "sig_" + cleanedName
cleanedName = re.sub('[^\w]', r'_', cleanedName)
if cleanedName in printed_rules:
printed_rules[cleanedName] += 1
cleanedName = cleanedName + "_" + str(printed_rules[cleanedName])
else:
printed_rules[cleanedName] = 1
rule += "rule %s {\n" % cleanedName
rule += " meta:\n"
rule += " description = \"%s for anomaly detection - file %s\"\n" % (prefix, fileName)
rule += " author = \"%s\"\n" % args.a
rule += " reference = \"%s\"\n" % reference
rule += " date = \"%s\"\n" % get_timestamp_basic()
for i, hash in enumerate(file_info[fileName]["hashes"]):
rule += " hash%s = \"%s\"\n" % (str(i + 1), hash)
rule += " strings:\n"
(rule_strings, opcodes_included, string_rule_count, high_scoring_strings) = \
get_rule_strings(inverse_stats[fileName], file_opcodes[fileName])
rule += rule_strings
folderNames = ""
if not args.nodirname:
folderNames += "and ( filepath matches /"
folderNames += "$/ or filepath matches /".join(file_info[fileName]["folder_names"])
folderNames += "$/ )"
condition = "filename == \"%s\" %s and not ( all of them )" % (fileName, folderNames)
rule += " condition:\n"
rule += " %s\n" % condition
rule += "}\n\n"
inverse_rules += rule
except Exception as e:
traceback.print_exc()
try:
if args.o:
fh.write(inverse_rules)
inverse_rule_count += 1
except Exception as e:
traceback.print_exc()
if args.o:
try:
fh.close()
except Exception as e:
traceback.print_exc()
if args.debug:
print(rules)
return (rule_count, inverse_rule_count, super_rule_count)
def get_rule_strings(string_elements, opcode_elements):
rule_strings = ""
high_scoring_strings = 0
string_rule_count = 0
for i, string in enumerate(string_elements):
is_fullword = True
initial_string = string
enc = " ascii"
base64comment = ""
hexEncComment = ""
reversedComment = ""
fullword = ""
pestudio_comment = ""
score_comment = ""
goodware_comment = ""
if string in good_strings_db:
goodware_comment = " /* Goodware String - occured %s times */" % (good_strings_db[string])
if string in stringScores:
if args.score:
score_comment += " /* score: '%.2f'*/" % (stringScores[string])
else:
print("NO SCORE: %s" % string)
if string[:8] == "UTF16LE:":
string = string[8:]
enc = " wide"
if string in base64strings:
base64comment = " /* base64 encoded string '%s' */" % base64strings[string]
if string in hexEncStrings:
hexEncComment = " /* hex encoded string '%s' */" % removeNonAsciiDrop(hexEncStrings[string])
if string in pestudioMarker and args.score:
pestudio_comment = " /* PEStudio Blacklist: %s */" % pestudioMarker[string]
if string in reversedStrings:
reversedComment = " /* reversed goodware string '%s' */" % reversedStrings[string]
if is_hex_encoded(string, check_length=False):
is_fullword = False
if len(string) >= args.s:
string = string[:args.s].rstrip("\\")
is_fullword = False
if is_fullword:
fullword = " fullword"
if float(stringScores[initial_string]) > score_highly_specific:
high_scoring_strings += 1
rule_strings += " $x%s = \"%s\"%s%s%s%s%s%s%s%s\n" % (
str(i + 1), string, fullword, enc, base64comment, reversedComment, pestudio_comment, score_comment,
goodware_comment, hexEncComment)
else:
rule_strings += " $s%s = \"%s\"%s%s%s%s%s%s%s%s\n" % (
str(i + 1), string, fullword, enc, base64comment, reversedComment, pestudio_comment, score_comment,
goodware_comment, hexEncComment)
if (i + 1) >= int(args.rc):
break
string_rule_count += 1
opcodes_included = False
if len(opcode_elements) > 0:
rule_strings += "\n"
for i, opcode in enumerate(opcode_elements):
rule_strings += " $op%s = { %s }\n" % (str(i), opcode)
opcodes_included = True
else:
if args.opcodes:
print("[-] Not enough unique opcodes found to include them")
return rule_strings, opcodes_included, string_rule_count, high_scoring_strings
def get_strings(string_elements):
strings = {
"ascii": [],
"wide": [],
"base64 encoded": [],
"hex encoded": [],
"reversed": []
}
for i, string in enumerate(string_elements):
if string[:8] == "UTF16LE:":
string = string[8:]
strings["wide"].append(string)
elif string in base64strings:
strings["base64 encoded"].append(string)
elif string in hexEncStrings:
strings["hex encoded"].append(string)
elif string in reversedStrings:
strings["reversed"].append(string)
else:
strings["ascii"].append(string)
return strings
def write_strings(filePath, strings, output_dir, scores):
SECTIONS = ["ascii", "wide", "base64 encoded", "hex encoded", "reversed"]
filename = os.path.basename(filePath)
strings_filename = os.path.join(output_dir, "%s_strings.txt" % filename)
print("[+] Writing strings to file %s" % strings_filename)
output_string = []
for key in SECTIONS:
if len(strings[key]) < 1:
continue
output_string.append("%s Strings" % key.upper())
output_string.append("------------------------------------------------------------------------")
for string in strings[key]:
if scores:
score = "unknown"
if key == "wide":
score = stringScores["UTF16LE:%s" % string]
else:
score = stringScores[string]
output_string.append("%d;%s" % score, string)
else:
output_string.append(string)
output_string.append("\n")
with open(strings_filename, "w") as fh:
fh.write("\n".join(output_string))
def initialize_pestudio_strings():
pestudio_strings = {}
tree = etree.parse(get_abs_path(PE_STRINGS_FILE))
pestudio_strings["strings"] = tree.findall(".//string")
pestudio_strings["av"] = tree.findall(".//av")
pestudio_strings["folder"] = tree.findall(".//folder")
pestudio_strings["os"] = tree.findall(".//os")
pestudio_strings["reg"] = tree.findall(".//reg")
pestudio_strings["guid"] = tree.findall(".//guid")
pestudio_strings["ssdl"] = tree.findall(".//ssdl")
pestudio_strings["ext"] = tree.findall(".//ext")
pestudio_strings["agent"] = tree.findall(".//agent")
pestudio_strings["oid"] = tree.findall(".//oid")
pestudio_strings["priv"] = tree.findall(".//priv")
return pestudio_strings
def initialize_bayes_filter():
stringTrainer = Trainer(tokenizer)
print("[-] Training filter with good strings from ./lib/good.txt")
with open(get_abs_path("./lib/good.txt"), "r") as fh_goodstrings:
for line in fh_goodstrings:
stringTrainer.train(line.rstrip("\n"), "string")
modified_line = re.sub(r'(\\\\|\/|\-|\.|\_)', ' ', line)
stringTrainer.train(modified_line, "string")
return stringTrainer
def get_pestudio_score(string):
for type in pestudio_strings:
for elem in pestudio_strings[type]:
if elem.text.lower() == string.lower():
if type != "ext":
return 5, type
return 0, ""
def get_opcode_string(opcode):
return ' '.join(opcode[i:i + 2] for i in range(0, len(opcode), 2))
def get_uint_string(magic):
if len(magic) == 2:
return "uint16(0) == 0x{1}{0}".format(magic[0].encode('hex'), magic[1].encode('hex'))
if len(magic) == 4:
return "uint32(0) == 0x{3}{2}{1}{0}".format(magic[0].encode('hex'), magic[1].encode('hex'),
magic[2].encode('hex'), magic[3].encode('hex'))
return ""
def get_file_range(size):
size_string = ""
try:
max_size_b = size * args.fm
if max_size_b < 1024:
max_size_b = 1024
max_size = max_size_b / 1024
max_size_kb = max_size
if len(str(max_size)) == 2:
max_size = int(round(max_size, -1))
elif len(str(max_size)) == 3:
max_size = int(round(max_size, -2))
elif len(str(max_size)) == 4:
max_size = int(round(max_size, -3))
elif len(str(max_size)) == 5:
max_size = int(round(max_size, -3))
size_string = "filesize < {0}KB".format(max_size)
if args.debug:
print("File Size Eval: SampleSize (b): {0} SizeWithMultiplier (b/Kb): {1} / {2} RoundedSize: {3}".format(
str(size), str(max_size_b), str(max_size_kb), str(max_size)))
except Exception as e:
if args.debug:
traceback.print_exc()
pass
finally:
return size_string
def get_timestamp_basic(date_obj=None):
if not date_obj:
date_obj = datetime.datetime.now()
date_str = date_obj.strftime("%Y-%m-%d")
return date_str
def is_ascii_char(b, padding_allowed=False):
if padding_allowed:
if (ord(b) < 127 and ord(b) > 31) or ord(b) == 0:
return 1
else:
if ord(b) < 127 and ord(b) > 31:
return 1
return 0
def is_ascii_string(string, padding_allowed=False):
for b in string:
if padding_allowed:
if not ((ord(b) < 127 and ord(b) > 31) or ord(b) == 0):
return 0
else:
if not (ord(b) < 127 and ord(b) > 31):
return 0
return 1
def is_base_64(s):
return (len(s) % 4 == 0) and re.match('^[A-Za-z0-9+/]+[=]{0,2}$', s)
def is_hex_encoded(s, check_length=True):
if re.match('^[A-Fa-f0-9]+$', s):
if check_length:
if len(s) % 2 == 0:
return True
else:
return True
return False
def extract_hex_strings(s):
strings = []
hex_strings = re.findall("([a-fA-F0-9]{10,})", s)
for string in list(hex_strings):
hex_strings += string.split('0000')
hex_strings += string.split('0d0a')
hex_strings += re.findall(r'((?:0000|002[a-f0-9]|00[3-9a-f][0-9a-f]){6,})', string, re.IGNORECASE)
hex_strings = list(set(hex_strings))
for string in hex_strings:
for x in string.split('00'):
if len(x) > 10:
strings.append(x)
for string in hex_strings:
try:
if len(string) % 2 != 0 or len(string) < 8:
continue
dec = string.replace('00', '').decode('hex')
if is_ascii_string(dec, padding_allowed=True):
strings.append(string)
except Exception as e:
traceback.print_exc()
return strings
def removeNonAsciiDrop(string):
nonascii = "error"
try:
nonascii = "".join(i for i in string if ord(i)<127 and ord(i)>31)
except Exception as e:
traceback.print_exc()
pass
return nonascii
def save(object, filename, protocol=0):
file = gzip.GzipFile(filename, 'wb')
file.write(pickle.dumps(object, protocol))
file.close()
def load(filename):
file = gzip.GzipFile(filename, 'rb')
buffer = ""
while 1:
data = file.read()
if data == "":
break
buffer += data
object = pickle.loads(buffer)
del (buffer)
file.close()
return object
def update_databases():
try:
dbDir = './dbs/'
if not os.path.exists(dbDir):
os.makedirs(dbDir)
except Exception as e:
if args.debug:
traceback.print_exc()
print("Error while creating the database directory ./dbs")
sys.exit(1)
try:
for filename, repo_url in REPO_URLS.iteritems():
print("Downloading %s from %s ..." % (filename, repo_url))
fileDownloader = urllib.URLopener()
fileDownloader.retrieve(repo_url, "./dbs/%s" % filename)
except Exception as e:
if args.debug:
traceback.print_exc()
print("Error while downloading the database file - check your Internet connection")
print("Alterntive download link: https://drive.google.com/drive/folders/0B2S_IOa0MiOHS0xmekR6VWRhZ28")
print("Download the files and place them into the ./dbs/ folder")
sys.exit(1)
def processSampleDir(targetDir):
base64strings = {}
hexEncStrings = {}
reversedStrings = {}
pestudioMarker = {}
stringScores = {}
(sample_string_stats, sample_opcode_stats, file_info) = \
parse_sample_dir(targetDir, args.nr, generateInfo=True, onlyRelevantExtensions=args.oe)
(file_strings, file_opcodes, combinations, super_rules, inverse_stats) = \
sample_string_evaluation(sample_string_stats, sample_opcode_stats, file_info)
(rule_count, inverse_rule_count, super_rule_count) = \
generate_rules(file_strings, file_opcodes, super_rules, file_info, inverse_stats)
if args.inverse:
print("[=] Generated %s INVERSE rules." % str(inverse_rule_count))
else:
print("[=] Generated %s SIMPLE rules." % str(rule_count))
if not nosuper:
print("[=] Generated %s SUPER rules." % str(super_rule_count))
print("[=] All rules written to %s" % args.o)
def emptyFolder(dir):
for file in os.listdir(dir):
filePath = os.path.join(dir, file)
try:
if os.path.isfile(filePath):
print("[!] Removing %s ..." % filePath)
os.unlink(filePath)
except Exception as e:
print(e)
def getReference(ref):
if os.path.exists(ref):
reference = getFileContent(ref)
print("[+] Read reference from file %s > %s" % (ref, reference))
return reference
else:
return ref
def getIdentifier(id, path):
if id == "not set" or not os.path.exists(id):
return os.path.basename(path.rstrip('/'))
else:
identifier = getFileContent(id)
print("[+] Read identifier from file %s > %s" % (id, identifier))
return identifier
def getPrefix(prefix, identifier):
if prefix == "Auto-generated rule":
return identifier
else:
return prefix
def getFileContent(file):
try:
with open(file) as f:
return f.read(1024)
except Exception as e:
return "not found"
def signal_handler(signal_name, frame):
print("> yarGen's work has been interrupted")
sys.exit(0)
def print_welcome():
print("###############################################################################")
print(" ______")
print(" __ ______ ______/ ____/__ ____")
print(" / / / / __ `/ ___/ / __/ _ \/ __ \\")
print(" / /_/ / /_/ / / / /_/ / __/ / / /")
print(" \__, /\__,_/_/ \____/\___/_/ /_/")
print(" /____/")
print(" ")
print(" Yara Rule Generator by Florian Roth")
print(" December 2018")
print(" Version %s" % __version__)
print(" ")
print("###############################################################################")
# MAIN ################################################################
if __name__ == '__main__':
# Signal handler for CTRL+C
signal_module.signal(signal_module.SIGINT, signal_handler)
# Parse Arguments
parser = argparse.ArgumentParser(description='yarGen')
group_creation = parser.add_argument_group('Rule Creation')
group_creation.add_argument('-m', help='Path to scan for malware')
group_creation.add_argument('-y', help='Minimum string length to consider (default=8)', metavar='min-size',
default=8)
group_creation.add_argument('-z', help='Minimum score to consider (default=0)', metavar='min-score', default=0)
group_creation.add_argument('-x', help='Score required to set string as \'highly specific string\' (default: 30)',
metavar='high-scoring', default=30)
group_creation.add_argument('-w', help='Minimum number of strings that overlap to create a super rule (default: 5)',
metavar='superrule-overlap', default=5)
group_creation.add_argument('-s', help='Maximum length to consider (default=128)', metavar='max-size', default=128)
group_creation.add_argument('-rc', help='Maximum number of strings per rule (default=20, intelligent filtering '
'will be applied)', metavar='maxstrings', default=20)
group_creation.add_argument('--excludegood', help='Force the exclude all goodware strings', action='store_true',
default=False)
group_output = parser.add_argument_group('Rule Output')
group_output.add_argument('-o', help='Output rule file', metavar='output_rule_file', default='yargen_rules.yar')
group_output.add_argument('-e', help='Output directory for string exports', metavar='output_dir_strings', default='')
group_output.add_argument('-a', help='Author Name', metavar='author', default='yarGen Rule Generator')
group_output.add_argument('-r', help='Reference (can be string or text file)', metavar='ref',
default='https://github.com/Neo23x0/yarGen')
group_output.add_argument('-l', help='License', metavar='lic', default='')
group_output.add_argument('-p', help='Prefix for the rule description', metavar='prefix',
default='Auto-generated rule')
group_output.add_argument('-b', help='Text file from which the identifier is read (default: last folder name in '
'the full path, e.g. "myRAT" if -m points to /mnt/mal/myRAT)',
metavar='identifier',
default='not set')
group_output.add_argument('--score', help='Show the string scores as comments in the rules', action='store_true',
default=False)
group_output.add_argument('--strings', help='Show the string scores as comments in the rules', action='store_true',
default=False)
group_output.add_argument('--nosimple', help='Skip simple rule creation for files included in super rules',
action='store_true', default=False)
group_output.add_argument('--nomagic', help='Don\'t include the magic header condition statement',
action='store_true', default=False)
group_output.add_argument('--nofilesize', help='Don\'t include the filesize condition statement',
action='store_true', default=False)
group_output.add_argument('-fm', help='Multiplier for the maximum \'filesize\' condition value (default: 3)',
default=3)
group_output.add_argument('--globalrule', help='Create global rules (improved rule set speed)',
action='store_true', default=False)
group_output.add_argument('--nosuper', action='store_true', default=False, help='Don\'t try to create super rules '
'that match against various files')
group_db = parser.add_argument_group('Database Operations')
group_db.add_argument('--update', action='store_true', default=False, help='Update the local strings and opcodes '
'dbs from the online repository')
group_db.add_argument('-g', help='Path to scan for goodware (dont use the database shipped with yaraGen)')
group_db.add_argument('-u', action='store_true', default=False, help='Update local standard goodware database with '
'a new analysis result (used with -g)')
group_db.add_argument('-c', action='store_true', default=False, help='Create new local goodware database '
'(use with -g and optionally -i "identifier")')
group_db.add_argument('-i', default="", help='Specify an identifier for the newly created databases '
'(good-strings-identifier.db, good-opcodes-identifier.db)')
group_general = parser.add_argument_group('General Options')
group_general.add_argument('--dropzone', action='store_true', default=False,
help='Dropzone mode - monitors a directory [-m] for new samples to process'
'WARNING: Processed files will be deleted!')
group_general.add_argument('--nr', action='store_true', default=False, help='Do not recursively scan directories')
group_general.add_argument('--oe', action='store_true', default=False, help='Only scan executable extensions EXE, '
'DLL, ASP, JSP, PHP, BIN, INFECTED')
group_general.add_argument('-fs', help='Max file size in MB to analyze (default=10)', metavar='size-in-MB',
default=10)
group_general.add_argument('--noextras', action='store_true', default=False,
help='Don\'t use extras like Imphash or PE header specifics')
group_general.add_argument('--debug', action='store_true', default=False, help='Debug output')
group_general.add_argument('--trace', action='store_true', default=False, help='Trace output')
group_opcode = parser.add_argument_group('Other Features')
group_opcode.add_argument('--opcodes', action='store_true', default=False, help='Do use the OpCode feature '
'(use this if not enough high '
'scoring strings can be found)')
group_opcode.add_argument('-n', help='Number of opcodes to add if not enough high scoring string could be found '
'(default=3)', metavar='opcode-num', default=3)
group_inverse = parser.add_argument_group('Inverse Mode (unstable)')
group_inverse.add_argument('--inverse', help=argparse.SUPPRESS, action='store_true', default=False)
group_inverse.add_argument('--nodirname', help=argparse.SUPPRESS, action='store_true', default=False)
group_inverse.add_argument('--noscorefilter', help=argparse.SUPPRESS, action='store_true', default=False)
args = parser.parse_args()
# Print Welcome
print_welcome()
if not args.update and not args.m and not args.g:
parser.print_help()
print("")
print("[E] You have to select --update to update yarGens database or -m for signature generation or -g for the "
"creation of goodware string collections "
"(see https://github.com/Neo23x0/yarGen#examples for more details)")
sys.exit(1)
# Update
if args.update:
update_databases()
print("[+] Updated databases - you can now start creating YARA rules")
sys.exit(0)
# Typical input erros
if args.m:
if os.path.isfile(args.m):
print("[E] Input is a file, please use a directory instead (-m path)")
sys.exit(0)
# Opcodes evaluation or not
use_opcodes = False
if args.opcodes:
use_opcodes = True
# Read PEStudio string list
pestudio_strings = {}
pestudio_available = False
# Super Rule Generation
nosuper = args.nosuper
# Identifier
sourcepath = args.m
if args.g:
sourcepath = args.g
identifier = getIdentifier(args.b, sourcepath)
print("[+] Using identifier '%s'" % identifier)
# Reference
reference = getReference(args.r)
print("[+] Using reference '%s'" % reference)
# Prefix
prefix = getPrefix(args.p, identifier)
print("[+] Using prefix '%s'" % prefix)
if os.path.isfile(get_abs_path(PE_STRINGS_FILE)) and lxml_available:
print("[+] Processing PEStudio strings ...")
pestudio_strings = initialize_pestudio_strings()
pestudio_available = True
else:
if lxml_available:
print("\nTo improve the analysis process please download the awesome PEStudio tool by marc @ochsenmeier " \
"from http://winitor.com and place the file 'strings.xml' in the ./3rdparty directory.\n")
time.sleep(5)
# Highly specific string score
score_highly_specific = int(args.x)
# Scan goodware files
if args.g:
print("[+] Processing goodware files ...")
good_strings_db, good_opcodes_db, good_imphashes_db, good_exports_db = \
parse_good_dir(args.g, args.nr, args.oe)
# Update existing databases
if args.u:
try:
print("[+] Updating databases ...")
# Evaluate the database identifiers
db_identifier = ""
if args.i != "":
db_identifier = "-%s" % args.i
strings_db = "./dbs/good-strings%s.db" % db_identifier
opcodes_db = "./dbs/good-opcodes%s.db" % db_identifier
imphashes_db = "./dbs/good-imphashes%s.db" % db_identifier
exports_db = "./dbs/good-exports%s.db" % db_identifier
# Strings -----------------------------------------------------
print("[+] Updating %s ..." % strings_db)
good_pickle = load(get_abs_path(strings_db))
print("Old string database entries: %s" % len(good_pickle))
good_pickle.update(good_strings_db)
print("New string database entries: %s" % len(good_pickle))
save(good_pickle, strings_db)
# Opcodes -----------------------------------------------------
print("[+] Updating %s ..." % opcodes_db)
good_opcode_pickle = load(get_abs_path(opcodes_db))
print("Old opcode database entries: %s" % len(good_opcode_pickle))
good_opcode_pickle.update(good_opcodes_db)
print("New opcode database entries: %s" % len(good_opcode_pickle))
save(good_opcode_pickle, opcodes_db)
# Imphashes ---------------------------------------------------
print("[+] Updating %s ..." % imphashes_db)
good_imphashes_pickle = load(get_abs_path(imphashes_db))
print("Old opcode database entries: %s" % len(good_imphashes_pickle))
good_imphashes_pickle.update(good_imphashes_db)
print("New opcode database entries: %s" % len(good_imphashes_pickle))
save(good_imphashes_pickle, imphashes_db)
# Exports -----------------------------------------------------
print("[+] Updating %s ..." % exports_db)
good_exports_pickle = load(get_abs_path(exports_db))
print("Old opcode database entries: %s" % len(good_exports_pickle))
good_exports_pickle.update(good_exports_db)
print("New opcode database entries: %s" % len(good_exports_pickle))
save(good_exports_pickle, exports_db)
except Exception as e:
traceback.print_exc()
# Create new databases
if args.c:
print("[+] Creating local database ...")
# Evaluate the database identifiers
db_identifier = ""
if args.i != "":
db_identifier = "-%s" % args.i
strings_db = "./dbs/good-strings%s.db" % db_identifier
opcodes_db = "./dbs/good-opcodes%s.db" % db_identifier
imphashes_db = "./dbs/good-imphashes%s.db" % db_identifier
exports_db = "./dbs/good-exports%s.db" % db_identifier
# Creating the databases
print("[+] Using '%s' as filename for newly created strings database" % strings_db)
print("[+] Using '%s' as filename for newly created opcodes database" % opcodes_db)
print("[+] Using '%s' as filename for newly created opcodes database" % imphashes_db)
print("[+] Using '%s' as filename for newly created opcodes database" % exports_db)
try:
if os.path.isfile(strings_db):
raw_input("File %s alread exists. Press enter to proceed or CTRL+C to exit." % strings_db)
os.remove(strings_db)
if os.path.isfile(opcodes_db):
raw_input("File %s alread exists. Press enter to proceed or CTRL+C to exit." % opcodes_db)
os.remove(opcodes_db)
if os.path.isfile(imphashes_db):
raw_input("File %s alread exists. Press enter to proceed or CTRL+C to exit." % imphashes_db)
os.remove(imphashes_db)
if os.path.isfile(exports_db):
raw_input("File %s alread exists. Press enter to proceed or CTRL+C to exit." % exports_db)
os.remove(exports_db)
# Strings
good_pickle = Counter()
good_pickle = good_strings_db
# Opcodes
good_op_pickle = Counter()
good_op_pickle = good_opcodes_db
# Imphashes
good_imphashes_pickle = Counter()
good_imphashes_pickle = good_imphashes_db
# Exports
good_exports_pickle = Counter()
good_exports_pickle = good_exports_db
# Save
save(good_pickle, strings_db)
save(good_op_pickle, opcodes_db)
save(good_imphashes_pickle, imphashes_db)
save(good_exports_pickle, exports_db)
print("New database with %d string, %d opcode, %d imphash, %d export entries created. " \
"(remember to use --opcodes to extract opcodes from the samples and create the opcode databases)"\
% (len(good_strings_db), len(good_opcodes_db), len(good_imphashes_db), len(good_exports_db)))
except Exception as e:
traceback.print_exc()
# Analyse malware samples and create rules
else:
if use_opcodes:
print("[+] Reading goodware strings from database 'good-strings.db' and 'good-opcodes.db' ...")
print(" (This could take some time and uses at least 6 GB of RAM)")
else:
print("[+] Reading goodware strings from database 'good-strings.db' ...")
print(" (This could take some time and uses at least 3 GB of RAM)")
good_strings_db = Counter()
good_opcodes_db = Counter()
good_imphashes_db = Counter()
good_exports_db = Counter()
opcodes_num = 0
strings_num = 0
imphash_num = 0
exports_num = 0
# Initialize all databases
for file in os.listdir(get_abs_path("./dbs/")):
if not file.endswith(".db"):
continue
filePath = os.path.join("./dbs/", file)
# String databases
if file.startswith("good-strings"):
try:
print("[+] Loading %s ..." % filePath)
good_pickle = load(get_abs_path(filePath))
good_strings_db.update(good_pickle)
print("[+] Total: %s / Added %d entries" % (
len(good_strings_db), len(good_strings_db) - strings_num))
strings_num = len(good_strings_db)
except Exception as e:
traceback.print_exc()
# Opcode databases
if file.startswith("good-opcodes"):
try:
if use_opcodes:
print("[+] Loading %s ..." % filePath)
good_op_pickle = load(get_abs_path(filePath))
good_opcodes_db.update(good_op_pickle)
print("[+] Total: %s (removed duplicates) / Added %d entries" % (
len(good_opcodes_db), len(good_opcodes_db) - opcodes_num))
opcodes_num = len(good_opcodes_db)
except Exception as e:
use_opcodes = False
traceback.print_exc()
# Imphash databases
if file.startswith("good-imphash"):
try:
print("[+] Loading %s ..." % filePath)
good_imphashes_pickle = load(get_abs_path(filePath))
good_imphashes_db.update(good_imphashes_pickle)
print("[+] Total: %s / Added %d entries" % (
len(good_imphashes_db), len(good_imphashes_db) - imphash_num))
imphash_num = len(good_imphashes_db)
except Exception as e:
traceback.print_exc()
# Export databases
if file.startswith("good-exports"):
try:
print("[+] Loading %s ..." % filePath)
good_exports_pickle = load(get_abs_path(filePath))
good_exports_db.update(good_exports_pickle)
print("[+] Total: %s / Added %d entries" % (
len(good_exports_db), len(good_exports_db) - exports_num))
exports_num = len(good_exports_db)
except Exception as e:
traceback.print_exc()
if use_opcodes and len(good_opcodes_db) < 1:
print("[E] Missing goodware opcode databases."
" Please run 'yarGen.py --update' to retrieve the newest database set.")
use_opcodes = False
if len(good_exports_db) < 1 and len(good_imphashes_db) < 1:
print("[E] Missing goodware imphash/export databases. "
" Please run 'yarGen.py --update' to retrieve the newest database set.")
use_opcodes = False
if len(good_strings_db) < 1 and not args.c:
print("[E] Error - no goodware databases found. "
" Please run 'yarGen.py --update' to retrieve the newest database set.")
sys.exit(1)
# If malware directory given
if args.m:
# Initialize Bayes Trainer (we will use the goodware string database for this)
print("[+] Initializing Bayes Filter ...")
stringTrainer = initialize_bayes_filter()
# Deactivate super rule generation if there's only a single file in the folder
if len(os.listdir(args.m)) < 2:
nosuper = True
base64strings = {}
reversedStrings = {}
hexEncStrings = {}
pestudioMarker = {}
stringScores = {}
if args.dropzone:
print("Monitoring %s for new sample files (processed samples will be removed)" % args.m)
while(True):
if len(os.listdir(args.m)) > 0:
if len(os.listdir(args.m)) < 2:
nosuper = True
else:
nosuper = False
# Read a new identifier
identifier = getIdentifier(args.b, args.m)
# Read a new reference
reference = getReference(args.r)
# Generate a new description prefix
prefix = getPrefix(args.p, identifier)
# Process the samples
processSampleDir(args.m)
# Delete all samples from the dropzone folder
emptyFolder(args.m)
time.sleep(1)
else:
# Scan malware files
print("[+] Processing malware files ...")
processSampleDir(args.m)
print("[+] yarGen run finished") | true | true |
1c2f6afbc8e9ef22187334b1035854ca8b2f15e1 | 392 | py | Python | tests/abilities/test_health.py | vishwatejharer/warriorpy | 57a8dccfe23796ef213288972b20a7e883e79b24 | [
"MIT"
] | 46 | 2015-02-02T19:32:05.000Z | 2022-03-16T16:24:14.000Z | tests/abilities/test_health.py | vishwatejharer/warriorpy | 57a8dccfe23796ef213288972b20a7e883e79b24 | [
"MIT"
] | 5 | 2015-07-06T15:26:43.000Z | 2020-04-21T08:12:04.000Z | tests/abilities/test_health.py | vishwatejharer/warriorpy | 57a8dccfe23796ef213288972b20a7e883e79b24 | [
"MIT"
] | 10 | 2015-02-10T02:56:22.000Z | 2021-05-15T22:37:38.000Z | import unittest
from pythonwarrior.units.warrior import Warrior
from pythonwarrior.abilities.health import Health
class TestHealth(unittest.TestCase):
def setUp(self):
self.warrior = Warrior()
self.health = Health(self.warrior)
def test_should_return_the_amount_of_health(self):
self.warrior.health = 10
self.assertEqual(10, self.health.perform())
| 26.133333 | 54 | 0.732143 | import unittest
from pythonwarrior.units.warrior import Warrior
from pythonwarrior.abilities.health import Health
class TestHealth(unittest.TestCase):
def setUp(self):
self.warrior = Warrior()
self.health = Health(self.warrior)
def test_should_return_the_amount_of_health(self):
self.warrior.health = 10
self.assertEqual(10, self.health.perform())
| true | true |
1c2f6b8f8a0f1859eee2807fbf87a4134a1b435c | 1,886 | py | Python | reflex_behaviors/src/turn_detector.py | ehwa009/social_mind | 0507f47b29371ef167b30b60bdd446fde118b626 | [
"Apache-2.0"
] | 7 | 2018-08-09T02:40:58.000Z | 2022-02-07T03:32:23.000Z | reflex_behaviors/src/turn_detector.py | ehwa009/social_mind | 0507f47b29371ef167b30b60bdd446fde118b626 | [
"Apache-2.0"
] | null | null | null | reflex_behaviors/src/turn_detector.py | ehwa009/social_mind | 0507f47b29371ef167b30b60bdd446fde118b626 | [
"Apache-2.0"
] | 2 | 2018-08-09T02:41:06.000Z | 2018-08-27T03:54:10.000Z | #!/usr/bin/python
#-*- encoding: utf8 -*-
import rospy
from std_msgs.msg import Bool, String, Empty
from mind_msgs.msg import RaisingEvents, SetIdleMotion
class TurnDetectorNode:
def __init__(self):
rospy.Subscriber('raising_events', RaisingEvents, self.handle_raising_events)
rospy.Subscriber('robot_is_saying', Bool, self.handle_robot_is_saying)
self.pub_start_speech_recognition = rospy.Publisher('sp_speech_recognizer/start', Empty, queue_size=10)
self.pub_stop_speech_recognition = rospy.Publisher('sp_speech_recognizer/stop', Empty, queue_size=10)
self.pub_set_idle_motion = rospy.Publisher('set_enable_idle_motion', SetIdleMotion, queue_size=10)
msg = SetIdleMotion()
msg.enabled = True
msg.with_leaning_forward = False
self.pub_set_idle_motion.publish(msg)
rospy.loginfo('\033[92m[%s]\033[0m initialized...'%rospy.get_name())
def handle_raising_events(self, msg):
pass
def handle_robot_is_saying(self, msg):
if msg.data:
# Robot started saying
rospy.loginfo('\033[92m[%s]\033[0m Robot\'s Turn...'%rospy.get_name())
msg = SetIdleMotion()
msg.enabled = False
msg.with_leaning_forward = False
self.pub_set_idle_motion.publish(msg)
self.pub_stop_speech_recognition.publish()
else:
# Robot completed saying
rospy.loginfo('\033[92m[%s]\033[0m User\'s Turn...'%rospy.get_name())
msg = SetIdleMotion()
msg.enabled = True
msg.with_leaning_forward = True
self.pub_set_idle_motion.publish(msg)
rospy.sleep(0.1)
self.pub_start_speech_recognition.publish()
if __name__ == '__main__':
rospy.init_node('turn_detector', anonymous=False)
m = TurnDetectorNode()
rospy.spin() | 34.290909 | 111 | 0.660657 |
import rospy
from std_msgs.msg import Bool, String, Empty
from mind_msgs.msg import RaisingEvents, SetIdleMotion
class TurnDetectorNode:
def __init__(self):
rospy.Subscriber('raising_events', RaisingEvents, self.handle_raising_events)
rospy.Subscriber('robot_is_saying', Bool, self.handle_robot_is_saying)
self.pub_start_speech_recognition = rospy.Publisher('sp_speech_recognizer/start', Empty, queue_size=10)
self.pub_stop_speech_recognition = rospy.Publisher('sp_speech_recognizer/stop', Empty, queue_size=10)
self.pub_set_idle_motion = rospy.Publisher('set_enable_idle_motion', SetIdleMotion, queue_size=10)
msg = SetIdleMotion()
msg.enabled = True
msg.with_leaning_forward = False
self.pub_set_idle_motion.publish(msg)
rospy.loginfo('\033[92m[%s]\033[0m initialized...'%rospy.get_name())
def handle_raising_events(self, msg):
pass
def handle_robot_is_saying(self, msg):
if msg.data:
rospy.loginfo('\033[92m[%s]\033[0m Robot\'s Turn...'%rospy.get_name())
msg = SetIdleMotion()
msg.enabled = False
msg.with_leaning_forward = False
self.pub_set_idle_motion.publish(msg)
self.pub_stop_speech_recognition.publish()
else:
# Robot completed saying
rospy.loginfo('\033[92m[%s]\033[0m User\'s Turn...'%rospy.get_name())
msg = SetIdleMotion()
msg.enabled = True
msg.with_leaning_forward = True
self.pub_set_idle_motion.publish(msg)
rospy.sleep(0.1)
self.pub_start_speech_recognition.publish()
if __name__ == '__main__':
rospy.init_node('turn_detector', anonymous=False)
m = TurnDetectorNode()
rospy.spin() | true | true |
1c2f6caf6f6e11179416b9ab70168e3b0a53bc9c | 9,281 | py | Python | dsl_parser/tests/test_deployment_update.py | mistio/cloudify-common | 3b706ba31a3371052fbdd12486d4a0befbcf491b | [
"Apache-2.0"
] | 6 | 2018-10-13T20:36:40.000Z | 2021-07-04T17:19:13.000Z | dsl_parser/tests/test_deployment_update.py | mistio/cloudify-common | 3b706ba31a3371052fbdd12486d4a0befbcf491b | [
"Apache-2.0"
] | 97 | 2018-05-25T12:10:19.000Z | 2022-03-30T10:16:40.000Z | dsl_parser/tests/test_deployment_update.py | mistio/cloudify-common | 3b706ba31a3371052fbdd12486d4a0befbcf491b | [
"Apache-2.0"
] | 15 | 2018-10-13T20:36:42.000Z | 2021-09-06T15:19:11.000Z | ########
# Copyright (c) 2018 Cloudify Platform Ltd. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# * See the License for the specific language governing permissions and
# * limitations under the License.
import copy
from dsl_parser import constants
from dsl_parser.multi_instance import modify_deployment
from dsl_parser.tests.abstract_test_parser import AbstractTestParser
class BaseDeploymentUpdateTest(AbstractTestParser):
@staticmethod
def modify_multi(plan, modified_nodes):
return modify_deployment(
nodes=modified_nodes,
previous_nodes=plan['nodes'],
previous_node_instances=plan['node_instances'],
modified_nodes=(),
scaling_groups={})
class TestDeploymentUpdate(BaseDeploymentUpdateTest):
BASE_BLUEPRINT = """
node_types:
cloudify.nodes.Compute:
properties:
x:
default: y
db: {}
webserver: {}
db_dependent: {}
type: {}
network: {}
relationships:
cloudify.relationships.depends_on:
properties:
connection_type:
default: 'all_to_all'
cloudify.relationships.contained_in:
derived_from: cloudify.relationships.depends_on
cloudify.relationships.connected_to:
derived_from: cloudify.relationships.depends_on
node_templates:
"""
BASE_NODES = """
without_rel:
type: type
with_rel:
type: type
"""
def test_add_node(self):
blueprint = self.BASE_BLUEPRINT + self.BASE_NODES
plan = self.parse_multi(blueprint)
plan['nodes'].append({
'name': 'new_node',
'id': 'new_node',
'type': 'new_type',
'number_of_instances': 1,
'deploy_number_of_instances': 1,
'min_number_of_instances': 1,
'max_number_of_instances': 1,
'relationships': [
{'type': 'cloudify.relationships.connected_to',
'target_id': 'without_rel',
'type_hierarchy': ['cloudify.relationships.connected_to'],
'properties': {
'connection_type': 'all_to_all'
},
}
]
})
modified_nodes = plan['nodes']
node_instances = self.modify_multi(plan, modified_nodes=modified_nodes)
self.assertEqual(len(node_instances[constants.ADDED_AND_RELATED]), 2)
added_and_related = node_instances[constants.ADDED_AND_RELATED]
added = [n for n in added_and_related if 'modification' in n]
related = [n for n in added_and_related if n not in added]
self.assertEqual(len(added), 1)
self.assertEqual(len(related), 1)
self.assertEqual(len(node_instances[constants.REMOVED_AND_RELATED]),
0)
self.assertEqual(len(node_instances[constants.EXTENDED_AND_RELATED]),
0)
self.assertEqual(len(node_instances[constants.REDUCED_AND_RELATED]),
0)
def test_remove_node(self):
blueprint = self.BASE_BLUEPRINT + self.BASE_NODES + """
relationships:
- type: cloudify.relationships.connected_to
target: without_rel
"""
plan = self.parse_multi(blueprint)
nodes = \
copy.deepcopy(
[n for n in plan['nodes'] if n['id'] != 'without_rel'])
with_rel_node = nodes[0]
with_rel_node['relationships'] = [r for r in
with_rel_node['relationships']
if r['target_id'] != 'without_rel']
node_instances = self.modify_multi(plan, modified_nodes=nodes)
self.assertEqual(len(node_instances[constants.ADDED_AND_RELATED]), 0)
self.assertEqual(len(node_instances[constants.REMOVED_AND_RELATED]), 2)
removed_and_related = node_instances[constants.REMOVED_AND_RELATED]
removed = [n for n in removed_and_related if 'modification' in n]
related = [n for n in removed_and_related if n not in removed]
self.assertEqual(len(removed), 1)
self.assertEqual(len(related), 1)
self.assertEqual(len(node_instances[constants.EXTENDED_AND_RELATED]),
0)
self.assertEqual(len(node_instances[constants.REDUCED_AND_RELATED]), 1)
reduced_and_related = node_instances[constants.REDUCED_AND_RELATED]
reduced = [n for n in reduced_and_related if 'modification' in n]
self.assertEqual(len(reduced), 1)
def test_add_relationship(self):
blueprint = self.BASE_BLUEPRINT + self.BASE_NODES
rel_type = 'cloudify.relationships.connected_to'
plan = self.parse_multi(blueprint)
with_rel = [n for n in plan['nodes'] if n['id'] == 'with_rel'][0]
without_rel = [n for n in plan['nodes'] if n['id'] == 'without_rel'][0]
with_rel['relationships'] = \
[{'type': rel_type,
'type_hierarchy': [rel_type],
'target_id': without_rel['id'],
'source_interface': {
'cloudify.interfaces.relationship_lifecycle': {
'preconfigure': 'scripts/increment.sh',
'establish': 'scripts/increment.sh',
'postconfigure': 'scripts/increment.sh'
}
},
'properties': {
'connection_type': 'all_to_all'
}}]
modified_nodes = [with_rel, without_rel]
node_instances = self.modify_multi(plan, modified_nodes=modified_nodes)
self.assertEqual(len(node_instances[constants.ADDED_AND_RELATED]), 0)
self.assertEqual(len(node_instances[constants.REMOVED_AND_RELATED]),
0)
self.assertEqual(len(node_instances[constants.EXTENDED_AND_RELATED]),
2)
extended_and_related = node_instances[constants.EXTENDED_AND_RELATED]
extended = [n for n in extended_and_related if 'modification' in n]
related = [n for n in extended_and_related if n not in extended]
self.assertEqual(len(extended), 1)
self.assertEqual(len(related), 1)
self.assertEqual(len(node_instances[constants.REDUCED_AND_RELATED]),
0)
def test_remove_relationship(self):
blueprint = self.BASE_BLUEPRINT + self.BASE_NODES + """
relationships:
- type: cloudify.relationships.connected_to
target: without_rel
"""
plan = self.parse_multi(blueprint)
nodes = copy.deepcopy(plan['nodes'])
node_with_rel = [n for n in nodes if n['id'] == 'with_rel'][0]
relationships = [r for r in node_with_rel['relationships']
if r['target_id'] != 'without_rel']
node_with_rel['relationships'] = relationships
node_instances = self.modify_multi(plan, modified_nodes=nodes)
self.assertEqual(len(node_instances[constants.ADDED_AND_RELATED]), 0)
self.assertEqual(len(node_instances[constants.REMOVED_AND_RELATED]),
0)
self.assertEqual(len(node_instances[constants.EXTENDED_AND_RELATED]),
0)
self.assertEqual(len(node_instances[constants.REDUCED_AND_RELATED]),
2)
reduced_and_related = node_instances[constants.REDUCED_AND_RELATED]
reduced = [n for n in reduced_and_related if 'modification' in n]
related = [n for n in reduced_and_related if n not in reduced]
self.assertEqual(len(reduced), 1)
self.assertEqual(len(related), 1)
def test_add_node_templates_to_empty_blueprint(self):
empty_blueprint = self.BASIC_VERSION_SECTION_DSL_1_3
plan = self.parse_multi(empty_blueprint)
plan['nodes'].append({
'name': 'new_node',
'id': 'new_node',
'type': 'new_type',
'number_of_instances': 1,
'deploy_number_of_instances': 1,
'min_number_of_instances': 1,
'max_number_of_instances': 1
})
modified_nodes = plan['nodes']
node_instances = self.modify_multi(plan, modified_nodes=modified_nodes)
self.assertEqual(len(node_instances[constants.ADDED_AND_RELATED]), 1)
added_and_related = node_instances[constants.ADDED_AND_RELATED]
added = [n for n in added_and_related if 'modification' in n]
self.assertEqual(len(added), 1)
self.assertEqual(len(node_instances[constants.REMOVED_AND_RELATED]),
0)
self.assertEqual(len(node_instances[constants.EXTENDED_AND_RELATED]),
0)
self.assertEqual(len(node_instances[constants.REDUCED_AND_RELATED]),
0)
| 40.352174 | 79 | 0.620623 | from dsl_parser import constants
from dsl_parser.multi_instance import modify_deployment
from dsl_parser.tests.abstract_test_parser import AbstractTestParser
class BaseDeploymentUpdateTest(AbstractTestParser):
@staticmethod
def modify_multi(plan, modified_nodes):
return modify_deployment(
nodes=modified_nodes,
previous_nodes=plan['nodes'],
previous_node_instances=plan['node_instances'],
modified_nodes=(),
scaling_groups={})
class TestDeploymentUpdate(BaseDeploymentUpdateTest):
BASE_BLUEPRINT = """
node_types:
cloudify.nodes.Compute:
properties:
x:
default: y
db: {}
webserver: {}
db_dependent: {}
type: {}
network: {}
relationships:
cloudify.relationships.depends_on:
properties:
connection_type:
default: 'all_to_all'
cloudify.relationships.contained_in:
derived_from: cloudify.relationships.depends_on
cloudify.relationships.connected_to:
derived_from: cloudify.relationships.depends_on
node_templates:
"""
BASE_NODES = """
without_rel:
type: type
with_rel:
type: type
"""
def test_add_node(self):
blueprint = self.BASE_BLUEPRINT + self.BASE_NODES
plan = self.parse_multi(blueprint)
plan['nodes'].append({
'name': 'new_node',
'id': 'new_node',
'type': 'new_type',
'number_of_instances': 1,
'deploy_number_of_instances': 1,
'min_number_of_instances': 1,
'max_number_of_instances': 1,
'relationships': [
{'type': 'cloudify.relationships.connected_to',
'target_id': 'without_rel',
'type_hierarchy': ['cloudify.relationships.connected_to'],
'properties': {
'connection_type': 'all_to_all'
},
}
]
})
modified_nodes = plan['nodes']
node_instances = self.modify_multi(plan, modified_nodes=modified_nodes)
self.assertEqual(len(node_instances[constants.ADDED_AND_RELATED]), 2)
added_and_related = node_instances[constants.ADDED_AND_RELATED]
added = [n for n in added_and_related if 'modification' in n]
related = [n for n in added_and_related if n not in added]
self.assertEqual(len(added), 1)
self.assertEqual(len(related), 1)
self.assertEqual(len(node_instances[constants.REMOVED_AND_RELATED]),
0)
self.assertEqual(len(node_instances[constants.EXTENDED_AND_RELATED]),
0)
self.assertEqual(len(node_instances[constants.REDUCED_AND_RELATED]),
0)
def test_remove_node(self):
blueprint = self.BASE_BLUEPRINT + self.BASE_NODES + """
relationships:
- type: cloudify.relationships.connected_to
target: without_rel
"""
plan = self.parse_multi(blueprint)
nodes = \
copy.deepcopy(
[n for n in plan['nodes'] if n['id'] != 'without_rel'])
with_rel_node = nodes[0]
with_rel_node['relationships'] = [r for r in
with_rel_node['relationships']
if r['target_id'] != 'without_rel']
node_instances = self.modify_multi(plan, modified_nodes=nodes)
self.assertEqual(len(node_instances[constants.ADDED_AND_RELATED]), 0)
self.assertEqual(len(node_instances[constants.REMOVED_AND_RELATED]), 2)
removed_and_related = node_instances[constants.REMOVED_AND_RELATED]
removed = [n for n in removed_and_related if 'modification' in n]
related = [n for n in removed_and_related if n not in removed]
self.assertEqual(len(removed), 1)
self.assertEqual(len(related), 1)
self.assertEqual(len(node_instances[constants.EXTENDED_AND_RELATED]),
0)
self.assertEqual(len(node_instances[constants.REDUCED_AND_RELATED]), 1)
reduced_and_related = node_instances[constants.REDUCED_AND_RELATED]
reduced = [n for n in reduced_and_related if 'modification' in n]
self.assertEqual(len(reduced), 1)
def test_add_relationship(self):
blueprint = self.BASE_BLUEPRINT + self.BASE_NODES
rel_type = 'cloudify.relationships.connected_to'
plan = self.parse_multi(blueprint)
with_rel = [n for n in plan['nodes'] if n['id'] == 'with_rel'][0]
without_rel = [n for n in plan['nodes'] if n['id'] == 'without_rel'][0]
with_rel['relationships'] = \
[{'type': rel_type,
'type_hierarchy': [rel_type],
'target_id': without_rel['id'],
'source_interface': {
'cloudify.interfaces.relationship_lifecycle': {
'preconfigure': 'scripts/increment.sh',
'establish': 'scripts/increment.sh',
'postconfigure': 'scripts/increment.sh'
}
},
'properties': {
'connection_type': 'all_to_all'
}}]
modified_nodes = [with_rel, without_rel]
node_instances = self.modify_multi(plan, modified_nodes=modified_nodes)
self.assertEqual(len(node_instances[constants.ADDED_AND_RELATED]), 0)
self.assertEqual(len(node_instances[constants.REMOVED_AND_RELATED]),
0)
self.assertEqual(len(node_instances[constants.EXTENDED_AND_RELATED]),
2)
extended_and_related = node_instances[constants.EXTENDED_AND_RELATED]
extended = [n for n in extended_and_related if 'modification' in n]
related = [n for n in extended_and_related if n not in extended]
self.assertEqual(len(extended), 1)
self.assertEqual(len(related), 1)
self.assertEqual(len(node_instances[constants.REDUCED_AND_RELATED]),
0)
def test_remove_relationship(self):
blueprint = self.BASE_BLUEPRINT + self.BASE_NODES + """
relationships:
- type: cloudify.relationships.connected_to
target: without_rel
"""
plan = self.parse_multi(blueprint)
nodes = copy.deepcopy(plan['nodes'])
node_with_rel = [n for n in nodes if n['id'] == 'with_rel'][0]
relationships = [r for r in node_with_rel['relationships']
if r['target_id'] != 'without_rel']
node_with_rel['relationships'] = relationships
node_instances = self.modify_multi(plan, modified_nodes=nodes)
self.assertEqual(len(node_instances[constants.ADDED_AND_RELATED]), 0)
self.assertEqual(len(node_instances[constants.REMOVED_AND_RELATED]),
0)
self.assertEqual(len(node_instances[constants.EXTENDED_AND_RELATED]),
0)
self.assertEqual(len(node_instances[constants.REDUCED_AND_RELATED]),
2)
reduced_and_related = node_instances[constants.REDUCED_AND_RELATED]
reduced = [n for n in reduced_and_related if 'modification' in n]
related = [n for n in reduced_and_related if n not in reduced]
self.assertEqual(len(reduced), 1)
self.assertEqual(len(related), 1)
def test_add_node_templates_to_empty_blueprint(self):
empty_blueprint = self.BASIC_VERSION_SECTION_DSL_1_3
plan = self.parse_multi(empty_blueprint)
plan['nodes'].append({
'name': 'new_node',
'id': 'new_node',
'type': 'new_type',
'number_of_instances': 1,
'deploy_number_of_instances': 1,
'min_number_of_instances': 1,
'max_number_of_instances': 1
})
modified_nodes = plan['nodes']
node_instances = self.modify_multi(plan, modified_nodes=modified_nodes)
self.assertEqual(len(node_instances[constants.ADDED_AND_RELATED]), 1)
added_and_related = node_instances[constants.ADDED_AND_RELATED]
added = [n for n in added_and_related if 'modification' in n]
self.assertEqual(len(added), 1)
self.assertEqual(len(node_instances[constants.REMOVED_AND_RELATED]),
0)
self.assertEqual(len(node_instances[constants.EXTENDED_AND_RELATED]),
0)
self.assertEqual(len(node_instances[constants.REDUCED_AND_RELATED]),
0)
| true | true |
1c2f6d30e79eec7063eef49c70ac7be3368730fc | 16,385 | py | Python | Fairness_attack/defenses.py | Ninarehm/attack | 0d5a6b842d4e81484540151d879036e9fe2184f1 | [
"MIT"
] | 8 | 2021-03-08T17:13:42.000Z | 2022-03-31T00:57:53.000Z | Fairness_attack/defenses.py | lutai14/attack | 773024c7b86be112521a2243f2f809a54891c81f | [
"MIT"
] | null | null | null | Fairness_attack/defenses.py | lutai14/attack | 773024c7b86be112521a2243f2f809a54891c81f | [
"MIT"
] | 1 | 2022-02-10T22:36:11.000Z | 2022-02-10T22:36:11.000Z | from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
from __future__ import unicode_literals
import copy
import sys
import numpy as np
from sklearn import metrics, model_selection, neighbors
import scipy.linalg as slin
import scipy.sparse as sparse
import upper_bounds
import data_utils as data
def remove_quantile(X, Y, dists, frac_to_remove):
"""
Removes the frac_to_remove points from X and Y with the highest value in dists.
This works separately for each class.
"""
if len(dists.shape) == 2: # Accept column vectors but reshape
assert dists.shape[1] == 1
dists = np.reshape(dists, -1)
assert len(dists.shape) == 1
assert X.shape[0] == Y.shape[0]
assert X.shape[0] == len(dists)
assert 0 <= frac_to_remove
assert frac_to_remove <= 1
frac_to_keep = 1.0 - frac_to_remove
num_removed_by_class = {}
idx_to_keep = []
for y in set(Y):
num_to_keep = int(np.round(frac_to_keep * np.sum(Y == y)))
num_removed_by_class[str(y)] = int(np.round(np.sum(Y == y))) - num_to_keep
idx_to_keep.append(
np.where(Y == y)[0][np.argsort(dists[Y == y])[:num_to_keep]])
idx_to_keep = np.concatenate(idx_to_keep)
X_def = X[idx_to_keep, :]
Y_def = Y[idx_to_keep]
return X_def, Y_def, idx_to_keep, num_removed_by_class
def compute_dists_under_Q(
X, Y,
Q,
subtract_from_l2=False, #If this is true, computes ||x - mu|| - ||Q(x - mu)||
centroids=None,
class_map=None,
norm=2):
"""
Computes ||Q(x - mu)|| in the corresponding norm.
Returns a vector of length num_examples (X.shape[0]).
If centroids is not specified, calculate it from the data.
If Q has dimension 3, then each class gets its own Q.
"""
if (centroids is not None) or (class_map is not None):
assert (centroids is not None) and (class_map is not None)
if subtract_from_l2:
assert Q is not None
if Q is not None and len(Q.shape) == 3:
assert class_map is not None
assert Q.shape[0] == len(class_map)
if norm == 1:
metric = 'manhattan'
elif norm == 2:
metric = 'euclidean'
else:
raise ValueError('norm must be 1 or 2')
Q_dists = np.zeros(X.shape[0])
if subtract_from_l2:
L2_dists = np.zeros(X.shape[0])
for y in set(Y):
if centroids is not None:
mu = centroids[class_map[y], :]
else:
mu = np.mean(X[Y == y, :], axis=0)
mu = mu.reshape(1, -1)
if Q is None: # assume Q = identity
Q_dists[Y == y] = metrics.pairwise.pairwise_distances(
X[Y == y, :],
mu,
metric=metric).reshape(-1)
else:
if len(Q.shape) == 3:
current_Q = Q[class_map[y], ...]
else:
current_Q = Q
if sparse.issparse(X):
XQ = X[Y == y, :].dot(current_Q.T)
else:
XQ = current_Q.dot(X[Y == y, :].T).T
muQ = current_Q.dot(mu.T).T
Q_dists[Y == y] = metrics.pairwise.pairwise_distances(
XQ,
muQ,
metric=metric).reshape(-1)
if subtract_from_l2:
L2_dists[Y == y] = metrics.pairwise.pairwise_distances(
X[Y == y, :],
mu,
metric=metric).reshape(-1)
Q_dists[Y == y] = np.sqrt(np.square(L2_dists[Y == y]) - np.square(Q_dists[Y == y]))
return Q_dists
def find_feasible_label_flips_in_sphere(X, Y, percentile):
class_map, centroids, centroid_vec, sphere_radii, slab_radii = data.get_data_params(
X,
Y,
percentile=percentile)
sphere_dists_flip = compute_dists_under_Q(
X, -Y,
Q=None,
subtract_from_l2=False,
centroids=centroids,
class_map=class_map,
norm=2)
feasible_flipped_mask = np.zeros(X.shape[0], dtype=bool)
for y in set(Y):
class_idx_flip = class_map[-y]
sphere_radius_flip = sphere_radii[class_idx_flip]
feasible_flipped_mask[Y == y] = (sphere_dists_flip[Y == y] <= sphere_radius_flip)
return feasible_flipped_mask
class DataDef(object):
def __init__(self, X_modified, Y_modified, X_test, Y_test, idx_train, idx_poison):
self.X_modified = X_modified
self.Y_modified = Y_modified
self.X_test = X_test
self.Y_test = Y_test
self.idx_train = idx_train
self.idx_poison = idx_poison
self.X_train = X_modified[idx_train, :]
self.Y_train = Y_modified[idx_train]
self.X_poison = X_modified[idx_poison, :]
self.Y_poison = Y_modified[idx_poison]
self.class_map = data.get_class_map()
self.emp_centroids = data.get_centroids(self.X_modified, self.Y_modified, self.class_map)
self.true_centroids = data.get_centroids(self.X_train, self.Y_train, self.class_map)
self.emp_centroid_vec = data.get_centroid_vec(self.emp_centroids)
self.true_centroid_vec = data.get_centroid_vec(self.true_centroids)
# Fraction of bad data / good data (so in total, there's 1+epsilon * good data )
self.epsilon = self.X_poison.shape[0] / self.X_train.shape[0]
def compute_dists_under_Q_over_dataset(
self,
Q,
subtract_from_l2=False, #If this is true, plots ||x - mu|| - ||Q(x - mu)||
use_emp_centroids=False,
norm=2):
if use_emp_centroids:
centroids = self.emp_centroids
else:
centroids = self.true_centroids
dists = compute_dists_under_Q(
self.X_modified, self.Y_modified,
Q,
subtract_from_l2=subtract_from_l2,
centroids=centroids,
class_map=self.class_map,
norm=norm)
return dists
def get_losses(self, w, b):
# This removes the max term from the hinge, so you can get negative loss if it's fit well
losses = 1 - self.Y_modified * (self.X_modified.dot(w) + b)
return losses
def get_sqrt_inv_covs(self, use_emp=False):
if use_emp:
sqrt_inv_covs = data.get_sqrt_inv_cov(self.X_modified, self.Y_modified, self.class_map)
else:
sqrt_inv_covs = data.get_sqrt_inv_cov(self.X_train, self.Y_train, self.class_map)
return sqrt_inv_covs
def get_knn_dists(self, num_neighbors, use_emp=False):
metric = 'euclidean'
if use_emp:
nbrs = neighbors.NearestNeighbors(
n_neighbors=num_neighbors,
metric=metric).fit(
self.X_modified)
else:
nbrs = neighbors.NearestNeighbors(
n_neighbors=num_neighbors,
metric=metric).fit(
self.X_train)
# Regardless of whether you use emp, we still want distances to the whole (modified) dataset.
dists_to_each_neighbor, _ = nbrs.kneighbors(self.X_modified)
return np.sum(dists_to_each_neighbor, axis=1)
# Might be able to speed up; is svds actually performant on dense matrices?
def project_to_low_rank(
self,
k,
use_emp=False,
get_projected_data=False):
"""
Projects to the rank (k+2) subspace defined by the top k SVs, mu_pos, and mu_neg.
If k is None, it tries to find a good k by taking the top 1000 SVs and seeing if we can
find some k such that sigma_k / sigma_1 < 0.1. If we can, we take the smallest such k.
If not, we take k = 1000 or d-1. (but when we add 2 back, this seems bad?)
Square root of the sum of squares is Frobenius norm.
"""
if use_emp:
X = self.X_modified
Y = self.Y_modified
else:
X = self.X_train
Y = self.Y_train
if sparse.issparse(X):
sq_fro_norm = sparse.linalg.norm(X, 'fro') ** 2
else:
sq_fro_norm = np.linalg.norm(X, 'fro') ** 2
if k is not None:
assert k > 0
assert k < self.X_train.shape[1]
U, S, V = sparse.linalg.svds(X, k=k, which='LM')
# If k is not specified, try to automatically find a good value
# This is a bit confusing because svds returns eigenvalues in increasing order
# so the meaning of k is reversed
else:
search_k = min(1000, X.shape[1] - 1)
target_sv_ratio = 0.95
U, S, V = sparse.linalg.svds(X, k=search_k, which='LM')
# Make sure it's sorted in the order we think it is...
sort_idx = np.argsort(S)[::-1]
S = S[sort_idx]
V = V[sort_idx, :]
max_sv = np.max(S)
assert S[0] == max_sv
sq_sv_cumsum = np.cumsum(np.power(S, 2))
assert np.all(sq_sv_cumsum < sq_fro_norm)
sv_ratios = sq_sv_cumsum / sq_fro_norm
if sv_ratios[-1] > target_sv_ratio:
k = np.where(sv_ratios > target_sv_ratio)[0][0]
else:
print(' Giving up -- max ratio was %s' % np.max(sv_ratios))
k = -1
V = V[:k, :]
S = S[:k]
mu_pos = np.array(np.mean(X[Y == 1, :], axis=0)).reshape(1, -1)
mu_neg = np.array(np.mean(X[Y == -1, :], axis=0)).reshape(1, -1)
V_mu = np.concatenate((V, mu_pos, mu_neg), axis=0)
P = slin.orth(V_mu.T).T
achieved_sv_ratio = np.sum(np.power(S, 2)) / sq_fro_norm
if get_projected_data:
PX_modified = self.X_modified.dot(P.T)
PX_train = self.X_train.dot(P.T)
PX_poison = self.X_poison.dot(P.T)
return P, achieved_sv_ratio, PX_modified, PX_train, PX_poison
else:
return P, achieved_sv_ratio
def find_num_points_kept(self, idx_to_keep):
good_mask = np.zeros(self.X_modified.shape[0], dtype=bool)
good_mask[self.idx_train] = True
bad_mask = np.zeros(self.X_modified.shape[0], dtype=bool)
bad_mask[self.idx_poison] = True
keep_mask = np.zeros(self.X_modified.shape[0], dtype=bool)
keep_mask[idx_to_keep] = True
frac_of_good_points_kept = np.mean(keep_mask & good_mask) / np.mean(good_mask)
frac_of_bad_points_kept = np.mean(keep_mask & bad_mask) / np.mean(bad_mask)
num_bad_points_removed_by_class = {}
for y in set(self.Y_modified):
num_bad_points_removed_by_class[str(y)] = np.sum(~keep_mask & bad_mask & (self.Y_modified == y))
return frac_of_good_points_kept, frac_of_bad_points_kept, num_bad_points_removed_by_class
# Because this needs to handle weight decay
# this actually creates a copy of model and changes its C
def remove_and_retrain(
self,
dists,
model,
weight_decay,
frac_to_remove,
num_folds=5):
X_def, Y_def, idx_to_keep, num_removed_by_class = remove_quantile(
self.X_modified,
self.Y_modified,
dists=dists,
frac_to_remove=frac_to_remove)
frac_of_good_points_kept, frac_of_bad_points_kept, num_bad_points_removed_by_class = self.find_num_points_kept(idx_to_keep)
num_bad_points_by_class = {}
for y in set(self.Y_poison):
num_bad_points_by_class[str(y)] = int(np.round(np.sum(self.Y_poison == y)))
model_def = copy.deepcopy(model)
model_def.C = 1.0 / (X_def.shape[0] * weight_decay)
mean_cv_score = None
if num_folds is not None:
k_fold = model_selection.KFold(n_splits=num_folds, shuffle=True, random_state=2)
cv_scores = model_selection.cross_val_score(
model_def,
X_def, Y_def,
cv=k_fold,
n_jobs=np.min((num_folds, 8)))
mean_cv_score = np.mean(cv_scores)
model_def.fit(X_def, Y_def)
params_def = np.reshape(model_def.coef_, -1)
bias_def = model_def.intercept_[0]
train_acc = model_def.score(X_def, Y_def)
test_acc = model_def.score(self.X_test, self.Y_test)
train_loss_overall = upper_bounds.hinge_loss(params_def, bias_def, X_def, Y_def)
train_loss_clean = upper_bounds.hinge_loss(params_def, bias_def, self.X_train, self.Y_train)
train_loss_poison = upper_bounds.hinge_loss(params_def, bias_def, self.X_poison, self.Y_poison)
test_loss = upper_bounds.hinge_loss(params_def, bias_def, self.X_test, self.Y_test)
results = {}
results['train_acc'] = train_acc
results['val_acc'] = mean_cv_score
results['test_acc'] = test_acc
results['train_loss_overall'] = train_loss_overall
results['train_loss_clean'] = train_loss_clean
results['train_loss_poison'] = train_loss_poison
results['test_loss'] = test_loss
results['frac_of_good_points_kept'] = frac_of_good_points_kept
results['frac_of_bad_points_kept'] = frac_of_bad_points_kept
results['num_removed_by_class'] = num_removed_by_class
results['num_bad_points_by_class'] = num_bad_points_by_class
results['num_bad_points_removed_by_class'] = num_bad_points_removed_by_class
return results
def eval_model(self, ScikitModel, weight_decay, fit_intercept, max_iter, frac_to_remove,
intercept_scaling=1,
use_slab=False,
use_loss=False,
verbose=True):
"""
Runs sphere, slab, loss
"""
def report_test_acc(dists, def_str):
retrain_results = self.remove_and_retrain(
dists,
model_def,
weight_decay,
frac_to_remove,
num_folds=None)
test_acc = retrain_results['test_acc']
if verbose:
train_acc = retrain_results['train_acc']
frac_of_good_points_kept = retrain_results['frac_of_good_points_kept']
frac_of_bad_points_kept = retrain_results['frac_of_bad_points_kept']
print()
print('After defending (%s):' % def_str)
print('Train (clean+poi): %.3f' % train_acc)
print('Test (overall or targeted) : %.3f' % test_acc)
print('Good points kept : %.3f%%' % (frac_of_good_points_kept*100))
print('Bad points kept : %.3f%%' % (frac_of_bad_points_kept*100))
return test_acc
C = 1.0 / (self.X_modified.shape[0] * weight_decay)
model_round = ScikitModel(
C=C,
tol=1e-8,
fit_intercept=fit_intercept,
intercept_scaling=intercept_scaling,
random_state=24,
max_iter=max_iter,
verbose=True)
model_round.fit(self.X_modified, self.Y_modified)
test_acc_before_defense = model_round.score(self.X_test, self.Y_test)
print()
print('With our attack, no defenses:')
print('Train (clean) : %.3f' % model_round.score(self.X_train, self.Y_train))
print('Train (clean+poi): %.3f' % model_round.score(self.X_modified, self.Y_modified))
print('Test (overall) : %.3f' % test_acc_before_defense)
model_def = ScikitModel(
C=C,
tol=1e-8,
fit_intercept=fit_intercept,
intercept_scaling=intercept_scaling,
random_state=24,
max_iter=max_iter,
verbose=True)
# L2 defense
dists = self.compute_dists_under_Q_over_dataset(
Q=None,
use_emp_centroids=True,
norm=2)
highest_test_acc = report_test_acc(dists, 'L2')
# Loss defense
if use_loss:
dists = self.get_losses(model_round.coef_.reshape(-1), model_round.intercept_)
highest_test_acc = max(highest_test_acc, report_test_acc(dists, 'loss'))
# Slab defense
if use_slab:
dists = self.compute_dists_under_Q_over_dataset(
Q=self.emp_centroid_vec,
use_emp_centroids=True,
norm=2)
highest_test_acc = max(highest_test_acc, report_test_acc(dists, 'slab'))
return test_acc_before_defense, highest_test_acc
| 34.936034 | 131 | 0.603967 | from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
from __future__ import unicode_literals
import copy
import sys
import numpy as np
from sklearn import metrics, model_selection, neighbors
import scipy.linalg as slin
import scipy.sparse as sparse
import upper_bounds
import data_utils as data
def remove_quantile(X, Y, dists, frac_to_remove):
if len(dists.shape) == 2:
assert dists.shape[1] == 1
dists = np.reshape(dists, -1)
assert len(dists.shape) == 1
assert X.shape[0] == Y.shape[0]
assert X.shape[0] == len(dists)
assert 0 <= frac_to_remove
assert frac_to_remove <= 1
frac_to_keep = 1.0 - frac_to_remove
num_removed_by_class = {}
idx_to_keep = []
for y in set(Y):
num_to_keep = int(np.round(frac_to_keep * np.sum(Y == y)))
num_removed_by_class[str(y)] = int(np.round(np.sum(Y == y))) - num_to_keep
idx_to_keep.append(
np.where(Y == y)[0][np.argsort(dists[Y == y])[:num_to_keep]])
idx_to_keep = np.concatenate(idx_to_keep)
X_def = X[idx_to_keep, :]
Y_def = Y[idx_to_keep]
return X_def, Y_def, idx_to_keep, num_removed_by_class
def compute_dists_under_Q(
X, Y,
Q,
subtract_from_l2=False,
centroids=None,
class_map=None,
norm=2):
if (centroids is not None) or (class_map is not None):
assert (centroids is not None) and (class_map is not None)
if subtract_from_l2:
assert Q is not None
if Q is not None and len(Q.shape) == 3:
assert class_map is not None
assert Q.shape[0] == len(class_map)
if norm == 1:
metric = 'manhattan'
elif norm == 2:
metric = 'euclidean'
else:
raise ValueError('norm must be 1 or 2')
Q_dists = np.zeros(X.shape[0])
if subtract_from_l2:
L2_dists = np.zeros(X.shape[0])
for y in set(Y):
if centroids is not None:
mu = centroids[class_map[y], :]
else:
mu = np.mean(X[Y == y, :], axis=0)
mu = mu.reshape(1, -1)
if Q is None:
Q_dists[Y == y] = metrics.pairwise.pairwise_distances(
X[Y == y, :],
mu,
metric=metric).reshape(-1)
else:
if len(Q.shape) == 3:
current_Q = Q[class_map[y], ...]
else:
current_Q = Q
if sparse.issparse(X):
XQ = X[Y == y, :].dot(current_Q.T)
else:
XQ = current_Q.dot(X[Y == y, :].T).T
muQ = current_Q.dot(mu.T).T
Q_dists[Y == y] = metrics.pairwise.pairwise_distances(
XQ,
muQ,
metric=metric).reshape(-1)
if subtract_from_l2:
L2_dists[Y == y] = metrics.pairwise.pairwise_distances(
X[Y == y, :],
mu,
metric=metric).reshape(-1)
Q_dists[Y == y] = np.sqrt(np.square(L2_dists[Y == y]) - np.square(Q_dists[Y == y]))
return Q_dists
def find_feasible_label_flips_in_sphere(X, Y, percentile):
class_map, centroids, centroid_vec, sphere_radii, slab_radii = data.get_data_params(
X,
Y,
percentile=percentile)
sphere_dists_flip = compute_dists_under_Q(
X, -Y,
Q=None,
subtract_from_l2=False,
centroids=centroids,
class_map=class_map,
norm=2)
feasible_flipped_mask = np.zeros(X.shape[0], dtype=bool)
for y in set(Y):
class_idx_flip = class_map[-y]
sphere_radius_flip = sphere_radii[class_idx_flip]
feasible_flipped_mask[Y == y] = (sphere_dists_flip[Y == y] <= sphere_radius_flip)
return feasible_flipped_mask
class DataDef(object):
def __init__(self, X_modified, Y_modified, X_test, Y_test, idx_train, idx_poison):
self.X_modified = X_modified
self.Y_modified = Y_modified
self.X_test = X_test
self.Y_test = Y_test
self.idx_train = idx_train
self.idx_poison = idx_poison
self.X_train = X_modified[idx_train, :]
self.Y_train = Y_modified[idx_train]
self.X_poison = X_modified[idx_poison, :]
self.Y_poison = Y_modified[idx_poison]
self.class_map = data.get_class_map()
self.emp_centroids = data.get_centroids(self.X_modified, self.Y_modified, self.class_map)
self.true_centroids = data.get_centroids(self.X_train, self.Y_train, self.class_map)
self.emp_centroid_vec = data.get_centroid_vec(self.emp_centroids)
self.true_centroid_vec = data.get_centroid_vec(self.true_centroids)
self.epsilon = self.X_poison.shape[0] / self.X_train.shape[0]
def compute_dists_under_Q_over_dataset(
self,
Q,
subtract_from_l2=False, #If this is true, plots ||x - mu|| - ||Q(x - mu)||
use_emp_centroids=False,
norm=2):
if use_emp_centroids:
centroids = self.emp_centroids
else:
centroids = self.true_centroids
dists = compute_dists_under_Q(
self.X_modified, self.Y_modified,
Q,
subtract_from_l2=subtract_from_l2,
centroids=centroids,
class_map=self.class_map,
norm=norm)
return dists
def get_losses(self, w, b):
# This removes the max term from the hinge, so you can get negative loss if it's fit well
losses = 1 - self.Y_modified * (self.X_modified.dot(w) + b)
return losses
def get_sqrt_inv_covs(self, use_emp=False):
if use_emp:
sqrt_inv_covs = data.get_sqrt_inv_cov(self.X_modified, self.Y_modified, self.class_map)
else:
sqrt_inv_covs = data.get_sqrt_inv_cov(self.X_train, self.Y_train, self.class_map)
return sqrt_inv_covs
def get_knn_dists(self, num_neighbors, use_emp=False):
metric = 'euclidean'
if use_emp:
nbrs = neighbors.NearestNeighbors(
n_neighbors=num_neighbors,
metric=metric).fit(
self.X_modified)
else:
nbrs = neighbors.NearestNeighbors(
n_neighbors=num_neighbors,
metric=metric).fit(
self.X_train)
dists_to_each_neighbor, _ = nbrs.kneighbors(self.X_modified)
return np.sum(dists_to_each_neighbor, axis=1)
def project_to_low_rank(
self,
k,
use_emp=False,
get_projected_data=False):
if use_emp:
X = self.X_modified
Y = self.Y_modified
else:
X = self.X_train
Y = self.Y_train
if sparse.issparse(X):
sq_fro_norm = sparse.linalg.norm(X, 'fro') ** 2
else:
sq_fro_norm = np.linalg.norm(X, 'fro') ** 2
if k is not None:
assert k > 0
assert k < self.X_train.shape[1]
U, S, V = sparse.linalg.svds(X, k=k, which='LM')
else:
search_k = min(1000, X.shape[1] - 1)
target_sv_ratio = 0.95
U, S, V = sparse.linalg.svds(X, k=search_k, which='LM')
sort_idx = np.argsort(S)[::-1]
S = S[sort_idx]
V = V[sort_idx, :]
max_sv = np.max(S)
assert S[0] == max_sv
sq_sv_cumsum = np.cumsum(np.power(S, 2))
assert np.all(sq_sv_cumsum < sq_fro_norm)
sv_ratios = sq_sv_cumsum / sq_fro_norm
if sv_ratios[-1] > target_sv_ratio:
k = np.where(sv_ratios > target_sv_ratio)[0][0]
else:
print(' Giving up -- max ratio was %s' % np.max(sv_ratios))
k = -1
V = V[:k, :]
S = S[:k]
mu_pos = np.array(np.mean(X[Y == 1, :], axis=0)).reshape(1, -1)
mu_neg = np.array(np.mean(X[Y == -1, :], axis=0)).reshape(1, -1)
V_mu = np.concatenate((V, mu_pos, mu_neg), axis=0)
P = slin.orth(V_mu.T).T
achieved_sv_ratio = np.sum(np.power(S, 2)) / sq_fro_norm
if get_projected_data:
PX_modified = self.X_modified.dot(P.T)
PX_train = self.X_train.dot(P.T)
PX_poison = self.X_poison.dot(P.T)
return P, achieved_sv_ratio, PX_modified, PX_train, PX_poison
else:
return P, achieved_sv_ratio
def find_num_points_kept(self, idx_to_keep):
good_mask = np.zeros(self.X_modified.shape[0], dtype=bool)
good_mask[self.idx_train] = True
bad_mask = np.zeros(self.X_modified.shape[0], dtype=bool)
bad_mask[self.idx_poison] = True
keep_mask = np.zeros(self.X_modified.shape[0], dtype=bool)
keep_mask[idx_to_keep] = True
frac_of_good_points_kept = np.mean(keep_mask & good_mask) / np.mean(good_mask)
frac_of_bad_points_kept = np.mean(keep_mask & bad_mask) / np.mean(bad_mask)
num_bad_points_removed_by_class = {}
for y in set(self.Y_modified):
num_bad_points_removed_by_class[str(y)] = np.sum(~keep_mask & bad_mask & (self.Y_modified == y))
return frac_of_good_points_kept, frac_of_bad_points_kept, num_bad_points_removed_by_class
# Because this needs to handle weight decay
# this actually creates a copy of model and changes its C
def remove_and_retrain(
self,
dists,
model,
weight_decay,
frac_to_remove,
num_folds=5):
X_def, Y_def, idx_to_keep, num_removed_by_class = remove_quantile(
self.X_modified,
self.Y_modified,
dists=dists,
frac_to_remove=frac_to_remove)
frac_of_good_points_kept, frac_of_bad_points_kept, num_bad_points_removed_by_class = self.find_num_points_kept(idx_to_keep)
num_bad_points_by_class = {}
for y in set(self.Y_poison):
num_bad_points_by_class[str(y)] = int(np.round(np.sum(self.Y_poison == y)))
model_def = copy.deepcopy(model)
model_def.C = 1.0 / (X_def.shape[0] * weight_decay)
mean_cv_score = None
if num_folds is not None:
k_fold = model_selection.KFold(n_splits=num_folds, shuffle=True, random_state=2)
cv_scores = model_selection.cross_val_score(
model_def,
X_def, Y_def,
cv=k_fold,
n_jobs=np.min((num_folds, 8)))
mean_cv_score = np.mean(cv_scores)
model_def.fit(X_def, Y_def)
params_def = np.reshape(model_def.coef_, -1)
bias_def = model_def.intercept_[0]
train_acc = model_def.score(X_def, Y_def)
test_acc = model_def.score(self.X_test, self.Y_test)
train_loss_overall = upper_bounds.hinge_loss(params_def, bias_def, X_def, Y_def)
train_loss_clean = upper_bounds.hinge_loss(params_def, bias_def, self.X_train, self.Y_train)
train_loss_poison = upper_bounds.hinge_loss(params_def, bias_def, self.X_poison, self.Y_poison)
test_loss = upper_bounds.hinge_loss(params_def, bias_def, self.X_test, self.Y_test)
results = {}
results['train_acc'] = train_acc
results['val_acc'] = mean_cv_score
results['test_acc'] = test_acc
results['train_loss_overall'] = train_loss_overall
results['train_loss_clean'] = train_loss_clean
results['train_loss_poison'] = train_loss_poison
results['test_loss'] = test_loss
results['frac_of_good_points_kept'] = frac_of_good_points_kept
results['frac_of_bad_points_kept'] = frac_of_bad_points_kept
results['num_removed_by_class'] = num_removed_by_class
results['num_bad_points_by_class'] = num_bad_points_by_class
results['num_bad_points_removed_by_class'] = num_bad_points_removed_by_class
return results
def eval_model(self, ScikitModel, weight_decay, fit_intercept, max_iter, frac_to_remove,
intercept_scaling=1,
use_slab=False,
use_loss=False,
verbose=True):
def report_test_acc(dists, def_str):
retrain_results = self.remove_and_retrain(
dists,
model_def,
weight_decay,
frac_to_remove,
num_folds=None)
test_acc = retrain_results['test_acc']
if verbose:
train_acc = retrain_results['train_acc']
frac_of_good_points_kept = retrain_results['frac_of_good_points_kept']
frac_of_bad_points_kept = retrain_results['frac_of_bad_points_kept']
print()
print('After defending (%s):' % def_str)
print('Train (clean+poi): %.3f' % train_acc)
print('Test (overall or targeted) : %.3f' % test_acc)
print('Good points kept : %.3f%%' % (frac_of_good_points_kept*100))
print('Bad points kept : %.3f%%' % (frac_of_bad_points_kept*100))
return test_acc
C = 1.0 / (self.X_modified.shape[0] * weight_decay)
model_round = ScikitModel(
C=C,
tol=1e-8,
fit_intercept=fit_intercept,
intercept_scaling=intercept_scaling,
random_state=24,
max_iter=max_iter,
verbose=True)
model_round.fit(self.X_modified, self.Y_modified)
test_acc_before_defense = model_round.score(self.X_test, self.Y_test)
print()
print('With our attack, no defenses:')
print('Train (clean) : %.3f' % model_round.score(self.X_train, self.Y_train))
print('Train (clean+poi): %.3f' % model_round.score(self.X_modified, self.Y_modified))
print('Test (overall) : %.3f' % test_acc_before_defense)
model_def = ScikitModel(
C=C,
tol=1e-8,
fit_intercept=fit_intercept,
intercept_scaling=intercept_scaling,
random_state=24,
max_iter=max_iter,
verbose=True)
# L2 defense
dists = self.compute_dists_under_Q_over_dataset(
Q=None,
use_emp_centroids=True,
norm=2)
highest_test_acc = report_test_acc(dists, 'L2')
# Loss defense
if use_loss:
dists = self.get_losses(model_round.coef_.reshape(-1), model_round.intercept_)
highest_test_acc = max(highest_test_acc, report_test_acc(dists, 'loss'))
# Slab defense
if use_slab:
dists = self.compute_dists_under_Q_over_dataset(
Q=self.emp_centroid_vec,
use_emp_centroids=True,
norm=2)
highest_test_acc = max(highest_test_acc, report_test_acc(dists, 'slab'))
return test_acc_before_defense, highest_test_acc
| true | true |
1c2f6d943cfc6ffe509c5db07d44559569799c6f | 71 | py | Python | PycharmProjects/PythonExercicios/ex001.py | RodrigoMASRamos/Projects.py | ed15981b320914c9667305dcd5fb5b7906fd9b00 | [
"MIT"
] | null | null | null | PycharmProjects/PythonExercicios/ex001.py | RodrigoMASRamos/Projects.py | ed15981b320914c9667305dcd5fb5b7906fd9b00 | [
"MIT"
] | null | null | null | PycharmProjects/PythonExercicios/ex001.py | RodrigoMASRamos/Projects.py | ed15981b320914c9667305dcd5fb5b7906fd9b00 | [
"MIT"
] | null | null | null | #Crie um programa que escreva "Olá,Mundo!" na tela.
print('Olá,mundo!') | 35.5 | 51 | 0.71831 |
print('Olá,mundo!') | true | true |
1c2f6ed35f058502d9614cd6295c3e6bd26ed8f5 | 399 | py | Python | coursegroup/asgi.py | Samyak-jain09/CourseRoom | 2ba951bea983e1bd60b873a0b07db9bac9e30531 | [
"MIT"
] | null | null | null | coursegroup/asgi.py | Samyak-jain09/CourseRoom | 2ba951bea983e1bd60b873a0b07db9bac9e30531 | [
"MIT"
] | null | null | null | coursegroup/asgi.py | Samyak-jain09/CourseRoom | 2ba951bea983e1bd60b873a0b07db9bac9e30531 | [
"MIT"
] | null | null | null | """
ASGI config for coursegroup project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/4.0/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'coursegroup.settings')
application = get_asgi_application()
| 23.470588 | 78 | 0.789474 |
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'coursegroup.settings')
application = get_asgi_application()
| true | true |
1c2f6ed365f1583d7cb3505faa954aa708e16799 | 3,114 | py | Python | tests/releases/test_cancel_release.py | kids-first/kf-api-study-creator | 93a79b108b6474f9b4135ace06c89ddcf63dd257 | [
"Apache-2.0"
] | 3 | 2019-05-04T02:07:28.000Z | 2020-10-16T17:47:44.000Z | tests/releases/test_cancel_release.py | kids-first/kf-api-study-creator | 93a79b108b6474f9b4135ace06c89ddcf63dd257 | [
"Apache-2.0"
] | 604 | 2019-02-21T18:14:51.000Z | 2022-02-10T08:13:54.000Z | tests/releases/test_cancel_release.py | kids-first/kf-api-study-creator | 93a79b108b6474f9b4135ace06c89ddcf63dd257 | [
"Apache-2.0"
] | null | null | null | import pytest
from graphql_relay import to_global_id
from creator.releases.models import Release
from creator.releases.factories import ReleaseFactory
CANCEL_RELEASE = """
mutation ($release: ID!) {
cancelRelease(release: $release) {
release {
id
state
}
}
}
"""
@pytest.mark.parametrize(
"user_group,allowed",
[
("Administrators", True),
("Services", False),
("Developers", False),
("Investigators", False),
("Bioinformatics", False),
(None, False),
],
)
def test_cancel_release(db, clients, user_group, allowed):
"""
Test the cancel mutation.
"""
client = clients.get(user_group)
release = ReleaseFactory(state="running")
resp = client.post(
"/graphql",
data={
"query": CANCEL_RELEASE,
"variables": {"release": to_global_id("ReleaseNode", release.pk)},
},
content_type="application/json",
)
if allowed:
assert resp.json()["data"]["cancelRelease"]["release"] is not None
assert (
resp.json()["data"]["cancelRelease"]["release"]["state"]
== "canceling"
)
else:
assert resp.json()["errors"][0]["message"] == "Not allowed"
@pytest.mark.parametrize(
"state,allowed",
[
("waiting", True),
("initializing", True),
("running", True),
("staged", True),
("publishing", True),
("published", False),
("failed", False),
("canceled", False),
],
)
def test_cancel_allowed_states(db, clients, state, allowed):
"""
Test that canceling may only occur from valid states with appropriate
end dates
"""
client = clients.get("Administrators")
release = ReleaseFactory(state=state)
assert release.ended_at is None if allowed else not None
resp = client.post(
"/graphql",
data={
"query": CANCEL_RELEASE,
"variables": {"release": to_global_id("ReleaseNode", release.pk)},
},
content_type="application/json",
)
release.refresh_from_db()
if allowed:
assert resp.json()["data"]["cancelRelease"]["release"] is not None
assert (
resp.json()["data"]["cancelRelease"]["release"]["state"]
== "canceling"
)
assert release.state == "canceled"
assert release.ended_at is not None
else:
assert f"Can't switch from state '{state}'" in (
resp.json()["errors"][0]["message"]
)
def test_cancel_release_does_not_exist(db, clients):
"""
Test that a release that does not exist cannot be canceled
"""
client = clients.get("Administrators")
release = ReleaseFactory(state="running")
resp = client.post(
"/graphql",
data={
"query": CANCEL_RELEASE,
"variables": {"release": to_global_id("ReleaseNode", "ABC")},
},
content_type="application/json",
)
assert resp.json()["errors"][0]["message"] == "Release ABC does not exist"
| 25.112903 | 78 | 0.569685 | import pytest
from graphql_relay import to_global_id
from creator.releases.models import Release
from creator.releases.factories import ReleaseFactory
CANCEL_RELEASE = """
mutation ($release: ID!) {
cancelRelease(release: $release) {
release {
id
state
}
}
}
"""
@pytest.mark.parametrize(
"user_group,allowed",
[
("Administrators", True),
("Services", False),
("Developers", False),
("Investigators", False),
("Bioinformatics", False),
(None, False),
],
)
def test_cancel_release(db, clients, user_group, allowed):
client = clients.get(user_group)
release = ReleaseFactory(state="running")
resp = client.post(
"/graphql",
data={
"query": CANCEL_RELEASE,
"variables": {"release": to_global_id("ReleaseNode", release.pk)},
},
content_type="application/json",
)
if allowed:
assert resp.json()["data"]["cancelRelease"]["release"] is not None
assert (
resp.json()["data"]["cancelRelease"]["release"]["state"]
== "canceling"
)
else:
assert resp.json()["errors"][0]["message"] == "Not allowed"
@pytest.mark.parametrize(
"state,allowed",
[
("waiting", True),
("initializing", True),
("running", True),
("staged", True),
("publishing", True),
("published", False),
("failed", False),
("canceled", False),
],
)
def test_cancel_allowed_states(db, clients, state, allowed):
client = clients.get("Administrators")
release = ReleaseFactory(state=state)
assert release.ended_at is None if allowed else not None
resp = client.post(
"/graphql",
data={
"query": CANCEL_RELEASE,
"variables": {"release": to_global_id("ReleaseNode", release.pk)},
},
content_type="application/json",
)
release.refresh_from_db()
if allowed:
assert resp.json()["data"]["cancelRelease"]["release"] is not None
assert (
resp.json()["data"]["cancelRelease"]["release"]["state"]
== "canceling"
)
assert release.state == "canceled"
assert release.ended_at is not None
else:
assert f"Can't switch from state '{state}'" in (
resp.json()["errors"][0]["message"]
)
def test_cancel_release_does_not_exist(db, clients):
client = clients.get("Administrators")
release = ReleaseFactory(state="running")
resp = client.post(
"/graphql",
data={
"query": CANCEL_RELEASE,
"variables": {"release": to_global_id("ReleaseNode", "ABC")},
},
content_type="application/json",
)
assert resp.json()["errors"][0]["message"] == "Release ABC does not exist"
| true | true |
1c2f7158c00e6dd0245bf303c5fa068571a21cd0 | 7,507 | py | Python | hummingbot/wallet/ethereum/watcher/websocket_watcher.py | csdenboer/hummingbot | 8a799675a325ebdbb74d76b2a44472cdbf74d691 | [
"Apache-2.0"
] | 37 | 2020-07-08T03:44:26.000Z | 2022-01-16T12:35:26.000Z | hummingbot/wallet/ethereum/watcher/websocket_watcher.py | csdenboer/hummingbot | 8a799675a325ebdbb74d76b2a44472cdbf74d691 | [
"Apache-2.0"
] | 13 | 2021-02-16T01:57:23.000Z | 2021-02-16T03:50:03.000Z | hummingbot/wallet/ethereum/watcher/websocket_watcher.py | csdenboer/hummingbot | 8a799675a325ebdbb74d76b2a44472cdbf74d691 | [
"Apache-2.0"
] | 17 | 2021-04-07T21:29:46.000Z | 2022-02-03T02:01:04.000Z | import websockets
from web3 import Web3
from web3.exceptions import BlockNotFound
from websockets.exceptions import ConnectionClosed
import logging
import ujson
import asyncio
from hexbytes import HexBytes
from web3.datastructures import AttributeDict
from cachetools import TTLCache
from typing import Optional, Dict, AsyncIterable, Any
from contextlib import suppress
from hummingbot.core.utils.async_utils import safe_ensure_future
from hummingbot.wallet.ethereum.watcher.base_watcher import BaseWatcher
from hummingbot.logger import HummingbotLogger
from hummingbot.core.event.events import NewBlocksWatcherEvent
class WSNewBlocksWatcher(BaseWatcher):
MESSAGE_TIMEOUT = 30.0
PING_TIMEOUT = 10.0
def __init__(self, w3: Web3, websocket_url):
super().__init__(w3)
self._network_on = False
self._nonce: int = 0
self._current_block_number: int = -1
self._websocket_url = websocket_url
self._node_address = None
self._client: Optional[websockets.WebSocketClientProtocol] = None
self._fetch_new_blocks_task: Optional[asyncio.Task] = None
self._block_cache = TTLCache(maxsize=10, ttl=120)
_nbw_logger: Optional[HummingbotLogger] = None
@classmethod
def logger(cls) -> HummingbotLogger:
if cls._nbw_logger is None:
cls._nbw_logger = logging.getLogger(__name__)
return cls._nbw_logger
@property
def block_number(self) -> int:
return self._current_block_number
@property
def block_cache(self) -> Dict[HexBytes, AttributeDict]:
cache_dict: Dict[HexBytes, AttributeDict] = dict([(key, self._block_cache[key])
for key in self._block_cache.keys()])
return cache_dict
async def start_network(self):
if self._fetch_new_blocks_task is not None:
await self.stop_network()
else:
try:
self._current_block_number = await self.call_async(getattr, self._w3.eth, "blockNumber")
except asyncio.CancelledError:
raise
except Exception:
self.logger().network("Error fetching newest Ethereum block number.",
app_warning_msg="Error fetching newest Ethereum block number. "
"Check Ethereum node connection",
exc_info=True)
await self.connect()
await self.subscribe(["newHeads"])
self._fetch_new_blocks_task: asyncio.Task = safe_ensure_future(self.fetch_new_blocks_loop())
self._network_on = True
async def stop_network(self):
if self._fetch_new_blocks_task is not None:
await self.disconnect()
self._fetch_new_blocks_task.cancel()
self._fetch_new_blocks_task = None
self._network_on = False
async def connect(self):
try:
self._client = await websockets.connect(uri=self._websocket_url)
return self._client
except Exception as e:
self.logger().network(f"ERROR in connection: {e}")
async def disconnect(self):
try:
await self._client.close()
self._client = None
except Exception as e:
self.logger().network(f"ERROR in disconnection: {e}")
async def _send(self, emit_data) -> int:
self._nonce += 1
emit_data["id"] = self._nonce
await self._client.send(ujson.dumps(emit_data))
return self._nonce
async def subscribe(self, params) -> bool:
emit_data = {
"method": "eth_subscribe",
"params": params
}
nonce = await self._send(emit_data)
raw_message = await self._client.recv()
if raw_message is not None:
resp = ujson.loads(raw_message)
if resp.get("id", None) == nonce:
self._node_address = resp.get("result")
return True
return False
async def _messages(self) -> AsyncIterable[Any]:
try:
while True:
try:
raw_msg_str: str = await asyncio.wait_for(self._client.recv(), self.MESSAGE_TIMEOUT)
yield raw_msg_str
except asyncio.TimeoutError:
try:
pong_waiter = await self._client.ping()
await asyncio.wait_for(pong_waiter, timeout=self.PING_TIMEOUT)
except asyncio.TimeoutError:
raise
except asyncio.TimeoutError:
self.logger().warning("WebSocket ping timed out. Going to reconnect...")
return
except ConnectionClosed:
return
finally:
await self.disconnect()
# Reconnect and subscribe in case a disconnect happens
await self.connect()
await self.subscribe(["newHeads"])
async def fetch_new_blocks_loop(self):
while True:
try:
async for raw_message in self._messages():
message_json = ujson.loads(raw_message) if raw_message is not None else None
if message_json.get("method", None) == "eth_subscription":
subscription_result_params = message_json.get("params", None)
incoming_block = subscription_result_params.get("result", None) \
if subscription_result_params is not None else None
if incoming_block is not None:
with suppress(BlockNotFound):
new_block: AttributeDict = await self.call_async(self._w3.eth.getBlock,
incoming_block.get("hash"), True)
self._current_block_number = new_block.get("number")
self._block_cache[new_block.get("hash")] = new_block
self.trigger_event(NewBlocksWatcherEvent.NewBlocks, [new_block])
except asyncio.TimeoutError:
self.logger().network("Timed out fetching new block.", exc_info=True,
app_warning_msg="Timed out fetching new block. "
"Check wallet network connection")
except asyncio.CancelledError:
raise
except Exception as e:
self.logger().network(f"Error fetching new block: {e}", exc_info=True,
app_warning_msg="Error fetching new block. "
"Check wallet network connection")
await asyncio.sleep(30.0)
async def get_timestamp_for_block(self, block_hash: HexBytes, max_tries: Optional[int] = 10) -> int:
counter = 0
block: AttributeDict = None
if block_hash in self._block_cache.keys():
block = self._block_cache.get(block_hash)
else:
while block is None:
if counter == max_tries:
raise ValueError(f"Block hash {block_hash.hex()} does not exist.")
counter += 1
block = self._block_cache.get(block_hash)
await asyncio.sleep(0.5)
return block.get("timestamp")
| 41.705556 | 114 | 0.582257 | import websockets
from web3 import Web3
from web3.exceptions import BlockNotFound
from websockets.exceptions import ConnectionClosed
import logging
import ujson
import asyncio
from hexbytes import HexBytes
from web3.datastructures import AttributeDict
from cachetools import TTLCache
from typing import Optional, Dict, AsyncIterable, Any
from contextlib import suppress
from hummingbot.core.utils.async_utils import safe_ensure_future
from hummingbot.wallet.ethereum.watcher.base_watcher import BaseWatcher
from hummingbot.logger import HummingbotLogger
from hummingbot.core.event.events import NewBlocksWatcherEvent
class WSNewBlocksWatcher(BaseWatcher):
MESSAGE_TIMEOUT = 30.0
PING_TIMEOUT = 10.0
def __init__(self, w3: Web3, websocket_url):
super().__init__(w3)
self._network_on = False
self._nonce: int = 0
self._current_block_number: int = -1
self._websocket_url = websocket_url
self._node_address = None
self._client: Optional[websockets.WebSocketClientProtocol] = None
self._fetch_new_blocks_task: Optional[asyncio.Task] = None
self._block_cache = TTLCache(maxsize=10, ttl=120)
_nbw_logger: Optional[HummingbotLogger] = None
@classmethod
def logger(cls) -> HummingbotLogger:
if cls._nbw_logger is None:
cls._nbw_logger = logging.getLogger(__name__)
return cls._nbw_logger
@property
def block_number(self) -> int:
return self._current_block_number
@property
def block_cache(self) -> Dict[HexBytes, AttributeDict]:
cache_dict: Dict[HexBytes, AttributeDict] = dict([(key, self._block_cache[key])
for key in self._block_cache.keys()])
return cache_dict
async def start_network(self):
if self._fetch_new_blocks_task is not None:
await self.stop_network()
else:
try:
self._current_block_number = await self.call_async(getattr, self._w3.eth, "blockNumber")
except asyncio.CancelledError:
raise
except Exception:
self.logger().network("Error fetching newest Ethereum block number.",
app_warning_msg="Error fetching newest Ethereum block number. "
"Check Ethereum node connection",
exc_info=True)
await self.connect()
await self.subscribe(["newHeads"])
self._fetch_new_blocks_task: asyncio.Task = safe_ensure_future(self.fetch_new_blocks_loop())
self._network_on = True
async def stop_network(self):
if self._fetch_new_blocks_task is not None:
await self.disconnect()
self._fetch_new_blocks_task.cancel()
self._fetch_new_blocks_task = None
self._network_on = False
async def connect(self):
try:
self._client = await websockets.connect(uri=self._websocket_url)
return self._client
except Exception as e:
self.logger().network(f"ERROR in connection: {e}")
async def disconnect(self):
try:
await self._client.close()
self._client = None
except Exception as e:
self.logger().network(f"ERROR in disconnection: {e}")
async def _send(self, emit_data) -> int:
self._nonce += 1
emit_data["id"] = self._nonce
await self._client.send(ujson.dumps(emit_data))
return self._nonce
async def subscribe(self, params) -> bool:
emit_data = {
"method": "eth_subscribe",
"params": params
}
nonce = await self._send(emit_data)
raw_message = await self._client.recv()
if raw_message is not None:
resp = ujson.loads(raw_message)
if resp.get("id", None) == nonce:
self._node_address = resp.get("result")
return True
return False
async def _messages(self) -> AsyncIterable[Any]:
try:
while True:
try:
raw_msg_str: str = await asyncio.wait_for(self._client.recv(), self.MESSAGE_TIMEOUT)
yield raw_msg_str
except asyncio.TimeoutError:
try:
pong_waiter = await self._client.ping()
await asyncio.wait_for(pong_waiter, timeout=self.PING_TIMEOUT)
except asyncio.TimeoutError:
raise
except asyncio.TimeoutError:
self.logger().warning("WebSocket ping timed out. Going to reconnect...")
return
except ConnectionClosed:
return
finally:
await self.disconnect()
await self.connect()
await self.subscribe(["newHeads"])
async def fetch_new_blocks_loop(self):
while True:
try:
async for raw_message in self._messages():
message_json = ujson.loads(raw_message) if raw_message is not None else None
if message_json.get("method", None) == "eth_subscription":
subscription_result_params = message_json.get("params", None)
incoming_block = subscription_result_params.get("result", None) \
if subscription_result_params is not None else None
if incoming_block is not None:
with suppress(BlockNotFound):
new_block: AttributeDict = await self.call_async(self._w3.eth.getBlock,
incoming_block.get("hash"), True)
self._current_block_number = new_block.get("number")
self._block_cache[new_block.get("hash")] = new_block
self.trigger_event(NewBlocksWatcherEvent.NewBlocks, [new_block])
except asyncio.TimeoutError:
self.logger().network("Timed out fetching new block.", exc_info=True,
app_warning_msg="Timed out fetching new block. "
"Check wallet network connection")
except asyncio.CancelledError:
raise
except Exception as e:
self.logger().network(f"Error fetching new block: {e}", exc_info=True,
app_warning_msg="Error fetching new block. "
"Check wallet network connection")
await asyncio.sleep(30.0)
async def get_timestamp_for_block(self, block_hash: HexBytes, max_tries: Optional[int] = 10) -> int:
counter = 0
block: AttributeDict = None
if block_hash in self._block_cache.keys():
block = self._block_cache.get(block_hash)
else:
while block is None:
if counter == max_tries:
raise ValueError(f"Block hash {block_hash.hex()} does not exist.")
counter += 1
block = self._block_cache.get(block_hash)
await asyncio.sleep(0.5)
return block.get("timestamp")
| true | true |
1c2f7270fe582a03985d45be13dfa2f5b03ec50e | 3,445 | py | Python | utils/measure/light_controller/hue.py | KrzysztofHajdamowicz/homeassistant-powercalc | 374312e21d1c21b28984990442ec56fc52177f4e | [
"MIT"
] | 219 | 2021-06-12T20:55:57.000Z | 2022-03-30T07:56:43.000Z | utils/measure/light_controller/hue.py | KrzysztofHajdamowicz/homeassistant-powercalc | 374312e21d1c21b28984990442ec56fc52177f4e | [
"MIT"
] | 420 | 2021-06-09T20:22:03.000Z | 2022-03-31T15:35:45.000Z | utils/measure/light_controller/hue.py | KrzysztofHajdamowicz/homeassistant-powercalc | 374312e21d1c21b28984990442ec56fc52177f4e | [
"MIT"
] | 84 | 2021-06-09T18:18:03.000Z | 2022-03-29T09:28:06.000Z | from __future__ import annotations
import os
from phue import Bridge, PhueRegistrationException
from PyInquirer import Separator
from .controller import LightController, LightInfo
from .errors import LightControllerError, ModelNotDiscoveredError
NAME = "hue"
TYPE_LIGHT = "light"
TYPE_GROUP = "group"
class HueLightController(LightController):
def __init__(self, bridge_ip: str):
self.bridge = self.initialize_hue_bridge(bridge_ip)
def change_light_state(self, color_mode: str, on: bool = True, **kwargs):
kwargs["on"] = on
if self.is_group:
self.bridge.set_group(self.light_id, kwargs)
else:
self.bridge.set_light(self.light_id, kwargs)
def get_light_info(self) -> LightInfo:
if self.is_group:
model_id = self.find_group_model(self.light_id)
return LightInfo(
model_id=model_id,
)
# Individual light information
light = self.bridge.get_light(self.light_id)
lightinfo = LightInfo(
model_id=light["modelid"],
)
if "ct" in light["capabilities"]["control"]:
lightinfo.min_mired = light["capabilities"]["control"]["ct"]["min"]
lightinfo.max_mired = light["capabilities"]["control"]["ct"]["max"]
return lightinfo
def find_group_model(self, group_id: str) -> str:
model_ids = set()
for light_id in self.bridge.get_group(group_id, "lights"):
light = self.bridge.get_light(int(light_id))
model_id = light["modelid"]
model_ids.add(model_id)
if len(model_ids) == 0:
raise ModelNotDiscoveredError("Could not find a model id for the group")
if len(model_ids) > 1:
raise LightControllerError("The Hue group contains lights of multiple models, this is not supported")
return model_ids.pop()
def initialize_hue_bridge(self, bridge_ip: str) -> Bridge:
config_file_path = os.path.join(os.path.dirname(__file__), "../.persistent/.python_hue")
try:
bridge = Bridge(ip=bridge_ip, config_file_path=config_file_path)
except PhueRegistrationException as err:
print("Please click the link button on the bridge, than hit enter..")
input()
bridge = Bridge(ip=bridge_ip, config_file_path=config_file_path)
return bridge
def get_questions(self) -> list[dict]:
def get_light_list(answers):
light_list = []
for light in self.bridge.lights:
light_list.append(
{"value": f"{TYPE_LIGHT}:{light.light_id}", "name": light.name}
)
if answers["multiple_lights"]:
light_list.append(Separator())
for group in self.bridge.groups:
light_list.append(
{"value": f"{TYPE_GROUP}:{group.group_id}", "name": group.name}
)
return light_list
return [
{
"type": "list",
"name": "light",
"message": "Select the light?",
"choices": get_light_list,
},
]
def process_answers(self, answers):
light_type, light_id = answers["light"].split(":")
self.is_group = light_type == TYPE_GROUP
self.light_id = int(light_id)
| 33.77451 | 113 | 0.593033 | from __future__ import annotations
import os
from phue import Bridge, PhueRegistrationException
from PyInquirer import Separator
from .controller import LightController, LightInfo
from .errors import LightControllerError, ModelNotDiscoveredError
NAME = "hue"
TYPE_LIGHT = "light"
TYPE_GROUP = "group"
class HueLightController(LightController):
def __init__(self, bridge_ip: str):
self.bridge = self.initialize_hue_bridge(bridge_ip)
def change_light_state(self, color_mode: str, on: bool = True, **kwargs):
kwargs["on"] = on
if self.is_group:
self.bridge.set_group(self.light_id, kwargs)
else:
self.bridge.set_light(self.light_id, kwargs)
def get_light_info(self) -> LightInfo:
if self.is_group:
model_id = self.find_group_model(self.light_id)
return LightInfo(
model_id=model_id,
)
light = self.bridge.get_light(self.light_id)
lightinfo = LightInfo(
model_id=light["modelid"],
)
if "ct" in light["capabilities"]["control"]:
lightinfo.min_mired = light["capabilities"]["control"]["ct"]["min"]
lightinfo.max_mired = light["capabilities"]["control"]["ct"]["max"]
return lightinfo
def find_group_model(self, group_id: str) -> str:
model_ids = set()
for light_id in self.bridge.get_group(group_id, "lights"):
light = self.bridge.get_light(int(light_id))
model_id = light["modelid"]
model_ids.add(model_id)
if len(model_ids) == 0:
raise ModelNotDiscoveredError("Could not find a model id for the group")
if len(model_ids) > 1:
raise LightControllerError("The Hue group contains lights of multiple models, this is not supported")
return model_ids.pop()
def initialize_hue_bridge(self, bridge_ip: str) -> Bridge:
config_file_path = os.path.join(os.path.dirname(__file__), "../.persistent/.python_hue")
try:
bridge = Bridge(ip=bridge_ip, config_file_path=config_file_path)
except PhueRegistrationException as err:
print("Please click the link button on the bridge, than hit enter..")
input()
bridge = Bridge(ip=bridge_ip, config_file_path=config_file_path)
return bridge
def get_questions(self) -> list[dict]:
def get_light_list(answers):
light_list = []
for light in self.bridge.lights:
light_list.append(
{"value": f"{TYPE_LIGHT}:{light.light_id}", "name": light.name}
)
if answers["multiple_lights"]:
light_list.append(Separator())
for group in self.bridge.groups:
light_list.append(
{"value": f"{TYPE_GROUP}:{group.group_id}", "name": group.name}
)
return light_list
return [
{
"type": "list",
"name": "light",
"message": "Select the light?",
"choices": get_light_list,
},
]
def process_answers(self, answers):
light_type, light_id = answers["light"].split(":")
self.is_group = light_type == TYPE_GROUP
self.light_id = int(light_id)
| true | true |
1c2f73592f4593515d1f7796a421c2402e8aaa17 | 176 | py | Python | src/setup.py | mountain/self | 189e00e810d4d719fa6b37b400eef17d2521a64c | [
"MIT"
] | 5 | 2019-12-30T16:18:06.000Z | 2022-03-22T17:36:09.000Z | src/setup.py | mountain/self | 189e00e810d4d719fa6b37b400eef17d2521a64c | [
"MIT"
] | 2 | 2019-12-30T10:09:15.000Z | 2020-01-03T02:48:18.000Z | src/setup.py | mountain/self | 189e00e810d4d719fa6b37b400eef17d2521a64c | [
"MIT"
] | 3 | 2019-12-30T06:10:04.000Z | 2021-04-23T09:37:48.000Z | # -*- coding: utf-8 -*-
from setuptools import setup
setup(name='gym_selfx',
version='0.0.1',
install_requires=['gym'] # And any other dependencies foo needs
)
| 17.6 | 70 | 0.642045 |
from setuptools import setup
setup(name='gym_selfx',
version='0.0.1',
install_requires=['gym']
)
| true | true |
1c2f736dee638ce5325945c68f2abacc6aa6182c | 2,036 | py | Python | haystack/utils/geo.py | cbows/django-haystack | 80c154b7b11fdcf99dd2ef0e82342ed13e26053a | [
"BSD-3-Clause"
] | 2,021 | 2015-02-06T07:45:08.000Z | 2022-03-30T12:26:39.000Z | haystack/utils/geo.py | cbows/django-haystack | 80c154b7b11fdcf99dd2ef0e82342ed13e26053a | [
"BSD-3-Clause"
] | 787 | 2015-02-03T20:06:04.000Z | 2022-03-30T09:00:38.000Z | haystack/utils/geo.py | cbows/django-haystack | 80c154b7b11fdcf99dd2ef0e82342ed13e26053a | [
"BSD-3-Clause"
] | 878 | 2015-02-04T15:29:50.000Z | 2022-03-28T16:51:44.000Z | from haystack.constants import WGS_84_SRID
from haystack.exceptions import SpatialError
def ensure_geometry(geom):
"""
Makes sure the parameter passed in looks like a GEOS ``GEOSGeometry``.
"""
if not hasattr(geom, "geom_type"):
raise SpatialError("Point '%s' doesn't appear to be a GEOS geometry." % geom)
return geom
def ensure_point(geom):
"""
Makes sure the parameter passed in looks like a GEOS ``Point``.
"""
ensure_geometry(geom)
if geom.geom_type != "Point":
raise SpatialError("Provided geometry '%s' is not a 'Point'." % geom)
return geom
def ensure_wgs84(point):
"""
Ensures the point passed in is a GEOS ``Point`` & returns that point's
data is in the WGS-84 spatial reference.
"""
ensure_point(point)
# Clone it so we don't alter the original, in case they're using it for
# something else.
new_point = point.clone()
if not new_point.srid:
# It has no spatial reference id. Assume WGS-84.
new_point.srid = WGS_84_SRID
elif new_point.srid != WGS_84_SRID:
# Transform it to get to the right system.
new_point.transform(WGS_84_SRID)
return new_point
def ensure_distance(dist):
"""
Makes sure the parameter passed in is a 'Distance' object.
"""
try:
# Since we mostly only care about the ``.km`` attribute, make sure
# it's there.
dist.km
except AttributeError:
raise SpatialError("'%s' does not appear to be a 'Distance' object." % dist)
return dist
def generate_bounding_box(bottom_left, top_right):
"""
Takes two opposite corners of a bounding box (order matters!) & generates
a two-tuple of the correct coordinates for the bounding box.
The two-tuple is in the form ``((min_lat, min_lng), (max_lat, max_lng))``.
"""
west, lat_1 = bottom_left.coords
east, lat_2 = top_right.coords
min_lat, max_lat = min(lat_1, lat_2), max(lat_1, lat_2)
return ((min_lat, west), (max_lat, east))
| 28.277778 | 85 | 0.655697 | from haystack.constants import WGS_84_SRID
from haystack.exceptions import SpatialError
def ensure_geometry(geom):
if not hasattr(geom, "geom_type"):
raise SpatialError("Point '%s' doesn't appear to be a GEOS geometry." % geom)
return geom
def ensure_point(geom):
ensure_geometry(geom)
if geom.geom_type != "Point":
raise SpatialError("Provided geometry '%s' is not a 'Point'." % geom)
return geom
def ensure_wgs84(point):
ensure_point(point)
# Clone it so we don't alter the original, in case they're using it for
# something else.
new_point = point.clone()
if not new_point.srid:
# It has no spatial reference id. Assume WGS-84.
new_point.srid = WGS_84_SRID
elif new_point.srid != WGS_84_SRID:
# Transform it to get to the right system.
new_point.transform(WGS_84_SRID)
return new_point
def ensure_distance(dist):
try:
# Since we mostly only care about the ``.km`` attribute, make sure
# it's there.
dist.km
except AttributeError:
raise SpatialError("'%s' does not appear to be a 'Distance' object." % dist)
return dist
def generate_bounding_box(bottom_left, top_right):
west, lat_1 = bottom_left.coords
east, lat_2 = top_right.coords
min_lat, max_lat = min(lat_1, lat_2), max(lat_1, lat_2)
return ((min_lat, west), (max_lat, east))
| true | true |
1c2f75d353f6511ae9439906228dcaac9f661c2c | 316 | py | Python | catkin_ws/src/ominibot_car/setup.py | CIRCUSPi/ROSKY | d0328e19cb65416b9353a1faabc725c1ac01c9f6 | [
"MIT"
] | 3 | 2021-08-03T07:08:24.000Z | 2021-09-12T14:33:41.000Z | catkin_ws/src/ominibot_car/setup.py | kjoelovelife/ROSKY | d0328e19cb65416b9353a1faabc725c1ac01c9f6 | [
"MIT"
] | 1 | 2021-09-03T13:27:33.000Z | 2021-09-06T03:28:16.000Z | catkin_ws/src/ominibot_car/setup.py | kjoelovelife/ROSKY | d0328e19cb65416b9353a1faabc725c1ac01c9f6 | [
"MIT"
] | 4 | 2020-12-28T08:20:07.000Z | 2021-08-10T06:55:30.000Z | ## ! DO NOT MANUALLY INVOKE THIS setup.py, USE CATKIN INSTEAD
from distutils.core import setup
from catkin_pkg.python_setup import generate_distutils_setup
# fetch values from package.xml
setup_args = generate_distutils_setup(
packages=['ominibot_car'],
package_dir={'': 'include'},
)
setup(**setup_args)
| 24.307692 | 61 | 0.765823 | tup import generate_distutils_setup
setup_args = generate_distutils_setup(
packages=['ominibot_car'],
package_dir={'': 'include'},
)
setup(**setup_args)
| true | true |
1c2f760f7cf4ccd458cfcf6a6dae6d4d18c5a94f | 24,454 | py | Python | pysph/tools/geometry.py | suhasjains/pysph | df4e882ac790b2790ccd23fb1aaa66a049c2f7b1 | [
"BSD-3-Clause"
] | 3 | 2021-01-06T03:01:18.000Z | 2022-03-21T03:02:55.000Z | docker/water/sph/tags/pysph/pysph/tools/geometry.py | liujiamingustc/phd | 4f815a738abad43531d02ac66f5bd0d9a1def52a | [
"Apache-2.0"
] | 1 | 2018-11-17T15:39:11.000Z | 2018-11-17T15:39:11.000Z | docker/water/sph/tags/pysph/pysph/tools/geometry.py | liujiamingustc/phd | 4f815a738abad43531d02ac66f5bd0d9a1def52a | [
"Apache-2.0"
] | 1 | 2020-06-21T08:42:07.000Z | 2020-06-21T08:42:07.000Z | from __future__ import division
import numpy as np
import copy
from pysph.base.nnps import LinkedListNNPS
from pysph.base.utils import get_particle_array, get_particle_array_wcsph
from pyzoltan.core.carray import UIntArray
from numpy.linalg import norm
def distance(point1, point2=np.array([0.0, 0.0, 0.0])):
return np.sqrt(sum((point1 - point2) * (point1 - point2)))
def distance_2d(point1, point2=np.array([0.0, 0.0])):
return np.sqrt(sum((point1 - point2) * (point1 - point2)))
def matrix_exp(matrix):
"""
Exponential of a matrix.
Finds the exponential of a square matrix of any order using the
formula exp(A) = I + (A/1!) + (A**2/2!) + (A**3/3!) + .........
Parameters
----------
matrix : numpy matrix of order nxn (square) filled with numbers
Returns
-------
result : numpy matrix of the same order
Examples
--------
>>>A = np.matrix([[1, 2],[2, 3]])
>>>matrix_exp(A)
matrix([[19.68002699, 30.56514746],
[30.56514746, 50.24517445]])
>>>B = np.matrix([[0, 0],[0, 0]])
>>>matrix_exp(B)
matrix([[1., 0.],
[0., 1.]])
"""
matrix = np.asmatrix(matrix)
tol = 1.0e-16
result = matrix**(0)
n = 1
condition = True
while condition:
adding = matrix**(n) / (1.0 * np.math.factorial(n))
result += adding
residue = np.sqrt(np.sum(np.square(adding)) /
np.sum(np.square(result)))
condition = (residue > tol)
n += 1
return result
def extrude(x, y, dx=0.01, extrude_dist=1.0, z_center=0.0):
"""
Extrudes a 2d geometry.
Takes a 2d geometry with x, y values and extrudes it in z direction by the
amount extrude_dist with z_center as center
Parameters
----------
x : 1d array object with numbers
y : 1d array object with numbers
dx : a number
extrude_dist : a number
z_center : a number
x, y should be of the same length and no x, y pair should be the same
Returns
-------
x_new : 1d numpy array object with new x values
y_new : 1d numpy array object with new y values
z_new : 1d numpy array object with z values
x_new, y_new, z_new are of the same length
Examples
--------
>>>x = np.array([0.0])
>>>y = np.array([0.0])
>>>extrude(x, y, 0.1, 0.2, 0.0)
(array([ 0., 0., 0.]),
array([ 0., 0., 0.]),
array([-0.1, 0., 0.1]))
"""
z = np.arange(z_center - extrude_dist / 2.,
z_center + (extrude_dist + dx) / 2., dx)
x_new = np.tile(np.asarray(x), len(z))
y_new = np.tile(np.asarray(y), len(z))
z_new = np.repeat(z, len(x))
return x_new, y_new, z_new
def translate(x, y, z, x_translate=0.0, y_translate=0.0, z_translate=0.0):
"""
Translates set of points in 3d cartisean space.
Takes set of points and translates each and every point by some
mentioned amount in all the 3 directions.
Parameters
----------
x : 1d array object with numbers
y : 1d array object with numbers
z : 1d array object with numbers
x_translate : a number
y_translate : a number
z_translate : a number
Returns
-------
x_new : 1d numpy array object with new x values
y_new : 1d numpy array object with new y values
z_new : 1d numpy array object with new z values
Examples
--------
>>>x = np.array([0.0, 1.0, 2.0])
>>>y = np.array([-1.0, 0.0, 1.5])
>>>z = np.array([0.5, -1.5, 0.0])
>>>translate(x, y, z, 1.0, -0.5, 2.0)
(array([ 1., 2., 3.]), array([-1.5, -0.5, 1.]), array([2.5, 0.5, 2.]))
"""
x_new = np.asarray(x) + x_translate
y_new = np.asarray(y) + y_translate
z_new = np.asarray(z) + z_translate
return x_new, y_new, z_new
def rotate(x, y, z, axis=np.array([0.0, 0.0, 1.0]), angle=90.0):
"""
Rotates set of points in 3d cartisean space.
Takes set of points and rotates each point with some angle w.r.t
a mentioned axis.
Parameters
----------
x : 1d array object with numbers
y : 1d array object with numbers
z : 1d array object with numbers
axis : 1d array with 3 numbers
angle(in degrees) : number
Returns
-------
x_new : 1d numpy array object with new x values
y_new : 1d numpy array object with new y values
z_new : 1d numpy array object with new z values
Examples
--------
>>>x = np.array([0.0, 1.0, 2.0])
>>>y = np.array([-1.0, 0.0, 1.5])
>>>z = np.array([0.5, -1.5, 0.0])
>>>axis = np.array([0.0, 0.0, 1.0])
>>>rotate(x, y, z, axis, 90.0)
(array([ 0.29212042, -0.5, 2.31181936]),
array([ -0.5, 3.31047738, 11.12095476]),
array([-0.5, -0.5, 3.5]))
"""
theta = angle * np.pi / 180.0
unit_vector = np.asarray(axis) / norm(np.asarray(axis))
matrix = np.cross(np.eye(3), unit_vector * theta)
rotation_matrix = matrix_exp(np.matrix(matrix))
new_points = []
for xi, yi, zi in zip(np.asarray(x), np.asarray(y), np.asarray(z)):
point = np.array([xi, yi, zi])
new = np.dot(rotation_matrix, point)
new_points.append(np.asarray(new)[0])
new_points = np.array(new_points)
x_new = new_points[:, 0]
y_new = new_points[:, 1]
z_new = new_points[:, 2]
return x_new, y_new, z_new
def get_2d_wall(dx=0.01, center=np.array([0.0, 0.0]), length=1.0,
num_layers=1, up=True):
"""
Generates a 2d wall which is parallel to x-axis. The wall can be
rotated parallel to any axis using the rotate function. 3d wall
can be also generated using the extrude function after generating
particles using this function.
^
|
|
y|*******************
| wall particles
|
|____________________>
x
Parameters
----------
dx : a number which is the spacing required
center : 1d array like object which is the center of wall
length : a number which is the length of the wall
num_layers : Number of layers for the wall
up : True if the layers have to created on top of base wall
Returns
-------
x : 1d numpy array with x coordinates of the wall
y : 1d numpy array with y coordinates of the wall
"""
x = np.arange(-length / 2., length / 2. + dx, dx) + center[0]
y = np.ones_like(x) * center[1]
value = 1 if up else -1
for i in range(1, num_layers):
y1 = np.ones_like(x) * center[1] + value * i * dx
y = np.concatenate([y, y1])
return np.tile(x, num_layers), y
def get_2d_tank(dx=0.001, base_center=np.array([0.0, 0.0]), length=1.0,
height=1.0, num_layers=1, outside=True):
"""
Generates an open 2d tank with the base parallel to x-axis and the side
walls parallel to y-axis. The tank can be rotated to any direction using
rotate function. 3d tank can be generated using extrude function.
^
|* *
|* 2d tank *
y|* particles *
|* *
|* * * * * * * * *
| base
|____________________>
x
Parameters
----------
dx : a number which is the spacing required
base_center : 1d array like object which is the center of base wall
length : a number which is the length of the base
height : a number which is the length of the side wall
num_layers : Number of layers for the tank
outside : A boolean value which decides if the layers are inside or outside
Returns
-------
x : 1d numpy array with x coordinates of the tank
y : 1d numpy array with y coordinates of the tank
"""
base = np.arange(-length / 2., length / 2. + dx, dx) * (1.0 + 0.0j)
left_wall = np.arange(dx, height + dx, dx) * (1.0j) - length / 2.
right_wall = np.arange(dx, height + dx, dx) * (1.0j) + length / 2.
particles = np.concatenate([left_wall, base, right_wall])
x = particles.real
y = particles.imag
value = 1 if outside else -1
for i in range(1, num_layers):
x1, y1 = get_2d_tank(dx, np.array(
[0.0, -value * i * dx]), length + 2.0 * i * value * dx,
height + i * value * dx)
x = np.concatenate([x, x1])
y = np.concatenate([y, y1])
return x + base_center[0], y + base_center[1]
def get_2d_circle(dx=0.01, r=0.5, center=np.array([0.0, 0.0])):
"""
Generates a completely filled 2d circular area.
Parameters
----------
dx : a number which is the spacing required
r : a number which is the radius of the circle
center : 1d array like object which is the center of the circle
Returns
-------
x : 1d numpy array with x coordinates of the circle particles
y : 1d numpy array with y coordinates of the circle particles
"""
N = int(2.0 * r / dx) + 1
x, y = np.mgrid[-r:r:N * 1j, -r:r:N * 1j]
x, y = np.ravel(x), np.ravel(y)
condition = (x * x + y * y <= r * r)
x, y = x[condition], y[condition]
return x + center[0], y + center[0]
def get_2d_hollow_circle(dx=0.01, r=1.0, center=np.array([0.0, 0.0]),
num_layers=2, inside=True):
"""
Generates a hollow 2d circle with some number of layers either on the
inside or on the outside of the body which is taken as an argument
Parameters
----------
dx : a number which is the spacing required
r : a number which is the radius of the circle
center : 1d array like object which is the center of the circle
num_layers : a number (int)
inside : boolean (True or False). If this is True then the layers
are generated inside the circle
Returns
-------
x : 1d numpy array with x coordinates of the circle particles
y : 1d numpy array with y coordinates of the circle particles
"""
r_grid = r + dx * num_layers
N = int(2.0 * r_grid / dx) + 1
x, y = np.mgrid[-r_grid:r_grid:N * 1j, -r_grid:r_grid:N * 1j]
x, y = np.ravel(x), np.ravel(y)
if inside:
cond1 = (x * x + y * y <= r * r)
cond2 = (x * x + y * y >= (r - num_layers * dx)**2)
else:
cond1 = (x * x + y * y >= r * r)
cond2 = (x * x + y * y <= (r + num_layers * dx)**2)
cond = cond1 & cond2
x, y = x[cond], y[cond]
return x + center[0], y + center[0]
def get_3d_hollow_cylinder(dx=0.01, r=0.5, length=1.0,
center=np.array([0.0, 0.0, 0.0]),
num_layers=2, inside=True):
"""
Generates a 3d hollow cylinder which is a extruded geometry
of the hollow circle with a closed base.
Parameters
----------
dx : a number which is the spacing required
r : a number which is the radius of the cylinder
length : a number which is the length of the cylinder
center : 1d array like object which is the center of the cylinder
num_layers : a number (int)
inside : boolean (True or False). If this is True then the layers
are generated inside the cylinder
Returns
-------
x : 1d numpy array with x coordinates of the cylinder particles
y : 1d numpy array with y coordinates of the cylinder particles
z : 1d numpy array with z coordinates of the cylinder particles
"""
x_2d, y_2d = get_2d_hollow_circle(dx, r, center[:2], num_layers, inside)
x, y, z = extrude(x_2d, y_2d, dx, length - dx, center[2] + dx / 2.)
x_circle, y_circle = get_2d_circle(dx, r, center[:2])
z_circle = np.ones_like(x_circle) * (center[2] - length / 2.)
x = np.concatenate([x, x_circle])
y = np.concatenate([y, y_circle])
z = np.concatenate([z, z_circle])
return x, y, z
def get_2d_block(dx=0.01, length=1.0, height=1.0, center=np.array([0., 0.])):
"""
Generates a 2d rectangular block of particles with axes parallel to
the coordinate axes.
^
|
|h * * * * * * *
|e * * * * * * *
y|i * * * * * * *
|g * * * * * * *
|h * * * * * * *
|t * * * * * * *
| * * * * * * *
| length
|________________>
x
Parameters
----------
dx : a number which is the spacing required
length : a number which is the length of the block
height : a number which is the height of the block
center : 1d array like object which is the center of the block
Returns
-------
x : 1d numpy array with x coordinates of the block particles
y : 1d numpy array with y coordinates of the block particles
"""
n1 = int(length / dx) + 1
n2 = int(height / dx) + 1
x, y = np.mgrid[-length / 2.:length / 2.:n1 *
1j, -height / 2.:height / 2.:n2 * 1j]
x, y = np.ravel(x), np.ravel(y)
return x + center[0], y + center[1]
def get_3d_sphere(dx=0.01, r=0.5, center=np.array([0.0, 0.0, 0.0])):
"""
Generates a 3d sphere.
Parameters
----------
dx : a number which is the spacing required
r : a number which is the radius of the sphere
center : 1d array like object which is the center of the sphere
Returns
-------
x : 1d numpy array with x coordinates of the sphere particles
y : 1d numpy array with y coordinates of the sphere particles
z : 1d numpy array with z coordinates of the sphere particles
"""
N = int(2.0 * r / dx) + 1
x, y, z = np.mgrid[-r:r:N * 1j, -r:r:N * 1j, -r:r:N * 1j]
x, y, z = np.ravel(x), np.ravel(y), np.ravel(z)
cond = (x * x + y * y + z * z <= r * r)
x, y, z = x[cond], y[cond], z[cond]
return x + center[0], y + center[1], z + center[2]
def get_3d_block(dx=0.01, length=1.0, height=1.0, depth=1.0,
center=np.array([0., 0., 0.])):
"""
Generates a 3d block of particles with the length, height and depth
parallel to x, y and z axis respectively.
Paramters
---------
dx : a number which is the spacing required
length : a number which is the length of the block
height : a number which is the height of the block
depth : a number which is the depth of the block
center : 1d array like object which is the center of the block
Returns
-------
x : 1d numpy array with x coordinates of the block particles
y : 1d numpy array with y coordinates of the block particles
z : 1d numpy array with z coordinates of the block particles
"""
n1 = int(length / dx) + 1
n2 = int(height / dx) + 1
n3 = int(depth / dx) + 1
x, y, z = np.mgrid[-length / 2.:length / 2.:n1 * 1j, -height /
2.:height / 2.:n2 * 1j, -depth / 2.:depth / 2.:n3 * 1j]
x, y, z = np.ravel(x), np.ravel(y), np.ravel(z)
return x + center[0], y + center[1], z + center[2]
def get_4digit_naca_airfoil(dx=0.01, airfoil='0012', c=1.0):
"""
Generates a 4 digit series NACA airfoil. For a 4 digit series airfoil,
the first digit is the (maximum camber / chord) * 100, second digit is
(location of maximum camber / chord) * 10 and the third and fourth digits
are the (maximum thickness / chord) * 100. The particles generated
using this function will form a solid 2d airfoil.
Parameters
----------
dx : a number which is the spacing required
airfoil : a string of 4 characters which is the airfoil name
c : a number which is the chord of the airfoil
Returns
-------
x : 1d numpy array with x coordinates of the airfoil particles
y : 1d numpy array with y coordinates of the airfoil particles
References
----------
https://en.wikipedia.org/wiki/NACA_airfoil
"""
n = int(c / dx) + 1
x, y = np.mgrid[0:c:n * 1j, -c / 2.:c / 2.:n * 1j]
x = np.ravel(x)
y = np.ravel(y)
x_naca = []
y_naca = []
t = float(airfoil[2:]) * 0.01 * c
if airfoil[:2] == '00':
for xi, yi in zip(x, y):
yt = 5.0 * t * (0.2969 * np.sqrt(xi / c) - 0.1260 * (xi / c) -
0.3516 * ((xi / c)**2.) + 0.2843 * ((xi / c)**3.)
- 0.1015 * ((xi / c)**4.))
if abs(yi) <= yt:
x_naca.append(xi)
y_naca.append(yi)
else:
m = 0.01 * float(airfoil[0])
p = 0.1 * float(airfoil[1])
for xi, yi in zip(x, y):
yt = 5.0 * t * (0.2969 * np.sqrt(xi / c) - 0.1260 * (xi / c) -
0.3516 * ((xi / c)**2.) + 0.2843 * ((xi / c)**3.)
- 0.1015 * ((xi / c)**4.))
if xi <= p * c:
yc = (m / (p * p)) * (2. * p * (xi / c) - (xi / c)**2.)
dydx = (2. * m / (p * p)) * (p - xi / c) / c
else:
yc = (m / ((1. - p) * (1. - p))) * \
(1. - 2. * p + 2. * p * (xi / c) - (xi / c)**2.)
dydx = (2. * m / ((1. - p) * (1. - p))) * (p - xi / c) / c
theta = np.arctan(dydx)
if yi >= 0.0:
yu = yc + yt * np.cos(theta)
if yi <= yu:
xu = xi - yt * np.sin(theta)
x_naca.append(xu)
y_naca.append(yi)
else:
yl = yc - yt * np.cos(theta)
if yi >= yl:
xl = xi + yt * np.sin(theta)
x_naca.append(xl)
y_naca.append(yi)
x_naca = np.array(x_naca)
y_naca = np.array(y_naca)
return x_naca, y_naca
def _get_m_k(series):
if series == '210':
return 0.058, 361.4
elif series == '220':
return 0.126, 51.64
elif series == '230':
return 0.2025, 15.957
elif series == '240':
return 0.290, 6.643
elif series == '250':
return 0.391, 3.23
elif series == '221':
return 0.130, 51.99
elif series == '231':
return 0.217, 15.793
elif series == '241':
return 0.318, 6.52
elif series == '251':
return 0.441, 3.191
def get_5digit_naca_airfoil(dx=0.01, airfoil='23112', c=1.0):
"""
Generates a 5 digit series NACA airfoil. For a 5 digit series airfoil,
the first digit is the design lift coefficient * 20 / 3, second digit is
(location of maximum camber / chord) * 20, third digit indicates the
reflexitivity of the camber and the fourth and fifth digits are the
(maximum thickness / chord) * 100. The particles generated using this
function will form a solid 2d airfoil.
Parameters
----------
dx : a number which is the spacing required
airfoil : a string of 5 characters which is the airfoil name
c : a number which is the chord of the airfoil
Returns
-------
x : 1d numpy array with x coordinates of the airfoil particles
y : 1d numpy array with y coordinates of the airfoil particles
References
----------
https://en.wikipedia.org/wiki/NACA_airfoil
http://www.aerospaceweb.org/question/airfoils/q0041.shtml
"""
n = int(c / dx) + 1
x, y = np.mgrid[0:c:n * 1j, -c / 2.:c / 2.:n * 1j]
x = np.ravel(x)
y = np.ravel(y)
x_naca = []
y_naca = []
t = 0.01 * float(airfoil[3:])
series = airfoil[:3]
m, k = _get_m_k(series)
for xi, yi in zip(x, y):
yt = 5.0 * t * (0.2969 * np.sqrt(xi / c) - 0.1260 * (xi / c) -
0.3516 * ((xi / c)**2.) + 0.2843 * ((xi / c)**3.)
- 0.1015 * ((xi / c)**4.))
xn = xi / c
if xn <= m:
yc = c * (k / 6.) * (xn**3. - 3. * m *
xn * xn + m * m * (3. - m) * xn)
dydx = (k / 6.) * (3. * xn * xn - 6. * m * xn + m * m * (3. - m))
else:
yc = c * (k * (m**3.) / 6.) * (1. - xn)
dydx = -(k * (m**3.) / 6.)
theta = np.arctan(dydx)
if yi >= 0.0:
yu = yc + yt * np.cos(theta)
if yi <= yu:
xu = xi - yt * np.sin(theta)
x_naca.append(xu)
y_naca.append(yi)
else:
yl = yc - yt * np.cos(theta)
if yi >= yl:
xl = xi + yt * np.sin(theta)
x_naca.append(xl)
y_naca.append(yi)
x_naca = np.array(x_naca)
y_naca = np.array(y_naca)
return x_naca, y_naca
def get_naca_wing(dx=0.01, airfoil='0012', span=1.0, chord=1.0):
"""
Generates a wing using a NACA 4 or 5 digit series airfoil. This will
generate only a rectangular wing.
Parameters
----------
dx : a number which is the spacing required
airfoil : a string of 4 or 5 characters which is the airfoil name
span : a number which is the span of the wing
c : a number which is the chord of the wing
Returns
-------
x : 1d numpy array with x coordinates of the airfoil particles
y : 1d numpy array with y coordinates of the airfoil particles
z : 1d numpy array with z coordinates of the airfoil particles
"""
if len(airfoil) == 4:
x, y = get_4digit_naca_airfoil(dx, airfoil, chord)
elif len(airfoil) == 5:
x, y = get_5digit_naca_airfoil(dx, airfoil, chord)
return extrude(x, y, dx, span)
def find_overlap_particles(fluid_parray, solid_parray, dx_solid, dim=3):
"""This function will take 2 particle arrays as input and will find all the
particles of the first particle array which are in the vicinity of the
particles from second particle array. The function will find all the
particles within the dx_solid vicinity so some particles may be identified
at the outer surface of the particles from the second particle array.
The particle arrays should atleast contain x, y and h values for a 2d case
and atleast x, y, z and h values for a 3d case.
Parameters
----------
fluid_parray : a pysph particle array object
solid_parray : a pysph particle array object
dx_solid : a number which is the dx of the second particle array
dim : dimensionality of the problem
Returns
-------
list of particle indices to remove from the first array.
"""
x = fluid_parray.x
x1 = solid_parray.x
y = fluid_parray.y
y1 = solid_parray.y
z = fluid_parray.z
z1 = solid_parray.z
if dim == 2:
z = np.zeros_like(x)
z1 = np.zeros_like(x1)
to_remove = []
ll_nnps = LinkedListNNPS(dim, [fluid_parray, solid_parray])
for i in range(len(x)):
nbrs = UIntArray()
ll_nnps.get_nearest_particles(1, 0, i, nbrs)
point_i = np.array([x[i], y[i], z[i]])
near_points = nbrs.get_npy_array()
distances = []
for ind in near_points:
dest = [x1[ind], y1[ind], z1[ind]]
distances.append(distance(point_i, dest))
if len(distances) == 0:
continue
elif min(distances) < (dx_solid * (1.0 - 1.0e-07)):
to_remove.append(i)
return to_remove
def remove_overlap_particles(fluid_parray, solid_parray, dx_solid, dim=3):
"""
This function will take 2 particle arrays as input and will remove all
the particles of the first particle array which are in the vicinity of
the particles from second particle array. The function will remove all
the particles within the dx_solid vicinity so some particles are removed
at the outer surface of the particles from the second particle array.
The particle arrays should atleast contain x, y and h values for a 2d case
and atleast x, y, z and h values for a 3d case
Parameters
----------
fluid_parray : a pysph particle array object
solid_parray : a pysph particle array object
dx_solid : a number which is the dx of the second particle array
dim : dimensionality of the problem
Returns
-------
None
"""
idx = find_overlap_particles(fluid_parray, solid_parray, dx_solid, dim)
fluid_parray.remove_particles(idx)
def show_2d(points, **kw):
"""Show two-dimensional geometry data.
The `points` are a tuple of x, y, z values, the extra keyword arguments are
passed along to the scatter function.
"""
import matplotlib.pyplot as plt
plt.scatter(points[0], points[1], **kw)
plt.xlabel('X')
plt.ylabel('Y')
def show_3d(points, **kw):
"""Show two-dimensional geometry data.
The `points` are a tuple of x, y, z values, the extra keyword arguments are
passed along to the `mlab.points3d` function.
"""
from mayavi import mlab
mlab.points3d(points[0], points[1], points[2], **kw)
mlab.axes(xlabel='X', ylabel='Y', zlabel='Z')
| 32.692513 | 79 | 0.567637 | from __future__ import division
import numpy as np
import copy
from pysph.base.nnps import LinkedListNNPS
from pysph.base.utils import get_particle_array, get_particle_array_wcsph
from pyzoltan.core.carray import UIntArray
from numpy.linalg import norm
def distance(point1, point2=np.array([0.0, 0.0, 0.0])):
return np.sqrt(sum((point1 - point2) * (point1 - point2)))
def distance_2d(point1, point2=np.array([0.0, 0.0])):
return np.sqrt(sum((point1 - point2) * (point1 - point2)))
def matrix_exp(matrix):
matrix = np.asmatrix(matrix)
tol = 1.0e-16
result = matrix**(0)
n = 1
condition = True
while condition:
adding = matrix**(n) / (1.0 * np.math.factorial(n))
result += adding
residue = np.sqrt(np.sum(np.square(adding)) /
np.sum(np.square(result)))
condition = (residue > tol)
n += 1
return result
def extrude(x, y, dx=0.01, extrude_dist=1.0, z_center=0.0):
z = np.arange(z_center - extrude_dist / 2.,
z_center + (extrude_dist + dx) / 2., dx)
x_new = np.tile(np.asarray(x), len(z))
y_new = np.tile(np.asarray(y), len(z))
z_new = np.repeat(z, len(x))
return x_new, y_new, z_new
def translate(x, y, z, x_translate=0.0, y_translate=0.0, z_translate=0.0):
x_new = np.asarray(x) + x_translate
y_new = np.asarray(y) + y_translate
z_new = np.asarray(z) + z_translate
return x_new, y_new, z_new
def rotate(x, y, z, axis=np.array([0.0, 0.0, 1.0]), angle=90.0):
theta = angle * np.pi / 180.0
unit_vector = np.asarray(axis) / norm(np.asarray(axis))
matrix = np.cross(np.eye(3), unit_vector * theta)
rotation_matrix = matrix_exp(np.matrix(matrix))
new_points = []
for xi, yi, zi in zip(np.asarray(x), np.asarray(y), np.asarray(z)):
point = np.array([xi, yi, zi])
new = np.dot(rotation_matrix, point)
new_points.append(np.asarray(new)[0])
new_points = np.array(new_points)
x_new = new_points[:, 0]
y_new = new_points[:, 1]
z_new = new_points[:, 2]
return x_new, y_new, z_new
def get_2d_wall(dx=0.01, center=np.array([0.0, 0.0]), length=1.0,
num_layers=1, up=True):
x = np.arange(-length / 2., length / 2. + dx, dx) + center[0]
y = np.ones_like(x) * center[1]
value = 1 if up else -1
for i in range(1, num_layers):
y1 = np.ones_like(x) * center[1] + value * i * dx
y = np.concatenate([y, y1])
return np.tile(x, num_layers), y
def get_2d_tank(dx=0.001, base_center=np.array([0.0, 0.0]), length=1.0,
height=1.0, num_layers=1, outside=True):
base = np.arange(-length / 2., length / 2. + dx, dx) * (1.0 + 0.0j)
left_wall = np.arange(dx, height + dx, dx) * (1.0j) - length / 2.
right_wall = np.arange(dx, height + dx, dx) * (1.0j) + length / 2.
particles = np.concatenate([left_wall, base, right_wall])
x = particles.real
y = particles.imag
value = 1 if outside else -1
for i in range(1, num_layers):
x1, y1 = get_2d_tank(dx, np.array(
[0.0, -value * i * dx]), length + 2.0 * i * value * dx,
height + i * value * dx)
x = np.concatenate([x, x1])
y = np.concatenate([y, y1])
return x + base_center[0], y + base_center[1]
def get_2d_circle(dx=0.01, r=0.5, center=np.array([0.0, 0.0])):
N = int(2.0 * r / dx) + 1
x, y = np.mgrid[-r:r:N * 1j, -r:r:N * 1j]
x, y = np.ravel(x), np.ravel(y)
condition = (x * x + y * y <= r * r)
x, y = x[condition], y[condition]
return x + center[0], y + center[0]
def get_2d_hollow_circle(dx=0.01, r=1.0, center=np.array([0.0, 0.0]),
num_layers=2, inside=True):
r_grid = r + dx * num_layers
N = int(2.0 * r_grid / dx) + 1
x, y = np.mgrid[-r_grid:r_grid:N * 1j, -r_grid:r_grid:N * 1j]
x, y = np.ravel(x), np.ravel(y)
if inside:
cond1 = (x * x + y * y <= r * r)
cond2 = (x * x + y * y >= (r - num_layers * dx)**2)
else:
cond1 = (x * x + y * y >= r * r)
cond2 = (x * x + y * y <= (r + num_layers * dx)**2)
cond = cond1 & cond2
x, y = x[cond], y[cond]
return x + center[0], y + center[0]
def get_3d_hollow_cylinder(dx=0.01, r=0.5, length=1.0,
center=np.array([0.0, 0.0, 0.0]),
num_layers=2, inside=True):
x_2d, y_2d = get_2d_hollow_circle(dx, r, center[:2], num_layers, inside)
x, y, z = extrude(x_2d, y_2d, dx, length - dx, center[2] + dx / 2.)
x_circle, y_circle = get_2d_circle(dx, r, center[:2])
z_circle = np.ones_like(x_circle) * (center[2] - length / 2.)
x = np.concatenate([x, x_circle])
y = np.concatenate([y, y_circle])
z = np.concatenate([z, z_circle])
return x, y, z
def get_2d_block(dx=0.01, length=1.0, height=1.0, center=np.array([0., 0.])):
n1 = int(length / dx) + 1
n2 = int(height / dx) + 1
x, y = np.mgrid[-length / 2.:length / 2.:n1 *
1j, -height / 2.:height / 2.:n2 * 1j]
x, y = np.ravel(x), np.ravel(y)
return x + center[0], y + center[1]
def get_3d_sphere(dx=0.01, r=0.5, center=np.array([0.0, 0.0, 0.0])):
N = int(2.0 * r / dx) + 1
x, y, z = np.mgrid[-r:r:N * 1j, -r:r:N * 1j, -r:r:N * 1j]
x, y, z = np.ravel(x), np.ravel(y), np.ravel(z)
cond = (x * x + y * y + z * z <= r * r)
x, y, z = x[cond], y[cond], z[cond]
return x + center[0], y + center[1], z + center[2]
def get_3d_block(dx=0.01, length=1.0, height=1.0, depth=1.0,
center=np.array([0., 0., 0.])):
n1 = int(length / dx) + 1
n2 = int(height / dx) + 1
n3 = int(depth / dx) + 1
x, y, z = np.mgrid[-length / 2.:length / 2.:n1 * 1j, -height /
2.:height / 2.:n2 * 1j, -depth / 2.:depth / 2.:n3 * 1j]
x, y, z = np.ravel(x), np.ravel(y), np.ravel(z)
return x + center[0], y + center[1], z + center[2]
def get_4digit_naca_airfoil(dx=0.01, airfoil='0012', c=1.0):
n = int(c / dx) + 1
x, y = np.mgrid[0:c:n * 1j, -c / 2.:c / 2.:n * 1j]
x = np.ravel(x)
y = np.ravel(y)
x_naca = []
y_naca = []
t = float(airfoil[2:]) * 0.01 * c
if airfoil[:2] == '00':
for xi, yi in zip(x, y):
yt = 5.0 * t * (0.2969 * np.sqrt(xi / c) - 0.1260 * (xi / c) -
0.3516 * ((xi / c)**2.) + 0.2843 * ((xi / c)**3.)
- 0.1015 * ((xi / c)**4.))
if abs(yi) <= yt:
x_naca.append(xi)
y_naca.append(yi)
else:
m = 0.01 * float(airfoil[0])
p = 0.1 * float(airfoil[1])
for xi, yi in zip(x, y):
yt = 5.0 * t * (0.2969 * np.sqrt(xi / c) - 0.1260 * (xi / c) -
0.3516 * ((xi / c)**2.) + 0.2843 * ((xi / c)**3.)
- 0.1015 * ((xi / c)**4.))
if xi <= p * c:
yc = (m / (p * p)) * (2. * p * (xi / c) - (xi / c)**2.)
dydx = (2. * m / (p * p)) * (p - xi / c) / c
else:
yc = (m / ((1. - p) * (1. - p))) * \
(1. - 2. * p + 2. * p * (xi / c) - (xi / c)**2.)
dydx = (2. * m / ((1. - p) * (1. - p))) * (p - xi / c) / c
theta = np.arctan(dydx)
if yi >= 0.0:
yu = yc + yt * np.cos(theta)
if yi <= yu:
xu = xi - yt * np.sin(theta)
x_naca.append(xu)
y_naca.append(yi)
else:
yl = yc - yt * np.cos(theta)
if yi >= yl:
xl = xi + yt * np.sin(theta)
x_naca.append(xl)
y_naca.append(yi)
x_naca = np.array(x_naca)
y_naca = np.array(y_naca)
return x_naca, y_naca
def _get_m_k(series):
if series == '210':
return 0.058, 361.4
elif series == '220':
return 0.126, 51.64
elif series == '230':
return 0.2025, 15.957
elif series == '240':
return 0.290, 6.643
elif series == '250':
return 0.391, 3.23
elif series == '221':
return 0.130, 51.99
elif series == '231':
return 0.217, 15.793
elif series == '241':
return 0.318, 6.52
elif series == '251':
return 0.441, 3.191
def get_5digit_naca_airfoil(dx=0.01, airfoil='23112', c=1.0):
n = int(c / dx) + 1
x, y = np.mgrid[0:c:n * 1j, -c / 2.:c / 2.:n * 1j]
x = np.ravel(x)
y = np.ravel(y)
x_naca = []
y_naca = []
t = 0.01 * float(airfoil[3:])
series = airfoil[:3]
m, k = _get_m_k(series)
for xi, yi in zip(x, y):
yt = 5.0 * t * (0.2969 * np.sqrt(xi / c) - 0.1260 * (xi / c) -
0.3516 * ((xi / c)**2.) + 0.2843 * ((xi / c)**3.)
- 0.1015 * ((xi / c)**4.))
xn = xi / c
if xn <= m:
yc = c * (k / 6.) * (xn**3. - 3. * m *
xn * xn + m * m * (3. - m) * xn)
dydx = (k / 6.) * (3. * xn * xn - 6. * m * xn + m * m * (3. - m))
else:
yc = c * (k * (m**3.) / 6.) * (1. - xn)
dydx = -(k * (m**3.) / 6.)
theta = np.arctan(dydx)
if yi >= 0.0:
yu = yc + yt * np.cos(theta)
if yi <= yu:
xu = xi - yt * np.sin(theta)
x_naca.append(xu)
y_naca.append(yi)
else:
yl = yc - yt * np.cos(theta)
if yi >= yl:
xl = xi + yt * np.sin(theta)
x_naca.append(xl)
y_naca.append(yi)
x_naca = np.array(x_naca)
y_naca = np.array(y_naca)
return x_naca, y_naca
def get_naca_wing(dx=0.01, airfoil='0012', span=1.0, chord=1.0):
if len(airfoil) == 4:
x, y = get_4digit_naca_airfoil(dx, airfoil, chord)
elif len(airfoil) == 5:
x, y = get_5digit_naca_airfoil(dx, airfoil, chord)
return extrude(x, y, dx, span)
def find_overlap_particles(fluid_parray, solid_parray, dx_solid, dim=3):
x = fluid_parray.x
x1 = solid_parray.x
y = fluid_parray.y
y1 = solid_parray.y
z = fluid_parray.z
z1 = solid_parray.z
if dim == 2:
z = np.zeros_like(x)
z1 = np.zeros_like(x1)
to_remove = []
ll_nnps = LinkedListNNPS(dim, [fluid_parray, solid_parray])
for i in range(len(x)):
nbrs = UIntArray()
ll_nnps.get_nearest_particles(1, 0, i, nbrs)
point_i = np.array([x[i], y[i], z[i]])
near_points = nbrs.get_npy_array()
distances = []
for ind in near_points:
dest = [x1[ind], y1[ind], z1[ind]]
distances.append(distance(point_i, dest))
if len(distances) == 0:
continue
elif min(distances) < (dx_solid * (1.0 - 1.0e-07)):
to_remove.append(i)
return to_remove
def remove_overlap_particles(fluid_parray, solid_parray, dx_solid, dim=3):
idx = find_overlap_particles(fluid_parray, solid_parray, dx_solid, dim)
fluid_parray.remove_particles(idx)
def show_2d(points, **kw):
import matplotlib.pyplot as plt
plt.scatter(points[0], points[1], **kw)
plt.xlabel('X')
plt.ylabel('Y')
def show_3d(points, **kw):
from mayavi import mlab
mlab.points3d(points[0], points[1], points[2], **kw)
mlab.axes(xlabel='X', ylabel='Y', zlabel='Z')
| true | true |
1c2f763199c907a4bd8ad18f5c4993d0bb39d63b | 367 | py | Python | lib/config_helper.py | ebeuerle/server_count | de701e8868ce7374a52ba6a5bd9bdcd5b1d1aa69 | [
"Unlicense"
] | null | null | null | lib/config_helper.py | ebeuerle/server_count | de701e8868ce7374a52ba6a5bd9bdcd5b1d1aa69 | [
"Unlicense"
] | null | null | null | lib/config_helper.py | ebeuerle/server_count | de701e8868ce7374a52ba6a5bd9bdcd5b1d1aa69 | [
"Unlicense"
] | null | null | null | import os
import yaml
PORTAL_CONFIG = os.path.join(os.path.dirname(__file__), '../configs/configs.yml')
CONFIG = yaml.load(file(PORTAL_CONFIG, 'r'))
class ConfigHelper(object):
def __init__(self):
self.halo_key = CONFIG["halo"]["api_key"]
self.halo_secret = CONFIG["halo"]["api_secret_key"]
self.halo_url = CONFIG["halo"]["url"]
| 30.583333 | 81 | 0.659401 | import os
import yaml
PORTAL_CONFIG = os.path.join(os.path.dirname(__file__), '../configs/configs.yml')
CONFIG = yaml.load(file(PORTAL_CONFIG, 'r'))
class ConfigHelper(object):
def __init__(self):
self.halo_key = CONFIG["halo"]["api_key"]
self.halo_secret = CONFIG["halo"]["api_secret_key"]
self.halo_url = CONFIG["halo"]["url"]
| true | true |
1c2f76a2e472d9aec97545d33462dff9f8433f07 | 7,401 | py | Python | imcls/datasets/ssdg_pacs.py | KaiyangZhou/mixstyle-release | 2a6bfe8016ab0f33d751f585b810bc27310096aa | [
"MIT"
] | 160 | 2021-01-13T07:17:27.000Z | 2022-03-30T14:43:54.000Z | DomainGeneralization/imcls/datasets/ssdg_pacs.py | YBZh/EFDM | 0baf35e26d22d0ab7d68a5f6cd7ed2541bdf8e4a | [
"MIT"
] | 15 | 2021-03-23T22:30:20.000Z | 2022-03-29T03:04:33.000Z | imcls/datasets/ssdg_pacs.py | KaiyangZhou/mixstyle-release | 2a6bfe8016ab0f33d751f585b810bc27310096aa | [
"MIT"
] | 22 | 2021-01-18T06:37:45.000Z | 2022-03-16T07:37:15.000Z | import os.path as osp
import random
from collections import defaultdict
from dassl.data.datasets import DATASET_REGISTRY, Datum, DatasetBase
from dassl.utils import mkdir_if_missing, read_json, write_json
@DATASET_REGISTRY.register()
class SSDGPACS(DatasetBase):
"""PACS.
Statistics:
- 4 domains: Photo (1,670), Art (2,048), Cartoon
(2,344), Sketch (3,929).
- 7 categories: dog, elephant, giraffe, guitar, horse,
house and person.
Reference:
- Li et al. Deeper, broader and artier domain generalization.
ICCV 2017.
- Zhou et al. Semi-Supervised Domain Generalization with
Stochastic StyleMatch. ArXiv preprint, 2021.
"""
dataset_dir = 'pacs'
domains = ['art_painting', 'cartoon', 'photo', 'sketch']
data_url = 'https://drive.google.com/uc?id=1m4X4fROCCXMO0lRLrr6Zz9Vb3974NWhE'
# the following images contain errors and should be ignored
_error_paths = ['sketch/dog/n02103406_4068-1.png']
def __init__(self, cfg):
root = osp.abspath(osp.expanduser(cfg.DATASET.ROOT))
self.dataset_dir = osp.join(root, self.dataset_dir)
self.image_dir = osp.join(self.dataset_dir, 'images')
self.split_dir = osp.join(self.dataset_dir, 'splits')
self.split_ssdg_dir = osp.join(self.dataset_dir, 'splits_ssdg')
mkdir_if_missing(self.split_ssdg_dir)
if not osp.exists(self.dataset_dir):
dst = osp.join(root, 'pacs.zip')
self.download_data(self.data_url, dst, from_gdrive=True)
self.check_input_domains(
cfg.DATASET.SOURCE_DOMAINS, cfg.DATASET.TARGET_DOMAINS
)
seed = cfg.SEED
num_labeled = cfg.DATASET.NUM_LABELED
src_domains = cfg.DATASET.SOURCE_DOMAINS
tgt_domain = cfg.DATASET.TARGET_DOMAINS[0]
split_ssdg_path = osp.join(self.split_ssdg_dir, f'{tgt_domain}_nlab{num_labeled}_seed{seed}.json')
if not osp.exists(split_ssdg_path):
train_x, train_u = self._read_data_train(cfg.DATASET.SOURCE_DOMAINS, num_labeled)
self.write_json_train(split_ssdg_path, src_domains, self.image_dir, train_x, train_u)
else:
train_x, train_u = self.read_json_train(split_ssdg_path, src_domains, self.image_dir)
val = self._read_data_test(cfg.DATASET.SOURCE_DOMAINS, 'crossval')
test = self._read_data_test(cfg.DATASET.TARGET_DOMAINS, 'all')
if cfg.DATASET.ALL_AS_UNLABELED:
train_u = train_u + train_x
super().__init__(train_x=train_x, train_u=train_u, val=val, test=test)
@staticmethod
def read_json_train(filepath, src_domains, image_dir):
"""
The latest office_home_dg dataset's class folders have
been changed to only contain the class names, e.g.,
000_Alarm_Clock/ is changed to Alarm_Clock/.
"""
def _convert_to_datums(items):
out = []
for impath, label, dname in items:
if dname not in src_domains:
continue
domain = src_domains.index(dname)
impath2 = osp.join(image_dir, impath)
if not osp.exists(impath2):
impath = impath.split('/')
if impath[-2].startswith('0'):
impath[-2] = impath[-2][4:]
impath = '/'.join(impath)
impath2 = osp.join(image_dir, impath)
item = Datum(impath=impath2, label=int(label), domain=domain)
out.append(item)
return out
print(f'Reading split from "{filepath}"')
split = read_json(filepath)
train_x = _convert_to_datums(split['train_x'])
train_u = _convert_to_datums(split['train_u'])
return train_x, train_u
@staticmethod
def write_json_train(filepath, src_domains, image_dir, train_x, train_u):
def _convert_to_list(items):
out = []
for item in items:
impath = item.impath
label = item.label
domain = item.domain
dname = src_domains[domain]
impath = impath.replace(image_dir, '')
if impath.startswith('/'):
impath = impath[1:]
out.append((impath, label, dname))
return out
train_x = _convert_to_list(train_x)
train_u = _convert_to_list(train_u)
output = {
'train_x': train_x,
'train_u': train_u
}
write_json(output, filepath)
print(f'Saved the split to "{filepath}"')
def _read_data_train(self, input_domains, num_labeled):
num_labeled_per_class = None
num_domains = len(input_domains)
items_x, items_u = [], []
for domain, dname in enumerate(input_domains):
file = osp.join(
self.split_dir, dname + '_train_kfold.txt'
)
impath_label_list = self._read_split_pacs(file)
impath_label_dict = defaultdict(list)
for impath, label in impath_label_list:
impath_label_dict[label].append((impath, label))
labels = list(impath_label_dict.keys())
if num_labeled_per_class is None:
num_labeled_per_class = num_labeled / (num_domains * len(labels))
for label in labels:
pairs = impath_label_dict[label]
assert len(pairs) >= num_labeled_per_class
random.shuffle(pairs)
for i, (impath, label) in enumerate(pairs):
item = Datum(impath=impath, label=label, domain=domain)
if (i + 1) <= num_labeled_per_class:
items_x.append(item)
else:
items_u.append(item)
return items_x, items_u
def _read_data_test(self, input_domains, split):
items = []
for domain, dname in enumerate(input_domains):
if split == 'all':
file_train = osp.join(
self.split_dir, dname + '_train_kfold.txt'
)
impath_label_list = self._read_split_pacs(file_train)
file_val = osp.join(
self.split_dir, dname + '_crossval_kfold.txt'
)
impath_label_list += self._read_split_pacs(file_val)
else:
file = osp.join(
self.split_dir, dname + '_' + split + '_kfold.txt'
)
impath_label_list = self._read_split_pacs(file)
for impath, label in impath_label_list:
item = Datum(impath=impath, label=label, domain=domain)
items.append(item)
return items
def _read_split_pacs(self, split_file):
items = []
with open(split_file, 'r') as f:
lines = f.readlines()
for line in lines:
line = line.strip()
impath, label = line.split(' ')
if impath in self._error_paths:
continue
impath = osp.join(self.image_dir, impath)
label = int(label) - 1
items.append((impath, label))
return items
| 37.005 | 106 | 0.576409 | import os.path as osp
import random
from collections import defaultdict
from dassl.data.datasets import DATASET_REGISTRY, Datum, DatasetBase
from dassl.utils import mkdir_if_missing, read_json, write_json
@DATASET_REGISTRY.register()
class SSDGPACS(DatasetBase):
dataset_dir = 'pacs'
domains = ['art_painting', 'cartoon', 'photo', 'sketch']
data_url = 'https://drive.google.com/uc?id=1m4X4fROCCXMO0lRLrr6Zz9Vb3974NWhE'
_error_paths = ['sketch/dog/n02103406_4068-1.png']
def __init__(self, cfg):
root = osp.abspath(osp.expanduser(cfg.DATASET.ROOT))
self.dataset_dir = osp.join(root, self.dataset_dir)
self.image_dir = osp.join(self.dataset_dir, 'images')
self.split_dir = osp.join(self.dataset_dir, 'splits')
self.split_ssdg_dir = osp.join(self.dataset_dir, 'splits_ssdg')
mkdir_if_missing(self.split_ssdg_dir)
if not osp.exists(self.dataset_dir):
dst = osp.join(root, 'pacs.zip')
self.download_data(self.data_url, dst, from_gdrive=True)
self.check_input_domains(
cfg.DATASET.SOURCE_DOMAINS, cfg.DATASET.TARGET_DOMAINS
)
seed = cfg.SEED
num_labeled = cfg.DATASET.NUM_LABELED
src_domains = cfg.DATASET.SOURCE_DOMAINS
tgt_domain = cfg.DATASET.TARGET_DOMAINS[0]
split_ssdg_path = osp.join(self.split_ssdg_dir, f'{tgt_domain}_nlab{num_labeled}_seed{seed}.json')
if not osp.exists(split_ssdg_path):
train_x, train_u = self._read_data_train(cfg.DATASET.SOURCE_DOMAINS, num_labeled)
self.write_json_train(split_ssdg_path, src_domains, self.image_dir, train_x, train_u)
else:
train_x, train_u = self.read_json_train(split_ssdg_path, src_domains, self.image_dir)
val = self._read_data_test(cfg.DATASET.SOURCE_DOMAINS, 'crossval')
test = self._read_data_test(cfg.DATASET.TARGET_DOMAINS, 'all')
if cfg.DATASET.ALL_AS_UNLABELED:
train_u = train_u + train_x
super().__init__(train_x=train_x, train_u=train_u, val=val, test=test)
@staticmethod
def read_json_train(filepath, src_domains, image_dir):
def _convert_to_datums(items):
out = []
for impath, label, dname in items:
if dname not in src_domains:
continue
domain = src_domains.index(dname)
impath2 = osp.join(image_dir, impath)
if not osp.exists(impath2):
impath = impath.split('/')
if impath[-2].startswith('0'):
impath[-2] = impath[-2][4:]
impath = '/'.join(impath)
impath2 = osp.join(image_dir, impath)
item = Datum(impath=impath2, label=int(label), domain=domain)
out.append(item)
return out
print(f'Reading split from "{filepath}"')
split = read_json(filepath)
train_x = _convert_to_datums(split['train_x'])
train_u = _convert_to_datums(split['train_u'])
return train_x, train_u
@staticmethod
def write_json_train(filepath, src_domains, image_dir, train_x, train_u):
def _convert_to_list(items):
out = []
for item in items:
impath = item.impath
label = item.label
domain = item.domain
dname = src_domains[domain]
impath = impath.replace(image_dir, '')
if impath.startswith('/'):
impath = impath[1:]
out.append((impath, label, dname))
return out
train_x = _convert_to_list(train_x)
train_u = _convert_to_list(train_u)
output = {
'train_x': train_x,
'train_u': train_u
}
write_json(output, filepath)
print(f'Saved the split to "{filepath}"')
def _read_data_train(self, input_domains, num_labeled):
num_labeled_per_class = None
num_domains = len(input_domains)
items_x, items_u = [], []
for domain, dname in enumerate(input_domains):
file = osp.join(
self.split_dir, dname + '_train_kfold.txt'
)
impath_label_list = self._read_split_pacs(file)
impath_label_dict = defaultdict(list)
for impath, label in impath_label_list:
impath_label_dict[label].append((impath, label))
labels = list(impath_label_dict.keys())
if num_labeled_per_class is None:
num_labeled_per_class = num_labeled / (num_domains * len(labels))
for label in labels:
pairs = impath_label_dict[label]
assert len(pairs) >= num_labeled_per_class
random.shuffle(pairs)
for i, (impath, label) in enumerate(pairs):
item = Datum(impath=impath, label=label, domain=domain)
if (i + 1) <= num_labeled_per_class:
items_x.append(item)
else:
items_u.append(item)
return items_x, items_u
def _read_data_test(self, input_domains, split):
items = []
for domain, dname in enumerate(input_domains):
if split == 'all':
file_train = osp.join(
self.split_dir, dname + '_train_kfold.txt'
)
impath_label_list = self._read_split_pacs(file_train)
file_val = osp.join(
self.split_dir, dname + '_crossval_kfold.txt'
)
impath_label_list += self._read_split_pacs(file_val)
else:
file = osp.join(
self.split_dir, dname + '_' + split + '_kfold.txt'
)
impath_label_list = self._read_split_pacs(file)
for impath, label in impath_label_list:
item = Datum(impath=impath, label=label, domain=domain)
items.append(item)
return items
def _read_split_pacs(self, split_file):
items = []
with open(split_file, 'r') as f:
lines = f.readlines()
for line in lines:
line = line.strip()
impath, label = line.split(' ')
if impath in self._error_paths:
continue
impath = osp.join(self.image_dir, impath)
label = int(label) - 1
items.append((impath, label))
return items
| true | true |
1c2f77808aaa6541d49b4da9726f01bf5798e7fe | 2,874 | py | Python | EventFilter/SiPixelRawToDigi/test/runRawToDigi_cfg.py | Hemida93/cmssw | 75a37059fc69b625a5e985f4f2e684cdebeeb8b5 | [
"Apache-2.0"
] | 1 | 2020-05-27T10:52:33.000Z | 2020-05-27T10:52:33.000Z | EventFilter/SiPixelRawToDigi/test/runRawToDigi_cfg.py | Hemida93/cmssw | 75a37059fc69b625a5e985f4f2e684cdebeeb8b5 | [
"Apache-2.0"
] | 28 | 2019-08-15T15:21:11.000Z | 2021-12-29T14:13:18.000Z | EventFilter/SiPixelRawToDigi/test/runRawToDigi_cfg.py | Hemida93/cmssw | 75a37059fc69b625a5e985f4f2e684cdebeeb8b5 | [
"Apache-2.0"
] | 1 | 2020-08-18T10:29:49.000Z | 2020-08-18T10:29:49.000Z | import FWCore.ParameterSet.Config as cms
process = cms.Process("MyRawToDigi")
process.load("FWCore.MessageLogger.MessageLogger_cfi")
process.load('Configuration.StandardSequences.GeometryRecoDB_cff')
#process.load("Configuration.StandardSequences.MagneticField_38T_cff")
process.load('Configuration.StandardSequences.MagneticField_cff')
process.load("Configuration.StandardSequences.FrontierConditions_GlobalTag_cff")
process.load("Configuration.StandardSequences.Services_cff")
process.maxEvents = cms.untracked.PSet( input = cms.untracked.int32(100))
process.source = cms.Source("PoolSource",
# fileNames = cms.untracked.vstring('file:rawdata.root')
fileNames = cms.untracked.vstring(
"rfio:/castor/cern.ch/cms/store/data/Run2012D/MinimumBias/RAW/v1/000/205/217/2EF61B7D-F216-E211-98C3-001D09F28D54.root",
# "rfio:/castor/cern.ch/cms/store/data/Run2012D/MinimumBias/RAW/v1/000/208/686/A88F66A0-393F-E211-9287-002481E0D524.root",
# "file:/afs/cern.ch/work/d/dkotlins/public/MC/mu/pt100_71_pre7/raw/raw2.root"
)
)
# Cabling
# include "CalibTracker/Configuration/data/Tracker_FakeConditions.cff"
#process.load("CalibTracker.Configuration.SiPixel_FakeConditions_cff")
#process.load("CalibTracker.Configuration.SiPixelCabling.SiPixelCabling_SQLite_cff")
#process.GlobalTag.connect = "frontier://FrontierProd/CMS_COND_21X_GLOBALTAG"
#process.GlobalTag.globaltag = "CRAFT_V3P::All"
#process.es_prefer_GlobalTag = cms.ESPrefer('PoolDBESSource','GlobalTag')
#process.siPixelCabling.connect = 'sqlite_file:cabling.db'
#process.siPixelCabling.toGet = cms.VPSet(cms.PSet(
# record = cms.string('SiPixelFedCablingMapRcd'),
# tag = cms.string('SiPixelFedCablingMap_v14')
#))
# Choose the global tag here:
#process.GlobalTag.globaltag = "GR_P_V40::All"
# for data in V7
# process.GlobalTag.globaltag = "GR_R_71_V1::All"
# for MC
process.GlobalTag.globaltag = "MC_71_V1::All"
process.load("EventFilter.SiPixelRawToDigi.SiPixelRawToDigi_cfi")
# for simultaions
process.siPixelDigis.InputLabel = 'siPixelRawData'
# for data
#process.siPixelDigis.InputLabel = 'source'
#process.siPixelDigis.InputLabel = 'rawDataCollector'
process.siPixelDigis.IncludeErrors = True
process.siPixelDigis.Timing = False
#process.siPixelDigis.UseCablingTree = True
process.MessageLogger = cms.Service("MessageLogger",
debugModules = cms.untracked.vstring('siPixelDigis'),
destinations = cms.untracked.vstring('r2d'),
r2d = cms.untracked.PSet( threshold = cms.untracked.string('WARNING'))
)
process.out = cms.OutputModule("PoolOutputModule",
fileName = cms.untracked.string('file:digis.root'),
# fileName = cms.untracked.string('file:/afs/cern.ch/work/d/dkotlins/public/data/digis/digis_1k.root'),
outputCommands = cms.untracked.vstring("drop *","keep *_siPixelDigis_*_*")
)
process.p = cms.Path(process.siPixelDigis)
process.ep = cms.EndPath(process.out)
| 42.264706 | 123 | 0.787404 | import FWCore.ParameterSet.Config as cms
process = cms.Process("MyRawToDigi")
process.load("FWCore.MessageLogger.MessageLogger_cfi")
process.load('Configuration.StandardSequences.GeometryRecoDB_cff')
process.load('Configuration.StandardSequences.MagneticField_cff')
process.load("Configuration.StandardSequences.FrontierConditions_GlobalTag_cff")
process.load("Configuration.StandardSequences.Services_cff")
process.maxEvents = cms.untracked.PSet( input = cms.untracked.int32(100))
process.source = cms.Source("PoolSource",
fileNames = cms.untracked.vstring(
"rfio:/castor/cern.ch/cms/store/data/Run2012D/MinimumBias/RAW/v1/000/205/217/2EF61B7D-F216-E211-98C3-001D09F28D54.root",
)
)
process.GlobalTag.globaltag = "MC_71_V1::All"
process.load("EventFilter.SiPixelRawToDigi.SiPixelRawToDigi_cfi")
process.siPixelDigis.InputLabel = 'siPixelRawData'
process.siPixelDigis.IncludeErrors = True
process.siPixelDigis.Timing = False
process.MessageLogger = cms.Service("MessageLogger",
debugModules = cms.untracked.vstring('siPixelDigis'),
destinations = cms.untracked.vstring('r2d'),
r2d = cms.untracked.PSet( threshold = cms.untracked.string('WARNING'))
)
process.out = cms.OutputModule("PoolOutputModule",
fileName = cms.untracked.string('file:digis.root'),
outputCommands = cms.untracked.vstring("drop *","keep *_siPixelDigis_*_*")
)
process.p = cms.Path(process.siPixelDigis)
process.ep = cms.EndPath(process.out)
| true | true |
1c2f77956e5fa37d1365479ad014e939c953bc44 | 376 | py | Python | app-flask/app-flask/app.py | benfab/mesos-demo-apps | e8da8cca8a1f916ea482ce21a6a3155d0ce56243 | [
"Apache-2.0"
] | null | null | null | app-flask/app-flask/app.py | benfab/mesos-demo-apps | e8da8cca8a1f916ea482ce21a6a3155d0ce56243 | [
"Apache-2.0"
] | null | null | null | app-flask/app-flask/app.py | benfab/mesos-demo-apps | e8da8cca8a1f916ea482ce21a6a3155d0ce56243 | [
"Apache-2.0"
] | null | null | null | from flask import Flask, render_template
import socket
app = Flask(__name__)
ip_address = socket.gethostbyname(socket.gethostname())
docker_hostname = socket.gethostname()
@app.route('/')
def hello_world():
return render_template('index.html', hostname=docker_hostname,myip=ip_address)
if __name__ == "__main__":
app.run(debug=True, host='0.0.0.0', port=int(8359))
| 26.857143 | 82 | 0.75 | from flask import Flask, render_template
import socket
app = Flask(__name__)
ip_address = socket.gethostbyname(socket.gethostname())
docker_hostname = socket.gethostname()
@app.route('/')
def hello_world():
return render_template('index.html', hostname=docker_hostname,myip=ip_address)
if __name__ == "__main__":
app.run(debug=True, host='0.0.0.0', port=int(8359))
| true | true |
1c2f77e32e620dacd00397ce78b7c62f84f3f36d | 6,947 | py | Python | build/scripts-3.7/startproject.py | lixiaopi1985/agrimet_scraper | eb566d2bbb6f1882656a1bc5319e9f35ad7dc5df | [
"MIT"
] | null | null | null | build/scripts-3.7/startproject.py | lixiaopi1985/agrimet_scraper | eb566d2bbb6f1882656a1bc5319e9f35ad7dc5df | [
"MIT"
] | null | null | null | build/scripts-3.7/startproject.py | lixiaopi1985/agrimet_scraper | eb566d2bbb6f1882656a1bc5319e9f35ad7dc5df | [
"MIT"
] | null | null | null | #!python
"""This module is used to run at command line to initialize project
myproject
| | | |_ db
| | |___ config
| |_____ log
|_______ station info
"""
import argparse
import os
import sys
import sqlite3
from configparser import RawConfigParser
import shutil
from agrimetscraper.utils.configurations import basic_configs
from agrimetscraper.utils.configreader import Configtuner
from agrimetscraper.utils.stationinfo import Stationinfo
from agrimetscraper.utils.mylogger import Setlog
from agrimetscraper.template import pipeline, runproject
from agrimetscraper.utils.mongoSetup import Mongosetup
from agrimetscraper.utils.mongoDB import get_db
import getpass
def main():
try:
parser = argparse.ArgumentParser(
prog="startproject",
usage="startproject.py -p myproject -t dbtype"
)
parser.add_argument("-p", dest="project", nargs="?", type=str, help="<string> name of your project")
parser.add_argument("-t", dest="dbtype", nargs="?", default="sql", choices=['sql', 'mongodb', 'atlas'], help="<string> store data type: sql or mongodb or to the atlas cloud")
args = parser.parse_args()
project = args.project
dbtype = args.dbtype
except argparse.ArgumentError as argerror:
print(argerror)
sys.exit(1)
print("""
Starting a new agrimetscraper project
""")
main_path = os.getcwd()
project_path = os.path.join(main_path, project)
if not os.path.exists(project_path):
os.makedirs(project_path)
else:
raise FileExistsError(f"{project} existed")
dbdir = os.path.join(project_path, f"{project}-database")
logdir = os.path.join(project_path, f"{project}-log")
configdir = os.path.join(project_path, f"{project}-config")
stationdir = os.path.join(project_path, f"{project}-stations")
if not os.path.exists(dbdir):
os.makedirs(dbdir)
if not os.path.exists(logdir):
os.makedirs(logdir)
if not os.path.exists(configdir):
os.makedirs(configdir)
if not os.path.exists(stationdir):
os.makedirs(stationdir)
# initialize file names in each directories
dbname = project + '.db'
dbfilepath = os.path.join(dbdir, dbname)
logfilename = project + ".log"
logfilepath = os.path.join(logdir, logfilename)
configfilename = project + ".ini"
configfilepath = os.path.join(configdir, configfilename)
stationfilename = "stations.csv"
stationfilepath = os.path.join(stationdir, stationfilename)
global_settings = basic_configs
# add new settings to config file
global_settings['PROJECT_SETTINGS']['project_name']=project
global_settings['PROJECT_SETTINGS']['project_path']=project_path
global_settings['PROJECT_SETTINGS']['project_setting_path']=configfilepath
global_settings['DB_SETTINGS']['database_path']=dbfilepath
global_settings['DB_SETTINGS']['database_type']=dbtype
global_settings['DB_SETTINGS']['database_name']=(dbname)
global_settings['LOG_SETTINGS']['logfile_path']=logfilepath
global_settings['LOG_SETTINGS']['logfile_name']=logfilename
global_settings['LOG_SETTINGS']['logfile_format'] = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
global_settings['LOG_SETTINGS']['logfile_datetimefmt'] = '%Y-%m-%d %H:%M:%S'
global_settings['STATION_SETTINGS']['station_dir'] = stationfilepath
config = RawConfigParser()
config.read_dict(global_settings)
print(f"\ninitializing config file: {configfilename}")
with open(configfilepath, 'w') as config_handle:
config.write(config_handle)
# create log file
print(f"making an empty log file: {logfilename}")
with open(logfilepath, 'a') as log_handle:
pass
# create stations.csv
print("retrieving stations information as csv")
config = Configtuner(configfilepath)
url = config.getconfig('STATION_SETTINGS', 'station_url')
station = Stationinfo(url, stationfilepath)
station_df = station.querysites()
config.setconfig("DB_SETTINGS", "database_tables", "StationInfo")
logger = Setlog(configfilepath, "startproject")
connect_string = config.getconfig("DB_SETTINGS", "connect_string")
if dbtype == 'sql':
# create db file
print(f"making an database: {dbname}")
logger.info(f"making an SQL database: {dbname}")
conn = sqlite3.connect(dbfilepath)
station_df.save2sql("StationInfo", conn)
conn.commit()
conn.close()
elif dbtype == 'mongodb':
if connect_string != "localhost":
logger.exception("host selected not match database type. Choose mongodb for local storage in your ini file")
raise ValueError("dbtype is not matching to the host type. Choose mongodb for local storage in your ini file")
print(f"making an database: {dbname}")
logger.info(f"making a mongo database: {dbname}")
# create collection from panda
df = station_df.df_filtered
data = df.to_dict(orient='records')
mongo_conn = Mongosetup(dbdir, logger)
mongo_conn.start_mongodb()
db, _ = get_db(project, connect_string)
db = db['StationInfo'] # collection
db.insert_many(data) # no need to consider update, once the project is setup, this collection will stand alone
elif dbtype == "atlas":
print(f"connecting to Mongo Atlas: database name: {dbname}")
logger.info(f"connecting to Mongo Atlas: database name: {dbname}")
connect_string = input("\nInput your connect string to atlas: ")
password = getpass.getpass("\nPassword: ")
connect_string = connect_string.replace('<password>', password)
if not connect_string.startswith("mongodb+srv://"):
logger.exception("host selected not match database type. Choose atlas for cloud storage in your ini file")
raise ValueError("dbtype is not matching to the host type. Choose atlas for cloud storage in your ini file")
config.setconfig("DB_SETTINGS", "connect_string", connect_string)
# create collection from panda
df = station_df.df_filtered
data = df.to_dict(orient='records')
db, _ = get_db(project, connect_string)
db = db['StationInfo']
db.insert_many(data) # no need to consider update, once the project is setup, this collection will stand alone
logger.info(f"{project} finished initialization.")
# copy files to local project location
runprojectpath = os.path.realpath(runproject.__file__)
pipelinepath = os.path.realpath(pipeline.__file__)
shutil.copy2(runprojectpath, project_path)
shutil.copy2(pipelinepath, project_path)
print(f"\n{project} finished initialization.\nYou can modify your local '.ini' file in the config folder to schedule scrape time and then run RunProject!\n")
if __name__ == "__main__":
main() | 35.085859 | 182 | 0.690226 |
import argparse
import os
import sys
import sqlite3
from configparser import RawConfigParser
import shutil
from agrimetscraper.utils.configurations import basic_configs
from agrimetscraper.utils.configreader import Configtuner
from agrimetscraper.utils.stationinfo import Stationinfo
from agrimetscraper.utils.mylogger import Setlog
from agrimetscraper.template import pipeline, runproject
from agrimetscraper.utils.mongoSetup import Mongosetup
from agrimetscraper.utils.mongoDB import get_db
import getpass
def main():
try:
parser = argparse.ArgumentParser(
prog="startproject",
usage="startproject.py -p myproject -t dbtype"
)
parser.add_argument("-p", dest="project", nargs="?", type=str, help="<string> name of your project")
parser.add_argument("-t", dest="dbtype", nargs="?", default="sql", choices=['sql', 'mongodb', 'atlas'], help="<string> store data type: sql or mongodb or to the atlas cloud")
args = parser.parse_args()
project = args.project
dbtype = args.dbtype
except argparse.ArgumentError as argerror:
print(argerror)
sys.exit(1)
print("""
Starting a new agrimetscraper project
""")
main_path = os.getcwd()
project_path = os.path.join(main_path, project)
if not os.path.exists(project_path):
os.makedirs(project_path)
else:
raise FileExistsError(f"{project} existed")
dbdir = os.path.join(project_path, f"{project}-database")
logdir = os.path.join(project_path, f"{project}-log")
configdir = os.path.join(project_path, f"{project}-config")
stationdir = os.path.join(project_path, f"{project}-stations")
if not os.path.exists(dbdir):
os.makedirs(dbdir)
if not os.path.exists(logdir):
os.makedirs(logdir)
if not os.path.exists(configdir):
os.makedirs(configdir)
if not os.path.exists(stationdir):
os.makedirs(stationdir)
dbname = project + '.db'
dbfilepath = os.path.join(dbdir, dbname)
logfilename = project + ".log"
logfilepath = os.path.join(logdir, logfilename)
configfilename = project + ".ini"
configfilepath = os.path.join(configdir, configfilename)
stationfilename = "stations.csv"
stationfilepath = os.path.join(stationdir, stationfilename)
global_settings = basic_configs
global_settings['PROJECT_SETTINGS']['project_name']=project
global_settings['PROJECT_SETTINGS']['project_path']=project_path
global_settings['PROJECT_SETTINGS']['project_setting_path']=configfilepath
global_settings['DB_SETTINGS']['database_path']=dbfilepath
global_settings['DB_SETTINGS']['database_type']=dbtype
global_settings['DB_SETTINGS']['database_name']=(dbname)
global_settings['LOG_SETTINGS']['logfile_path']=logfilepath
global_settings['LOG_SETTINGS']['logfile_name']=logfilename
global_settings['LOG_SETTINGS']['logfile_format'] = '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
global_settings['LOG_SETTINGS']['logfile_datetimefmt'] = '%Y-%m-%d %H:%M:%S'
global_settings['STATION_SETTINGS']['station_dir'] = stationfilepath
config = RawConfigParser()
config.read_dict(global_settings)
print(f"\ninitializing config file: {configfilename}")
with open(configfilepath, 'w') as config_handle:
config.write(config_handle)
print(f"making an empty log file: {logfilename}")
with open(logfilepath, 'a') as log_handle:
pass
print("retrieving stations information as csv")
config = Configtuner(configfilepath)
url = config.getconfig('STATION_SETTINGS', 'station_url')
station = Stationinfo(url, stationfilepath)
station_df = station.querysites()
config.setconfig("DB_SETTINGS", "database_tables", "StationInfo")
logger = Setlog(configfilepath, "startproject")
connect_string = config.getconfig("DB_SETTINGS", "connect_string")
if dbtype == 'sql':
print(f"making an database: {dbname}")
logger.info(f"making an SQL database: {dbname}")
conn = sqlite3.connect(dbfilepath)
station_df.save2sql("StationInfo", conn)
conn.commit()
conn.close()
elif dbtype == 'mongodb':
if connect_string != "localhost":
logger.exception("host selected not match database type. Choose mongodb for local storage in your ini file")
raise ValueError("dbtype is not matching to the host type. Choose mongodb for local storage in your ini file")
print(f"making an database: {dbname}")
logger.info(f"making a mongo database: {dbname}")
df = station_df.df_filtered
data = df.to_dict(orient='records')
mongo_conn = Mongosetup(dbdir, logger)
mongo_conn.start_mongodb()
db, _ = get_db(project, connect_string)
db = db['StationInfo']
db.insert_many(data)
elif dbtype == "atlas":
print(f"connecting to Mongo Atlas: database name: {dbname}")
logger.info(f"connecting to Mongo Atlas: database name: {dbname}")
connect_string = input("\nInput your connect string to atlas: ")
password = getpass.getpass("\nPassword: ")
connect_string = connect_string.replace('<password>', password)
if not connect_string.startswith("mongodb+srv://"):
logger.exception("host selected not match database type. Choose atlas for cloud storage in your ini file")
raise ValueError("dbtype is not matching to the host type. Choose atlas for cloud storage in your ini file")
config.setconfig("DB_SETTINGS", "connect_string", connect_string)
df = station_df.df_filtered
data = df.to_dict(orient='records')
db, _ = get_db(project, connect_string)
db = db['StationInfo']
db.insert_many(data)
logger.info(f"{project} finished initialization.")
runprojectpath = os.path.realpath(runproject.__file__)
pipelinepath = os.path.realpath(pipeline.__file__)
shutil.copy2(runprojectpath, project_path)
shutil.copy2(pipelinepath, project_path)
print(f"\n{project} finished initialization.\nYou can modify your local '.ini' file in the config folder to schedule scrape time and then run RunProject!\n")
if __name__ == "__main__":
main() | true | true |
1c2f7840f420a76ee14e7ae813ac5048f0aeb1fa | 11,631 | py | Python | gym_trading/envs/Testing_Env.py | Deion14/mlp3 | cab1a18b36114f49622e3a5fc8650efda5205e01 | [
"MIT"
] | null | null | null | gym_trading/envs/Testing_Env.py | Deion14/mlp3 | cab1a18b36114f49622e3a5fc8650efda5205e01 | [
"MIT"
] | 1 | 2018-03-04T13:43:10.000Z | 2018-03-04T13:43:10.000Z | gym_trading/envs/Testing_Env.py | Deion14/mlp3 | cab1a18b36114f49622e3a5fc8650efda5205e01 | [
"MIT"
] | null | null | null |
import gym
from gym import error, spaces, utils
from gym.utils import seeding
from collections import Counter
import quandl
import numpy as np
from numpy import random
import pandas as pd
import logging
import pdb
from sklearn import preprocessing
import tempfile
log = logging.getLogger(__name__)
log.info('%s logger started.',__name__)
class QuandlEnvSrcTest(object):
'''
Quandl-based implementation of a TradingEnv's data source.
Pulls data from Quandl, preps for use by TradingEnv and then
acts as data provider for each new episode.
'''
q_api_key = "bB4wp5--7XrkpGZ7-gxJ"
quandl.ApiConfig.api_key = q_api_key
MinPercentileDays = 100
QuandlAuthToken = "" # not necessary, but can be used if desired
Name = "TSE/9994" # https://www.quandl.com/search (use 'Free' filter)
def __init__(self, days=252, name=Name, auth=QuandlAuthToken, scale=True ):
self.name = name
self.auth = auth
self.days = days+1
log.info('getting data for %s from quandl...',QuandlEnvSrcTest.Name)
Stocks=['GE', 'AMD', 'F', 'AAPL', 'AIG', 'CHK', 'MU', 'MSFT', 'CSCO', 'T']
self.NumberOfStocks=len(Stocks)
df = quandl.get_table('WIKI/PRICES', ticker=Stocks, qopts = { 'columns': ['ticker', 'volume','adj_close'] }, date = { 'gte': '2015-10-25', 'lte': '2017-12-29' }, paginate=False)
self.NumberOfStocks=len(Stocks)
df = df[ ~np.isnan(df.volume)][['ticker','volume', 'adj_close']]
# we calculate returns and percentiles, then kill nans
df = df[['ticker','adj_close','volume']]
self.Dimension=len(list(df))
df.volume.replace(0,1,inplace=True) # days shouldn't have zero volume..
df['Return'] = (df.adj_close-df.adj_close.shift())/df.adj_close.shift()
#df['Return2Day'] = (df.adj_close-df.adj_close.shift(periods=2))/df.adj_close.shift(periods=2)
#df['Return5Day'] = (df.adj_close-df.adj_close.shift(periods=5))/df.adj_close.shift(periods=5)
#df['Return10Day'] = (df.adj_close-df.adj_close.shift(periods=10))/df.adj_close.shift(periods=10)
pctrank = lambda x: pd.Series(x).rank(pct=True).iloc[-1]
names=["Stock"+str(i) for i in range(1,len(Stocks)+1)]
for i ,j in enumerate(Stocks):
if i==0:
stock1=df[df['ticker'] == Stocks[i]].drop("ticker", axis=1 )
stock1= stock1.set_index(np.arange(0,len(stock1)))
DF=stock1
elif i==1:
stock1=df[df['ticker'] == Stocks[i]].drop("ticker", axis=1 )
stock1= stock1.set_index(np.arange(0,len(stock1)))
DF=DF.join(stock1, lsuffix='Stock1', rsuffix='Stock2')
else:
stock1=df[df['ticker'] == Stocks[i]].drop("ticker", axis=1 )
stock1= stock1.set_index(np.arange(0,len(stock1)))
DF=DF.join(stock1, rsuffix=names[i])
DF=DF.iloc[1:] # remove 1st 10
colNames=list(DF)
#removeRetCols = ["ReturnStock"+str(i) for i in range(1,3)]
colNames = [i for j, i in enumerate(colNames) if j not in range(self.Dimension-1,self.NumberOfStocks*self.Dimension,self.Dimension)]
DF[colNames] = DF[colNames].apply(lambda x: (x - x.mean()) / (x.var()))
df=DF
self.min_values = df.min(axis=0)
self.max_values = df.max(axis=0)
self.data = df
self.step = 0
def reset(self):
# automatically starts at first since its test
self.idx = 252
self.step = 0
def _step(self):
obs = self.data.iloc[(self.idx-252):self.idx].as_matrix()
self.idx += 1
self.step += 1
done = self.step >= self.days
#pdb.set_trace()
retAllStocks=list(np.arange(self.Dimension-1,self.Dimension*self.NumberOfStocks,self.Dimension ))
returns=self.data.iloc[:self.idx,retAllStocks] #past returns of stocks
return obs,done,returns
############################# #########################################
############################# #########################################
class TradingSim(object) :
""" Implements core trading simulator for single-instrument univ """
def __init__(self, steps, trading_cost_bps = 1e-3, time_cost_bps = 1e-4,NumberOfStocks=2):
# invariant for object life
self.NumberOfStocks =NumberOfStocks
self.trading_cost_bps = trading_cost_bps
self.time_cost_bps = time_cost_bps
self.steps = steps
# change every step
self.step = 0
self.actions = np.zeros((self.steps,self.NumberOfStocks))
self.navs = np.ones(self.steps)
self.mkt_nav = np.ones(self.steps)
self.strat_retrns = np.ones(self.steps)
self.posns = np.zeros(self.steps)
self.costs = np.zeros(self.steps)
self.trades = np.zeros(self.steps)
self.mkt_retrns = np.zeros((self.steps,1))
self.total_returns = 0
self.negative_returns = [0]
def reset(self):
self.step = 0
self.actions.fill(0)
self.navs.fill(1)
self.mkt_nav.fill(1)
self.strat_retrns.fill(0)
self.posns.fill(0)
self.costs.fill(0)
self.trades.fill(0)
self.mkt_retrns.fill(0)
self.total_returns = 0
self.negative_returns = [0]
def _step(self, action, retrn ):
""" Given an action and return for prior period, calculates costs, navs,
etc and returns the reward and a summary of the day's activity. """
#bod_posn = 0.0 if self.step == 0 else self.posns[self.step-1]
#bod_nav = 1.0 if self.step == 0 else self.navs[self.step-1]
#mkt_nav = 1.0 if self.step == 0 else self.mkt_nav[self.step-1]
self.actions[self.step,:] = action
#self.posns[self.step] = action - 1
#self.trades[self.step] = self.posns[self.step] - bod_posn
tradecosts = np.empty_like(action)
tradecosts.fill(.0001)
costs = np.dot(tradecosts,abs(action.reshape(-1,1)))
trade_costs_pct = abs(self.trades[self.step]) * self.trading_cost_bps
self.costs[self.step] = costs
#reward= np.dot(retrn, action.reshape(-1,1))-self.costs[self.step]
reward= np.dot(retrn, action.reshape(-1,1))-costs
nominal_reward = np.dot(retrn, action.reshape(-1,1)) - self.costs[self.step]
self.total_returns = self.total_returns + nominal_reward
oldsort = self.mkt_retrns[self.step-1,:]
newsort = 0
sortchange = 0
stdev_neg_returns = 0
if nominal_reward < 0:
self.negative_returns = np.append(self.negative_returns, nominal_reward)
stdev_neg_returns = np.sqrt(np.std(self.negative_returns))
else:
stdev_neg_returns = np.sqrt(np.std(self.negative_returns))
if stdev_neg_returns == 0:
newsort = self.total_returns / .1
else:
newsort = self.total_returns / stdev_neg_returns
if oldsort == 0:
sortchange = newsort
else:
sortchange = (newsort - oldsort)/oldsort
self.mkt_retrns[self.step,:] = newsort
#reward = ( (bod_posn * retrn) - self.costs[self.step] )
#pdb.set_trace()
# self.strat_retrns[self.step] = sortchange
#if self.step != 0 :
# self.navs[self.step] = bod_nav * (1 + self.strat_retrns[self.step-1])
# self.mkt_nav[self.step] = mkt_nav * (1 + self.mkt_retrns[self.step-1])
#info = { 'reward': reward, 'nav':self.navs[self.step], 'costs':self.costs[self.step] }
info = { 'reward': reward, 'costs':self.costs[self.step] ,'nominal_reward':nominal_reward}
self.step += 1
return sortchange, newsort, info
def to_df(self):
"""returns internal state in new dataframe """
cols = ['action', 'bod_nav', 'mkt_nav','mkt_return','sim_return',
'position','costs', 'trade' ]
rets = _prices2returns(self.navs)
#pdb.set_trace()
df = pd.DataFrame( )
"""
{'action': self.actions, # today's action (from agent)
'bod_nav': self.navs, # BOD Net Asset Value (NAV)
'mkt_nav': self.mkt_nav,
'mkt_return': self.mkt_retrns,
'sim_return': self.strat_retrns,
'position': self.posns, # EOD position
'costs': self.costs, # eod costs
'trade': self.trades },# eod trade
columns=cols)
"""
return df
############################# #########################################
############################# #########################################
############################# #########################################
############################# #########################################
class TestingEnv(gym.Env):
"""This gym implements a simple trading environment for reinforcement learning.
"""
metadata = {'render.modes': ['human']}
def __init__(self):
self.days = 252
self.src = QuandlEnvSrcTest(days=self.days)
self.sim = TradingSim(steps=self.days, trading_cost_bps=1e-3,
time_cost_bps=1e-4,NumberOfStocks=self.src.NumberOfStocks)
self.action_space = spaces.Box(low=-1, high=1, shape=(self.src.NumberOfStocks,))
self.observation_space= spaces.Box( self.src.min_values,
self.src.max_values)
self._reset()
def _configure(self, display=None):
self.display = display
def _seed(self, seed=None):
self.np_random, seed = seeding.np_random(seed)
return [seed]
def _step(self, action):
#assert self.action_space.contains(action), "%r (%s) invalid"%(action, type(action))
observation, done, Returns = self.src._step()
retAllStocks=list(np.arange(self.src.Dimension-1,self.src.Dimension*self.src.NumberOfStocks,self.src.Dimension ))
yret = observation[-1,retAllStocks]
reward, sort, info = self.sim._step( action, yret )
return observation, reward, done, sort, info, Returns
def _reset(self):
self.src.reset()
self.sim.reset()
out=self.src._step()#,self.src._step()[2]
return out[0], out[2]#changes this form [0] to this
def _render(self, mode='human', close=False):
#... TODO
pass
# some convenience functions:
def run_strat(self, strategy, return_df=True):
"""run provided strategy, returns dataframe with all steps"""
observation = self.reset()
done = False
while not done:
action = strategy( observation, self ) # call strategy
observation, reward, done, info = self.step(action)
return self.sim.to_df() if return_df else None
def run_strats( self, strategy, episodes=1, write_log=True, return_df=True):
""" run provided strategy the specified # of times, possibly
writing a log and possibly returning a dataframe summarizing activity.
Note that writing the log is expensive and returning the df is moreso.
For training purposes, you might not want to set both.
"""
logfile = None
if write_log:
logfile = tempfile.NamedTemporaryFile(delete=False)
log.info('writing log to %s',logfile.name)
need_df = write_log or return_df
alldf = None
for i in range(episodes):
df = self.run_strat(strategy, return_df=need_df)
if write_log:
#df.to_csv(logfile, mode='a')
if return_df:
alldf = df if alldf is None else pd.concat([alldf,df], axis=0)
return alldf
| 33.326648 | 182 | 0.590491 |
import gym
from gym import error, spaces, utils
from gym.utils import seeding
from collections import Counter
import quandl
import numpy as np
from numpy import random
import pandas as pd
import logging
import pdb
from sklearn import preprocessing
import tempfile
log = logging.getLogger(__name__)
log.info('%s logger started.',__name__)
class QuandlEnvSrcTest(object):
q_api_key = "bB4wp5--7XrkpGZ7-gxJ"
quandl.ApiConfig.api_key = q_api_key
MinPercentileDays = 100
QuandlAuthToken = ""
Name = "TSE/9994"
def __init__(self, days=252, name=Name, auth=QuandlAuthToken, scale=True ):
self.name = name
self.auth = auth
self.days = days+1
log.info('getting data for %s from quandl...',QuandlEnvSrcTest.Name)
Stocks=['GE', 'AMD', 'F', 'AAPL', 'AIG', 'CHK', 'MU', 'MSFT', 'CSCO', 'T']
self.NumberOfStocks=len(Stocks)
df = quandl.get_table('WIKI/PRICES', ticker=Stocks, qopts = { 'columns': ['ticker', 'volume','adj_close'] }, date = { 'gte': '2015-10-25', 'lte': '2017-12-29' }, paginate=False)
self.NumberOfStocks=len(Stocks)
df = df[ ~np.isnan(df.volume)][['ticker','volume', 'adj_close']]
df = df[['ticker','adj_close','volume']]
self.Dimension=len(list(df))
df.volume.replace(0,1,inplace=True)
df['Return'] = (df.adj_close-df.adj_close.shift())/df.adj_close.shift()
#df['Return2Day'] = (df.adj_close-df.adj_close.shift(periods=2))/df.adj_close.shift(periods=2)
#df['Return5Day'] = (df.adj_close-df.adj_close.shift(periods=5))/df.adj_close.shift(periods=5)
#df['Return10Day'] = (df.adj_close-df.adj_close.shift(periods=10))/df.adj_close.shift(periods=10)
pctrank = lambda x: pd.Series(x).rank(pct=True).iloc[-1]
names=["Stock"+str(i) for i in range(1,len(Stocks)+1)]
for i ,j in enumerate(Stocks):
if i==0:
stock1=df[df['ticker'] == Stocks[i]].drop("ticker", axis=1 )
stock1= stock1.set_index(np.arange(0,len(stock1)))
DF=stock1
elif i==1:
stock1=df[df['ticker'] == Stocks[i]].drop("ticker", axis=1 )
stock1= stock1.set_index(np.arange(0,len(stock1)))
DF=DF.join(stock1, lsuffix='Stock1', rsuffix='Stock2')
else:
stock1=df[df['ticker'] == Stocks[i]].drop("ticker", axis=1 )
stock1= stock1.set_index(np.arange(0,len(stock1)))
DF=DF.join(stock1, rsuffix=names[i])
DF=DF.iloc[1:] # remove 1st 10
colNames=list(DF)
#removeRetCols = ["ReturnStock"+str(i) for i in range(1,3)]
colNames = [i for j, i in enumerate(colNames) if j not in range(self.Dimension-1,self.NumberOfStocks*self.Dimension,self.Dimension)]
DF[colNames] = DF[colNames].apply(lambda x: (x - x.mean()) / (x.var()))
df=DF
self.min_values = df.min(axis=0)
self.max_values = df.max(axis=0)
self.data = df
self.step = 0
def reset(self):
# automatically starts at first since its test
self.idx = 252
self.step = 0
def _step(self):
obs = self.data.iloc[(self.idx-252):self.idx].as_matrix()
self.idx += 1
self.step += 1
done = self.step >= self.days
#pdb.set_trace()
retAllStocks=list(np.arange(self.Dimension-1,self.Dimension*self.NumberOfStocks,self.Dimension ))
returns=self.data.iloc[:self.idx,retAllStocks] #past returns of stocks
return obs,done,returns
############################# #########################################
############################# #########################################
class TradingSim(object) :
def __init__(self, steps, trading_cost_bps = 1e-3, time_cost_bps = 1e-4,NumberOfStocks=2):
# invariant for object life
self.NumberOfStocks =NumberOfStocks
self.trading_cost_bps = trading_cost_bps
self.time_cost_bps = time_cost_bps
self.steps = steps
# change every step
self.step = 0
self.actions = np.zeros((self.steps,self.NumberOfStocks))
self.navs = np.ones(self.steps)
self.mkt_nav = np.ones(self.steps)
self.strat_retrns = np.ones(self.steps)
self.posns = np.zeros(self.steps)
self.costs = np.zeros(self.steps)
self.trades = np.zeros(self.steps)
self.mkt_retrns = np.zeros((self.steps,1))
self.total_returns = 0
self.negative_returns = [0]
def reset(self):
self.step = 0
self.actions.fill(0)
self.navs.fill(1)
self.mkt_nav.fill(1)
self.strat_retrns.fill(0)
self.posns.fill(0)
self.costs.fill(0)
self.trades.fill(0)
self.mkt_retrns.fill(0)
self.total_returns = 0
self.negative_returns = [0]
def _step(self, action, retrn ):
#bod_posn = 0.0 if self.step == 0 else self.posns[self.step-1]
#bod_nav = 1.0 if self.step == 0 else self.navs[self.step-1]
#mkt_nav = 1.0 if self.step == 0 else self.mkt_nav[self.step-1]
self.actions[self.step,:] = action
#self.posns[self.step] = action - 1
#self.trades[self.step] = self.posns[self.step] - bod_posn
tradecosts = np.empty_like(action)
tradecosts.fill(.0001)
costs = np.dot(tradecosts,abs(action.reshape(-1,1)))
trade_costs_pct = abs(self.trades[self.step]) * self.trading_cost_bps
self.costs[self.step] = costs
#reward= np.dot(retrn, action.reshape(-1,1))-self.costs[self.step]
reward= np.dot(retrn, action.reshape(-1,1))-costs
nominal_reward = np.dot(retrn, action.reshape(-1,1)) - self.costs[self.step]
self.total_returns = self.total_returns + nominal_reward
oldsort = self.mkt_retrns[self.step-1,:]
newsort = 0
sortchange = 0
stdev_neg_returns = 0
if nominal_reward < 0:
self.negative_returns = np.append(self.negative_returns, nominal_reward)
stdev_neg_returns = np.sqrt(np.std(self.negative_returns))
else:
stdev_neg_returns = np.sqrt(np.std(self.negative_returns))
if stdev_neg_returns == 0:
newsort = self.total_returns / .1
else:
newsort = self.total_returns / stdev_neg_returns
if oldsort == 0:
sortchange = newsort
else:
sortchange = (newsort - oldsort)/oldsort
self.mkt_retrns[self.step,:] = newsort
#reward = ( (bod_posn * retrn) - self.costs[self.step] )
#pdb.set_trace()
# self.strat_retrns[self.step] = sortchange
#if self.step != 0 :
# self.navs[self.step] = bod_nav * (1 + self.strat_retrns[self.step-1])
# self.mkt_nav[self.step] = mkt_nav * (1 + self.mkt_retrns[self.step-1])
#info = { 'reward': reward, 'nav':self.navs[self.step], 'costs':self.costs[self.step] }
info = { 'reward': reward, 'costs':self.costs[self.step] ,'nominal_reward':nominal_reward}
self.step += 1
return sortchange, newsort, info
def to_df(self):
cols = ['action', 'bod_nav', 'mkt_nav','mkt_return','sim_return',
'position','costs', 'trade' ]
rets = _prices2returns(self.navs)
#pdb.set_trace()
df = pd.DataFrame( )
return df
############################# #########################################
############################# #########################################
############################# #########################################
############################# #########################################
class TestingEnv(gym.Env):
metadata = {'render.modes': ['human']}
def __init__(self):
self.days = 252
self.src = QuandlEnvSrcTest(days=self.days)
self.sim = TradingSim(steps=self.days, trading_cost_bps=1e-3,
time_cost_bps=1e-4,NumberOfStocks=self.src.NumberOfStocks)
self.action_space = spaces.Box(low=-1, high=1, shape=(self.src.NumberOfStocks,))
self.observation_space= spaces.Box( self.src.min_values,
self.src.max_values)
self._reset()
def _configure(self, display=None):
self.display = display
def _seed(self, seed=None):
self.np_random, seed = seeding.np_random(seed)
return [seed]
def _step(self, action):
#assert self.action_space.contains(action), "%r (%s) invalid"%(action, type(action))
observation, done, Returns = self.src._step()
retAllStocks=list(np.arange(self.src.Dimension-1,self.src.Dimension*self.src.NumberOfStocks,self.src.Dimension ))
yret = observation[-1,retAllStocks]
reward, sort, info = self.sim._step( action, yret )
return observation, reward, done, sort, info, Returns
def _reset(self):
self.src.reset()
self.sim.reset()
out=self.src._step()#,self.src._step()[2]
return out[0], out[2]#changes this form [0] to this
def _render(self, mode='human', close=False):
#... TODO
pass
# some convenience functions:
def run_strat(self, strategy, return_df=True):
observation = self.reset()
done = False
while not done:
action = strategy( observation, self ) # call strategy
observation, reward, done, info = self.step(action)
return self.sim.to_df() if return_df else None
def run_strats( self, strategy, episodes=1, write_log=True, return_df=True):
logfile = None
if write_log:
logfile = tempfile.NamedTemporaryFile(delete=False)
log.info('writing log to %s',logfile.name)
need_df = write_log or return_df
alldf = None
for i in range(episodes):
df = self.run_strat(strategy, return_df=need_df)
if write_log:
#df.to_csv(logfile, mode='a')
if return_df:
alldf = df if alldf is None else pd.concat([alldf,df], axis=0)
return alldf
| true | true |
1c2f787ce2eac863bb77dd9a5d921cb975743311 | 428 | py | Python | array/0283_move_zeroes/0283_move_zeroes.py | zdyxry/LeetCode | 33371285d0f3302158230f46e8b1b63b9f4639c4 | [
"Xnet",
"X11"
] | 6 | 2019-09-16T01:50:44.000Z | 2020-09-17T08:52:25.000Z | array/0283_move_zeroes/0283_move_zeroes.py | zdyxry/LeetCode | 33371285d0f3302158230f46e8b1b63b9f4639c4 | [
"Xnet",
"X11"
] | null | null | null | array/0283_move_zeroes/0283_move_zeroes.py | zdyxry/LeetCode | 33371285d0f3302158230f46e8b1b63b9f4639c4 | [
"Xnet",
"X11"
] | 4 | 2020-02-07T12:43:16.000Z | 2021-04-11T06:38:55.000Z | # -*- coding: utf-8 -*-
class Solution(object):
def moveZeroes(self, nums):
"""
:type nums: List[int]
:rtype: None Do not return anything, modify nums in-place instead.
"""
pos = 0
for i in range(len(nums)):
if nums[i]:
nums[i], nums[pos] = nums[pos], nums[i]
pos += 1
nums = [0,1,0,3,12]
Solution().moveZeroes(nums)
print(nums) | 22.526316 | 74 | 0.502336 |
class Solution(object):
def moveZeroes(self, nums):
pos = 0
for i in range(len(nums)):
if nums[i]:
nums[i], nums[pos] = nums[pos], nums[i]
pos += 1
nums = [0,1,0,3,12]
Solution().moveZeroes(nums)
print(nums) | true | true |
1c2f7a621aca507291ac0ddb3de1ae98acabf9a0 | 4,878 | py | Python | tests/integration/states/test_nsxt_manager.py | jain-prerna/salt-ext-modules-vmware-old | 89ea6dd77c6d5a35dc55c23adbdc361949a63057 | [
"Apache-2.0"
] | 1 | 2021-11-02T20:24:19.000Z | 2021-11-02T20:24:19.000Z | tests/integration/states/test_nsxt_manager.py | cmcmarrow/salt-ext-modules-vmware | c546a9f9ae121b7399dabae82f714117d0ab558d | [
"Apache-2.0"
] | null | null | null | tests/integration/states/test_nsxt_manager.py | cmcmarrow/salt-ext-modules-vmware | c546a9f9ae121b7399dabae82f714117d0ab558d | [
"Apache-2.0"
] | 1 | 2021-12-15T02:46:59.000Z | 2021-12-15T02:46:59.000Z | """
Integration Tests for nsxt_manager state module
"""
import logging
import pytest
import requests
from requests.auth import HTTPBasicAuth
log = logging.getLogger(__name__)
BASE_URL = "https://{}/api/v1/configs/management"
def _get_manager_config_from_nsxt(nsxt_config):
hostname = nsxt_config["hostname"]
username = nsxt_config["username"]
password = nsxt_config["password"]
cert = nsxt_config.get("cert", False)
return requests.get(
url=BASE_URL.format(hostname), auth=HTTPBasicAuth(username, password), verify=cert
).json()
def _set_manager_config_to_nsxt(nsxt_config, data):
hostname = nsxt_config["hostname"]
username = nsxt_config["username"]
password = nsxt_config["password"]
cert = nsxt_config.get("cert", False)
return requests.put(
url=BASE_URL.format(hostname),
auth=HTTPBasicAuth(username, password),
verify=cert,
data=data,
headers={"content-type": "application/json"},
).json()
@pytest.fixture
def publish_fqdns(nsxt_config):
# get current config
current_manager_config = _get_manager_config_from_nsxt(nsxt_config)
publish_fqdns = current_manager_config["publish_fqdns"]
log.info("Initial publish_fqdns value %s", publish_fqdns)
# yield the current publish_fqdns
yield publish_fqdns
# get current config for latest revision number after tests ran
current_manager_config = _get_manager_config_from_nsxt(nsxt_config)
current_manager_config["publish_fqdns"] = publish_fqdns
log.info("Final publish_fqdns value %s", publish_fqdns)
# restore the config state to original
_set_manager_config_to_nsxt(nsxt_config, current_manager_config)
def test_nsxt_manager(nsxt_config, salt_call_cli, publish_fqdns):
"""
Tests NSX-T Manager State module to verify publish_fqdns_enabled/publish_fqdns_disabled
when it is enabled/disabled in NSX-T Manager
"""
if publish_fqdns:
changes, comment = _execute_publish_fqdns_enabled(nsxt_config, salt_call_cli)
assert not changes
assert comment == "publish_fqdns is already set to True"
changes, comment = _execute_publish_fqdns_disabled(nsxt_config, salt_call_cli)
assert dict(changes)["new"]["publish_fqdns"] is False
assert dict(changes)["old"]["publish_fqdns"] is True
assert comment == "publish_fqdns has been set to False"
changes, comment = _execute_publish_fqdns_disabled(nsxt_config, salt_call_cli)
assert not changes
assert comment == "publish_fqdns is already set to False"
changes, comment = _execute_publish_fqdns_enabled(nsxt_config, salt_call_cli)
assert dict(changes)["new"]["publish_fqdns"] is True
assert dict(changes)["old"]["publish_fqdns"] is False
assert comment == "publish_fqdns has been set to True"
else:
changes, comment = _execute_publish_fqdns_disabled(nsxt_config, salt_call_cli)
assert not changes
assert comment == "publish_fqdns is already set to False"
changes, comment = _execute_publish_fqdns_enabled(nsxt_config, salt_call_cli)
assert dict(changes)["new"]["publish_fqdns"] is True
assert dict(changes)["old"]["publish_fqdns"] is False
assert comment == "publish_fqdns has been set to True"
changes, comment = _execute_publish_fqdns_enabled(nsxt_config, salt_call_cli)
assert not changes
assert comment == "publish_fqdns is already set to True"
changes, comment = _execute_publish_fqdns_disabled(nsxt_config, salt_call_cli)
assert dict(changes)["new"]["publish_fqdns"] is False
assert dict(changes)["old"]["publish_fqdns"] is True
assert comment == "publish_fqdns has been set to False"
def _execute_publish_fqdns_enabled(nsxt_config, salt_call_cli):
hostname = nsxt_config["hostname"]
username = nsxt_config["username"]
password = nsxt_config["password"]
response = salt_call_cli.run(
"state.single",
"nsxt_manager.publish_fqdns_enabled",
name="publish_fqdns_enabled",
hostname=hostname,
username=username,
password=password,
verify_ssl=False,
).json
result = dict(list(response.values())[0])
return result.get("changes"), result.get("comment")
def _execute_publish_fqdns_disabled(nsxt_config, salt_call_cli):
hostname = nsxt_config["hostname"]
username = nsxt_config["username"]
password = nsxt_config["password"]
response = salt_call_cli.run(
"state.single",
"nsxt_manager.publish_fqdns_disabled",
name="publish_fqdns_disabled",
hostname=hostname,
username=username,
password=password,
verify_ssl=False,
).json
result = dict(list(response.values())[0])
return result.get("changes"), result.get("comment")
| 35.093525 | 91 | 0.708282 | import logging
import pytest
import requests
from requests.auth import HTTPBasicAuth
log = logging.getLogger(__name__)
BASE_URL = "https://{}/api/v1/configs/management"
def _get_manager_config_from_nsxt(nsxt_config):
hostname = nsxt_config["hostname"]
username = nsxt_config["username"]
password = nsxt_config["password"]
cert = nsxt_config.get("cert", False)
return requests.get(
url=BASE_URL.format(hostname), auth=HTTPBasicAuth(username, password), verify=cert
).json()
def _set_manager_config_to_nsxt(nsxt_config, data):
hostname = nsxt_config["hostname"]
username = nsxt_config["username"]
password = nsxt_config["password"]
cert = nsxt_config.get("cert", False)
return requests.put(
url=BASE_URL.format(hostname),
auth=HTTPBasicAuth(username, password),
verify=cert,
data=data,
headers={"content-type": "application/json"},
).json()
@pytest.fixture
def publish_fqdns(nsxt_config):
current_manager_config = _get_manager_config_from_nsxt(nsxt_config)
publish_fqdns = current_manager_config["publish_fqdns"]
log.info("Initial publish_fqdns value %s", publish_fqdns)
yield publish_fqdns
current_manager_config = _get_manager_config_from_nsxt(nsxt_config)
current_manager_config["publish_fqdns"] = publish_fqdns
log.info("Final publish_fqdns value %s", publish_fqdns)
_set_manager_config_to_nsxt(nsxt_config, current_manager_config)
def test_nsxt_manager(nsxt_config, salt_call_cli, publish_fqdns):
if publish_fqdns:
changes, comment = _execute_publish_fqdns_enabled(nsxt_config, salt_call_cli)
assert not changes
assert comment == "publish_fqdns is already set to True"
changes, comment = _execute_publish_fqdns_disabled(nsxt_config, salt_call_cli)
assert dict(changes)["new"]["publish_fqdns"] is False
assert dict(changes)["old"]["publish_fqdns"] is True
assert comment == "publish_fqdns has been set to False"
changes, comment = _execute_publish_fqdns_disabled(nsxt_config, salt_call_cli)
assert not changes
assert comment == "publish_fqdns is already set to False"
changes, comment = _execute_publish_fqdns_enabled(nsxt_config, salt_call_cli)
assert dict(changes)["new"]["publish_fqdns"] is True
assert dict(changes)["old"]["publish_fqdns"] is False
assert comment == "publish_fqdns has been set to True"
else:
changes, comment = _execute_publish_fqdns_disabled(nsxt_config, salt_call_cli)
assert not changes
assert comment == "publish_fqdns is already set to False"
changes, comment = _execute_publish_fqdns_enabled(nsxt_config, salt_call_cli)
assert dict(changes)["new"]["publish_fqdns"] is True
assert dict(changes)["old"]["publish_fqdns"] is False
assert comment == "publish_fqdns has been set to True"
changes, comment = _execute_publish_fqdns_enabled(nsxt_config, salt_call_cli)
assert not changes
assert comment == "publish_fqdns is already set to True"
changes, comment = _execute_publish_fqdns_disabled(nsxt_config, salt_call_cli)
assert dict(changes)["new"]["publish_fqdns"] is False
assert dict(changes)["old"]["publish_fqdns"] is True
assert comment == "publish_fqdns has been set to False"
def _execute_publish_fqdns_enabled(nsxt_config, salt_call_cli):
hostname = nsxt_config["hostname"]
username = nsxt_config["username"]
password = nsxt_config["password"]
response = salt_call_cli.run(
"state.single",
"nsxt_manager.publish_fqdns_enabled",
name="publish_fqdns_enabled",
hostname=hostname,
username=username,
password=password,
verify_ssl=False,
).json
result = dict(list(response.values())[0])
return result.get("changes"), result.get("comment")
def _execute_publish_fqdns_disabled(nsxt_config, salt_call_cli):
hostname = nsxt_config["hostname"]
username = nsxt_config["username"]
password = nsxt_config["password"]
response = salt_call_cli.run(
"state.single",
"nsxt_manager.publish_fqdns_disabled",
name="publish_fqdns_disabled",
hostname=hostname,
username=username,
password=password,
verify_ssl=False,
).json
result = dict(list(response.values())[0])
return result.get("changes"), result.get("comment")
| true | true |
1c2f7a87173d99f850ebdd8cc9bb86dfe639a434 | 130,630 | py | Python | cvat/apps/engine/tests/test_rest_api.py | eegml/cvat | e7808cfb0322c1adcf61e7955b8b4a8c2badd0d2 | [
"MIT"
] | null | null | null | cvat/apps/engine/tests/test_rest_api.py | eegml/cvat | e7808cfb0322c1adcf61e7955b8b4a8c2badd0d2 | [
"MIT"
] | null | null | null | cvat/apps/engine/tests/test_rest_api.py | eegml/cvat | e7808cfb0322c1adcf61e7955b8b4a8c2badd0d2 | [
"MIT"
] | null | null | null | # Copyright (C) 2018 Intel Corporation
#
# SPDX-License-Identifier: MIT
import os
import shutil
from PIL import Image
from io import BytesIO
from enum import Enum
import random
from rest_framework.test import APITestCase, APIClient
from rest_framework import status
from django.conf import settings
from django.contrib.auth.models import User, Group
from cvat.apps.engine.models import (Task, Segment, Job, StatusChoice,
AttributeType, Project, Data)
from unittest import mock
import io
import xml.etree.ElementTree as ET
from collections import defaultdict
import zipfile
from pycocotools import coco as coco_loader
import tempfile
import av
import numpy as np
def create_db_users(cls):
(group_admin, _) = Group.objects.get_or_create(name="admin")
(group_user, _) = Group.objects.get_or_create(name="user")
(group_annotator, _) = Group.objects.get_or_create(name="annotator")
(group_observer, _) = Group.objects.get_or_create(name="observer")
user_admin = User.objects.create_superuser(username="admin", email="",
password="admin")
user_admin.groups.add(group_admin)
user_owner = User.objects.create_user(username="user1", password="user1")
user_owner.groups.add(group_user)
user_assignee = User.objects.create_user(username="user2", password="user2")
user_assignee.groups.add(group_annotator)
user_annotator = User.objects.create_user(username="user3", password="user3")
user_annotator.groups.add(group_annotator)
user_observer = User.objects.create_user(username="user4", password="user4")
user_observer.groups.add(group_observer)
user_dummy = User.objects.create_user(username="user5", password="user5")
user_dummy.groups.add(group_user)
cls.admin = user_admin
cls.owner = cls.user1 = user_owner
cls.assignee = cls.user2 = user_assignee
cls.annotator = cls.user3 = user_annotator
cls.observer = cls.user4 = user_observer
cls.user = cls.user5 = user_dummy
def create_db_task(data):
data_settings = {
"size": data.pop("size"),
"image_quality": data.pop("image_quality"),
}
db_data = Data.objects.create(**data_settings)
shutil.rmtree(db_data.get_data_dirname(), ignore_errors=True)
os.makedirs(db_data.get_data_dirname())
os.makedirs(db_data.get_upload_dirname())
db_task = Task.objects.create(**data)
shutil.rmtree(db_task.get_task_dirname(), ignore_errors=True)
os.makedirs(db_task.get_task_dirname())
os.makedirs(db_task.get_task_logs_dirname())
os.makedirs(db_task.get_task_artifacts_dirname())
db_task.data = db_data
db_task.save()
for x in range(0, db_task.data.size, db_task.segment_size):
start_frame = x
stop_frame = min(x + db_task.segment_size - 1, db_task.data.size - 1)
db_segment = Segment()
db_segment.task = db_task
db_segment.start_frame = start_frame
db_segment.stop_frame = stop_frame
db_segment.save()
db_job = Job()
db_job.segment = db_segment
db_job.save()
return db_task
def create_dummy_db_tasks(obj, project=None):
tasks = []
data = {
"name": "my task #1",
"owner": obj.owner,
"assignee": obj.assignee,
"overlap": 0,
"segment_size": 100,
"z_order": False,
"image_quality": 75,
"size": 100,
"project": project
}
db_task = create_db_task(data)
tasks.append(db_task)
data = {
"name": "my multijob task",
"owner": obj.user,
"overlap": 0,
"segment_size": 100,
"z_order": True,
"image_quality": 50,
"size": 200,
"project": project
}
db_task = create_db_task(data)
tasks.append(db_task)
data = {
"name": "my task #2",
"owner": obj.owner,
"assignee": obj.assignee,
"overlap": 0,
"segment_size": 100,
"z_order": False,
"image_quality": 75,
"size": 100,
"project": project
}
db_task = create_db_task(data)
tasks.append(db_task)
data = {
"name": "super task",
"owner": obj.admin,
"overlap": 0,
"segment_size": 50,
"z_order": False,
"image_quality": 95,
"size": 50,
"project": project
}
db_task = create_db_task(data)
tasks.append(db_task)
return tasks
def create_dummy_db_projects(obj):
projects = []
data = {
"name": "my empty project",
"owner": obj.owner,
"assignee": obj.assignee,
}
db_project = Project.objects.create(**data)
projects.append(db_project)
data = {
"name": "my project without assignee",
"owner": obj.user,
}
db_project = Project.objects.create(**data)
create_dummy_db_tasks(obj, db_project)
projects.append(db_project)
data = {
"name": "my big project",
"owner": obj.owner,
"assignee": obj.assignee,
}
db_project = Project.objects.create(**data)
create_dummy_db_tasks(obj, db_project)
projects.append(db_project)
data = {
"name": "public project",
}
db_project = Project.objects.create(**data)
create_dummy_db_tasks(obj, db_project)
projects.append(db_project)
data = {
"name": "super project",
"owner": obj.admin,
"assignee": obj.assignee,
}
db_project = Project.objects.create(**data)
create_dummy_db_tasks(obj, db_project)
projects.append(db_project)
return projects
class ForceLogin:
def __init__(self, user, client):
self.user = user
self.client = client
def __enter__(self):
if self.user:
self.client.force_login(self.user, backend='django.contrib.auth.backends.ModelBackend')
return self
def __exit__(self, exception_type, exception_value, traceback):
if self.user:
self.client.logout()
class JobGetAPITestCase(APITestCase):
def setUp(self):
self.client = APIClient()
@classmethod
def setUpTestData(cls):
create_db_users(cls)
cls.task = create_dummy_db_tasks(cls)[0]
cls.job = Job.objects.filter(segment__task_id=cls.task.id).first()
cls.job.assignee = cls.annotator
cls.job.save()
def _run_api_v1_jobs_id(self, jid, user):
with ForceLogin(user, self.client):
response = self.client.get('/api/v1/jobs/{}'.format(jid))
return response
def _check_request(self, response):
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data["id"], self.job.id)
self.assertEqual(response.data["status"], StatusChoice.ANNOTATION)
self.assertEqual(response.data["start_frame"], self.job.segment.start_frame)
self.assertEqual(response.data["stop_frame"], self.job.segment.stop_frame)
def test_api_v1_jobs_id_admin(self):
response = self._run_api_v1_jobs_id(self.job.id, self.admin)
self._check_request(response)
response = self._run_api_v1_jobs_id(self.job.id + 10, self.admin)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_api_v1_jobs_id_owner(self):
response = self._run_api_v1_jobs_id(self.job.id, self.owner)
self._check_request(response)
response = self._run_api_v1_jobs_id(self.job.id + 10, self.owner)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_api_v1_jobs_id_annotator(self):
response = self._run_api_v1_jobs_id(self.job.id, self.annotator)
self._check_request(response)
response = self._run_api_v1_jobs_id(self.job.id + 10, self.annotator)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_api_v1_jobs_id_observer(self):
response = self._run_api_v1_jobs_id(self.job.id, self.observer)
self._check_request(response)
response = self._run_api_v1_jobs_id(self.job.id + 10, self.observer)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_api_v1_jobs_id_user(self):
response = self._run_api_v1_jobs_id(self.job.id, self.user)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
response = self._run_api_v1_jobs_id(self.job.id + 10, self.user)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_api_v1_jobs_id_no_auth(self):
response = self._run_api_v1_jobs_id(self.job.id, None)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
response = self._run_api_v1_jobs_id(self.job.id + 10, None)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
class JobUpdateAPITestCase(APITestCase):
def setUp(self):
self.client = APIClient()
self.task = create_dummy_db_tasks(self)[0]
self.job = Job.objects.filter(segment__task_id=self.task.id).first()
self.job.assignee = self.annotator
self.job.save()
@classmethod
def setUpTestData(cls):
create_db_users(cls)
def _run_api_v1_jobs_id(self, jid, user, data):
with ForceLogin(user, self.client):
response = self.client.put('/api/v1/jobs/{}'.format(jid), data=data, format='json')
return response
def _check_request(self, response, data):
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data["id"], self.job.id)
self.assertEqual(response.data["status"], data.get('status', self.job.status))
assignee = self.job.assignee.id if self.job.assignee else None
self.assertEqual(response.data["assignee"], data.get('assignee', assignee))
self.assertEqual(response.data["start_frame"], self.job.segment.start_frame)
self.assertEqual(response.data["stop_frame"], self.job.segment.stop_frame)
def test_api_v1_jobs_id_admin(self):
data = {"status": StatusChoice.COMPLETED, "assignee": self.owner.id}
response = self._run_api_v1_jobs_id(self.job.id, self.admin, data)
self._check_request(response, data)
response = self._run_api_v1_jobs_id(self.job.id + 10, self.admin, data)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_api_v1_jobs_id_owner(self):
data = {"status": StatusChoice.VALIDATION, "assignee": self.annotator.id}
response = self._run_api_v1_jobs_id(self.job.id, self.owner, data)
self._check_request(response, data)
response = self._run_api_v1_jobs_id(self.job.id + 10, self.owner, data)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_api_v1_jobs_id_annotator(self):
data = {"status": StatusChoice.ANNOTATION, "assignee": self.user.id}
response = self._run_api_v1_jobs_id(self.job.id, self.annotator, data)
self._check_request(response, data)
response = self._run_api_v1_jobs_id(self.job.id + 10, self.annotator, data)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_api_v1_jobs_id_observer(self):
data = {"status": StatusChoice.ANNOTATION, "assignee": self.admin.id}
response = self._run_api_v1_jobs_id(self.job.id, self.observer, data)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
response = self._run_api_v1_jobs_id(self.job.id + 10, self.observer, data)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_api_v1_jobs_id_user(self):
data = {"status": StatusChoice.ANNOTATION, "assignee": self.user.id}
response = self._run_api_v1_jobs_id(self.job.id, self.user, data)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
response = self._run_api_v1_jobs_id(self.job.id + 10, self.user, data)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_api_v1_jobs_id_no_auth(self):
data = {"status": StatusChoice.ANNOTATION, "assignee": self.user.id}
response = self._run_api_v1_jobs_id(self.job.id, None, data)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
response = self._run_api_v1_jobs_id(self.job.id + 10, None, data)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
class JobPartialUpdateAPITestCase(JobUpdateAPITestCase):
def _run_api_v1_jobs_id(self, jid, user, data):
with ForceLogin(user, self.client):
response = self.client.patch('/api/v1/jobs/{}'.format(jid), data=data, format='json')
return response
def test_api_v1_jobs_id_annotator_partial(self):
data = {"status": StatusChoice.VALIDATION}
response = self._run_api_v1_jobs_id(self.job.id, self.owner, data)
self._check_request(response, data)
def test_api_v1_jobs_id_admin_partial(self):
data = {"assignee": self.user.id}
response = self._run_api_v1_jobs_id(self.job.id, self.owner, data)
self._check_request(response, data)
class ServerAboutAPITestCase(APITestCase):
def setUp(self):
self.client = APIClient()
@classmethod
def setUpTestData(cls):
create_db_users(cls)
def _run_api_v1_server_about(self, user):
with ForceLogin(user, self.client):
response = self.client.get('/api/v1/server/about')
return response
def _check_request(self, response):
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertIsNotNone(response.data.get("name", None))
self.assertIsNotNone(response.data.get("description", None))
self.assertIsNotNone(response.data.get("version", None))
def test_api_v1_server_about_admin(self):
response = self._run_api_v1_server_about(self.admin)
self._check_request(response)
def test_api_v1_server_about_user(self):
response = self._run_api_v1_server_about(self.user)
self._check_request(response)
def test_api_v1_server_about_no_auth(self):
response = self._run_api_v1_server_about(None)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
class ServerExceptionAPITestCase(APITestCase):
def setUp(self):
self.client = APIClient()
@classmethod
def setUpTestData(cls):
create_db_users(cls)
cls.data = {
"system": "Linux",
"client": "rest_framework.APIClient",
"time": "2019-01-29T12:34:56.000000Z",
"task_id": 1,
"job_id": 1,
"proj_id": 2,
"client_id": 12321235123,
"message": "just test message",
"filename": "http://localhost/my_file.js",
"line": 1,
"column": 1,
"stack": ""
}
def _run_api_v1_server_exception(self, user):
with ForceLogin(user, self.client):
#pylint: disable=unused-variable
with mock.patch("cvat.apps.engine.views.clogger") as clogger:
response = self.client.post('/api/v1/server/exception',
self.data, format='json')
return response
def test_api_v1_server_exception_admin(self):
response = self._run_api_v1_server_exception(self.admin)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_api_v1_server_exception_user(self):
response = self._run_api_v1_server_exception(self.user)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_api_v1_server_exception_no_auth(self):
response = self._run_api_v1_server_exception(None)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
class ServerLogsAPITestCase(APITestCase):
def setUp(self):
self.client = APIClient()
@classmethod
def setUpTestData(cls):
create_db_users(cls)
cls.data = [
{
"time": "2019-01-29T12:34:56.000000Z",
"task_id": 1,
"job_id": 1,
"proj_id": 2,
"client_id": 12321235123,
"message": "just test message",
"name": "add point",
"is_active": True,
"payload": {"count": 1}
},
{
"time": "2019-02-24T12:34:56.000000Z",
"client_id": 12321235123,
"name": "add point",
"is_active": True,
}]
def _run_api_v1_server_logs(self, user):
with ForceLogin(user, self.client):
#pylint: disable=unused-variable
with mock.patch("cvat.apps.engine.views.clogger") as clogger:
response = self.client.post('/api/v1/server/logs',
self.data, format='json')
return response
def test_api_v1_server_logs_admin(self):
response = self._run_api_v1_server_logs(self.admin)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_api_v1_server_logs_user(self):
response = self._run_api_v1_server_logs(self.user)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_api_v1_server_logs_no_auth(self):
response = self._run_api_v1_server_logs(None)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
class UserAPITestCase(APITestCase):
def setUp(self):
self.client = APIClient()
create_db_users(self)
def _check_response(self, user, response, is_full=True):
self.assertEqual(response.status_code, status.HTTP_200_OK)
self._check_data(user, response.data, is_full)
def _check_data(self, user, data, is_full):
self.assertEqual(data["id"], user.id)
self.assertEqual(data["username"], user.username)
self.assertEqual(data["first_name"], user.first_name)
self.assertEqual(data["last_name"], user.last_name)
self.assertEqual(data["email"], user.email)
extra_check = self.assertIn if is_full else self.assertNotIn
extra_check("groups", data)
extra_check("is_staff", data)
extra_check("is_superuser", data)
extra_check("is_active", data)
extra_check("last_login", data)
extra_check("date_joined", data)
class UserListAPITestCase(UserAPITestCase):
def _run_api_v1_users(self, user):
with ForceLogin(user, self.client):
response = self.client.get('/api/v1/users')
return response
def _check_response(self, user, response, is_full):
self.assertEqual(response.status_code, status.HTTP_200_OK)
for user_info in response.data['results']:
db_user = getattr(self, user_info['username'])
self._check_data(db_user, user_info, is_full)
def test_api_v1_users_admin(self):
response = self._run_api_v1_users(self.admin)
self._check_response(self.admin, response, True)
def test_api_v1_users_user(self):
response = self._run_api_v1_users(self.user)
self._check_response(self.user, response, False)
def test_api_v1_users_annotator(self):
response = self._run_api_v1_users(self.annotator)
self._check_response(self.annotator, response, False)
def test_api_v1_users_observer(self):
response = self._run_api_v1_users(self.observer)
self._check_response(self.observer, response, False)
def test_api_v1_users_no_auth(self):
response = self._run_api_v1_users(None)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
class UserSelfAPITestCase(UserAPITestCase):
def _run_api_v1_users_self(self, user):
with ForceLogin(user, self.client):
response = self.client.get('/api/v1/users/self')
return response
def test_api_v1_users_self_admin(self):
response = self._run_api_v1_users_self(self.admin)
self._check_response(self.admin, response)
def test_api_v1_users_self_user(self):
response = self._run_api_v1_users_self(self.user)
self._check_response(self.user, response)
def test_api_v1_users_self_annotator(self):
response = self._run_api_v1_users_self(self.annotator)
self._check_response(self.annotator, response)
def test_api_v1_users_self_observer(self):
response = self._run_api_v1_users_self(self.observer)
self._check_response(self.observer, response)
def test_api_v1_users_self_no_auth(self):
response = self._run_api_v1_users_self(None)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
class UserGetAPITestCase(UserAPITestCase):
def _run_api_v1_users_id(self, user, user_id):
with ForceLogin(user, self.client):
response = self.client.get('/api/v1/users/{}'.format(user_id))
return response
def test_api_v1_users_id_admin(self):
response = self._run_api_v1_users_id(self.admin, self.user.id)
self._check_response(self.user, response, True)
response = self._run_api_v1_users_id(self.admin, self.admin.id)
self._check_response(self.admin, response, True)
response = self._run_api_v1_users_id(self.admin, self.owner.id)
self._check_response(self.owner, response, True)
def test_api_v1_users_id_user(self):
response = self._run_api_v1_users_id(self.user, self.user.id)
self._check_response(self.user, response, True)
response = self._run_api_v1_users_id(self.user, self.owner.id)
self._check_response(self.owner, response, False)
def test_api_v1_users_id_annotator(self):
response = self._run_api_v1_users_id(self.annotator, self.annotator.id)
self._check_response(self.annotator, response, True)
response = self._run_api_v1_users_id(self.annotator, self.user.id)
self._check_response(self.user, response, False)
def test_api_v1_users_id_observer(self):
response = self._run_api_v1_users_id(self.observer, self.observer.id)
self._check_response(self.observer, response, True)
response = self._run_api_v1_users_id(self.observer, self.user.id)
self._check_response(self.user, response, False)
def test_api_v1_users_id_no_auth(self):
response = self._run_api_v1_users_id(None, self.user.id)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
class UserPartialUpdateAPITestCase(UserAPITestCase):
def _run_api_v1_users_id(self, user, user_id, data):
with ForceLogin(user, self.client):
response = self.client.patch('/api/v1/users/{}'.format(user_id), data=data)
return response
def _check_response_with_data(self, user, response, data, is_full):
# refresh information about the user from DB
user = User.objects.get(id=user.id)
for k,v in data.items():
self.assertEqual(response.data[k], v)
self._check_response(user, response, is_full)
def test_api_v1_users_id_admin_partial(self):
data = {"username": "user09", "last_name": "my last name"}
response = self._run_api_v1_users_id(self.admin, self.user.id, data)
self._check_response_with_data(self.user, response, data, True)
def test_api_v1_users_id_user_partial(self):
data = {"username": "user10", "first_name": "my name"}
response = self._run_api_v1_users_id(self.user, self.user.id, data)
self._check_response_with_data(self.user, response, data, False)
data = {"is_staff": True}
response = self._run_api_v1_users_id(self.user, self.user.id, data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
data = {"username": "admin", "is_superuser": True}
response = self._run_api_v1_users_id(self.user, self.user.id, data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
data = {"username": "non_active", "is_active": False}
response = self._run_api_v1_users_id(self.user, self.user.id, data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
data = {"username": "annotator01", "first_name": "slave"}
response = self._run_api_v1_users_id(self.user, self.annotator.id, data)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_api_v1_users_id_no_auth_partial(self):
data = {"username": "user12"}
response = self._run_api_v1_users_id(None, self.user.id, data)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
class UserDeleteAPITestCase(UserAPITestCase):
def _run_api_v1_users_id(self, user, user_id):
with ForceLogin(user, self.client):
response = self.client.delete('/api/v1/users/{}'.format(user_id))
return response
def test_api_v1_users_id_admin(self):
response = self._run_api_v1_users_id(self.admin, self.user.id)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
response = self._run_api_v1_users_id(self.admin, self.admin.id)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
def test_api_v1_users_id_user(self):
response = self._run_api_v1_users_id(self.user, self.owner.id)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
response = self._run_api_v1_users_id(self.user, self.user.id)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
def test_api_v1_users_id_annotator(self):
response = self._run_api_v1_users_id(self.annotator, self.user.id)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
response = self._run_api_v1_users_id(self.annotator, self.annotator.id)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
def test_api_v1_users_id_observer(self):
response = self._run_api_v1_users_id(self.observer, self.user.id)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
response = self._run_api_v1_users_id(self.observer, self.observer.id)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
def test_api_v1_users_id_no_auth(self):
response = self._run_api_v1_users_id(None, self.user.id)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
class ProjectListAPITestCase(APITestCase):
def setUp(self):
self.client = APIClient()
@classmethod
def setUpTestData(cls):
create_db_users(cls)
cls.projects = create_dummy_db_projects(cls)
def _run_api_v1_projects(self, user, params=""):
with ForceLogin(user, self.client):
response = self.client.get('/api/v1/projects{}'.format(params))
return response
def test_api_v1_projects_admin(self):
response = self._run_api_v1_projects(self.admin)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertListEqual(
sorted([project.name for project in self.projects]),
sorted([res["name"] for res in response.data["results"]]))
def test_api_v1_projects_user(self):
response = self._run_api_v1_projects(self.user)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertListEqual(
sorted([project.name for project in self.projects
if 'my empty project' != project.name]),
sorted([res["name"] for res in response.data["results"]]))
def test_api_v1_projects_observer(self):
response = self._run_api_v1_projects(self.observer)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertListEqual(
sorted([project.name for project in self.projects]),
sorted([res["name"] for res in response.data["results"]]))
def test_api_v1_projects_no_auth(self):
response = self._run_api_v1_projects(None)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
class ProjectGetAPITestCase(APITestCase):
def setUp(self):
self.client = APIClient()
@classmethod
def setUpTestData(cls):
create_db_users(cls)
cls.projects = create_dummy_db_projects(cls)
def _run_api_v1_projects_id(self, pid, user):
with ForceLogin(user, self.client):
response = self.client.get('/api/v1/projects/{}'.format(pid))
return response
def _check_response(self, response, db_project):
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data["name"], db_project.name)
owner = db_project.owner.id if db_project.owner else None
self.assertEqual(response.data["owner"], owner)
assignee = db_project.assignee.id if db_project.assignee else None
self.assertEqual(response.data["assignee"], assignee)
self.assertEqual(response.data["status"], db_project.status)
def _check_api_v1_projects_id(self, user):
for db_project in self.projects:
response = self._run_api_v1_projects_id(db_project.id, user)
if user and user.has_perm("engine.project.access", db_project):
self._check_response(response, db_project)
elif user:
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
else:
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_api_v1_projects_id_admin(self):
self._check_api_v1_projects_id(self.admin)
def test_api_v1_projects_id_user(self):
self._check_api_v1_projects_id(self.user)
def test_api_v1_projects_id_observer(self):
self._check_api_v1_projects_id(self.observer)
def test_api_v1_projects_id_no_auth(self):
self._check_api_v1_projects_id(None)
class ProjectDeleteAPITestCase(APITestCase):
def setUp(self):
self.client = APIClient()
@classmethod
def setUpTestData(cls):
create_db_users(cls)
cls.projects = create_dummy_db_projects(cls)
def _run_api_v1_projects_id(self, pid, user):
with ForceLogin(user, self.client):
response = self.client.delete('/api/v1/projects/{}'.format(pid), format="json")
return response
def _check_api_v1_projects_id(self, user):
for db_project in self.projects:
response = self._run_api_v1_projects_id(db_project.id, user)
if user and user.has_perm("engine.project.delete", db_project):
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
elif user:
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
else:
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_api_v1_projects_id_admin(self):
self._check_api_v1_projects_id(self.admin)
def test_api_v1_projects_id_user(self):
self._check_api_v1_projects_id(self.user)
def test_api_v1_projects_id_observer(self):
self._check_api_v1_projects_id(self.observer)
def test_api_v1_projects_id_no_auth(self):
self._check_api_v1_projects_id(None)
class ProjectCreateAPITestCase(APITestCase):
def setUp(self):
self.client = APIClient()
@classmethod
def setUpTestData(cls):
create_db_users(cls)
def _run_api_v1_projects(self, user, data):
with ForceLogin(user, self.client):
response = self.client.post('/api/v1/projects', data=data, format="json")
return response
def _check_response(self, response, user, data):
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(response.data["name"], data["name"])
self.assertEqual(response.data["owner"], data.get("owner", user.id))
self.assertEqual(response.data["assignee"], data.get("assignee"))
self.assertEqual(response.data["bug_tracker"], data.get("bug_tracker", ""))
self.assertEqual(response.data["status"], StatusChoice.ANNOTATION)
def _check_api_v1_projects(self, user, data):
response = self._run_api_v1_projects(user, data)
if user and user.has_perm("engine.project.create"):
self._check_response(response, user, data)
elif user:
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
else:
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_api_v1_projects_admin(self):
data = {
"name": "new name for the project",
"bug_tracker": "http://example.com"
}
self._check_api_v1_projects(self.admin, data)
data = {
"owner": self.owner.id,
"assignee": self.assignee.id,
"name": "new name for the project"
}
self._check_api_v1_projects(self.admin, data)
data = {
"owner": self.admin.id,
"name": "2"
}
self._check_api_v1_projects(self.admin, data)
def test_api_v1_projects_user(self):
data = {
"name": "Dummy name",
"bug_tracker": "it is just text"
}
self._check_api_v1_projects(self.user, data)
data = {
"owner": self.owner.id,
"assignee": self.assignee.id,
"name": "My import project with data"
}
self._check_api_v1_projects(self.user, data)
def test_api_v1_projects_observer(self):
data = {
"name": "My Project #1",
"owner": self.owner.id,
"assignee": self.assignee.id
}
self._check_api_v1_projects(self.observer, data)
def test_api_v1_projects_no_auth(self):
data = {
"name": "My Project #2",
"owner": self.admin.id,
}
self._check_api_v1_projects(None, data)
class ProjectPartialUpdateAPITestCase(APITestCase):
def setUp(self):
self.client = APIClient()
@classmethod
def setUpTestData(cls):
create_db_users(cls)
cls.projects = create_dummy_db_projects(cls)
def _run_api_v1_projects_id(self, pid, user, data):
with ForceLogin(user, self.client):
response = self.client.patch('/api/v1/projects/{}'.format(pid),
data=data, format="json")
return response
def _check_response(self, response, db_project, data):
self.assertEqual(response.status_code, status.HTTP_200_OK)
name = data.get("name", db_project.name)
self.assertEqual(response.data["name"], name)
owner = db_project.owner.id if db_project.owner else None
owner = data.get("owner", owner)
self.assertEqual(response.data["owner"], owner)
assignee = db_project.assignee.id if db_project.assignee else None
assignee = data.get("assignee", assignee)
self.assertEqual(response.data["assignee"], assignee)
self.assertEqual(response.data["status"], db_project.status)
def _check_api_v1_projects_id(self, user, data):
for db_project in self.projects:
response = self._run_api_v1_projects_id(db_project.id, user, data)
if user and user.has_perm("engine.project.change", db_project):
self._check_response(response, db_project, data)
elif user:
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
else:
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_api_v1_projects_id_admin(self):
data = {
"name": "new name for the project",
"owner": self.owner.id,
}
self._check_api_v1_projects_id(self.admin, data)
def test_api_v1_projects_id_user(self):
data = {
"name": "new name for the project",
"owner": self.assignee.id,
}
self._check_api_v1_projects_id(self.user, data)
def test_api_v1_projects_id_observer(self):
data = {
"name": "new name for the project",
}
self._check_api_v1_projects_id(self.observer, data)
def test_api_v1_projects_id_no_auth(self):
data = {
"name": "new name for the project",
}
self._check_api_v1_projects_id(None, data)
class ProjectListOfTasksAPITestCase(APITestCase):
def setUp(self):
self.client = APIClient()
@classmethod
def setUpTestData(cls):
create_db_users(cls)
cls.projects = create_dummy_db_projects(cls)
def _run_api_v1_projects_id_tasks(self, user, pid):
with ForceLogin(user, self.client):
response = self.client.get('/api/v1/projects/{}/tasks'.format(pid))
return response
def test_api_v1_projects_id_tasks_admin(self):
project = self.projects[1]
response = self._run_api_v1_projects_id_tasks(self.admin, project.id)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertListEqual(
sorted([task.name for task in project.tasks.all()]),
sorted([res["name"] for res in response.data["results"]]))
def test_api_v1_projects_id_tasks_user(self):
project = self.projects[1]
response = self._run_api_v1_projects_id_tasks(self.user, project.id)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertListEqual(
sorted([task.name for task in project.tasks.all()
if task.owner in [None, self.user] or
task.assignee in [None, self.user]]),
sorted([res["name"] for res in response.data["results"]]))
def test_api_v1_projects_id_tasks_observer(self):
project = self.projects[1]
response = self._run_api_v1_projects_id_tasks(self.observer, project.id)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertListEqual(
sorted([task.name for task in project.tasks.all()]),
sorted([res["name"] for res in response.data["results"]]))
def test_api_v1_projects_id_tasks_no_auth(self):
project = self.projects[1]
response = self._run_api_v1_projects_id_tasks(None, project.id)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
class TaskListAPITestCase(APITestCase):
def setUp(self):
self.client = APIClient()
@classmethod
def setUpTestData(cls):
create_db_users(cls)
cls.tasks = create_dummy_db_tasks(cls)
def _run_api_v1_tasks(self, user, params=""):
with ForceLogin(user, self.client):
response = self.client.get('/api/v1/tasks{}'.format(params))
return response
def test_api_v1_tasks_admin(self):
response = self._run_api_v1_tasks(self.admin)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertListEqual(
sorted([task.name for task in self.tasks]),
sorted([res["name"] for res in response.data["results"]]))
def test_api_v1_tasks_user(self):
response = self._run_api_v1_tasks(self.user)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertListEqual(
sorted([task.name for task in self.tasks
if (task.owner == self.user or task.assignee == None)]),
sorted([res["name"] for res in response.data["results"]]))
def test_api_v1_tasks_observer(self):
response = self._run_api_v1_tasks(self.observer)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertListEqual(
sorted([task.name for task in self.tasks]),
sorted([res["name"] for res in response.data["results"]]))
def test_api_v1_tasks_no_auth(self):
response = self._run_api_v1_tasks(None)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
class TaskGetAPITestCase(APITestCase):
def setUp(self):
self.client = APIClient()
@classmethod
def setUpTestData(cls):
create_db_users(cls)
cls.tasks = create_dummy_db_tasks(cls)
def _run_api_v1_tasks_id(self, tid, user):
with ForceLogin(user, self.client):
response = self.client.get('/api/v1/tasks/{}'.format(tid))
return response
def _check_response(self, response, db_task):
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data["name"], db_task.name)
self.assertEqual(response.data["size"], db_task.data.size)
self.assertEqual(response.data["mode"], db_task.mode)
owner = db_task.owner.id if db_task.owner else None
self.assertEqual(response.data["owner"], owner)
assignee = db_task.assignee.id if db_task.assignee else None
self.assertEqual(response.data["assignee"], assignee)
self.assertEqual(response.data["overlap"], db_task.overlap)
self.assertEqual(response.data["segment_size"], db_task.segment_size)
self.assertEqual(response.data["z_order"], db_task.z_order)
self.assertEqual(response.data["image_quality"], db_task.data.image_quality)
self.assertEqual(response.data["status"], db_task.status)
self.assertListEqual(
[label.name for label in db_task.label_set.all()],
[label["name"] for label in response.data["labels"]]
)
def _check_api_v1_tasks_id(self, user):
for db_task in self.tasks:
response = self._run_api_v1_tasks_id(db_task.id, user)
if user and user.has_perm("engine.task.access", db_task):
self._check_response(response, db_task)
elif user:
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
else:
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_api_v1_tasks_id_admin(self):
self._check_api_v1_tasks_id(self.admin)
def test_api_v1_tasks_id_user(self):
self._check_api_v1_tasks_id(self.user)
def test_api_v1_tasks_id_observer(self):
self._check_api_v1_tasks_id(self.observer)
def test_api_v1_tasks_id_no_auth(self):
self._check_api_v1_tasks_id(None)
class TaskDeleteAPITestCase(APITestCase):
def setUp(self):
self.client = APIClient()
@classmethod
def setUpTestData(cls):
create_db_users(cls)
cls.tasks = create_dummy_db_tasks(cls)
def _run_api_v1_tasks_id(self, tid, user):
with ForceLogin(user, self.client):
response = self.client.delete('/api/v1/tasks/{}'.format(tid), format="json")
return response
def _check_api_v1_tasks_id(self, user):
for db_task in self.tasks:
response = self._run_api_v1_tasks_id(db_task.id, user)
if user and user.has_perm("engine.task.delete", db_task):
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
elif user:
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
else:
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_api_v1_tasks_id_admin(self):
self._check_api_v1_tasks_id(self.admin)
def test_api_v1_tasks_id_user(self):
self._check_api_v1_tasks_id(self.user)
def test_api_v1_tasks_id_observer(self):
self._check_api_v1_tasks_id(self.observer)
def test_api_v1_tasks_id_no_auth(self):
self._check_api_v1_tasks_id(None)
class TaskUpdateAPITestCase(APITestCase):
def setUp(self):
self.client = APIClient()
@classmethod
def setUpTestData(cls):
create_db_users(cls)
cls.tasks = create_dummy_db_tasks(cls)
def _run_api_v1_tasks_id(self, tid, user, data):
with ForceLogin(user, self.client):
response = self.client.put('/api/v1/tasks/{}'.format(tid),
data=data, format="json")
return response
def _check_response(self, response, db_task, data):
self.assertEqual(response.status_code, status.HTTP_200_OK)
name = data.get("name", db_task.name)
self.assertEqual(response.data["name"], name)
self.assertEqual(response.data["size"], db_task.data.size)
mode = data.get("mode", db_task.mode)
self.assertEqual(response.data["mode"], mode)
owner = db_task.owner.id if db_task.owner else None
owner = data.get("owner", owner)
self.assertEqual(response.data["owner"], owner)
assignee = db_task.assignee.id if db_task.assignee else None
assignee = data.get("assignee", assignee)
self.assertEqual(response.data["assignee"], assignee)
self.assertEqual(response.data["overlap"], db_task.overlap)
self.assertEqual(response.data["segment_size"], db_task.segment_size)
z_order = data.get("z_order", db_task.z_order)
self.assertEqual(response.data["z_order"], z_order)
image_quality = data.get("image_quality", db_task.data.image_quality)
self.assertEqual(response.data["image_quality"], image_quality)
self.assertEqual(response.data["status"], db_task.status)
if data.get("labels"):
self.assertListEqual(
[label["name"] for label in data.get("labels")],
[label["name"] for label in response.data["labels"]]
)
else:
self.assertListEqual(
[label.name for label in db_task.label_set.all()],
[label["name"] for label in response.data["labels"]]
)
def _check_api_v1_tasks_id(self, user, data):
for db_task in self.tasks:
response = self._run_api_v1_tasks_id(db_task.id, user, data)
if user and user.has_perm("engine.task.change", db_task):
self._check_response(response, db_task, data)
elif user:
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
else:
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_api_v1_tasks_id_admin(self):
data = {
"name": "new name for the task",
"owner": self.owner.id,
"labels": [{
"name": "non-vehicle",
"attributes": [{
"name": "my_attribute",
"mutable": True,
"input_type": AttributeType.CHECKBOX,
"default_value": "true"
}]
}]
}
self._check_api_v1_tasks_id(self.admin, data)
def test_api_v1_tasks_id_user(self):
data = {
"name": "new name for the task",
"owner": self.assignee.id,
"labels": [{
"name": "car",
"attributes": [{
"name": "color",
"mutable": False,
"input_type": AttributeType.SELECT,
"default_value": "white",
"values": ["white", "yellow", "green", "red"]
}]
}]
}
self._check_api_v1_tasks_id(self.user, data)
def test_api_v1_tasks_id_observer(self):
data = {
"name": "new name for the task",
"labels": [{
"name": "test",
}]
}
self._check_api_v1_tasks_id(self.observer, data)
def test_api_v1_tasks_id_no_auth(self):
data = {
"name": "new name for the task",
"labels": [{
"name": "test",
}]
}
self._check_api_v1_tasks_id(None, data)
class TaskPartialUpdateAPITestCase(TaskUpdateAPITestCase):
def _run_api_v1_tasks_id(self, tid, user, data):
with ForceLogin(user, self.client):
response = self.client.patch('/api/v1/tasks/{}'.format(tid),
data=data, format="json")
return response
def test_api_v1_tasks_id_admin_partial(self):
data = {
"name": "new name for the task #2",
}
self._check_api_v1_tasks_id(self.admin, data)
data = {
"name": "new name for the task",
"owner": self.owner.id
}
self._check_api_v1_tasks_id(self.admin, data)
# Now owner is updated, but self.db_tasks are obsolete
# We can't do any tests without owner in data below
def test_api_v1_tasks_id_user_partial(self):
data = {
"labels": [{
"name": "car",
"attributes": [{
"name": "color",
"mutable": False,
"input_type": AttributeType.SELECT,
"default_value": "white",
"values": ["white", "yellow", "green", "red"]
}]
}]
}
self._check_api_v1_tasks_id(self.user, data)
data = {
"owner": self.observer.id,
"assignee": self.annotator.id
}
self._check_api_v1_tasks_id(self.user, data)
def test_api_v1_tasks_id_observer(self):
data = {
"name": "my task #3"
}
self._check_api_v1_tasks_id(self.observer, data)
def test_api_v1_tasks_id_no_auth(self):
data = {
"name": "new name for the task",
"labels": [{
"name": "test",
}]
}
self._check_api_v1_tasks_id(None, data)
class TaskCreateAPITestCase(APITestCase):
def setUp(self):
self.client = APIClient()
@classmethod
def setUpTestData(cls):
create_db_users(cls)
def _run_api_v1_tasks(self, user, data):
with ForceLogin(user, self.client):
response = self.client.post('/api/v1/tasks', data=data, format="json")
return response
def _check_response(self, response, user, data):
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(response.data["name"], data["name"])
self.assertEqual(response.data["mode"], "")
self.assertEqual(response.data["owner"], data.get("owner", user.id))
self.assertEqual(response.data["assignee"], data.get("assignee"))
self.assertEqual(response.data["bug_tracker"], data.get("bug_tracker", ""))
self.assertEqual(response.data["overlap"], data.get("overlap", None))
self.assertEqual(response.data["segment_size"], data.get("segment_size", 0))
self.assertEqual(response.data["z_order"], data.get("z_order", False))
self.assertEqual(response.data["status"], StatusChoice.ANNOTATION)
self.assertListEqual(
[label["name"] for label in data.get("labels")],
[label["name"] for label in response.data["labels"]]
)
def _check_api_v1_tasks(self, user, data):
response = self._run_api_v1_tasks(user, data)
if user and user.has_perm("engine.task.create"):
self._check_response(response, user, data)
elif user:
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
else:
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_api_v1_tasks_admin(self):
data = {
"name": "new name for the task",
"labels": [{
"name": "non-vehicle",
"attributes": [{
"name": "my_attribute",
"mutable": True,
"input_type": AttributeType.CHECKBOX,
"default_value": "true"
}]
}]
}
self._check_api_v1_tasks(self.admin, data)
def test_api_v1_tasks_user(self):
data = {
"name": "new name for the task",
"owner": self.assignee.id,
"labels": [{
"name": "car",
"attributes": [{
"name": "color",
"mutable": False,
"input_type": AttributeType.SELECT,
"default_value": "white",
"values": ["white", "yellow", "green", "red"]
}]
}]
}
self._check_api_v1_tasks(self.user, data)
def test_api_v1_tasks_observer(self):
data = {
"name": "new name for the task",
"labels": [{
"name": "test",
}]
}
self._check_api_v1_tasks(self.observer, data)
def test_api_v1_tasks_no_auth(self):
data = {
"name": "new name for the task",
"labels": [{
"name": "test",
}]
}
self._check_api_v1_tasks(None, data)
def generate_image_file(filename):
f = BytesIO()
width = random.randint(100, 800)
height = random.randint(100, 800)
image = Image.new('RGB', size=(width, height))
image.save(f, 'jpeg')
f.name = filename
f.seek(0)
return (width, height), f
def generate_image_files(*args):
images = []
image_sizes = []
for image_name in args:
img_size, image = generate_image_file(image_name)
image_sizes.append(img_size)
images.append(image)
return image_sizes, images
def generate_video_file(filename, width=1920, height=1080, duration=1, fps=25):
f = BytesIO()
total_frames = duration * fps
container = av.open(f, mode='w', format='mp4')
stream = container.add_stream('mpeg4', rate=fps)
stream.width = width
stream.height = height
stream.pix_fmt = 'yuv420p'
for frame_i in range(total_frames):
img = np.empty((stream.width, stream.height, 3))
img[:, :, 0] = 0.5 + 0.5 * np.sin(2 * np.pi * (0 / 3 + frame_i / total_frames))
img[:, :, 1] = 0.5 + 0.5 * np.sin(2 * np.pi * (1 / 3 + frame_i / total_frames))
img[:, :, 2] = 0.5 + 0.5 * np.sin(2 * np.pi * (2 / 3 + frame_i / total_frames))
img = np.round(255 * img).astype(np.uint8)
img = np.clip(img, 0, 255)
frame = av.VideoFrame.from_ndarray(img, format='rgb24')
for packet in stream.encode(frame):
container.mux(packet)
# Flush stream
for packet in stream.encode():
container.mux(packet)
# Close the file
container.close()
f.name = filename
f.seek(0)
return [(width, height)] * total_frames, f
def generate_zip_archive_file(filename, count):
image_sizes = []
zip_buf = BytesIO()
with zipfile.ZipFile(zip_buf, 'w') as zip_chunk:
for idx in range(count):
image_name = "image_{:6d}.jpg".format(idx)
size, image_buf = generate_image_file(image_name)
image_sizes.append(size)
zip_chunk.writestr(image_name, image_buf.getvalue())
zip_buf.name = filename
zip_buf.seek(0)
return image_sizes, zip_buf
class TaskDataAPITestCase(APITestCase):
_image_sizes = {}
class ChunkType(str, Enum):
IMAGESET = 'imageset'
VIDEO = 'video'
def __str__(self):
return self.value
def setUp(self):
self.client = APIClient()
@classmethod
def setUpTestData(cls):
create_db_users(cls)
@classmethod
def setUpClass(cls):
super().setUpClass()
filename = "test_1.jpg"
path = os.path.join(settings.SHARE_ROOT, filename)
img_size, data = generate_image_file(filename)
with open(path, "wb") as image:
image.write(data.read())
cls._image_sizes[filename] = img_size
filename = "test_2.jpg"
path = os.path.join(settings.SHARE_ROOT, filename)
img_size, data = generate_image_file(filename)
with open(path, "wb") as image:
image.write(data.read())
cls._image_sizes[filename] = img_size
filename = "test_3.jpg"
path = os.path.join(settings.SHARE_ROOT, filename)
img_size, data = generate_image_file(filename)
with open(path, "wb") as image:
image.write(data.read())
cls._image_sizes[filename] = img_size
filename = os.path.join("data", "test_3.jpg")
path = os.path.join(settings.SHARE_ROOT, filename)
os.makedirs(os.path.dirname(path))
img_size, data = generate_image_file(filename)
with open(path, "wb") as image:
image.write(data.read())
cls._image_sizes[filename] = img_size
filename = "test_video_1.mp4"
path = os.path.join(settings.SHARE_ROOT, filename)
img_sizes, data = generate_video_file(filename, width=1280, height=720)
with open(path, "wb") as video:
video.write(data.read())
cls._image_sizes[filename] = img_sizes
filename = os.path.join("videos", "test_video_1.mp4")
path = os.path.join(settings.SHARE_ROOT, filename)
os.makedirs(os.path.dirname(path))
img_sizes, data = generate_video_file(filename, width=1280, height=720)
with open(path, "wb") as video:
video.write(data.read())
cls._image_sizes[filename] = img_sizes
filename = os.path.join("test_archive_1.zip")
path = os.path.join(settings.SHARE_ROOT, filename)
img_sizes, data = generate_zip_archive_file(filename, count=5)
with open(path, "wb") as zip_archive:
zip_archive.write(data.read())
cls._image_sizes[filename] = img_sizes
@classmethod
def tearDownClass(cls):
super().tearDownClass()
path = os.path.join(settings.SHARE_ROOT, "test_1.jpg")
os.remove(path)
path = os.path.join(settings.SHARE_ROOT, "test_2.jpg")
os.remove(path)
path = os.path.join(settings.SHARE_ROOT, "test_3.jpg")
os.remove(path)
path = os.path.join(settings.SHARE_ROOT, "data", "test_3.jpg")
os.remove(path)
path = os.path.join(settings.SHARE_ROOT, "test_video_1.mp4")
os.remove(path)
path = os.path.join(settings.SHARE_ROOT, "videos", "test_video_1.mp4")
os.remove(path)
def _run_api_v1_tasks_id_data_post(self, tid, user, data):
with ForceLogin(user, self.client):
response = self.client.post('/api/v1/tasks/{}/data'.format(tid),
data=data)
return response
def _create_task(self, user, data):
with ForceLogin(user, self.client):
response = self.client.post('/api/v1/tasks', data=data, format="json")
return response
def _get_task(self, user, tid):
with ForceLogin(user, self.client):
return self.client.get("/api/v1/tasks/{}".format(tid))
def _run_api_v1_task_id_data_get(self, tid, user, data_type, data_quality=None, data_number=None):
url = '/api/v1/tasks/{}/data?type={}'.format(tid, data_type)
if data_quality is not None:
url += '&quality={}'.format(data_quality)
if data_number is not None:
url += '&number={}'.format(data_number)
with ForceLogin(user, self.client):
return self.client.get(url)
def _get_preview(self, tid, user):
return self._run_api_v1_task_id_data_get(tid, user, "preview")
def _get_compressed_chunk(self, tid, user, number):
return self._run_api_v1_task_id_data_get(tid, user, "chunk", "compressed", number)
def _get_original_chunk(self, tid, user, number):
return self._run_api_v1_task_id_data_get(tid, user, "chunk", "original", number)
def _get_compressed_frame(self, tid, user, number):
return self._run_api_v1_task_id_data_get(tid, user, "frame", "compressed", number)
def _get_original_frame(self, tid, user, number):
return self._run_api_v1_task_id_data_get(tid, user, "frame", "original", number)
@staticmethod
def _extract_zip_chunk(chunk_buffer):
chunk = zipfile.ZipFile(chunk_buffer, mode='r')
return [Image.open(BytesIO(chunk.read(f))) for f in sorted(chunk.namelist())]
@staticmethod
def _extract_video_chunk(chunk_buffer):
container = av.open(chunk_buffer)
stream = container.streams.video[0]
return [f.to_image() for f in container.decode(stream)]
def _test_api_v1_tasks_id_data_spec(self, user, spec, data, expected_compressed_type, expected_original_type, image_sizes):
# create task
response = self._create_task(user, spec)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
task_id = response.data["id"]
# post data for the task
response = self._run_api_v1_tasks_id_data_post(task_id, user, data)
self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)
response = self._get_task(user, task_id)
expected_status_code = status.HTTP_200_OK
if user == self.user and "owner" in spec and spec["owner"] != user.id and \
"assignee" in spec and spec["assignee"] != user.id:
expected_status_code = status.HTTP_403_FORBIDDEN
self.assertEqual(response.status_code, expected_status_code)
if expected_status_code == status.HTTP_200_OK:
task = response.json()
self.assertEqual(expected_compressed_type, task["data_compressed_chunk_type"])
self.assertEqual(expected_original_type, task["data_original_chunk_type"])
self.assertEqual(len(image_sizes), task["size"])
# check preview
response = self._get_preview(task_id, user)
self.assertEqual(response.status_code, expected_status_code)
if expected_status_code == status.HTTP_200_OK:
preview = Image.open(io.BytesIO(b"".join(response.streaming_content)))
self.assertEqual(preview.size, image_sizes[0])
# check compressed chunk
response = self._get_compressed_chunk(task_id, user, 0)
self.assertEqual(response.status_code, expected_status_code)
if expected_status_code == status.HTTP_200_OK:
compressed_chunk = io.BytesIO(b"".join(response.streaming_content))
if task["data_compressed_chunk_type"] == self.ChunkType.IMAGESET:
images = self._extract_zip_chunk(compressed_chunk)
else:
images = self._extract_video_chunk(compressed_chunk)
self.assertEqual(len(images), min(task["data_chunk_size"], len(image_sizes)))
for image_idx, image in enumerate(images):
self.assertEqual(image.size, image_sizes[image_idx])
# check original chunk
response = self._get_original_chunk(task_id, user, 0)
self.assertEqual(response.status_code, expected_status_code)
if expected_status_code == status.HTTP_200_OK:
original_chunk = io.BytesIO(b"".join(response.streaming_content))
if task["data_original_chunk_type"] == self.ChunkType.IMAGESET:
images = self._extract_zip_chunk(original_chunk)
else:
images = self._extract_video_chunk(original_chunk)
for image_idx, image in enumerate(images):
self.assertEqual(image.size, image_sizes[image_idx])
self.assertEqual(len(images), min(task["data_chunk_size"], len(image_sizes)))
if task["data_original_chunk_type"] == self.ChunkType.IMAGESET:
server_files = [img for key, img in data.items() if key.startswith("server_files")]
client_files = [img for key, img in data.items() if key.startswith("client_files")]
if server_files:
source_files = [os.path.join(settings.SHARE_ROOT, f) for f in sorted(server_files)]
else:
source_files = [f for f in sorted(client_files, key=lambda e: e.name)]
source_images = []
for f in source_files:
if zipfile.is_zipfile(f):
source_images.extend(self._extract_zip_chunk(f))
else:
source_images.append(Image.open(f))
for img_idx, image in enumerate(images):
server_image = np.array(image)
source_image = np.array(source_images[img_idx])
self.assertTrue(np.array_equal(source_image, server_image))
def _test_api_v1_tasks_id_data(self, user):
task_spec = {
"name": "my task #1",
"owner": self.owner.id,
"assignee": self.assignee.id,
"overlap": 0,
"segment_size": 100,
"z_order": False,
"labels": [
{"name": "car"},
{"name": "person"},
]
}
image_sizes, images = generate_image_files("test_1.jpg", "test_2.jpg", "test_3.jpg")
task_data = {
"client_files[0]": images[0],
"client_files[1]": images[1],
"client_files[2]": images[2],
"image_quality": 75,
}
self._test_api_v1_tasks_id_data_spec(user, task_spec, task_data, self.ChunkType.IMAGESET, self.ChunkType.IMAGESET, image_sizes)
task_spec = {
"name": "my task #2",
"overlap": 0,
"segment_size": 0,
"labels": [
{"name": "car"},
{"name": "person"},
]
}
task_data = {
"server_files[0]": "test_1.jpg",
"server_files[1]": "test_2.jpg",
"server_files[2]": "test_3.jpg",
"server_files[3]": os.path.join("data", "test_3.jpg"),
"image_quality": 75,
}
image_sizes = [
self._image_sizes[task_data["server_files[3]"]],
self._image_sizes[task_data["server_files[0]"]],
self._image_sizes[task_data["server_files[1]"]],
self._image_sizes[task_data["server_files[2]"]],
]
self._test_api_v1_tasks_id_data_spec(user, task_spec, task_data, self.ChunkType.IMAGESET, self.ChunkType.IMAGESET, image_sizes)
task_spec = {
"name": "my video task #1",
"overlap": 0,
"segment_size": 100,
"z_order": False,
"labels": [
{"name": "car"},
{"name": "person"},
]
}
image_sizes, video = generate_video_file(filename="test_video_1.mp4", width=1280, height=720)
task_data = {
"client_files[0]": video,
"image_quality": 43,
}
self._test_api_v1_tasks_id_data_spec(user, task_spec, task_data, self.ChunkType.VIDEO, self.ChunkType.VIDEO, image_sizes)
task_spec = {
"name": "my video task #2",
"overlap": 0,
"segment_size": 5,
"labels": [
{"name": "car"},
{"name": "person"},
]
}
task_data = {
"server_files[0]": "test_video_1.mp4",
"image_quality": 57,
}
image_sizes = self._image_sizes[task_data["server_files[0]"]]
self._test_api_v1_tasks_id_data_spec(user, task_spec, task_data, self.ChunkType.VIDEO, self.ChunkType.VIDEO, image_sizes)
task_spec = {
"name": "my video task #3",
"overlap": 0,
"segment_size": 0,
"labels": [
{"name": "car"},
{"name": "person"},
]
}
task_data = {
"server_files[0]": os.path.join("videos", "test_video_1.mp4"),
"image_quality": 57,
}
image_sizes = self._image_sizes[task_data["server_files[0]"]]
self._test_api_v1_tasks_id_data_spec(user, task_spec, task_data, self.ChunkType.VIDEO, self.ChunkType.VIDEO, image_sizes)
task_spec = {
"name": "my video task #4",
"overlap": 0,
"segment_size": 5,
"labels": [
{"name": "car"},
{"name": "person"},
]
}
task_data = {
"server_files[0]": "test_video_1.mp4",
"image_quality": 12,
"use_zip_chunks": True,
}
image_sizes = self._image_sizes[task_data["server_files[0]"]]
self._test_api_v1_tasks_id_data_spec(user, task_spec, task_data, self.ChunkType.IMAGESET, self.ChunkType.VIDEO, image_sizes)
task_spec = {
"name": "my archive task #6",
"overlap": 0,
"segment_size": 0,
"labels": [
{"name": "car"},
{"name": "person"},
]
}
task_data = {
"server_files[0]": "test_archive_1.zip",
"image_quality": 88,
}
image_sizes = self._image_sizes[task_data["server_files[0]"]]
self._test_api_v1_tasks_id_data_spec(user, task_spec, task_data, self.ChunkType.IMAGESET, self.ChunkType.IMAGESET, image_sizes)
task_spec = {
"name": "my archive task #7",
"overlap": 0,
"segment_size": 0,
"labels": [
{"name": "car"},
{"name": "person"},
]
}
image_sizes, archive = generate_zip_archive_file("test_archive_2.zip", 7)
task_data = {
"client_files[0]": archive,
"image_quality": 100,
}
self._test_api_v1_tasks_id_data_spec(user, task_spec, task_data, self.ChunkType.IMAGESET, self.ChunkType.IMAGESET, image_sizes)
def test_api_v1_tasks_id_data_admin(self):
self._test_api_v1_tasks_id_data(self.admin)
def test_api_v1_tasks_id_data_owner(self):
self._test_api_v1_tasks_id_data(self.owner)
def test_api_v1_tasks_id_data_user(self):
self._test_api_v1_tasks_id_data(self.user)
def test_api_v1_tasks_id_data_no_auth(self):
data = {
"name": "my task #3",
"owner": self.owner.id,
"assignee": self.assignee.id,
"overlap": 0,
"segment_size": 100,
"z_order": False,
"labels": [
{"name": "car"},
{"name": "person"},
]
}
response = self._create_task(None, data)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def compare_objects(self, obj1, obj2, ignore_keys, fp_tolerance=.001):
if isinstance(obj1, dict):
self.assertTrue(isinstance(obj2, dict), "{} != {}".format(obj1, obj2))
for k in obj1.keys():
if k in ignore_keys:
continue
compare_objects(self, obj1[k], obj2.get(k), ignore_keys)
elif isinstance(obj1, list):
self.assertTrue(isinstance(obj2, list), "{} != {}".format(obj1, obj2))
self.assertEqual(len(obj1), len(obj2), "{} != {}".format(obj1, obj2))
for v1, v2 in zip(obj1, obj2):
compare_objects(self, v1, v2, ignore_keys)
else:
if isinstance(obj1, float) or isinstance(obj2, float):
self.assertAlmostEqual(obj1, obj2, delta=fp_tolerance)
else:
self.assertEqual(obj1, obj2)
class JobAnnotationAPITestCase(APITestCase):
def setUp(self):
self.client = APIClient()
@classmethod
def setUpTestData(cls):
create_db_users(cls)
def _create_task(self, owner, assignee):
data = {
"name": "my task #1",
"owner": owner.id,
"assignee": assignee.id,
"overlap": 0,
"segment_size": 100,
"z_order": False,
"labels": [
{
"name": "car",
"attributes": [
{
"name": "model",
"mutable": False,
"input_type": "select",
"default_value": "mazda",
"values": ["bmw", "mazda", "renault"]
},
{
"name": "parked",
"mutable": True,
"input_type": "checkbox",
"default_value": False
},
]
},
{"name": "person"},
]
}
with ForceLogin(owner, self.client):
response = self.client.post('/api/v1/tasks', data=data, format="json")
assert response.status_code == status.HTTP_201_CREATED
tid = response.data["id"]
images = {
"client_files[0]": generate_image_file("test_1.jpg")[1],
"client_files[1]": generate_image_file("test_2.jpg")[1],
"client_files[2]": generate_image_file("test_3.jpg")[1],
"image_quality": 75,
}
response = self.client.post("/api/v1/tasks/{}/data".format(tid), data=images)
assert response.status_code == status.HTTP_202_ACCEPTED
response = self.client.get("/api/v1/tasks/{}".format(tid))
task = response.data
response = self.client.get("/api/v1/tasks/{}/jobs".format(tid))
jobs = response.data
return (task, jobs)
@staticmethod
def _get_default_attr_values(task):
default_attr_values = {}
for label in task["labels"]:
default_attr_values[label["id"]] = {
"mutable": [],
"immutable": [],
"all": [],
}
for attr in label["attributes"]:
default_value = {
"spec_id": attr["id"],
"value": attr["default_value"],
}
if attr["mutable"]:
default_attr_values[label["id"]]["mutable"].append(default_value)
else:
default_attr_values[label["id"]]["immutable"].append(default_value)
default_attr_values[label["id"]]["all"].append(default_value)
return default_attr_values
def _put_api_v1_jobs_id_data(self, jid, user, data):
with ForceLogin(user, self.client):
response = self.client.put("/api/v1/jobs/{}/annotations".format(jid),
data=data, format="json")
return response
def _get_api_v1_jobs_id_data(self, jid, user):
with ForceLogin(user, self.client):
response = self.client.get("/api/v1/jobs/{}/annotations".format(jid))
return response
def _delete_api_v1_jobs_id_data(self, jid, user):
with ForceLogin(user, self.client):
response = self.client.delete("/api/v1/jobs/{}/annotations".format(jid),
format="json")
return response
def _patch_api_v1_jobs_id_data(self, jid, user, action, data):
with ForceLogin(user, self.client):
response = self.client.patch(
"/api/v1/jobs/{}/annotations?action={}".format(jid, action),
data=data, format="json")
return response
def _check_response(self, response, data):
if not response.status_code in [
status.HTTP_403_FORBIDDEN, status.HTTP_401_UNAUTHORIZED]:
compare_objects(self, data, response.data, ignore_keys=["id"])
def _run_api_v1_jobs_id_annotations(self, owner, assignee, annotator):
task, jobs = self._create_task(owner, assignee)
if annotator:
HTTP_200_OK = status.HTTP_200_OK
HTTP_204_NO_CONTENT = status.HTTP_204_NO_CONTENT
HTTP_400_BAD_REQUEST = status.HTTP_400_BAD_REQUEST
else:
HTTP_200_OK = status.HTTP_401_UNAUTHORIZED
HTTP_204_NO_CONTENT = status.HTTP_401_UNAUTHORIZED
HTTP_400_BAD_REQUEST = status.HTTP_401_UNAUTHORIZED
job = jobs[0]
data = {
"version": 0,
"tags": [],
"shapes": [],
"tracks": []
}
response = self._put_api_v1_jobs_id_data(job["id"], annotator, data)
self.assertEqual(response.status_code, HTTP_200_OK)
data = {
"version": 1,
"tags": [
{
"frame": 0,
"label_id": task["labels"][0]["id"],
"group": None,
"attributes": []
}
],
"shapes": [
{
"frame": 0,
"label_id": task["labels"][0]["id"],
"group": None,
"attributes": [
{
"spec_id": task["labels"][0]["attributes"][0]["id"],
"value": task["labels"][0]["attributes"][0]["values"][0]
},
{
"spec_id": task["labels"][0]["attributes"][1]["id"],
"value": task["labels"][0]["attributes"][1]["default_value"]
}
],
"points": [1.0, 2.1, 100, 300.222],
"type": "rectangle",
"occluded": False
},
{
"frame": 1,
"label_id": task["labels"][1]["id"],
"group": None,
"attributes": [],
"points": [2.0, 2.1, 100, 300.222, 400, 500, 1, 3],
"type": "polygon",
"occluded": False
},
],
"tracks": [
{
"frame": 0,
"label_id": task["labels"][0]["id"],
"group": None,
"attributes": [
{
"spec_id": task["labels"][0]["attributes"][0]["id"],
"value": task["labels"][0]["attributes"][0]["values"][0]
},
],
"shapes": [
{
"frame": 0,
"points": [1.0, 2.1, 100, 300.222],
"type": "rectangle",
"occluded": False,
"outside": False,
"attributes": [
{
"spec_id": task["labels"][0]["attributes"][1]["id"],
"value": task["labels"][0]["attributes"][1]["default_value"]
},
]
},
{
"frame": 1,
"attributes": [],
"points": [2.0, 2.1, 100, 300.222],
"type": "rectangle",
"occluded": True,
"outside": True
},
]
},
{
"frame": 1,
"label_id": task["labels"][1]["id"],
"group": None,
"attributes": [],
"shapes": [
{
"frame": 1,
"attributes": [],
"points": [1.0, 2.1, 100, 300.222],
"type": "rectangle",
"occluded": False,
"outside": False
}
]
},
]
}
default_attr_values = self._get_default_attr_values(task)
response = self._put_api_v1_jobs_id_data(job["id"], annotator, data)
data["version"] += 1 # need to update the version
self.assertEqual(response.status_code, HTTP_200_OK)
self._check_response(response, data)
response = self._get_api_v1_jobs_id_data(job["id"], annotator)
self.assertEqual(response.status_code, HTTP_200_OK)
# server should add default attribute values if puted data doesn't contain it
data["tags"][0]["attributes"] = default_attr_values[data["tags"][0]["label_id"]]["all"]
data["tracks"][0]["shapes"][1]["attributes"] = default_attr_values[data["tracks"][0]["label_id"]]["mutable"]
self._check_response(response, data)
response = self._delete_api_v1_jobs_id_data(job["id"], annotator)
data["version"] += 1 # need to update the version
self.assertEqual(response.status_code, HTTP_204_NO_CONTENT)
data = {
"version": data["version"],
"tags": [],
"shapes": [],
"tracks": []
}
response = self._get_api_v1_jobs_id_data(job["id"], annotator)
self.assertEqual(response.status_code, HTTP_200_OK)
self._check_response(response, data)
data = {
"version": data["version"],
"tags": [
{
"frame": 0,
"label_id": task["labels"][0]["id"],
"group": None,
"attributes": []
}
],
"shapes": [
{
"frame": 0,
"label_id": task["labels"][0]["id"],
"group": None,
"attributes": [
{
"spec_id": task["labels"][0]["attributes"][0]["id"],
"value": task["labels"][0]["attributes"][0]["values"][0]
},
{
"spec_id": task["labels"][0]["attributes"][1]["id"],
"value": task["labels"][0]["attributes"][1]["default_value"]
}
],
"points": [1.0, 2.1, 100, 300.222],
"type": "rectangle",
"occluded": False
},
{
"frame": 1,
"label_id": task["labels"][1]["id"],
"group": None,
"attributes": [],
"points": [2.0, 2.1, 100, 300.222, 400, 500, 1, 3],
"type": "polygon",
"occluded": False
},
],
"tracks": [
{
"frame": 0,
"label_id": task["labels"][0]["id"],
"group": None,
"attributes": [
{
"spec_id": task["labels"][0]["attributes"][0]["id"],
"value": task["labels"][0]["attributes"][0]["values"][0]
},
],
"shapes": [
{
"frame": 0,
"points": [1.0, 2.1, 100, 300.222],
"type": "rectangle",
"occluded": False,
"outside": False,
"attributes": [
{
"spec_id": task["labels"][0]["attributes"][1]["id"],
"value": task["labels"][0]["attributes"][1]["default_value"]
},
]
},
{
"frame": 1,
"attributes": [],
"points": [2.0, 2.1, 100, 300.222],
"type": "rectangle",
"occluded": True,
"outside": True
},
]
},
{
"frame": 1,
"label_id": task["labels"][1]["id"],
"group": None,
"attributes": [],
"shapes": [
{
"frame": 1,
"attributes": [],
"points": [1.0, 2.1, 100, 300.222],
"type": "rectangle",
"occluded": False,
"outside": False
}
]
},
]
}
response = self._patch_api_v1_jobs_id_data(job["id"], annotator,
"create", data)
data["version"] += 1
self.assertEqual(response.status_code, HTTP_200_OK)
self._check_response(response, data)
response = self._get_api_v1_jobs_id_data(job["id"], annotator)
self.assertEqual(response.status_code, HTTP_200_OK)
# server should add default attribute values if puted data doesn't contain it
data["tags"][0]["attributes"] = default_attr_values[data["tags"][0]["label_id"]]["all"]
data["tracks"][0]["shapes"][1]["attributes"] = default_attr_values[data["tracks"][0]["label_id"]]["mutable"]
self._check_response(response, data)
data = response.data
if not response.status_code in [
status.HTTP_403_FORBIDDEN, status.HTTP_401_UNAUTHORIZED]:
data["tags"][0]["label_id"] = task["labels"][0]["id"]
data["shapes"][0]["points"] = [1, 2, 3.0, 100, 120, 1, 2, 4.0]
data["shapes"][0]["type"] = "polygon"
data["tracks"][0]["group"] = 10
data["tracks"][0]["shapes"][0]["outside"] = False
data["tracks"][0]["shapes"][0]["occluded"] = False
response = self._patch_api_v1_jobs_id_data(job["id"], annotator,
"update", data)
data["version"] = data.get("version", 0) + 1 # need to update the version
self.assertEqual(response.status_code, HTTP_200_OK)
self._check_response(response, data)
response = self._get_api_v1_jobs_id_data(job["id"], annotator)
self.assertEqual(response.status_code, HTTP_200_OK)
self._check_response(response, data)
response = self._patch_api_v1_jobs_id_data(job["id"], annotator,
"delete", data)
data["version"] += 1 # need to update the version
self.assertEqual(response.status_code, HTTP_200_OK)
self._check_response(response, data)
data = {
"version": data["version"],
"tags": [],
"shapes": [],
"tracks": []
}
response = self._get_api_v1_jobs_id_data(job["id"], annotator)
self.assertEqual(response.status_code, HTTP_200_OK)
self._check_response(response, data)
data = {
"version": data["version"],
"tags": [
{
"frame": 0,
"label_id": 11010101,
"group": None,
"attributes": []
}
],
"shapes": [
{
"frame": 0,
"label_id": task["labels"][0]["id"],
"group": None,
"attributes": [
{
"spec_id": 32234234,
"value": task["labels"][0]["attributes"][0]["values"][0]
},
{
"spec_id": task["labels"][0]["attributes"][1]["id"],
"value": task["labels"][0]["attributes"][0]["default_value"]
}
],
"points": [1.0, 2.1, 100, 300.222],
"type": "rectangle",
"occluded": False
},
{
"frame": 1,
"label_id": 1212121,
"group": None,
"attributes": [],
"points": [2.0, 2.1, 100, 300.222, 400, 500, 1, 3],
"type": "polygon",
"occluded": False
},
],
"tracks": [
{
"frame": 0,
"label_id": 0,
"group": None,
"attributes": [],
"shapes": [
{
"frame": 0,
"points": [1.0, 2.1, 100, 300.222],
"type": "rectangle",
"occluded": False,
"outside": False,
"attributes": [
{
"spec_id": 10000,
"value": task["labels"][0]["attributes"][0]["values"][0]
},
{
"spec_id": task["labels"][0]["attributes"][1]["id"],
"value": task["labels"][0]["attributes"][1]["default_value"]
}
]
},
{
"frame": 1,
"attributes": [],
"points": [2.0, 2.1, 100, 300.222],
"type": "rectangle",
"occluded": True,
"outside": True
},
]
},
{
"frame": 1,
"label_id": task["labels"][1]["id"],
"group": None,
"attributes": [],
"shapes": [
{
"frame": 1,
"attributes": [],
"points": [1.0, 2.1, 100, 300.222],
"type": "rectangle",
"occluded": False,
"outside": False
}
]
},
]
}
response = self._patch_api_v1_jobs_id_data(job["id"], annotator,
"create", data)
self.assertEqual(response.status_code, HTTP_400_BAD_REQUEST)
def test_api_v1_jobs_id_annotations_admin(self):
self._run_api_v1_jobs_id_annotations(self.admin, self.assignee,
self.assignee)
def test_api_v1_jobs_id_annotations_user(self):
self._run_api_v1_jobs_id_annotations(self.user, self.assignee,
self.assignee)
def test_api_v1_jobs_id_annotations_observer(self):
_, jobs = self._create_task(self.user, self.assignee)
job = jobs[0]
data = {
"version": 0,
"tags": [],
"shapes": [],
"tracks": []
}
response = self._get_api_v1_jobs_id_data(job["id"], self.observer)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response = self._put_api_v1_jobs_id_data(job["id"], self.observer, data)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
response = self._patch_api_v1_jobs_id_data(job["id"], self.observer, "create", data)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
response = self._delete_api_v1_jobs_id_data(job["id"], self.observer)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_api_v1_jobs_id_annotations_no_auth(self):
self._run_api_v1_jobs_id_annotations(self.user, self.assignee, None)
class TaskAnnotationAPITestCase(JobAnnotationAPITestCase):
def _put_api_v1_tasks_id_annotations(self, pk, user, data):
with ForceLogin(user, self.client):
response = self.client.put("/api/v1/tasks/{}/annotations".format(pk),
data=data, format="json")
return response
def _get_api_v1_tasks_id_annotations(self, pk, user):
with ForceLogin(user, self.client):
response = self.client.get("/api/v1/tasks/{}/annotations".format(pk))
return response
def _delete_api_v1_tasks_id_annotations(self, pk, user):
with ForceLogin(user, self.client):
response = self.client.delete("/api/v1/tasks/{}/annotations".format(pk),
format="json")
return response
def _dump_api_v1_tasks_id_annotations(self, pk, user, query_params=""):
with ForceLogin(user, self.client):
response = self.client.get(
"/api/v1/tasks/{0}/annotations/my_task_{0}?{1}".format(pk, query_params))
return response
def _patch_api_v1_tasks_id_annotations(self, pk, user, action, data):
with ForceLogin(user, self.client):
response = self.client.patch(
"/api/v1/tasks/{}/annotations?action={}".format(pk, action),
data=data, format="json")
return response
def _upload_api_v1_tasks_id_annotations(self, pk, user, data, query_params=""):
with ForceLogin(user, self.client):
response = self.client.put(
path="/api/v1/tasks/{0}/annotations?{1}".format(pk, query_params),
data=data,
format="multipart",
)
return response
def _get_annotation_formats(self, user):
with ForceLogin(user, self.client):
response = self.client.get(
path="/api/v1/server/annotation/formats"
)
return response
def _check_response(self, response, data):
if not response.status_code in [
status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN]:
compare_objects(self, data, response.data, ignore_keys=["id"])
def _run_api_v1_tasks_id_annotations(self, owner, assignee, annotator):
task, _ = self._create_task(owner, assignee)
if annotator:
HTTP_200_OK = status.HTTP_200_OK
HTTP_204_NO_CONTENT = status.HTTP_204_NO_CONTENT
HTTP_400_BAD_REQUEST = status.HTTP_400_BAD_REQUEST
else:
HTTP_200_OK = status.HTTP_401_UNAUTHORIZED
HTTP_204_NO_CONTENT = status.HTTP_401_UNAUTHORIZED
HTTP_400_BAD_REQUEST = status.HTTP_401_UNAUTHORIZED
data = {
"version": 0,
"tags": [],
"shapes": [],
"tracks": []
}
response = self._put_api_v1_tasks_id_annotations(task["id"], annotator, data)
data["version"] += 1
self.assertEqual(response.status_code, HTTP_200_OK)
data = {
"version": data["version"],
"tags": [
{
"frame": 0,
"label_id": task["labels"][0]["id"],
"group": None,
"attributes": []
}
],
"shapes": [
{
"frame": 0,
"label_id": task["labels"][0]["id"],
"group": None,
"attributes": [
{
"spec_id": task["labels"][0]["attributes"][0]["id"],
"value": task["labels"][0]["attributes"][0]["values"][0]
},
{
"spec_id": task["labels"][0]["attributes"][1]["id"],
"value": task["labels"][0]["attributes"][0]["default_value"]
}
],
"points": [1.0, 2.1, 100, 300.222],
"type": "rectangle",
"occluded": False
},
{
"frame": 1,
"label_id": task["labels"][1]["id"],
"group": None,
"attributes": [],
"points": [2.0, 2.1, 100, 300.222, 400, 500, 1, 3],
"type": "polygon",
"occluded": False
},
],
"tracks": [
{
"frame": 0,
"label_id": task["labels"][0]["id"],
"group": None,
"attributes": [
{
"spec_id": task["labels"][0]["attributes"][0]["id"],
"value": task["labels"][0]["attributes"][0]["values"][0]
},
],
"shapes": [
{
"frame": 0,
"points": [1.0, 2.1, 100, 300.222],
"type": "rectangle",
"occluded": False,
"outside": False,
"attributes": [
{
"spec_id": task["labels"][0]["attributes"][1]["id"],
"value": task["labels"][0]["attributes"][1]["default_value"]
}
]
},
{
"frame": 1,
"attributes": [],
"points": [2.0, 2.1, 100, 300.222],
"type": "rectangle",
"occluded": True,
"outside": True
},
]
},
{
"frame": 1,
"label_id": task["labels"][1]["id"],
"group": None,
"attributes": [],
"shapes": [
{
"frame": 1,
"attributes": [],
"points": [1.0, 2.1, 100, 300.222],
"type": "rectangle",
"occluded": False,
"outside": False
}
]
},
]
}
response = self._put_api_v1_tasks_id_annotations(task["id"], annotator, data)
data["version"] += 1
self.assertEqual(response.status_code, HTTP_200_OK)
self._check_response(response, data)
default_attr_values = self._get_default_attr_values(task)
response = self._get_api_v1_tasks_id_annotations(task["id"], annotator)
# server should add default attribute values if puted data doesn't contain it
data["tags"][0]["attributes"] = default_attr_values[data["tags"][0]["label_id"]]["all"]
data["tracks"][0]["shapes"][1]["attributes"] = default_attr_values[data["tracks"][0]["label_id"]]["mutable"]
self.assertEqual(response.status_code, HTTP_200_OK)
self._check_response(response, data)
response = self._delete_api_v1_tasks_id_annotations(task["id"], annotator)
data["version"] += 1
self.assertEqual(response.status_code, HTTP_204_NO_CONTENT)
data = {
"version": data["version"],
"tags": [],
"shapes": [],
"tracks": []
}
response = self._get_api_v1_tasks_id_annotations(task["id"], annotator)
self.assertEqual(response.status_code, HTTP_200_OK)
self._check_response(response, data)
data = {
"version": data["version"],
"tags": [
{
"frame": 0,
"label_id": task["labels"][0]["id"],
"group": None,
"attributes": []
}
],
"shapes": [
{
"frame": 0,
"label_id": task["labels"][0]["id"],
"group": None,
"attributes": [
{
"spec_id": task["labels"][0]["attributes"][0]["id"],
"value": task["labels"][0]["attributes"][0]["values"][0]
},
{
"spec_id": task["labels"][0]["attributes"][1]["id"],
"value": task["labels"][0]["attributes"][0]["default_value"]
}
],
"points": [1.0, 2.1, 100, 300.222],
"type": "rectangle",
"occluded": False
},
{
"frame": 1,
"label_id": task["labels"][1]["id"],
"group": None,
"attributes": [],
"points": [2.0, 2.1, 100, 300.222, 400, 500, 1, 3],
"type": "polygon",
"occluded": False
},
],
"tracks": [
{
"frame": 0,
"label_id": task["labels"][0]["id"],
"group": None,
"attributes": [
{
"spec_id": task["labels"][0]["attributes"][0]["id"],
"value": task["labels"][0]["attributes"][0]["values"][0]
},
],
"shapes": [
{
"frame": 0,
"points": [1.0, 2.1, 100, 300.222],
"type": "rectangle",
"occluded": False,
"outside": False,
"attributes": [
{
"spec_id": task["labels"][0]["attributes"][1]["id"],
"value": task["labels"][0]["attributes"][1]["default_value"]
}
]
},
{
"frame": 1,
"attributes": [],
"points": [2.0, 2.1, 100, 300.222],
"type": "rectangle",
"occluded": True,
"outside": True
},
]
},
{
"frame": 1,
"label_id": task["labels"][1]["id"],
"group": None,
"attributes": [],
"shapes": [
{
"frame": 1,
"attributes": [],
"points": [1.0, 2.1, 100, 300.222],
"type": "rectangle",
"occluded": False,
"outside": False
}
]
},
]
}
response = self._patch_api_v1_tasks_id_annotations(task["id"], annotator,
"create", data)
data["version"] += 1
self.assertEqual(response.status_code, HTTP_200_OK)
self._check_response(response, data)
response = self._get_api_v1_tasks_id_annotations(task["id"], annotator)
# server should add default attribute values if puted data doesn't contain it
data["tags"][0]["attributes"] = default_attr_values[data["tags"][0]["label_id"]]["all"]
data["tracks"][0]["shapes"][1]["attributes"] = default_attr_values[data["tracks"][0]["label_id"]]["mutable"]
self.assertEqual(response.status_code, HTTP_200_OK)
self._check_response(response, data)
data = response.data
if not response.status_code in [
status.HTTP_403_FORBIDDEN, status.HTTP_401_UNAUTHORIZED]:
data["tags"][0]["label_id"] = task["labels"][0]["id"]
data["shapes"][0]["points"] = [1, 2, 3.0, 100, 120, 1, 2, 4.0]
data["shapes"][0]["type"] = "polygon"
data["tracks"][0]["group"] = 10
data["tracks"][0]["shapes"][0]["outside"] = False
data["tracks"][0]["shapes"][0]["occluded"] = False
response = self._patch_api_v1_tasks_id_annotations(task["id"], annotator,
"update", data)
data["version"] = data.get("version", 0) + 1
self.assertEqual(response.status_code, HTTP_200_OK)
self._check_response(response, data)
response = self._get_api_v1_tasks_id_annotations(task["id"], annotator)
self.assertEqual(response.status_code, HTTP_200_OK)
self._check_response(response, data)
response = self._patch_api_v1_tasks_id_annotations(task["id"], annotator,
"delete", data)
data["version"] += 1
self.assertEqual(response.status_code, HTTP_200_OK)
self._check_response(response, data)
data = {
"version": data["version"],
"tags": [],
"shapes": [],
"tracks": []
}
response = self._get_api_v1_tasks_id_annotations(task["id"], annotator)
self.assertEqual(response.status_code, HTTP_200_OK)
self._check_response(response, data)
data = {
"version": data["version"],
"tags": [
{
"frame": 0,
"label_id": 11010101,
"group": None,
"attributes": []
}
],
"shapes": [
{
"frame": 0,
"label_id": task["labels"][0]["id"],
"group": None,
"attributes": [
{
"spec_id": 32234234,
"value": task["labels"][0]["attributes"][0]["values"][0]
},
{
"spec_id": task["labels"][0]["attributes"][1]["id"],
"value": task["labels"][0]["attributes"][0]["default_value"]
}
],
"points": [1.0, 2.1, 100, 300.222],
"type": "rectangle",
"occluded": False
},
{
"frame": 1,
"label_id": 1212121,
"group": None,
"attributes": [],
"points": [2.0, 2.1, 100, 300.222, 400, 500, 1, 3],
"type": "polygon",
"occluded": False
},
],
"tracks": [
{
"frame": 0,
"label_id": 0,
"group": None,
"attributes": [],
"shapes": [
{
"frame": 0,
"points": [1.0, 2.1, 100, 300.222],
"type": "rectangle",
"occluded": False,
"outside": False,
"attributes": [
{
"spec_id": 10000,
"value": task["labels"][0]["attributes"][0]["values"][0]
},
{
"spec_id": task["labels"][0]["attributes"][1]["id"],
"value": task["labels"][0]["attributes"][0]["default_value"]
}
]
},
{
"frame": 1,
"attributes": [],
"points": [2.0, 2.1, 100, 300.222],
"type": "rectangle",
"occluded": True,
"outside": True
},
]
},
{
"frame": 1,
"label_id": task["labels"][1]["id"],
"group": None,
"attributes": [],
"shapes": [
{
"frame": 1,
"attributes": [],
"points": [1.0, 2.1, 100, 300.222],
"type": "rectangle",
"occluded": False,
"outside": False
}
]
},
]
}
response = self._patch_api_v1_tasks_id_annotations(task["id"], annotator,
"create", data)
self.assertEqual(response.status_code, HTTP_400_BAD_REQUEST)
def _run_api_v1_tasks_id_annotations_dump_load(self, owner, assignee, annotator):
if annotator:
HTTP_200_OK = status.HTTP_200_OK
HTTP_204_NO_CONTENT = status.HTTP_204_NO_CONTENT
HTTP_202_ACCEPTED = status.HTTP_202_ACCEPTED
HTTP_201_CREATED = status.HTTP_201_CREATED
else:
HTTP_200_OK = status.HTTP_401_UNAUTHORIZED
HTTP_204_NO_CONTENT = status.HTTP_401_UNAUTHORIZED
HTTP_202_ACCEPTED = status.HTTP_401_UNAUTHORIZED
HTTP_201_CREATED = status.HTTP_401_UNAUTHORIZED
def _get_initial_annotation(annotation_format):
rectangle_tracks_with_attrs = [{
"frame": 0,
"label_id": task["labels"][0]["id"],
"group": 0,
"attributes": [
{
"spec_id": task["labels"][0]["attributes"][0]["id"],
"value": task["labels"][0]["attributes"][0]["values"][0]
},
],
"shapes": [
{
"frame": 0,
"points": [1.0, 2.1, 50.1, 30.22],
"type": "rectangle",
"occluded": False,
"outside": False,
"attributes": [
{
"spec_id": task["labels"][0]["attributes"][1]["id"],
"value": task["labels"][0]["attributes"][1]["default_value"]
}
]
},
{
"frame": 1,
"points": [2.0, 2.1, 77.2, 36.22],
"type": "rectangle",
"occluded": True,
"outside": True,
"attributes": [
{
"spec_id": task["labels"][0]["attributes"][1]["id"],
"value": task["labels"][0]["attributes"][1]["default_value"]
}
]
},
]
}]
rectangle_tracks_wo_attrs = [{
"frame": 1,
"label_id": task["labels"][1]["id"],
"group": 0,
"attributes": [],
"shapes": [
{
"frame": 1,
"attributes": [],
"points": [1.0, 2.1, 50.2, 36.6],
"type": "rectangle",
"occluded": False,
"outside": False
},
{
"frame": 2,
"attributes": [],
"points": [1.0, 2.1, 51, 36.6],
"type": "rectangle",
"occluded": False,
"outside": True
}
]
}]
rectangle_shapes_with_attrs = [{
"frame": 0,
"label_id": task["labels"][0]["id"],
"group": 0,
"attributes": [
{
"spec_id": task["labels"][0]["attributes"][0]["id"],
"value": task["labels"][0]["attributes"][0]["values"][0]
},
{
"spec_id": task["labels"][0]["attributes"][1]["id"],
"value": task["labels"][0]["attributes"][1]["default_value"]
}
],
"points": [1.0, 2.1, 10.6, 53.22],
"type": "rectangle",
"occluded": False
}]
rectangle_shapes_wo_attrs = [{
"frame": 1,
"label_id": task["labels"][1]["id"],
"group": 0,
"attributes": [],
"points": [2.0, 2.1, 40, 50.7],
"type": "rectangle",
"occluded": False
}]
polygon_shapes_wo_attrs = [{
"frame": 1,
"label_id": task["labels"][1]["id"],
"group": 0,
"attributes": [],
"points": [2.0, 2.1, 100, 30.22, 40, 77, 1, 3],
"type": "polygon",
"occluded": False
}]
polygon_shapes_with_attrs = [{
"frame": 2,
"label_id": task["labels"][0]["id"],
"group": 1,
"attributes": [
{
"spec_id": task["labels"][0]["attributes"][0]["id"],
"value": task["labels"][0]["attributes"][0]["values"][1]
},
{
"spec_id": task["labels"][0]["attributes"][1]["id"],
"value": task["labels"][0]["attributes"][1]["default_value"]
}
],
"points": [20.0, 0.1, 10, 3.22, 4, 7, 10, 30, 1, 2, 4.44, 5.55],
"type": "polygon",
"occluded": True
},
{
"frame": 2,
"label_id": task["labels"][1]["id"],
"group": 1,
"attributes": [],
"points": [4, 7, 10, 30, 4, 5.55],
"type": "polygon",
"occluded": False
}]
tags_wo_attrs = [{
"frame": 2,
"label_id": task["labels"][1]["id"],
"group": 0,
"attributes": []
}]
tags_with_attrs = [{
"frame": 1,
"label_id": task["labels"][0]["id"],
"group": 3,
"attributes": [
{
"spec_id": task["labels"][0]["attributes"][0]["id"],
"value": task["labels"][0]["attributes"][0]["values"][1]
},
{
"spec_id": task["labels"][0]["attributes"][1]["id"],
"value": task["labels"][0]["attributes"][1]["default_value"]
}
],
}]
annotations = {
"version": 0,
"tags": [],
"shapes": [],
"tracks": [],
}
if annotation_format == "CVAT XML 1.1 for videos":
annotations["tracks"] = rectangle_tracks_with_attrs + rectangle_tracks_wo_attrs
elif annotation_format == "CVAT XML 1.1 for images":
annotations["shapes"] = rectangle_shapes_with_attrs + rectangle_shapes_wo_attrs \
+ polygon_shapes_wo_attrs + polygon_shapes_with_attrs
annotations["tags"] = tags_with_attrs + tags_wo_attrs
elif annotation_format == "PASCAL VOC ZIP 1.1":
annotations["shapes"] = rectangle_shapes_wo_attrs
annotations["tags"] = tags_wo_attrs
elif annotation_format == "YOLO ZIP 1.1" or \
annotation_format == "TFRecord ZIP 1.0":
annotations["shapes"] = rectangle_shapes_wo_attrs
elif annotation_format == "COCO JSON 1.0":
annotations["shapes"] = polygon_shapes_wo_attrs
elif annotation_format == "MASK ZIP 1.1":
annotations["shapes"] = rectangle_shapes_wo_attrs + polygon_shapes_wo_attrs
annotations["tracks"] = rectangle_tracks_wo_attrs
elif annotation_format == "MOT CSV 1.0":
annotations["tracks"] = rectangle_tracks_wo_attrs
elif annotation_format == "LabelMe ZIP 3.0 for images":
annotations["shapes"] = rectangle_shapes_with_attrs + \
rectangle_shapes_wo_attrs + \
polygon_shapes_wo_attrs + \
polygon_shapes_with_attrs
return annotations
response = self._get_annotation_formats(annotator)
self.assertEqual(response.status_code, HTTP_200_OK)
if annotator is not None:
supported_formats = response.data
else:
supported_formats = [{
"name": "CVAT",
"dumpers": [{
"display_name": "CVAT XML 1.1 for images"
}],
"loaders": [{
"display_name": "CVAT XML 1.1"
}]
}]
self.assertTrue(isinstance(supported_formats, list) and supported_formats)
for annotation_format in supported_formats:
for dumper in annotation_format["dumpers"]:
# 1. create task
task, jobs = self._create_task(owner, assignee)
# 2. add annotation
data = _get_initial_annotation(dumper["display_name"])
response = self._put_api_v1_tasks_id_annotations(task["id"], annotator, data)
data["version"] += 1
self.assertEqual(response.status_code, HTTP_200_OK)
self._check_response(response, data)
# 3. download annotation
response = self._dump_api_v1_tasks_id_annotations(task["id"], annotator,
"format={}".format(dumper["display_name"]))
self.assertEqual(response.status_code, HTTP_202_ACCEPTED)
response = self._dump_api_v1_tasks_id_annotations(task["id"], annotator,
"format={}".format(dumper["display_name"]))
self.assertEqual(response.status_code, HTTP_201_CREATED)
response = self._dump_api_v1_tasks_id_annotations(task["id"], annotator,
"action=download&format={}".format(dumper["display_name"]))
self.assertEqual(response.status_code, HTTP_200_OK)
# 4. check downloaded data
if response.status_code == status.HTTP_200_OK:
self.assertTrue(response.streaming)
content = io.BytesIO(b"".join(response.streaming_content))
self._check_dump_content(content, task, jobs, data, annotation_format["name"])
content.seek(0)
# 5. remove annotation form the task
response = self._delete_api_v1_tasks_id_annotations(task["id"], annotator)
data["version"] += 1
self.assertEqual(response.status_code, HTTP_204_NO_CONTENT)
# 6. upload annotation and check annotation
uploaded_data = {
"annotation_file": content,
}
for loader in annotation_format["loaders"]:
if loader["display_name"] == "MASK ZIP 1.1":
continue # can't really predict the result and check
response = self._upload_api_v1_tasks_id_annotations(task["id"], annotator, uploaded_data, "format={}".format(loader["display_name"]))
self.assertEqual(response.status_code, HTTP_202_ACCEPTED)
response = self._upload_api_v1_tasks_id_annotations(task["id"], annotator, {}, "format={}".format(loader["display_name"]))
self.assertEqual(response.status_code, HTTP_201_CREATED)
response = self._get_api_v1_tasks_id_annotations(task["id"], annotator)
self.assertEqual(response.status_code, HTTP_200_OK)
data["version"] += 2 # upload is delete + put
self._check_response(response, data)
def _check_dump_content(self, content, task, jobs, data, annotation_format_name):
def etree_to_dict(t):
d = {t.tag: {} if t.attrib else None}
children = list(t)
if children:
dd = defaultdict(list)
for dc in map(etree_to_dict, children):
for k, v in dc.items():
dd[k].append(v)
d = {t.tag: {k: v[0] if len(v) == 1 else v
for k, v in dd.items()}}
if t.attrib:
d[t.tag].update(('@' + k, v) for k, v in t.attrib.items())
if t.text:
text = t.text.strip()
if not (children or t.attrib):
d[t.tag] = text
return d
if annotation_format_name == "CVAT":
xmldump = ET.fromstring(content.read())
self.assertEqual(xmldump.tag, "annotations")
tags = xmldump.findall("./meta")
self.assertEqual(len(tags), 1)
meta = etree_to_dict(tags[0])["meta"]
self.assertEqual(meta["task"]["name"], task["name"])
elif annotation_format_name == "PASCAL VOC":
self.assertTrue(zipfile.is_zipfile(content))
elif annotation_format_name == "YOLO":
self.assertTrue(zipfile.is_zipfile(content))
elif annotation_format_name == "COCO":
with tempfile.NamedTemporaryFile() as tmp_file:
tmp_file.write(content.read())
tmp_file.flush()
coco = coco_loader.COCO(tmp_file.name)
self.assertTrue(coco.getAnnIds())
elif annotation_format_name == "TFRecord":
self.assertTrue(zipfile.is_zipfile(content))
elif annotation_format_name == "MASK":
self.assertTrue(zipfile.is_zipfile(content))
def _run_coco_annotation_upload_test(self, user):
def generate_coco_anno():
return b"""{
"categories": [
{
"id": 1,
"name": "car",
"supercategory": ""
},
{
"id": 2,
"name": "person",
"supercategory": ""
}
],
"images": [
{
"coco_url": "",
"date_captured": "",
"flickr_url": "",
"license": 0,
"id": 0,
"file_name": "test_1.jpg",
"height": 720,
"width": 1280
}
],
"annotations": [
{
"category_id": 1,
"id": 1,
"image_id": 0,
"iscrowd": 0,
"segmentation": [
[]
],
"area": 17702.0,
"bbox": [
574.0,
407.0,
167.0,
106.0
]
}
]
}"""
response = self._get_annotation_formats(user)
self.assertEqual(response.status_code, status.HTTP_200_OK)
supported_formats = response.data
self.assertTrue(isinstance(supported_formats, list) and supported_formats)
coco_format = None
for f in response.data:
if f["name"] == "COCO":
coco_format = f
break
self.assertTrue(coco_format)
loader = coco_format["loaders"][0]
task, _ = self._create_task(user, user)
content = io.BytesIO(generate_coco_anno())
content.seek(0)
uploaded_data = {
"annotation_file": content,
}
response = self._upload_api_v1_tasks_id_annotations(task["id"], user, uploaded_data, "format={}".format(loader["display_name"]))
self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)
response = self._upload_api_v1_tasks_id_annotations(task["id"], user, {}, "format={}".format(loader["display_name"]))
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
response = self._get_api_v1_tasks_id_annotations(task["id"], user)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_api_v1_tasks_id_annotations_admin(self):
self._run_api_v1_tasks_id_annotations(self.admin, self.assignee,
self.assignee)
def test_api_v1_tasks_id_annotations_user(self):
self._run_api_v1_tasks_id_annotations(self.user, self.assignee,
self.assignee)
def test_api_v1_tasks_id_annotations_no_auth(self):
self._run_api_v1_tasks_id_annotations(self.user, self.assignee, None)
def test_api_v1_tasks_id_annotations_dump_load_admin(self):
self._run_api_v1_tasks_id_annotations_dump_load(self.admin, self.assignee,
self.assignee)
def test_api_v1_tasks_id_annotations_dump_load_user(self):
self._run_api_v1_tasks_id_annotations_dump_load(self.user, self.assignee,
self.assignee)
def test_api_v1_tasks_id_annotations_dump_load_no_auth(self):
self._run_api_v1_tasks_id_annotations_dump_load(self.user, self.assignee, None)
def test_api_v1_tasks_id_annotations_upload_coco_user(self):
self._run_coco_annotation_upload_test(self.user)
class ServerShareAPITestCase(APITestCase):
def setUp(self):
self.client = APIClient()
@classmethod
def setUpTestData(cls):
create_db_users(cls)
@classmethod
def setUpClass(cls):
super().setUpClass()
path = os.path.join(settings.SHARE_ROOT, "file0.txt")
open(path, "w").write("test string")
path = os.path.join(settings.SHARE_ROOT, "test1")
os.makedirs(path)
path = os.path.join(path, "file1.txt")
open(path, "w").write("test string")
directory = os.path.join(settings.SHARE_ROOT, "test1", "test3")
os.makedirs(directory)
path = os.path.join(settings.SHARE_ROOT, "test2")
os.makedirs(path)
path = os.path.join(path, "file2.txt")
open(path, "w").write("test string")
@classmethod
def tearDownClass(cls):
super().tearDownClass()
path = os.path.join(settings.SHARE_ROOT, "file0.txt")
os.remove(path)
path = os.path.join(settings.SHARE_ROOT, "test1")
shutil.rmtree(path)
path = os.path.join(settings.SHARE_ROOT, "test2")
shutil.rmtree(path)
def _run_api_v1_server_share(self, user, directory):
with ForceLogin(user, self.client):
response = self.client.get(
'/api/v1/server/share?directory={}'.format(directory))
return response
def _test_api_v1_server_share(self, user):
data = [
{"name": "test1", "type": "DIR"},
{"name": "test2", "type": "DIR"},
{"name": "file0.txt", "type": "REG"},
]
response = self._run_api_v1_server_share(user, "/")
self.assertEqual(response.status_code, status.HTTP_200_OK)
compare_objects(
self=self,
obj1=sorted(data, key=lambda d: d["name"]),
obj2=sorted(response.data, key=lambda d: d["name"]),
ignore_keys=[]
)
data = [
{"name": "file1.txt", "type": "REG"},
{"name": "test3", "type": "DIR"},
]
response = self._run_api_v1_server_share(user, "/test1")
self.assertEqual(response.status_code, status.HTTP_200_OK)
compare_objects(
self=self,
obj1=sorted(data, key=lambda d: d["name"]),
obj2=sorted(response.data, key=lambda d: d["name"]),
ignore_keys=[]
)
data = []
response = self._run_api_v1_server_share(user, "/test1/test3")
self.assertEqual(response.status_code, status.HTTP_200_OK)
compare_objects(
self=self,
obj1=sorted(data, key=lambda d: d["name"]),
obj2=sorted(response.data, key=lambda d: d["name"]),
ignore_keys=[]
)
data = [
{"name": "file2.txt", "type": "REG"},
]
response = self._run_api_v1_server_share(user, "/test2")
self.assertEqual(response.status_code, status.HTTP_200_OK)
compare_objects(
self=self,
obj1=sorted(data, key=lambda d: d["name"]),
obj2=sorted(response.data, key=lambda d: d["name"]),
ignore_keys=[]
)
response = self._run_api_v1_server_share(user, "/test4")
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_api_v1_server_share_admin(self):
self._test_api_v1_server_share(self.admin)
def test_api_v1_server_share_owner(self):
self._test_api_v1_server_share(self.owner)
def test_api_v1_server_share_assignee(self):
self._test_api_v1_server_share(self.assignee)
def test_api_v1_server_share_user(self):
self._test_api_v1_server_share(self.user)
def test_api_v1_server_share_annotator(self):
self._test_api_v1_server_share(self.annotator)
def test_api_v1_server_share_observer(self):
self._test_api_v1_server_share(self.observer)
def test_api_v1_server_share_no_auth(self):
response = self._run_api_v1_server_share(None, "/")
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
| 38.443202 | 157 | 0.544194 |
import os
import shutil
from PIL import Image
from io import BytesIO
from enum import Enum
import random
from rest_framework.test import APITestCase, APIClient
from rest_framework import status
from django.conf import settings
from django.contrib.auth.models import User, Group
from cvat.apps.engine.models import (Task, Segment, Job, StatusChoice,
AttributeType, Project, Data)
from unittest import mock
import io
import xml.etree.ElementTree as ET
from collections import defaultdict
import zipfile
from pycocotools import coco as coco_loader
import tempfile
import av
import numpy as np
def create_db_users(cls):
(group_admin, _) = Group.objects.get_or_create(name="admin")
(group_user, _) = Group.objects.get_or_create(name="user")
(group_annotator, _) = Group.objects.get_or_create(name="annotator")
(group_observer, _) = Group.objects.get_or_create(name="observer")
user_admin = User.objects.create_superuser(username="admin", email="",
password="admin")
user_admin.groups.add(group_admin)
user_owner = User.objects.create_user(username="user1", password="user1")
user_owner.groups.add(group_user)
user_assignee = User.objects.create_user(username="user2", password="user2")
user_assignee.groups.add(group_annotator)
user_annotator = User.objects.create_user(username="user3", password="user3")
user_annotator.groups.add(group_annotator)
user_observer = User.objects.create_user(username="user4", password="user4")
user_observer.groups.add(group_observer)
user_dummy = User.objects.create_user(username="user5", password="user5")
user_dummy.groups.add(group_user)
cls.admin = user_admin
cls.owner = cls.user1 = user_owner
cls.assignee = cls.user2 = user_assignee
cls.annotator = cls.user3 = user_annotator
cls.observer = cls.user4 = user_observer
cls.user = cls.user5 = user_dummy
def create_db_task(data):
data_settings = {
"size": data.pop("size"),
"image_quality": data.pop("image_quality"),
}
db_data = Data.objects.create(**data_settings)
shutil.rmtree(db_data.get_data_dirname(), ignore_errors=True)
os.makedirs(db_data.get_data_dirname())
os.makedirs(db_data.get_upload_dirname())
db_task = Task.objects.create(**data)
shutil.rmtree(db_task.get_task_dirname(), ignore_errors=True)
os.makedirs(db_task.get_task_dirname())
os.makedirs(db_task.get_task_logs_dirname())
os.makedirs(db_task.get_task_artifacts_dirname())
db_task.data = db_data
db_task.save()
for x in range(0, db_task.data.size, db_task.segment_size):
start_frame = x
stop_frame = min(x + db_task.segment_size - 1, db_task.data.size - 1)
db_segment = Segment()
db_segment.task = db_task
db_segment.start_frame = start_frame
db_segment.stop_frame = stop_frame
db_segment.save()
db_job = Job()
db_job.segment = db_segment
db_job.save()
return db_task
def create_dummy_db_tasks(obj, project=None):
tasks = []
data = {
"name": "my task #1",
"owner": obj.owner,
"assignee": obj.assignee,
"overlap": 0,
"segment_size": 100,
"z_order": False,
"image_quality": 75,
"size": 100,
"project": project
}
db_task = create_db_task(data)
tasks.append(db_task)
data = {
"name": "my multijob task",
"owner": obj.user,
"overlap": 0,
"segment_size": 100,
"z_order": True,
"image_quality": 50,
"size": 200,
"project": project
}
db_task = create_db_task(data)
tasks.append(db_task)
data = {
"name": "my task #2",
"owner": obj.owner,
"assignee": obj.assignee,
"overlap": 0,
"segment_size": 100,
"z_order": False,
"image_quality": 75,
"size": 100,
"project": project
}
db_task = create_db_task(data)
tasks.append(db_task)
data = {
"name": "super task",
"owner": obj.admin,
"overlap": 0,
"segment_size": 50,
"z_order": False,
"image_quality": 95,
"size": 50,
"project": project
}
db_task = create_db_task(data)
tasks.append(db_task)
return tasks
def create_dummy_db_projects(obj):
projects = []
data = {
"name": "my empty project",
"owner": obj.owner,
"assignee": obj.assignee,
}
db_project = Project.objects.create(**data)
projects.append(db_project)
data = {
"name": "my project without assignee",
"owner": obj.user,
}
db_project = Project.objects.create(**data)
create_dummy_db_tasks(obj, db_project)
projects.append(db_project)
data = {
"name": "my big project",
"owner": obj.owner,
"assignee": obj.assignee,
}
db_project = Project.objects.create(**data)
create_dummy_db_tasks(obj, db_project)
projects.append(db_project)
data = {
"name": "public project",
}
db_project = Project.objects.create(**data)
create_dummy_db_tasks(obj, db_project)
projects.append(db_project)
data = {
"name": "super project",
"owner": obj.admin,
"assignee": obj.assignee,
}
db_project = Project.objects.create(**data)
create_dummy_db_tasks(obj, db_project)
projects.append(db_project)
return projects
class ForceLogin:
def __init__(self, user, client):
self.user = user
self.client = client
def __enter__(self):
if self.user:
self.client.force_login(self.user, backend='django.contrib.auth.backends.ModelBackend')
return self
def __exit__(self, exception_type, exception_value, traceback):
if self.user:
self.client.logout()
class JobGetAPITestCase(APITestCase):
def setUp(self):
self.client = APIClient()
@classmethod
def setUpTestData(cls):
create_db_users(cls)
cls.task = create_dummy_db_tasks(cls)[0]
cls.job = Job.objects.filter(segment__task_id=cls.task.id).first()
cls.job.assignee = cls.annotator
cls.job.save()
def _run_api_v1_jobs_id(self, jid, user):
with ForceLogin(user, self.client):
response = self.client.get('/api/v1/jobs/{}'.format(jid))
return response
def _check_request(self, response):
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data["id"], self.job.id)
self.assertEqual(response.data["status"], StatusChoice.ANNOTATION)
self.assertEqual(response.data["start_frame"], self.job.segment.start_frame)
self.assertEqual(response.data["stop_frame"], self.job.segment.stop_frame)
def test_api_v1_jobs_id_admin(self):
response = self._run_api_v1_jobs_id(self.job.id, self.admin)
self._check_request(response)
response = self._run_api_v1_jobs_id(self.job.id + 10, self.admin)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_api_v1_jobs_id_owner(self):
response = self._run_api_v1_jobs_id(self.job.id, self.owner)
self._check_request(response)
response = self._run_api_v1_jobs_id(self.job.id + 10, self.owner)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_api_v1_jobs_id_annotator(self):
response = self._run_api_v1_jobs_id(self.job.id, self.annotator)
self._check_request(response)
response = self._run_api_v1_jobs_id(self.job.id + 10, self.annotator)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_api_v1_jobs_id_observer(self):
response = self._run_api_v1_jobs_id(self.job.id, self.observer)
self._check_request(response)
response = self._run_api_v1_jobs_id(self.job.id + 10, self.observer)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_api_v1_jobs_id_user(self):
response = self._run_api_v1_jobs_id(self.job.id, self.user)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
response = self._run_api_v1_jobs_id(self.job.id + 10, self.user)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_api_v1_jobs_id_no_auth(self):
response = self._run_api_v1_jobs_id(self.job.id, None)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
response = self._run_api_v1_jobs_id(self.job.id + 10, None)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
class JobUpdateAPITestCase(APITestCase):
def setUp(self):
self.client = APIClient()
self.task = create_dummy_db_tasks(self)[0]
self.job = Job.objects.filter(segment__task_id=self.task.id).first()
self.job.assignee = self.annotator
self.job.save()
@classmethod
def setUpTestData(cls):
create_db_users(cls)
def _run_api_v1_jobs_id(self, jid, user, data):
with ForceLogin(user, self.client):
response = self.client.put('/api/v1/jobs/{}'.format(jid), data=data, format='json')
return response
def _check_request(self, response, data):
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data["id"], self.job.id)
self.assertEqual(response.data["status"], data.get('status', self.job.status))
assignee = self.job.assignee.id if self.job.assignee else None
self.assertEqual(response.data["assignee"], data.get('assignee', assignee))
self.assertEqual(response.data["start_frame"], self.job.segment.start_frame)
self.assertEqual(response.data["stop_frame"], self.job.segment.stop_frame)
def test_api_v1_jobs_id_admin(self):
data = {"status": StatusChoice.COMPLETED, "assignee": self.owner.id}
response = self._run_api_v1_jobs_id(self.job.id, self.admin, data)
self._check_request(response, data)
response = self._run_api_v1_jobs_id(self.job.id + 10, self.admin, data)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_api_v1_jobs_id_owner(self):
data = {"status": StatusChoice.VALIDATION, "assignee": self.annotator.id}
response = self._run_api_v1_jobs_id(self.job.id, self.owner, data)
self._check_request(response, data)
response = self._run_api_v1_jobs_id(self.job.id + 10, self.owner, data)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_api_v1_jobs_id_annotator(self):
data = {"status": StatusChoice.ANNOTATION, "assignee": self.user.id}
response = self._run_api_v1_jobs_id(self.job.id, self.annotator, data)
self._check_request(response, data)
response = self._run_api_v1_jobs_id(self.job.id + 10, self.annotator, data)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_api_v1_jobs_id_observer(self):
data = {"status": StatusChoice.ANNOTATION, "assignee": self.admin.id}
response = self._run_api_v1_jobs_id(self.job.id, self.observer, data)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
response = self._run_api_v1_jobs_id(self.job.id + 10, self.observer, data)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_api_v1_jobs_id_user(self):
data = {"status": StatusChoice.ANNOTATION, "assignee": self.user.id}
response = self._run_api_v1_jobs_id(self.job.id, self.user, data)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
response = self._run_api_v1_jobs_id(self.job.id + 10, self.user, data)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_api_v1_jobs_id_no_auth(self):
data = {"status": StatusChoice.ANNOTATION, "assignee": self.user.id}
response = self._run_api_v1_jobs_id(self.job.id, None, data)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
response = self._run_api_v1_jobs_id(self.job.id + 10, None, data)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
class JobPartialUpdateAPITestCase(JobUpdateAPITestCase):
def _run_api_v1_jobs_id(self, jid, user, data):
with ForceLogin(user, self.client):
response = self.client.patch('/api/v1/jobs/{}'.format(jid), data=data, format='json')
return response
def test_api_v1_jobs_id_annotator_partial(self):
data = {"status": StatusChoice.VALIDATION}
response = self._run_api_v1_jobs_id(self.job.id, self.owner, data)
self._check_request(response, data)
def test_api_v1_jobs_id_admin_partial(self):
data = {"assignee": self.user.id}
response = self._run_api_v1_jobs_id(self.job.id, self.owner, data)
self._check_request(response, data)
class ServerAboutAPITestCase(APITestCase):
def setUp(self):
self.client = APIClient()
@classmethod
def setUpTestData(cls):
create_db_users(cls)
def _run_api_v1_server_about(self, user):
with ForceLogin(user, self.client):
response = self.client.get('/api/v1/server/about')
return response
def _check_request(self, response):
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertIsNotNone(response.data.get("name", None))
self.assertIsNotNone(response.data.get("description", None))
self.assertIsNotNone(response.data.get("version", None))
def test_api_v1_server_about_admin(self):
response = self._run_api_v1_server_about(self.admin)
self._check_request(response)
def test_api_v1_server_about_user(self):
response = self._run_api_v1_server_about(self.user)
self._check_request(response)
def test_api_v1_server_about_no_auth(self):
response = self._run_api_v1_server_about(None)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
class ServerExceptionAPITestCase(APITestCase):
def setUp(self):
self.client = APIClient()
@classmethod
def setUpTestData(cls):
create_db_users(cls)
cls.data = {
"system": "Linux",
"client": "rest_framework.APIClient",
"time": "2019-01-29T12:34:56.000000Z",
"task_id": 1,
"job_id": 1,
"proj_id": 2,
"client_id": 12321235123,
"message": "just test message",
"filename": "http://localhost/my_file.js",
"line": 1,
"column": 1,
"stack": ""
}
def _run_api_v1_server_exception(self, user):
with ForceLogin(user, self.client):
with mock.patch("cvat.apps.engine.views.clogger") as clogger:
response = self.client.post('/api/v1/server/exception',
self.data, format='json')
return response
def test_api_v1_server_exception_admin(self):
response = self._run_api_v1_server_exception(self.admin)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_api_v1_server_exception_user(self):
response = self._run_api_v1_server_exception(self.user)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_api_v1_server_exception_no_auth(self):
response = self._run_api_v1_server_exception(None)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
class ServerLogsAPITestCase(APITestCase):
def setUp(self):
self.client = APIClient()
@classmethod
def setUpTestData(cls):
create_db_users(cls)
cls.data = [
{
"time": "2019-01-29T12:34:56.000000Z",
"task_id": 1,
"job_id": 1,
"proj_id": 2,
"client_id": 12321235123,
"message": "just test message",
"name": "add point",
"is_active": True,
"payload": {"count": 1}
},
{
"time": "2019-02-24T12:34:56.000000Z",
"client_id": 12321235123,
"name": "add point",
"is_active": True,
}]
def _run_api_v1_server_logs(self, user):
with ForceLogin(user, self.client):
with mock.patch("cvat.apps.engine.views.clogger") as clogger:
response = self.client.post('/api/v1/server/logs',
self.data, format='json')
return response
def test_api_v1_server_logs_admin(self):
response = self._run_api_v1_server_logs(self.admin)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_api_v1_server_logs_user(self):
response = self._run_api_v1_server_logs(self.user)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
def test_api_v1_server_logs_no_auth(self):
response = self._run_api_v1_server_logs(None)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
class UserAPITestCase(APITestCase):
def setUp(self):
self.client = APIClient()
create_db_users(self)
def _check_response(self, user, response, is_full=True):
self.assertEqual(response.status_code, status.HTTP_200_OK)
self._check_data(user, response.data, is_full)
def _check_data(self, user, data, is_full):
self.assertEqual(data["id"], user.id)
self.assertEqual(data["username"], user.username)
self.assertEqual(data["first_name"], user.first_name)
self.assertEqual(data["last_name"], user.last_name)
self.assertEqual(data["email"], user.email)
extra_check = self.assertIn if is_full else self.assertNotIn
extra_check("groups", data)
extra_check("is_staff", data)
extra_check("is_superuser", data)
extra_check("is_active", data)
extra_check("last_login", data)
extra_check("date_joined", data)
class UserListAPITestCase(UserAPITestCase):
def _run_api_v1_users(self, user):
with ForceLogin(user, self.client):
response = self.client.get('/api/v1/users')
return response
def _check_response(self, user, response, is_full):
self.assertEqual(response.status_code, status.HTTP_200_OK)
for user_info in response.data['results']:
db_user = getattr(self, user_info['username'])
self._check_data(db_user, user_info, is_full)
def test_api_v1_users_admin(self):
response = self._run_api_v1_users(self.admin)
self._check_response(self.admin, response, True)
def test_api_v1_users_user(self):
response = self._run_api_v1_users(self.user)
self._check_response(self.user, response, False)
def test_api_v1_users_annotator(self):
response = self._run_api_v1_users(self.annotator)
self._check_response(self.annotator, response, False)
def test_api_v1_users_observer(self):
response = self._run_api_v1_users(self.observer)
self._check_response(self.observer, response, False)
def test_api_v1_users_no_auth(self):
response = self._run_api_v1_users(None)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
class UserSelfAPITestCase(UserAPITestCase):
def _run_api_v1_users_self(self, user):
with ForceLogin(user, self.client):
response = self.client.get('/api/v1/users/self')
return response
def test_api_v1_users_self_admin(self):
response = self._run_api_v1_users_self(self.admin)
self._check_response(self.admin, response)
def test_api_v1_users_self_user(self):
response = self._run_api_v1_users_self(self.user)
self._check_response(self.user, response)
def test_api_v1_users_self_annotator(self):
response = self._run_api_v1_users_self(self.annotator)
self._check_response(self.annotator, response)
def test_api_v1_users_self_observer(self):
response = self._run_api_v1_users_self(self.observer)
self._check_response(self.observer, response)
def test_api_v1_users_self_no_auth(self):
response = self._run_api_v1_users_self(None)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
class UserGetAPITestCase(UserAPITestCase):
def _run_api_v1_users_id(self, user, user_id):
with ForceLogin(user, self.client):
response = self.client.get('/api/v1/users/{}'.format(user_id))
return response
def test_api_v1_users_id_admin(self):
response = self._run_api_v1_users_id(self.admin, self.user.id)
self._check_response(self.user, response, True)
response = self._run_api_v1_users_id(self.admin, self.admin.id)
self._check_response(self.admin, response, True)
response = self._run_api_v1_users_id(self.admin, self.owner.id)
self._check_response(self.owner, response, True)
def test_api_v1_users_id_user(self):
response = self._run_api_v1_users_id(self.user, self.user.id)
self._check_response(self.user, response, True)
response = self._run_api_v1_users_id(self.user, self.owner.id)
self._check_response(self.owner, response, False)
def test_api_v1_users_id_annotator(self):
response = self._run_api_v1_users_id(self.annotator, self.annotator.id)
self._check_response(self.annotator, response, True)
response = self._run_api_v1_users_id(self.annotator, self.user.id)
self._check_response(self.user, response, False)
def test_api_v1_users_id_observer(self):
response = self._run_api_v1_users_id(self.observer, self.observer.id)
self._check_response(self.observer, response, True)
response = self._run_api_v1_users_id(self.observer, self.user.id)
self._check_response(self.user, response, False)
def test_api_v1_users_id_no_auth(self):
response = self._run_api_v1_users_id(None, self.user.id)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
class UserPartialUpdateAPITestCase(UserAPITestCase):
def _run_api_v1_users_id(self, user, user_id, data):
with ForceLogin(user, self.client):
response = self.client.patch('/api/v1/users/{}'.format(user_id), data=data)
return response
def _check_response_with_data(self, user, response, data, is_full):
user = User.objects.get(id=user.id)
for k,v in data.items():
self.assertEqual(response.data[k], v)
self._check_response(user, response, is_full)
def test_api_v1_users_id_admin_partial(self):
data = {"username": "user09", "last_name": "my last name"}
response = self._run_api_v1_users_id(self.admin, self.user.id, data)
self._check_response_with_data(self.user, response, data, True)
def test_api_v1_users_id_user_partial(self):
data = {"username": "user10", "first_name": "my name"}
response = self._run_api_v1_users_id(self.user, self.user.id, data)
self._check_response_with_data(self.user, response, data, False)
data = {"is_staff": True}
response = self._run_api_v1_users_id(self.user, self.user.id, data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
data = {"username": "admin", "is_superuser": True}
response = self._run_api_v1_users_id(self.user, self.user.id, data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
data = {"username": "non_active", "is_active": False}
response = self._run_api_v1_users_id(self.user, self.user.id, data)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
data = {"username": "annotator01", "first_name": "slave"}
response = self._run_api_v1_users_id(self.user, self.annotator.id, data)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_api_v1_users_id_no_auth_partial(self):
data = {"username": "user12"}
response = self._run_api_v1_users_id(None, self.user.id, data)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
class UserDeleteAPITestCase(UserAPITestCase):
def _run_api_v1_users_id(self, user, user_id):
with ForceLogin(user, self.client):
response = self.client.delete('/api/v1/users/{}'.format(user_id))
return response
def test_api_v1_users_id_admin(self):
response = self._run_api_v1_users_id(self.admin, self.user.id)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
response = self._run_api_v1_users_id(self.admin, self.admin.id)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
def test_api_v1_users_id_user(self):
response = self._run_api_v1_users_id(self.user, self.owner.id)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
response = self._run_api_v1_users_id(self.user, self.user.id)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
def test_api_v1_users_id_annotator(self):
response = self._run_api_v1_users_id(self.annotator, self.user.id)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
response = self._run_api_v1_users_id(self.annotator, self.annotator.id)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
def test_api_v1_users_id_observer(self):
response = self._run_api_v1_users_id(self.observer, self.user.id)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
response = self._run_api_v1_users_id(self.observer, self.observer.id)
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
def test_api_v1_users_id_no_auth(self):
response = self._run_api_v1_users_id(None, self.user.id)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
class ProjectListAPITestCase(APITestCase):
def setUp(self):
self.client = APIClient()
@classmethod
def setUpTestData(cls):
create_db_users(cls)
cls.projects = create_dummy_db_projects(cls)
def _run_api_v1_projects(self, user, params=""):
with ForceLogin(user, self.client):
response = self.client.get('/api/v1/projects{}'.format(params))
return response
def test_api_v1_projects_admin(self):
response = self._run_api_v1_projects(self.admin)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertListEqual(
sorted([project.name for project in self.projects]),
sorted([res["name"] for res in response.data["results"]]))
def test_api_v1_projects_user(self):
response = self._run_api_v1_projects(self.user)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertListEqual(
sorted([project.name for project in self.projects
if 'my empty project' != project.name]),
sorted([res["name"] for res in response.data["results"]]))
def test_api_v1_projects_observer(self):
response = self._run_api_v1_projects(self.observer)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertListEqual(
sorted([project.name for project in self.projects]),
sorted([res["name"] for res in response.data["results"]]))
def test_api_v1_projects_no_auth(self):
response = self._run_api_v1_projects(None)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
class ProjectGetAPITestCase(APITestCase):
def setUp(self):
self.client = APIClient()
@classmethod
def setUpTestData(cls):
create_db_users(cls)
cls.projects = create_dummy_db_projects(cls)
def _run_api_v1_projects_id(self, pid, user):
with ForceLogin(user, self.client):
response = self.client.get('/api/v1/projects/{}'.format(pid))
return response
def _check_response(self, response, db_project):
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data["name"], db_project.name)
owner = db_project.owner.id if db_project.owner else None
self.assertEqual(response.data["owner"], owner)
assignee = db_project.assignee.id if db_project.assignee else None
self.assertEqual(response.data["assignee"], assignee)
self.assertEqual(response.data["status"], db_project.status)
def _check_api_v1_projects_id(self, user):
for db_project in self.projects:
response = self._run_api_v1_projects_id(db_project.id, user)
if user and user.has_perm("engine.project.access", db_project):
self._check_response(response, db_project)
elif user:
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
else:
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_api_v1_projects_id_admin(self):
self._check_api_v1_projects_id(self.admin)
def test_api_v1_projects_id_user(self):
self._check_api_v1_projects_id(self.user)
def test_api_v1_projects_id_observer(self):
self._check_api_v1_projects_id(self.observer)
def test_api_v1_projects_id_no_auth(self):
self._check_api_v1_projects_id(None)
class ProjectDeleteAPITestCase(APITestCase):
def setUp(self):
self.client = APIClient()
@classmethod
def setUpTestData(cls):
create_db_users(cls)
cls.projects = create_dummy_db_projects(cls)
def _run_api_v1_projects_id(self, pid, user):
with ForceLogin(user, self.client):
response = self.client.delete('/api/v1/projects/{}'.format(pid), format="json")
return response
def _check_api_v1_projects_id(self, user):
for db_project in self.projects:
response = self._run_api_v1_projects_id(db_project.id, user)
if user and user.has_perm("engine.project.delete", db_project):
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
elif user:
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
else:
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_api_v1_projects_id_admin(self):
self._check_api_v1_projects_id(self.admin)
def test_api_v1_projects_id_user(self):
self._check_api_v1_projects_id(self.user)
def test_api_v1_projects_id_observer(self):
self._check_api_v1_projects_id(self.observer)
def test_api_v1_projects_id_no_auth(self):
self._check_api_v1_projects_id(None)
class ProjectCreateAPITestCase(APITestCase):
def setUp(self):
self.client = APIClient()
@classmethod
def setUpTestData(cls):
create_db_users(cls)
def _run_api_v1_projects(self, user, data):
with ForceLogin(user, self.client):
response = self.client.post('/api/v1/projects', data=data, format="json")
return response
def _check_response(self, response, user, data):
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(response.data["name"], data["name"])
self.assertEqual(response.data["owner"], data.get("owner", user.id))
self.assertEqual(response.data["assignee"], data.get("assignee"))
self.assertEqual(response.data["bug_tracker"], data.get("bug_tracker", ""))
self.assertEqual(response.data["status"], StatusChoice.ANNOTATION)
def _check_api_v1_projects(self, user, data):
response = self._run_api_v1_projects(user, data)
if user and user.has_perm("engine.project.create"):
self._check_response(response, user, data)
elif user:
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
else:
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_api_v1_projects_admin(self):
data = {
"name": "new name for the project",
"bug_tracker": "http://example.com"
}
self._check_api_v1_projects(self.admin, data)
data = {
"owner": self.owner.id,
"assignee": self.assignee.id,
"name": "new name for the project"
}
self._check_api_v1_projects(self.admin, data)
data = {
"owner": self.admin.id,
"name": "2"
}
self._check_api_v1_projects(self.admin, data)
def test_api_v1_projects_user(self):
data = {
"name": "Dummy name",
"bug_tracker": "it is just text"
}
self._check_api_v1_projects(self.user, data)
data = {
"owner": self.owner.id,
"assignee": self.assignee.id,
"name": "My import project with data"
}
self._check_api_v1_projects(self.user, data)
def test_api_v1_projects_observer(self):
data = {
"name": "My Project #1",
"owner": self.owner.id,
"assignee": self.assignee.id
}
self._check_api_v1_projects(self.observer, data)
def test_api_v1_projects_no_auth(self):
data = {
"name": "My Project #2",
"owner": self.admin.id,
}
self._check_api_v1_projects(None, data)
class ProjectPartialUpdateAPITestCase(APITestCase):
def setUp(self):
self.client = APIClient()
@classmethod
def setUpTestData(cls):
create_db_users(cls)
cls.projects = create_dummy_db_projects(cls)
def _run_api_v1_projects_id(self, pid, user, data):
with ForceLogin(user, self.client):
response = self.client.patch('/api/v1/projects/{}'.format(pid),
data=data, format="json")
return response
def _check_response(self, response, db_project, data):
self.assertEqual(response.status_code, status.HTTP_200_OK)
name = data.get("name", db_project.name)
self.assertEqual(response.data["name"], name)
owner = db_project.owner.id if db_project.owner else None
owner = data.get("owner", owner)
self.assertEqual(response.data["owner"], owner)
assignee = db_project.assignee.id if db_project.assignee else None
assignee = data.get("assignee", assignee)
self.assertEqual(response.data["assignee"], assignee)
self.assertEqual(response.data["status"], db_project.status)
def _check_api_v1_projects_id(self, user, data):
for db_project in self.projects:
response = self._run_api_v1_projects_id(db_project.id, user, data)
if user and user.has_perm("engine.project.change", db_project):
self._check_response(response, db_project, data)
elif user:
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
else:
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_api_v1_projects_id_admin(self):
data = {
"name": "new name for the project",
"owner": self.owner.id,
}
self._check_api_v1_projects_id(self.admin, data)
def test_api_v1_projects_id_user(self):
data = {
"name": "new name for the project",
"owner": self.assignee.id,
}
self._check_api_v1_projects_id(self.user, data)
def test_api_v1_projects_id_observer(self):
data = {
"name": "new name for the project",
}
self._check_api_v1_projects_id(self.observer, data)
def test_api_v1_projects_id_no_auth(self):
data = {
"name": "new name for the project",
}
self._check_api_v1_projects_id(None, data)
class ProjectListOfTasksAPITestCase(APITestCase):
def setUp(self):
self.client = APIClient()
@classmethod
def setUpTestData(cls):
create_db_users(cls)
cls.projects = create_dummy_db_projects(cls)
def _run_api_v1_projects_id_tasks(self, user, pid):
with ForceLogin(user, self.client):
response = self.client.get('/api/v1/projects/{}/tasks'.format(pid))
return response
def test_api_v1_projects_id_tasks_admin(self):
project = self.projects[1]
response = self._run_api_v1_projects_id_tasks(self.admin, project.id)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertListEqual(
sorted([task.name for task in project.tasks.all()]),
sorted([res["name"] for res in response.data["results"]]))
def test_api_v1_projects_id_tasks_user(self):
project = self.projects[1]
response = self._run_api_v1_projects_id_tasks(self.user, project.id)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertListEqual(
sorted([task.name for task in project.tasks.all()
if task.owner in [None, self.user] or
task.assignee in [None, self.user]]),
sorted([res["name"] for res in response.data["results"]]))
def test_api_v1_projects_id_tasks_observer(self):
project = self.projects[1]
response = self._run_api_v1_projects_id_tasks(self.observer, project.id)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertListEqual(
sorted([task.name for task in project.tasks.all()]),
sorted([res["name"] for res in response.data["results"]]))
def test_api_v1_projects_id_tasks_no_auth(self):
project = self.projects[1]
response = self._run_api_v1_projects_id_tasks(None, project.id)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
class TaskListAPITestCase(APITestCase):
def setUp(self):
self.client = APIClient()
@classmethod
def setUpTestData(cls):
create_db_users(cls)
cls.tasks = create_dummy_db_tasks(cls)
def _run_api_v1_tasks(self, user, params=""):
with ForceLogin(user, self.client):
response = self.client.get('/api/v1/tasks{}'.format(params))
return response
def test_api_v1_tasks_admin(self):
response = self._run_api_v1_tasks(self.admin)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertListEqual(
sorted([task.name for task in self.tasks]),
sorted([res["name"] for res in response.data["results"]]))
def test_api_v1_tasks_user(self):
response = self._run_api_v1_tasks(self.user)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertListEqual(
sorted([task.name for task in self.tasks
if (task.owner == self.user or task.assignee == None)]),
sorted([res["name"] for res in response.data["results"]]))
def test_api_v1_tasks_observer(self):
response = self._run_api_v1_tasks(self.observer)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertListEqual(
sorted([task.name for task in self.tasks]),
sorted([res["name"] for res in response.data["results"]]))
def test_api_v1_tasks_no_auth(self):
response = self._run_api_v1_tasks(None)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
class TaskGetAPITestCase(APITestCase):
def setUp(self):
self.client = APIClient()
@classmethod
def setUpTestData(cls):
create_db_users(cls)
cls.tasks = create_dummy_db_tasks(cls)
def _run_api_v1_tasks_id(self, tid, user):
with ForceLogin(user, self.client):
response = self.client.get('/api/v1/tasks/{}'.format(tid))
return response
def _check_response(self, response, db_task):
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(response.data["name"], db_task.name)
self.assertEqual(response.data["size"], db_task.data.size)
self.assertEqual(response.data["mode"], db_task.mode)
owner = db_task.owner.id if db_task.owner else None
self.assertEqual(response.data["owner"], owner)
assignee = db_task.assignee.id if db_task.assignee else None
self.assertEqual(response.data["assignee"], assignee)
self.assertEqual(response.data["overlap"], db_task.overlap)
self.assertEqual(response.data["segment_size"], db_task.segment_size)
self.assertEqual(response.data["z_order"], db_task.z_order)
self.assertEqual(response.data["image_quality"], db_task.data.image_quality)
self.assertEqual(response.data["status"], db_task.status)
self.assertListEqual(
[label.name for label in db_task.label_set.all()],
[label["name"] for label in response.data["labels"]]
)
def _check_api_v1_tasks_id(self, user):
for db_task in self.tasks:
response = self._run_api_v1_tasks_id(db_task.id, user)
if user and user.has_perm("engine.task.access", db_task):
self._check_response(response, db_task)
elif user:
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
else:
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_api_v1_tasks_id_admin(self):
self._check_api_v1_tasks_id(self.admin)
def test_api_v1_tasks_id_user(self):
self._check_api_v1_tasks_id(self.user)
def test_api_v1_tasks_id_observer(self):
self._check_api_v1_tasks_id(self.observer)
def test_api_v1_tasks_id_no_auth(self):
self._check_api_v1_tasks_id(None)
class TaskDeleteAPITestCase(APITestCase):
def setUp(self):
self.client = APIClient()
@classmethod
def setUpTestData(cls):
create_db_users(cls)
cls.tasks = create_dummy_db_tasks(cls)
def _run_api_v1_tasks_id(self, tid, user):
with ForceLogin(user, self.client):
response = self.client.delete('/api/v1/tasks/{}'.format(tid), format="json")
return response
def _check_api_v1_tasks_id(self, user):
for db_task in self.tasks:
response = self._run_api_v1_tasks_id(db_task.id, user)
if user and user.has_perm("engine.task.delete", db_task):
self.assertEqual(response.status_code, status.HTTP_204_NO_CONTENT)
elif user:
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
else:
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_api_v1_tasks_id_admin(self):
self._check_api_v1_tasks_id(self.admin)
def test_api_v1_tasks_id_user(self):
self._check_api_v1_tasks_id(self.user)
def test_api_v1_tasks_id_observer(self):
self._check_api_v1_tasks_id(self.observer)
def test_api_v1_tasks_id_no_auth(self):
self._check_api_v1_tasks_id(None)
class TaskUpdateAPITestCase(APITestCase):
def setUp(self):
self.client = APIClient()
@classmethod
def setUpTestData(cls):
create_db_users(cls)
cls.tasks = create_dummy_db_tasks(cls)
def _run_api_v1_tasks_id(self, tid, user, data):
with ForceLogin(user, self.client):
response = self.client.put('/api/v1/tasks/{}'.format(tid),
data=data, format="json")
return response
def _check_response(self, response, db_task, data):
self.assertEqual(response.status_code, status.HTTP_200_OK)
name = data.get("name", db_task.name)
self.assertEqual(response.data["name"], name)
self.assertEqual(response.data["size"], db_task.data.size)
mode = data.get("mode", db_task.mode)
self.assertEqual(response.data["mode"], mode)
owner = db_task.owner.id if db_task.owner else None
owner = data.get("owner", owner)
self.assertEqual(response.data["owner"], owner)
assignee = db_task.assignee.id if db_task.assignee else None
assignee = data.get("assignee", assignee)
self.assertEqual(response.data["assignee"], assignee)
self.assertEqual(response.data["overlap"], db_task.overlap)
self.assertEqual(response.data["segment_size"], db_task.segment_size)
z_order = data.get("z_order", db_task.z_order)
self.assertEqual(response.data["z_order"], z_order)
image_quality = data.get("image_quality", db_task.data.image_quality)
self.assertEqual(response.data["image_quality"], image_quality)
self.assertEqual(response.data["status"], db_task.status)
if data.get("labels"):
self.assertListEqual(
[label["name"] for label in data.get("labels")],
[label["name"] for label in response.data["labels"]]
)
else:
self.assertListEqual(
[label.name for label in db_task.label_set.all()],
[label["name"] for label in response.data["labels"]]
)
def _check_api_v1_tasks_id(self, user, data):
for db_task in self.tasks:
response = self._run_api_v1_tasks_id(db_task.id, user, data)
if user and user.has_perm("engine.task.change", db_task):
self._check_response(response, db_task, data)
elif user:
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
else:
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_api_v1_tasks_id_admin(self):
data = {
"name": "new name for the task",
"owner": self.owner.id,
"labels": [{
"name": "non-vehicle",
"attributes": [{
"name": "my_attribute",
"mutable": True,
"input_type": AttributeType.CHECKBOX,
"default_value": "true"
}]
}]
}
self._check_api_v1_tasks_id(self.admin, data)
def test_api_v1_tasks_id_user(self):
data = {
"name": "new name for the task",
"owner": self.assignee.id,
"labels": [{
"name": "car",
"attributes": [{
"name": "color",
"mutable": False,
"input_type": AttributeType.SELECT,
"default_value": "white",
"values": ["white", "yellow", "green", "red"]
}]
}]
}
self._check_api_v1_tasks_id(self.user, data)
def test_api_v1_tasks_id_observer(self):
data = {
"name": "new name for the task",
"labels": [{
"name": "test",
}]
}
self._check_api_v1_tasks_id(self.observer, data)
def test_api_v1_tasks_id_no_auth(self):
data = {
"name": "new name for the task",
"labels": [{
"name": "test",
}]
}
self._check_api_v1_tasks_id(None, data)
class TaskPartialUpdateAPITestCase(TaskUpdateAPITestCase):
def _run_api_v1_tasks_id(self, tid, user, data):
with ForceLogin(user, self.client):
response = self.client.patch('/api/v1/tasks/{}'.format(tid),
data=data, format="json")
return response
def test_api_v1_tasks_id_admin_partial(self):
data = {
"name": "new name for the task #2",
}
self._check_api_v1_tasks_id(self.admin, data)
data = {
"name": "new name for the task",
"owner": self.owner.id
}
self._check_api_v1_tasks_id(self.admin, data)
def test_api_v1_tasks_id_user_partial(self):
data = {
"labels": [{
"name": "car",
"attributes": [{
"name": "color",
"mutable": False,
"input_type": AttributeType.SELECT,
"default_value": "white",
"values": ["white", "yellow", "green", "red"]
}]
}]
}
self._check_api_v1_tasks_id(self.user, data)
data = {
"owner": self.observer.id,
"assignee": self.annotator.id
}
self._check_api_v1_tasks_id(self.user, data)
def test_api_v1_tasks_id_observer(self):
data = {
"name": "my task #3"
}
self._check_api_v1_tasks_id(self.observer, data)
def test_api_v1_tasks_id_no_auth(self):
data = {
"name": "new name for the task",
"labels": [{
"name": "test",
}]
}
self._check_api_v1_tasks_id(None, data)
class TaskCreateAPITestCase(APITestCase):
def setUp(self):
self.client = APIClient()
@classmethod
def setUpTestData(cls):
create_db_users(cls)
def _run_api_v1_tasks(self, user, data):
with ForceLogin(user, self.client):
response = self.client.post('/api/v1/tasks', data=data, format="json")
return response
def _check_response(self, response, user, data):
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(response.data["name"], data["name"])
self.assertEqual(response.data["mode"], "")
self.assertEqual(response.data["owner"], data.get("owner", user.id))
self.assertEqual(response.data["assignee"], data.get("assignee"))
self.assertEqual(response.data["bug_tracker"], data.get("bug_tracker", ""))
self.assertEqual(response.data["overlap"], data.get("overlap", None))
self.assertEqual(response.data["segment_size"], data.get("segment_size", 0))
self.assertEqual(response.data["z_order"], data.get("z_order", False))
self.assertEqual(response.data["status"], StatusChoice.ANNOTATION)
self.assertListEqual(
[label["name"] for label in data.get("labels")],
[label["name"] for label in response.data["labels"]]
)
def _check_api_v1_tasks(self, user, data):
response = self._run_api_v1_tasks(user, data)
if user and user.has_perm("engine.task.create"):
self._check_response(response, user, data)
elif user:
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
else:
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_api_v1_tasks_admin(self):
data = {
"name": "new name for the task",
"labels": [{
"name": "non-vehicle",
"attributes": [{
"name": "my_attribute",
"mutable": True,
"input_type": AttributeType.CHECKBOX,
"default_value": "true"
}]
}]
}
self._check_api_v1_tasks(self.admin, data)
def test_api_v1_tasks_user(self):
data = {
"name": "new name for the task",
"owner": self.assignee.id,
"labels": [{
"name": "car",
"attributes": [{
"name": "color",
"mutable": False,
"input_type": AttributeType.SELECT,
"default_value": "white",
"values": ["white", "yellow", "green", "red"]
}]
}]
}
self._check_api_v1_tasks(self.user, data)
def test_api_v1_tasks_observer(self):
data = {
"name": "new name for the task",
"labels": [{
"name": "test",
}]
}
self._check_api_v1_tasks(self.observer, data)
def test_api_v1_tasks_no_auth(self):
data = {
"name": "new name for the task",
"labels": [{
"name": "test",
}]
}
self._check_api_v1_tasks(None, data)
def generate_image_file(filename):
f = BytesIO()
width = random.randint(100, 800)
height = random.randint(100, 800)
image = Image.new('RGB', size=(width, height))
image.save(f, 'jpeg')
f.name = filename
f.seek(0)
return (width, height), f
def generate_image_files(*args):
images = []
image_sizes = []
for image_name in args:
img_size, image = generate_image_file(image_name)
image_sizes.append(img_size)
images.append(image)
return image_sizes, images
def generate_video_file(filename, width=1920, height=1080, duration=1, fps=25):
f = BytesIO()
total_frames = duration * fps
container = av.open(f, mode='w', format='mp4')
stream = container.add_stream('mpeg4', rate=fps)
stream.width = width
stream.height = height
stream.pix_fmt = 'yuv420p'
for frame_i in range(total_frames):
img = np.empty((stream.width, stream.height, 3))
img[:, :, 0] = 0.5 + 0.5 * np.sin(2 * np.pi * (0 / 3 + frame_i / total_frames))
img[:, :, 1] = 0.5 + 0.5 * np.sin(2 * np.pi * (1 / 3 + frame_i / total_frames))
img[:, :, 2] = 0.5 + 0.5 * np.sin(2 * np.pi * (2 / 3 + frame_i / total_frames))
img = np.round(255 * img).astype(np.uint8)
img = np.clip(img, 0, 255)
frame = av.VideoFrame.from_ndarray(img, format='rgb24')
for packet in stream.encode(frame):
container.mux(packet)
# Flush stream
for packet in stream.encode():
container.mux(packet)
# Close the file
container.close()
f.name = filename
f.seek(0)
return [(width, height)] * total_frames, f
def generate_zip_archive_file(filename, count):
image_sizes = []
zip_buf = BytesIO()
with zipfile.ZipFile(zip_buf, 'w') as zip_chunk:
for idx in range(count):
image_name = "image_{:6d}.jpg".format(idx)
size, image_buf = generate_image_file(image_name)
image_sizes.append(size)
zip_chunk.writestr(image_name, image_buf.getvalue())
zip_buf.name = filename
zip_buf.seek(0)
return image_sizes, zip_buf
class TaskDataAPITestCase(APITestCase):
_image_sizes = {}
class ChunkType(str, Enum):
IMAGESET = 'imageset'
VIDEO = 'video'
def __str__(self):
return self.value
def setUp(self):
self.client = APIClient()
@classmethod
def setUpTestData(cls):
create_db_users(cls)
@classmethod
def setUpClass(cls):
super().setUpClass()
filename = "test_1.jpg"
path = os.path.join(settings.SHARE_ROOT, filename)
img_size, data = generate_image_file(filename)
with open(path, "wb") as image:
image.write(data.read())
cls._image_sizes[filename] = img_size
filename = "test_2.jpg"
path = os.path.join(settings.SHARE_ROOT, filename)
img_size, data = generate_image_file(filename)
with open(path, "wb") as image:
image.write(data.read())
cls._image_sizes[filename] = img_size
filename = "test_3.jpg"
path = os.path.join(settings.SHARE_ROOT, filename)
img_size, data = generate_image_file(filename)
with open(path, "wb") as image:
image.write(data.read())
cls._image_sizes[filename] = img_size
filename = os.path.join("data", "test_3.jpg")
path = os.path.join(settings.SHARE_ROOT, filename)
os.makedirs(os.path.dirname(path))
img_size, data = generate_image_file(filename)
with open(path, "wb") as image:
image.write(data.read())
cls._image_sizes[filename] = img_size
filename = "test_video_1.mp4"
path = os.path.join(settings.SHARE_ROOT, filename)
img_sizes, data = generate_video_file(filename, width=1280, height=720)
with open(path, "wb") as video:
video.write(data.read())
cls._image_sizes[filename] = img_sizes
filename = os.path.join("videos", "test_video_1.mp4")
path = os.path.join(settings.SHARE_ROOT, filename)
os.makedirs(os.path.dirname(path))
img_sizes, data = generate_video_file(filename, width=1280, height=720)
with open(path, "wb") as video:
video.write(data.read())
cls._image_sizes[filename] = img_sizes
filename = os.path.join("test_archive_1.zip")
path = os.path.join(settings.SHARE_ROOT, filename)
img_sizes, data = generate_zip_archive_file(filename, count=5)
with open(path, "wb") as zip_archive:
zip_archive.write(data.read())
cls._image_sizes[filename] = img_sizes
@classmethod
def tearDownClass(cls):
super().tearDownClass()
path = os.path.join(settings.SHARE_ROOT, "test_1.jpg")
os.remove(path)
path = os.path.join(settings.SHARE_ROOT, "test_2.jpg")
os.remove(path)
path = os.path.join(settings.SHARE_ROOT, "test_3.jpg")
os.remove(path)
path = os.path.join(settings.SHARE_ROOT, "data", "test_3.jpg")
os.remove(path)
path = os.path.join(settings.SHARE_ROOT, "test_video_1.mp4")
os.remove(path)
path = os.path.join(settings.SHARE_ROOT, "videos", "test_video_1.mp4")
os.remove(path)
def _run_api_v1_tasks_id_data_post(self, tid, user, data):
with ForceLogin(user, self.client):
response = self.client.post('/api/v1/tasks/{}/data'.format(tid),
data=data)
return response
def _create_task(self, user, data):
with ForceLogin(user, self.client):
response = self.client.post('/api/v1/tasks', data=data, format="json")
return response
def _get_task(self, user, tid):
with ForceLogin(user, self.client):
return self.client.get("/api/v1/tasks/{}".format(tid))
def _run_api_v1_task_id_data_get(self, tid, user, data_type, data_quality=None, data_number=None):
url = '/api/v1/tasks/{}/data?type={}'.format(tid, data_type)
if data_quality is not None:
url += '&quality={}'.format(data_quality)
if data_number is not None:
url += '&number={}'.format(data_number)
with ForceLogin(user, self.client):
return self.client.get(url)
def _get_preview(self, tid, user):
return self._run_api_v1_task_id_data_get(tid, user, "preview")
def _get_compressed_chunk(self, tid, user, number):
return self._run_api_v1_task_id_data_get(tid, user, "chunk", "compressed", number)
def _get_original_chunk(self, tid, user, number):
return self._run_api_v1_task_id_data_get(tid, user, "chunk", "original", number)
def _get_compressed_frame(self, tid, user, number):
return self._run_api_v1_task_id_data_get(tid, user, "frame", "compressed", number)
def _get_original_frame(self, tid, user, number):
return self._run_api_v1_task_id_data_get(tid, user, "frame", "original", number)
@staticmethod
def _extract_zip_chunk(chunk_buffer):
chunk = zipfile.ZipFile(chunk_buffer, mode='r')
return [Image.open(BytesIO(chunk.read(f))) for f in sorted(chunk.namelist())]
@staticmethod
def _extract_video_chunk(chunk_buffer):
container = av.open(chunk_buffer)
stream = container.streams.video[0]
return [f.to_image() for f in container.decode(stream)]
def _test_api_v1_tasks_id_data_spec(self, user, spec, data, expected_compressed_type, expected_original_type, image_sizes):
# create task
response = self._create_task(user, spec)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
task_id = response.data["id"]
# post data for the task
response = self._run_api_v1_tasks_id_data_post(task_id, user, data)
self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)
response = self._get_task(user, task_id)
expected_status_code = status.HTTP_200_OK
if user == self.user and "owner" in spec and spec["owner"] != user.id and \
"assignee" in spec and spec["assignee"] != user.id:
expected_status_code = status.HTTP_403_FORBIDDEN
self.assertEqual(response.status_code, expected_status_code)
if expected_status_code == status.HTTP_200_OK:
task = response.json()
self.assertEqual(expected_compressed_type, task["data_compressed_chunk_type"])
self.assertEqual(expected_original_type, task["data_original_chunk_type"])
self.assertEqual(len(image_sizes), task["size"])
# check preview
response = self._get_preview(task_id, user)
self.assertEqual(response.status_code, expected_status_code)
if expected_status_code == status.HTTP_200_OK:
preview = Image.open(io.BytesIO(b"".join(response.streaming_content)))
self.assertEqual(preview.size, image_sizes[0])
# check compressed chunk
response = self._get_compressed_chunk(task_id, user, 0)
self.assertEqual(response.status_code, expected_status_code)
if expected_status_code == status.HTTP_200_OK:
compressed_chunk = io.BytesIO(b"".join(response.streaming_content))
if task["data_compressed_chunk_type"] == self.ChunkType.IMAGESET:
images = self._extract_zip_chunk(compressed_chunk)
else:
images = self._extract_video_chunk(compressed_chunk)
self.assertEqual(len(images), min(task["data_chunk_size"], len(image_sizes)))
for image_idx, image in enumerate(images):
self.assertEqual(image.size, image_sizes[image_idx])
# check original chunk
response = self._get_original_chunk(task_id, user, 0)
self.assertEqual(response.status_code, expected_status_code)
if expected_status_code == status.HTTP_200_OK:
original_chunk = io.BytesIO(b"".join(response.streaming_content))
if task["data_original_chunk_type"] == self.ChunkType.IMAGESET:
images = self._extract_zip_chunk(original_chunk)
else:
images = self._extract_video_chunk(original_chunk)
for image_idx, image in enumerate(images):
self.assertEqual(image.size, image_sizes[image_idx])
self.assertEqual(len(images), min(task["data_chunk_size"], len(image_sizes)))
if task["data_original_chunk_type"] == self.ChunkType.IMAGESET:
server_files = [img for key, img in data.items() if key.startswith("server_files")]
client_files = [img for key, img in data.items() if key.startswith("client_files")]
if server_files:
source_files = [os.path.join(settings.SHARE_ROOT, f) for f in sorted(server_files)]
else:
source_files = [f for f in sorted(client_files, key=lambda e: e.name)]
source_images = []
for f in source_files:
if zipfile.is_zipfile(f):
source_images.extend(self._extract_zip_chunk(f))
else:
source_images.append(Image.open(f))
for img_idx, image in enumerate(images):
server_image = np.array(image)
source_image = np.array(source_images[img_idx])
self.assertTrue(np.array_equal(source_image, server_image))
def _test_api_v1_tasks_id_data(self, user):
task_spec = {
"name": "my task #1",
"owner": self.owner.id,
"assignee": self.assignee.id,
"overlap": 0,
"segment_size": 100,
"z_order": False,
"labels": [
{"name": "car"},
{"name": "person"},
]
}
image_sizes, images = generate_image_files("test_1.jpg", "test_2.jpg", "test_3.jpg")
task_data = {
"client_files[0]": images[0],
"client_files[1]": images[1],
"client_files[2]": images[2],
"image_quality": 75,
}
self._test_api_v1_tasks_id_data_spec(user, task_spec, task_data, self.ChunkType.IMAGESET, self.ChunkType.IMAGESET, image_sizes)
task_spec = {
"name": "my task #2",
"overlap": 0,
"segment_size": 0,
"labels": [
{"name": "car"},
{"name": "person"},
]
}
task_data = {
"server_files[0]": "test_1.jpg",
"server_files[1]": "test_2.jpg",
"server_files[2]": "test_3.jpg",
"server_files[3]": os.path.join("data", "test_3.jpg"),
"image_quality": 75,
}
image_sizes = [
self._image_sizes[task_data["server_files[3]"]],
self._image_sizes[task_data["server_files[0]"]],
self._image_sizes[task_data["server_files[1]"]],
self._image_sizes[task_data["server_files[2]"]],
]
self._test_api_v1_tasks_id_data_spec(user, task_spec, task_data, self.ChunkType.IMAGESET, self.ChunkType.IMAGESET, image_sizes)
task_spec = {
"name": "my video task #1",
"overlap": 0,
"segment_size": 100,
"z_order": False,
"labels": [
{"name": "car"},
{"name": "person"},
]
}
image_sizes, video = generate_video_file(filename="test_video_1.mp4", width=1280, height=720)
task_data = {
"client_files[0]": video,
"image_quality": 43,
}
self._test_api_v1_tasks_id_data_spec(user, task_spec, task_data, self.ChunkType.VIDEO, self.ChunkType.VIDEO, image_sizes)
task_spec = {
"name": "my video task #2",
"overlap": 0,
"segment_size": 5,
"labels": [
{"name": "car"},
{"name": "person"},
]
}
task_data = {
"server_files[0]": "test_video_1.mp4",
"image_quality": 57,
}
image_sizes = self._image_sizes[task_data["server_files[0]"]]
self._test_api_v1_tasks_id_data_spec(user, task_spec, task_data, self.ChunkType.VIDEO, self.ChunkType.VIDEO, image_sizes)
task_spec = {
"name": "my video task #3",
"overlap": 0,
"segment_size": 0,
"labels": [
{"name": "car"},
{"name": "person"},
]
}
task_data = {
"server_files[0]": os.path.join("videos", "test_video_1.mp4"),
"image_quality": 57,
}
image_sizes = self._image_sizes[task_data["server_files[0]"]]
self._test_api_v1_tasks_id_data_spec(user, task_spec, task_data, self.ChunkType.VIDEO, self.ChunkType.VIDEO, image_sizes)
task_spec = {
"name": "my video task #4",
"overlap": 0,
"segment_size": 5,
"labels": [
{"name": "car"},
{"name": "person"},
]
}
task_data = {
"server_files[0]": "test_video_1.mp4",
"image_quality": 12,
"use_zip_chunks": True,
}
image_sizes = self._image_sizes[task_data["server_files[0]"]]
self._test_api_v1_tasks_id_data_spec(user, task_spec, task_data, self.ChunkType.IMAGESET, self.ChunkType.VIDEO, image_sizes)
task_spec = {
"name": "my archive task #6",
"overlap": 0,
"segment_size": 0,
"labels": [
{"name": "car"},
{"name": "person"},
]
}
task_data = {
"server_files[0]": "test_archive_1.zip",
"image_quality": 88,
}
image_sizes = self._image_sizes[task_data["server_files[0]"]]
self._test_api_v1_tasks_id_data_spec(user, task_spec, task_data, self.ChunkType.IMAGESET, self.ChunkType.IMAGESET, image_sizes)
task_spec = {
"name": "my archive task #7",
"overlap": 0,
"segment_size": 0,
"labels": [
{"name": "car"},
{"name": "person"},
]
}
image_sizes, archive = generate_zip_archive_file("test_archive_2.zip", 7)
task_data = {
"client_files[0]": archive,
"image_quality": 100,
}
self._test_api_v1_tasks_id_data_spec(user, task_spec, task_data, self.ChunkType.IMAGESET, self.ChunkType.IMAGESET, image_sizes)
def test_api_v1_tasks_id_data_admin(self):
self._test_api_v1_tasks_id_data(self.admin)
def test_api_v1_tasks_id_data_owner(self):
self._test_api_v1_tasks_id_data(self.owner)
def test_api_v1_tasks_id_data_user(self):
self._test_api_v1_tasks_id_data(self.user)
def test_api_v1_tasks_id_data_no_auth(self):
data = {
"name": "my task #3",
"owner": self.owner.id,
"assignee": self.assignee.id,
"overlap": 0,
"segment_size": 100,
"z_order": False,
"labels": [
{"name": "car"},
{"name": "person"},
]
}
response = self._create_task(None, data)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def compare_objects(self, obj1, obj2, ignore_keys, fp_tolerance=.001):
if isinstance(obj1, dict):
self.assertTrue(isinstance(obj2, dict), "{} != {}".format(obj1, obj2))
for k in obj1.keys():
if k in ignore_keys:
continue
compare_objects(self, obj1[k], obj2.get(k), ignore_keys)
elif isinstance(obj1, list):
self.assertTrue(isinstance(obj2, list), "{} != {}".format(obj1, obj2))
self.assertEqual(len(obj1), len(obj2), "{} != {}".format(obj1, obj2))
for v1, v2 in zip(obj1, obj2):
compare_objects(self, v1, v2, ignore_keys)
else:
if isinstance(obj1, float) or isinstance(obj2, float):
self.assertAlmostEqual(obj1, obj2, delta=fp_tolerance)
else:
self.assertEqual(obj1, obj2)
class JobAnnotationAPITestCase(APITestCase):
def setUp(self):
self.client = APIClient()
@classmethod
def setUpTestData(cls):
create_db_users(cls)
def _create_task(self, owner, assignee):
data = {
"name": "my task #1",
"owner": owner.id,
"assignee": assignee.id,
"overlap": 0,
"segment_size": 100,
"z_order": False,
"labels": [
{
"name": "car",
"attributes": [
{
"name": "model",
"mutable": False,
"input_type": "select",
"default_value": "mazda",
"values": ["bmw", "mazda", "renault"]
},
{
"name": "parked",
"mutable": True,
"input_type": "checkbox",
"default_value": False
},
]
},
{"name": "person"},
]
}
with ForceLogin(owner, self.client):
response = self.client.post('/api/v1/tasks', data=data, format="json")
assert response.status_code == status.HTTP_201_CREATED
tid = response.data["id"]
images = {
"client_files[0]": generate_image_file("test_1.jpg")[1],
"client_files[1]": generate_image_file("test_2.jpg")[1],
"client_files[2]": generate_image_file("test_3.jpg")[1],
"image_quality": 75,
}
response = self.client.post("/api/v1/tasks/{}/data".format(tid), data=images)
assert response.status_code == status.HTTP_202_ACCEPTED
response = self.client.get("/api/v1/tasks/{}".format(tid))
task = response.data
response = self.client.get("/api/v1/tasks/{}/jobs".format(tid))
jobs = response.data
return (task, jobs)
@staticmethod
def _get_default_attr_values(task):
default_attr_values = {}
for label in task["labels"]:
default_attr_values[label["id"]] = {
"mutable": [],
"immutable": [],
"all": [],
}
for attr in label["attributes"]:
default_value = {
"spec_id": attr["id"],
"value": attr["default_value"],
}
if attr["mutable"]:
default_attr_values[label["id"]]["mutable"].append(default_value)
else:
default_attr_values[label["id"]]["immutable"].append(default_value)
default_attr_values[label["id"]]["all"].append(default_value)
return default_attr_values
def _put_api_v1_jobs_id_data(self, jid, user, data):
with ForceLogin(user, self.client):
response = self.client.put("/api/v1/jobs/{}/annotations".format(jid),
data=data, format="json")
return response
def _get_api_v1_jobs_id_data(self, jid, user):
with ForceLogin(user, self.client):
response = self.client.get("/api/v1/jobs/{}/annotations".format(jid))
return response
def _delete_api_v1_jobs_id_data(self, jid, user):
with ForceLogin(user, self.client):
response = self.client.delete("/api/v1/jobs/{}/annotations".format(jid),
format="json")
return response
def _patch_api_v1_jobs_id_data(self, jid, user, action, data):
with ForceLogin(user, self.client):
response = self.client.patch(
"/api/v1/jobs/{}/annotations?action={}".format(jid, action),
data=data, format="json")
return response
def _check_response(self, response, data):
if not response.status_code in [
status.HTTP_403_FORBIDDEN, status.HTTP_401_UNAUTHORIZED]:
compare_objects(self, data, response.data, ignore_keys=["id"])
def _run_api_v1_jobs_id_annotations(self, owner, assignee, annotator):
task, jobs = self._create_task(owner, assignee)
if annotator:
HTTP_200_OK = status.HTTP_200_OK
HTTP_204_NO_CONTENT = status.HTTP_204_NO_CONTENT
HTTP_400_BAD_REQUEST = status.HTTP_400_BAD_REQUEST
else:
HTTP_200_OK = status.HTTP_401_UNAUTHORIZED
HTTP_204_NO_CONTENT = status.HTTP_401_UNAUTHORIZED
HTTP_400_BAD_REQUEST = status.HTTP_401_UNAUTHORIZED
job = jobs[0]
data = {
"version": 0,
"tags": [],
"shapes": [],
"tracks": []
}
response = self._put_api_v1_jobs_id_data(job["id"], annotator, data)
self.assertEqual(response.status_code, HTTP_200_OK)
data = {
"version": 1,
"tags": [
{
"frame": 0,
"label_id": task["labels"][0]["id"],
"group": None,
"attributes": []
}
],
"shapes": [
{
"frame": 0,
"label_id": task["labels"][0]["id"],
"group": None,
"attributes": [
{
"spec_id": task["labels"][0]["attributes"][0]["id"],
"value": task["labels"][0]["attributes"][0]["values"][0]
},
{
"spec_id": task["labels"][0]["attributes"][1]["id"],
"value": task["labels"][0]["attributes"][1]["default_value"]
}
],
"points": [1.0, 2.1, 100, 300.222],
"type": "rectangle",
"occluded": False
},
{
"frame": 1,
"label_id": task["labels"][1]["id"],
"group": None,
"attributes": [],
"points": [2.0, 2.1, 100, 300.222, 400, 500, 1, 3],
"type": "polygon",
"occluded": False
},
],
"tracks": [
{
"frame": 0,
"label_id": task["labels"][0]["id"],
"group": None,
"attributes": [
{
"spec_id": task["labels"][0]["attributes"][0]["id"],
"value": task["labels"][0]["attributes"][0]["values"][0]
},
],
"shapes": [
{
"frame": 0,
"points": [1.0, 2.1, 100, 300.222],
"type": "rectangle",
"occluded": False,
"outside": False,
"attributes": [
{
"spec_id": task["labels"][0]["attributes"][1]["id"],
"value": task["labels"][0]["attributes"][1]["default_value"]
},
]
},
{
"frame": 1,
"attributes": [],
"points": [2.0, 2.1, 100, 300.222],
"type": "rectangle",
"occluded": True,
"outside": True
},
]
},
{
"frame": 1,
"label_id": task["labels"][1]["id"],
"group": None,
"attributes": [],
"shapes": [
{
"frame": 1,
"attributes": [],
"points": [1.0, 2.1, 100, 300.222],
"type": "rectangle",
"occluded": False,
"outside": False
}
]
},
]
}
default_attr_values = self._get_default_attr_values(task)
response = self._put_api_v1_jobs_id_data(job["id"], annotator, data)
data["version"] += 1 # need to update the version
self.assertEqual(response.status_code, HTTP_200_OK)
self._check_response(response, data)
response = self._get_api_v1_jobs_id_data(job["id"], annotator)
self.assertEqual(response.status_code, HTTP_200_OK)
# server should add default attribute values if puted data doesn't contain it
data["tags"][0]["attributes"] = default_attr_values[data["tags"][0]["label_id"]]["all"]
data["tracks"][0]["shapes"][1]["attributes"] = default_attr_values[data["tracks"][0]["label_id"]]["mutable"]
self._check_response(response, data)
response = self._delete_api_v1_jobs_id_data(job["id"], annotator)
data["version"] += 1
self.assertEqual(response.status_code, HTTP_204_NO_CONTENT)
data = {
"version": data["version"],
"tags": [],
"shapes": [],
"tracks": []
}
response = self._get_api_v1_jobs_id_data(job["id"], annotator)
self.assertEqual(response.status_code, HTTP_200_OK)
self._check_response(response, data)
data = {
"version": data["version"],
"tags": [
{
"frame": 0,
"label_id": task["labels"][0]["id"],
"group": None,
"attributes": []
}
],
"shapes": [
{
"frame": 0,
"label_id": task["labels"][0]["id"],
"group": None,
"attributes": [
{
"spec_id": task["labels"][0]["attributes"][0]["id"],
"value": task["labels"][0]["attributes"][0]["values"][0]
},
{
"spec_id": task["labels"][0]["attributes"][1]["id"],
"value": task["labels"][0]["attributes"][1]["default_value"]
}
],
"points": [1.0, 2.1, 100, 300.222],
"type": "rectangle",
"occluded": False
},
{
"frame": 1,
"label_id": task["labels"][1]["id"],
"group": None,
"attributes": [],
"points": [2.0, 2.1, 100, 300.222, 400, 500, 1, 3],
"type": "polygon",
"occluded": False
},
],
"tracks": [
{
"frame": 0,
"label_id": task["labels"][0]["id"],
"group": None,
"attributes": [
{
"spec_id": task["labels"][0]["attributes"][0]["id"],
"value": task["labels"][0]["attributes"][0]["values"][0]
},
],
"shapes": [
{
"frame": 0,
"points": [1.0, 2.1, 100, 300.222],
"type": "rectangle",
"occluded": False,
"outside": False,
"attributes": [
{
"spec_id": task["labels"][0]["attributes"][1]["id"],
"value": task["labels"][0]["attributes"][1]["default_value"]
},
]
},
{
"frame": 1,
"attributes": [],
"points": [2.0, 2.1, 100, 300.222],
"type": "rectangle",
"occluded": True,
"outside": True
},
]
},
{
"frame": 1,
"label_id": task["labels"][1]["id"],
"group": None,
"attributes": [],
"shapes": [
{
"frame": 1,
"attributes": [],
"points": [1.0, 2.1, 100, 300.222],
"type": "rectangle",
"occluded": False,
"outside": False
}
]
},
]
}
response = self._patch_api_v1_jobs_id_data(job["id"], annotator,
"create", data)
data["version"] += 1
self.assertEqual(response.status_code, HTTP_200_OK)
self._check_response(response, data)
response = self._get_api_v1_jobs_id_data(job["id"], annotator)
self.assertEqual(response.status_code, HTTP_200_OK)
data["tags"][0]["attributes"] = default_attr_values[data["tags"][0]["label_id"]]["all"]
data["tracks"][0]["shapes"][1]["attributes"] = default_attr_values[data["tracks"][0]["label_id"]]["mutable"]
self._check_response(response, data)
data = response.data
if not response.status_code in [
status.HTTP_403_FORBIDDEN, status.HTTP_401_UNAUTHORIZED]:
data["tags"][0]["label_id"] = task["labels"][0]["id"]
data["shapes"][0]["points"] = [1, 2, 3.0, 100, 120, 1, 2, 4.0]
data["shapes"][0]["type"] = "polygon"
data["tracks"][0]["group"] = 10
data["tracks"][0]["shapes"][0]["outside"] = False
data["tracks"][0]["shapes"][0]["occluded"] = False
response = self._patch_api_v1_jobs_id_data(job["id"], annotator,
"update", data)
data["version"] = data.get("version", 0) + 1 # need to update the version
self.assertEqual(response.status_code, HTTP_200_OK)
self._check_response(response, data)
response = self._get_api_v1_jobs_id_data(job["id"], annotator)
self.assertEqual(response.status_code, HTTP_200_OK)
self._check_response(response, data)
response = self._patch_api_v1_jobs_id_data(job["id"], annotator,
"delete", data)
data["version"] += 1 # need to update the version
self.assertEqual(response.status_code, HTTP_200_OK)
self._check_response(response, data)
data = {
"version": data["version"],
"tags": [],
"shapes": [],
"tracks": []
}
response = self._get_api_v1_jobs_id_data(job["id"], annotator)
self.assertEqual(response.status_code, HTTP_200_OK)
self._check_response(response, data)
data = {
"version": data["version"],
"tags": [
{
"frame": 0,
"label_id": 11010101,
"group": None,
"attributes": []
}
],
"shapes": [
{
"frame": 0,
"label_id": task["labels"][0]["id"],
"group": None,
"attributes": [
{
"spec_id": 32234234,
"value": task["labels"][0]["attributes"][0]["values"][0]
},
{
"spec_id": task["labels"][0]["attributes"][1]["id"],
"value": task["labels"][0]["attributes"][0]["default_value"]
}
],
"points": [1.0, 2.1, 100, 300.222],
"type": "rectangle",
"occluded": False
},
{
"frame": 1,
"label_id": 1212121,
"group": None,
"attributes": [],
"points": [2.0, 2.1, 100, 300.222, 400, 500, 1, 3],
"type": "polygon",
"occluded": False
},
],
"tracks": [
{
"frame": 0,
"label_id": 0,
"group": None,
"attributes": [],
"shapes": [
{
"frame": 0,
"points": [1.0, 2.1, 100, 300.222],
"type": "rectangle",
"occluded": False,
"outside": False,
"attributes": [
{
"spec_id": 10000,
"value": task["labels"][0]["attributes"][0]["values"][0]
},
{
"spec_id": task["labels"][0]["attributes"][1]["id"],
"value": task["labels"][0]["attributes"][1]["default_value"]
}
]
},
{
"frame": 1,
"attributes": [],
"points": [2.0, 2.1, 100, 300.222],
"type": "rectangle",
"occluded": True,
"outside": True
},
]
},
{
"frame": 1,
"label_id": task["labels"][1]["id"],
"group": None,
"attributes": [],
"shapes": [
{
"frame": 1,
"attributes": [],
"points": [1.0, 2.1, 100, 300.222],
"type": "rectangle",
"occluded": False,
"outside": False
}
]
},
]
}
response = self._patch_api_v1_jobs_id_data(job["id"], annotator,
"create", data)
self.assertEqual(response.status_code, HTTP_400_BAD_REQUEST)
def test_api_v1_jobs_id_annotations_admin(self):
self._run_api_v1_jobs_id_annotations(self.admin, self.assignee,
self.assignee)
def test_api_v1_jobs_id_annotations_user(self):
self._run_api_v1_jobs_id_annotations(self.user, self.assignee,
self.assignee)
def test_api_v1_jobs_id_annotations_observer(self):
_, jobs = self._create_task(self.user, self.assignee)
job = jobs[0]
data = {
"version": 0,
"tags": [],
"shapes": [],
"tracks": []
}
response = self._get_api_v1_jobs_id_data(job["id"], self.observer)
self.assertEqual(response.status_code, status.HTTP_200_OK)
response = self._put_api_v1_jobs_id_data(job["id"], self.observer, data)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
response = self._patch_api_v1_jobs_id_data(job["id"], self.observer, "create", data)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
response = self._delete_api_v1_jobs_id_data(job["id"], self.observer)
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_api_v1_jobs_id_annotations_no_auth(self):
self._run_api_v1_jobs_id_annotations(self.user, self.assignee, None)
class TaskAnnotationAPITestCase(JobAnnotationAPITestCase):
def _put_api_v1_tasks_id_annotations(self, pk, user, data):
with ForceLogin(user, self.client):
response = self.client.put("/api/v1/tasks/{}/annotations".format(pk),
data=data, format="json")
return response
def _get_api_v1_tasks_id_annotations(self, pk, user):
with ForceLogin(user, self.client):
response = self.client.get("/api/v1/tasks/{}/annotations".format(pk))
return response
def _delete_api_v1_tasks_id_annotations(self, pk, user):
with ForceLogin(user, self.client):
response = self.client.delete("/api/v1/tasks/{}/annotations".format(pk),
format="json")
return response
def _dump_api_v1_tasks_id_annotations(self, pk, user, query_params=""):
with ForceLogin(user, self.client):
response = self.client.get(
"/api/v1/tasks/{0}/annotations/my_task_{0}?{1}".format(pk, query_params))
return response
def _patch_api_v1_tasks_id_annotations(self, pk, user, action, data):
with ForceLogin(user, self.client):
response = self.client.patch(
"/api/v1/tasks/{}/annotations?action={}".format(pk, action),
data=data, format="json")
return response
def _upload_api_v1_tasks_id_annotations(self, pk, user, data, query_params=""):
with ForceLogin(user, self.client):
response = self.client.put(
path="/api/v1/tasks/{0}/annotations?{1}".format(pk, query_params),
data=data,
format="multipart",
)
return response
def _get_annotation_formats(self, user):
with ForceLogin(user, self.client):
response = self.client.get(
path="/api/v1/server/annotation/formats"
)
return response
def _check_response(self, response, data):
if not response.status_code in [
status.HTTP_401_UNAUTHORIZED, status.HTTP_403_FORBIDDEN]:
compare_objects(self, data, response.data, ignore_keys=["id"])
def _run_api_v1_tasks_id_annotations(self, owner, assignee, annotator):
task, _ = self._create_task(owner, assignee)
if annotator:
HTTP_200_OK = status.HTTP_200_OK
HTTP_204_NO_CONTENT = status.HTTP_204_NO_CONTENT
HTTP_400_BAD_REQUEST = status.HTTP_400_BAD_REQUEST
else:
HTTP_200_OK = status.HTTP_401_UNAUTHORIZED
HTTP_204_NO_CONTENT = status.HTTP_401_UNAUTHORIZED
HTTP_400_BAD_REQUEST = status.HTTP_401_UNAUTHORIZED
data = {
"version": 0,
"tags": [],
"shapes": [],
"tracks": []
}
response = self._put_api_v1_tasks_id_annotations(task["id"], annotator, data)
data["version"] += 1
self.assertEqual(response.status_code, HTTP_200_OK)
data = {
"version": data["version"],
"tags": [
{
"frame": 0,
"label_id": task["labels"][0]["id"],
"group": None,
"attributes": []
}
],
"shapes": [
{
"frame": 0,
"label_id": task["labels"][0]["id"],
"group": None,
"attributes": [
{
"spec_id": task["labels"][0]["attributes"][0]["id"],
"value": task["labels"][0]["attributes"][0]["values"][0]
},
{
"spec_id": task["labels"][0]["attributes"][1]["id"],
"value": task["labels"][0]["attributes"][0]["default_value"]
}
],
"points": [1.0, 2.1, 100, 300.222],
"type": "rectangle",
"occluded": False
},
{
"frame": 1,
"label_id": task["labels"][1]["id"],
"group": None,
"attributes": [],
"points": [2.0, 2.1, 100, 300.222, 400, 500, 1, 3],
"type": "polygon",
"occluded": False
},
],
"tracks": [
{
"frame": 0,
"label_id": task["labels"][0]["id"],
"group": None,
"attributes": [
{
"spec_id": task["labels"][0]["attributes"][0]["id"],
"value": task["labels"][0]["attributes"][0]["values"][0]
},
],
"shapes": [
{
"frame": 0,
"points": [1.0, 2.1, 100, 300.222],
"type": "rectangle",
"occluded": False,
"outside": False,
"attributes": [
{
"spec_id": task["labels"][0]["attributes"][1]["id"],
"value": task["labels"][0]["attributes"][1]["default_value"]
}
]
},
{
"frame": 1,
"attributes": [],
"points": [2.0, 2.1, 100, 300.222],
"type": "rectangle",
"occluded": True,
"outside": True
},
]
},
{
"frame": 1,
"label_id": task["labels"][1]["id"],
"group": None,
"attributes": [],
"shapes": [
{
"frame": 1,
"attributes": [],
"points": [1.0, 2.1, 100, 300.222],
"type": "rectangle",
"occluded": False,
"outside": False
}
]
},
]
}
response = self._put_api_v1_tasks_id_annotations(task["id"], annotator, data)
data["version"] += 1
self.assertEqual(response.status_code, HTTP_200_OK)
self._check_response(response, data)
default_attr_values = self._get_default_attr_values(task)
response = self._get_api_v1_tasks_id_annotations(task["id"], annotator)
# server should add default attribute values if puted data doesn't contain it
data["tags"][0]["attributes"] = default_attr_values[data["tags"][0]["label_id"]]["all"]
data["tracks"][0]["shapes"][1]["attributes"] = default_attr_values[data["tracks"][0]["label_id"]]["mutable"]
self.assertEqual(response.status_code, HTTP_200_OK)
self._check_response(response, data)
response = self._delete_api_v1_tasks_id_annotations(task["id"], annotator)
data["version"] += 1
self.assertEqual(response.status_code, HTTP_204_NO_CONTENT)
data = {
"version": data["version"],
"tags": [],
"shapes": [],
"tracks": []
}
response = self._get_api_v1_tasks_id_annotations(task["id"], annotator)
self.assertEqual(response.status_code, HTTP_200_OK)
self._check_response(response, data)
data = {
"version": data["version"],
"tags": [
{
"frame": 0,
"label_id": task["labels"][0]["id"],
"group": None,
"attributes": []
}
],
"shapes": [
{
"frame": 0,
"label_id": task["labels"][0]["id"],
"group": None,
"attributes": [
{
"spec_id": task["labels"][0]["attributes"][0]["id"],
"value": task["labels"][0]["attributes"][0]["values"][0]
},
{
"spec_id": task["labels"][0]["attributes"][1]["id"],
"value": task["labels"][0]["attributes"][0]["default_value"]
}
],
"points": [1.0, 2.1, 100, 300.222],
"type": "rectangle",
"occluded": False
},
{
"frame": 1,
"label_id": task["labels"][1]["id"],
"group": None,
"attributes": [],
"points": [2.0, 2.1, 100, 300.222, 400, 500, 1, 3],
"type": "polygon",
"occluded": False
},
],
"tracks": [
{
"frame": 0,
"label_id": task["labels"][0]["id"],
"group": None,
"attributes": [
{
"spec_id": task["labels"][0]["attributes"][0]["id"],
"value": task["labels"][0]["attributes"][0]["values"][0]
},
],
"shapes": [
{
"frame": 0,
"points": [1.0, 2.1, 100, 300.222],
"type": "rectangle",
"occluded": False,
"outside": False,
"attributes": [
{
"spec_id": task["labels"][0]["attributes"][1]["id"],
"value": task["labels"][0]["attributes"][1]["default_value"]
}
]
},
{
"frame": 1,
"attributes": [],
"points": [2.0, 2.1, 100, 300.222],
"type": "rectangle",
"occluded": True,
"outside": True
},
]
},
{
"frame": 1,
"label_id": task["labels"][1]["id"],
"group": None,
"attributes": [],
"shapes": [
{
"frame": 1,
"attributes": [],
"points": [1.0, 2.1, 100, 300.222],
"type": "rectangle",
"occluded": False,
"outside": False
}
]
},
]
}
response = self._patch_api_v1_tasks_id_annotations(task["id"], annotator,
"create", data)
data["version"] += 1
self.assertEqual(response.status_code, HTTP_200_OK)
self._check_response(response, data)
response = self._get_api_v1_tasks_id_annotations(task["id"], annotator)
data["tags"][0]["attributes"] = default_attr_values[data["tags"][0]["label_id"]]["all"]
data["tracks"][0]["shapes"][1]["attributes"] = default_attr_values[data["tracks"][0]["label_id"]]["mutable"]
self.assertEqual(response.status_code, HTTP_200_OK)
self._check_response(response, data)
data = response.data
if not response.status_code in [
status.HTTP_403_FORBIDDEN, status.HTTP_401_UNAUTHORIZED]:
data["tags"][0]["label_id"] = task["labels"][0]["id"]
data["shapes"][0]["points"] = [1, 2, 3.0, 100, 120, 1, 2, 4.0]
data["shapes"][0]["type"] = "polygon"
data["tracks"][0]["group"] = 10
data["tracks"][0]["shapes"][0]["outside"] = False
data["tracks"][0]["shapes"][0]["occluded"] = False
response = self._patch_api_v1_tasks_id_annotations(task["id"], annotator,
"update", data)
data["version"] = data.get("version", 0) + 1
self.assertEqual(response.status_code, HTTP_200_OK)
self._check_response(response, data)
response = self._get_api_v1_tasks_id_annotations(task["id"], annotator)
self.assertEqual(response.status_code, HTTP_200_OK)
self._check_response(response, data)
response = self._patch_api_v1_tasks_id_annotations(task["id"], annotator,
"delete", data)
data["version"] += 1
self.assertEqual(response.status_code, HTTP_200_OK)
self._check_response(response, data)
data = {
"version": data["version"],
"tags": [],
"shapes": [],
"tracks": []
}
response = self._get_api_v1_tasks_id_annotations(task["id"], annotator)
self.assertEqual(response.status_code, HTTP_200_OK)
self._check_response(response, data)
data = {
"version": data["version"],
"tags": [
{
"frame": 0,
"label_id": 11010101,
"group": None,
"attributes": []
}
],
"shapes": [
{
"frame": 0,
"label_id": task["labels"][0]["id"],
"group": None,
"attributes": [
{
"spec_id": 32234234,
"value": task["labels"][0]["attributes"][0]["values"][0]
},
{
"spec_id": task["labels"][0]["attributes"][1]["id"],
"value": task["labels"][0]["attributes"][0]["default_value"]
}
],
"points": [1.0, 2.1, 100, 300.222],
"type": "rectangle",
"occluded": False
},
{
"frame": 1,
"label_id": 1212121,
"group": None,
"attributes": [],
"points": [2.0, 2.1, 100, 300.222, 400, 500, 1, 3],
"type": "polygon",
"occluded": False
},
],
"tracks": [
{
"frame": 0,
"label_id": 0,
"group": None,
"attributes": [],
"shapes": [
{
"frame": 0,
"points": [1.0, 2.1, 100, 300.222],
"type": "rectangle",
"occluded": False,
"outside": False,
"attributes": [
{
"spec_id": 10000,
"value": task["labels"][0]["attributes"][0]["values"][0]
},
{
"spec_id": task["labels"][0]["attributes"][1]["id"],
"value": task["labels"][0]["attributes"][0]["default_value"]
}
]
},
{
"frame": 1,
"attributes": [],
"points": [2.0, 2.1, 100, 300.222],
"type": "rectangle",
"occluded": True,
"outside": True
},
]
},
{
"frame": 1,
"label_id": task["labels"][1]["id"],
"group": None,
"attributes": [],
"shapes": [
{
"frame": 1,
"attributes": [],
"points": [1.0, 2.1, 100, 300.222],
"type": "rectangle",
"occluded": False,
"outside": False
}
]
},
]
}
response = self._patch_api_v1_tasks_id_annotations(task["id"], annotator,
"create", data)
self.assertEqual(response.status_code, HTTP_400_BAD_REQUEST)
def _run_api_v1_tasks_id_annotations_dump_load(self, owner, assignee, annotator):
if annotator:
HTTP_200_OK = status.HTTP_200_OK
HTTP_204_NO_CONTENT = status.HTTP_204_NO_CONTENT
HTTP_202_ACCEPTED = status.HTTP_202_ACCEPTED
HTTP_201_CREATED = status.HTTP_201_CREATED
else:
HTTP_200_OK = status.HTTP_401_UNAUTHORIZED
HTTP_204_NO_CONTENT = status.HTTP_401_UNAUTHORIZED
HTTP_202_ACCEPTED = status.HTTP_401_UNAUTHORIZED
HTTP_201_CREATED = status.HTTP_401_UNAUTHORIZED
def _get_initial_annotation(annotation_format):
rectangle_tracks_with_attrs = [{
"frame": 0,
"label_id": task["labels"][0]["id"],
"group": 0,
"attributes": [
{
"spec_id": task["labels"][0]["attributes"][0]["id"],
"value": task["labels"][0]["attributes"][0]["values"][0]
},
],
"shapes": [
{
"frame": 0,
"points": [1.0, 2.1, 50.1, 30.22],
"type": "rectangle",
"occluded": False,
"outside": False,
"attributes": [
{
"spec_id": task["labels"][0]["attributes"][1]["id"],
"value": task["labels"][0]["attributes"][1]["default_value"]
}
]
},
{
"frame": 1,
"points": [2.0, 2.1, 77.2, 36.22],
"type": "rectangle",
"occluded": True,
"outside": True,
"attributes": [
{
"spec_id": task["labels"][0]["attributes"][1]["id"],
"value": task["labels"][0]["attributes"][1]["default_value"]
}
]
},
]
}]
rectangle_tracks_wo_attrs = [{
"frame": 1,
"label_id": task["labels"][1]["id"],
"group": 0,
"attributes": [],
"shapes": [
{
"frame": 1,
"attributes": [],
"points": [1.0, 2.1, 50.2, 36.6],
"type": "rectangle",
"occluded": False,
"outside": False
},
{
"frame": 2,
"attributes": [],
"points": [1.0, 2.1, 51, 36.6],
"type": "rectangle",
"occluded": False,
"outside": True
}
]
}]
rectangle_shapes_with_attrs = [{
"frame": 0,
"label_id": task["labels"][0]["id"],
"group": 0,
"attributes": [
{
"spec_id": task["labels"][0]["attributes"][0]["id"],
"value": task["labels"][0]["attributes"][0]["values"][0]
},
{
"spec_id": task["labels"][0]["attributes"][1]["id"],
"value": task["labels"][0]["attributes"][1]["default_value"]
}
],
"points": [1.0, 2.1, 10.6, 53.22],
"type": "rectangle",
"occluded": False
}]
rectangle_shapes_wo_attrs = [{
"frame": 1,
"label_id": task["labels"][1]["id"],
"group": 0,
"attributes": [],
"points": [2.0, 2.1, 40, 50.7],
"type": "rectangle",
"occluded": False
}]
polygon_shapes_wo_attrs = [{
"frame": 1,
"label_id": task["labels"][1]["id"],
"group": 0,
"attributes": [],
"points": [2.0, 2.1, 100, 30.22, 40, 77, 1, 3],
"type": "polygon",
"occluded": False
}]
polygon_shapes_with_attrs = [{
"frame": 2,
"label_id": task["labels"][0]["id"],
"group": 1,
"attributes": [
{
"spec_id": task["labels"][0]["attributes"][0]["id"],
"value": task["labels"][0]["attributes"][0]["values"][1]
},
{
"spec_id": task["labels"][0]["attributes"][1]["id"],
"value": task["labels"][0]["attributes"][1]["default_value"]
}
],
"points": [20.0, 0.1, 10, 3.22, 4, 7, 10, 30, 1, 2, 4.44, 5.55],
"type": "polygon",
"occluded": True
},
{
"frame": 2,
"label_id": task["labels"][1]["id"],
"group": 1,
"attributes": [],
"points": [4, 7, 10, 30, 4, 5.55],
"type": "polygon",
"occluded": False
}]
tags_wo_attrs = [{
"frame": 2,
"label_id": task["labels"][1]["id"],
"group": 0,
"attributes": []
}]
tags_with_attrs = [{
"frame": 1,
"label_id": task["labels"][0]["id"],
"group": 3,
"attributes": [
{
"spec_id": task["labels"][0]["attributes"][0]["id"],
"value": task["labels"][0]["attributes"][0]["values"][1]
},
{
"spec_id": task["labels"][0]["attributes"][1]["id"],
"value": task["labels"][0]["attributes"][1]["default_value"]
}
],
}]
annotations = {
"version": 0,
"tags": [],
"shapes": [],
"tracks": [],
}
if annotation_format == "CVAT XML 1.1 for videos":
annotations["tracks"] = rectangle_tracks_with_attrs + rectangle_tracks_wo_attrs
elif annotation_format == "CVAT XML 1.1 for images":
annotations["shapes"] = rectangle_shapes_with_attrs + rectangle_shapes_wo_attrs \
+ polygon_shapes_wo_attrs + polygon_shapes_with_attrs
annotations["tags"] = tags_with_attrs + tags_wo_attrs
elif annotation_format == "PASCAL VOC ZIP 1.1":
annotations["shapes"] = rectangle_shapes_wo_attrs
annotations["tags"] = tags_wo_attrs
elif annotation_format == "YOLO ZIP 1.1" or \
annotation_format == "TFRecord ZIP 1.0":
annotations["shapes"] = rectangle_shapes_wo_attrs
elif annotation_format == "COCO JSON 1.0":
annotations["shapes"] = polygon_shapes_wo_attrs
elif annotation_format == "MASK ZIP 1.1":
annotations["shapes"] = rectangle_shapes_wo_attrs + polygon_shapes_wo_attrs
annotations["tracks"] = rectangle_tracks_wo_attrs
elif annotation_format == "MOT CSV 1.0":
annotations["tracks"] = rectangle_tracks_wo_attrs
elif annotation_format == "LabelMe ZIP 3.0 for images":
annotations["shapes"] = rectangle_shapes_with_attrs + \
rectangle_shapes_wo_attrs + \
polygon_shapes_wo_attrs + \
polygon_shapes_with_attrs
return annotations
response = self._get_annotation_formats(annotator)
self.assertEqual(response.status_code, HTTP_200_OK)
if annotator is not None:
supported_formats = response.data
else:
supported_formats = [{
"name": "CVAT",
"dumpers": [{
"display_name": "CVAT XML 1.1 for images"
}],
"loaders": [{
"display_name": "CVAT XML 1.1"
}]
}]
self.assertTrue(isinstance(supported_formats, list) and supported_formats)
for annotation_format in supported_formats:
for dumper in annotation_format["dumpers"]:
# 1. create task
task, jobs = self._create_task(owner, assignee)
# 2. add annotation
data = _get_initial_annotation(dumper["display_name"])
response = self._put_api_v1_tasks_id_annotations(task["id"], annotator, data)
data["version"] += 1
self.assertEqual(response.status_code, HTTP_200_OK)
self._check_response(response, data)
# 3. download annotation
response = self._dump_api_v1_tasks_id_annotations(task["id"], annotator,
"format={}".format(dumper["display_name"]))
self.assertEqual(response.status_code, HTTP_202_ACCEPTED)
response = self._dump_api_v1_tasks_id_annotations(task["id"], annotator,
"format={}".format(dumper["display_name"]))
self.assertEqual(response.status_code, HTTP_201_CREATED)
response = self._dump_api_v1_tasks_id_annotations(task["id"], annotator,
"action=download&format={}".format(dumper["display_name"]))
self.assertEqual(response.status_code, HTTP_200_OK)
# 4. check downloaded data
if response.status_code == status.HTTP_200_OK:
self.assertTrue(response.streaming)
content = io.BytesIO(b"".join(response.streaming_content))
self._check_dump_content(content, task, jobs, data, annotation_format["name"])
content.seek(0)
# 5. remove annotation form the task
response = self._delete_api_v1_tasks_id_annotations(task["id"], annotator)
data["version"] += 1
self.assertEqual(response.status_code, HTTP_204_NO_CONTENT)
# 6. upload annotation and check annotation
uploaded_data = {
"annotation_file": content,
}
for loader in annotation_format["loaders"]:
if loader["display_name"] == "MASK ZIP 1.1":
continue # can't really predict the result and check
response = self._upload_api_v1_tasks_id_annotations(task["id"], annotator, uploaded_data, "format={}".format(loader["display_name"]))
self.assertEqual(response.status_code, HTTP_202_ACCEPTED)
response = self._upload_api_v1_tasks_id_annotations(task["id"], annotator, {}, "format={}".format(loader["display_name"]))
self.assertEqual(response.status_code, HTTP_201_CREATED)
response = self._get_api_v1_tasks_id_annotations(task["id"], annotator)
self.assertEqual(response.status_code, HTTP_200_OK)
data["version"] += 2
self._check_response(response, data)
def _check_dump_content(self, content, task, jobs, data, annotation_format_name):
def etree_to_dict(t):
d = {t.tag: {} if t.attrib else None}
children = list(t)
if children:
dd = defaultdict(list)
for dc in map(etree_to_dict, children):
for k, v in dc.items():
dd[k].append(v)
d = {t.tag: {k: v[0] if len(v) == 1 else v
for k, v in dd.items()}}
if t.attrib:
d[t.tag].update(('@' + k, v) for k, v in t.attrib.items())
if t.text:
text = t.text.strip()
if not (children or t.attrib):
d[t.tag] = text
return d
if annotation_format_name == "CVAT":
xmldump = ET.fromstring(content.read())
self.assertEqual(xmldump.tag, "annotations")
tags = xmldump.findall("./meta")
self.assertEqual(len(tags), 1)
meta = etree_to_dict(tags[0])["meta"]
self.assertEqual(meta["task"]["name"], task["name"])
elif annotation_format_name == "PASCAL VOC":
self.assertTrue(zipfile.is_zipfile(content))
elif annotation_format_name == "YOLO":
self.assertTrue(zipfile.is_zipfile(content))
elif annotation_format_name == "COCO":
with tempfile.NamedTemporaryFile() as tmp_file:
tmp_file.write(content.read())
tmp_file.flush()
coco = coco_loader.COCO(tmp_file.name)
self.assertTrue(coco.getAnnIds())
elif annotation_format_name == "TFRecord":
self.assertTrue(zipfile.is_zipfile(content))
elif annotation_format_name == "MASK":
self.assertTrue(zipfile.is_zipfile(content))
def _run_coco_annotation_upload_test(self, user):
def generate_coco_anno():
return b"""{
"categories": [
{
"id": 1,
"name": "car",
"supercategory": ""
},
{
"id": 2,
"name": "person",
"supercategory": ""
}
],
"images": [
{
"coco_url": "",
"date_captured": "",
"flickr_url": "",
"license": 0,
"id": 0,
"file_name": "test_1.jpg",
"height": 720,
"width": 1280
}
],
"annotations": [
{
"category_id": 1,
"id": 1,
"image_id": 0,
"iscrowd": 0,
"segmentation": [
[]
],
"area": 17702.0,
"bbox": [
574.0,
407.0,
167.0,
106.0
]
}
]
}"""
response = self._get_annotation_formats(user)
self.assertEqual(response.status_code, status.HTTP_200_OK)
supported_formats = response.data
self.assertTrue(isinstance(supported_formats, list) and supported_formats)
coco_format = None
for f in response.data:
if f["name"] == "COCO":
coco_format = f
break
self.assertTrue(coco_format)
loader = coco_format["loaders"][0]
task, _ = self._create_task(user, user)
content = io.BytesIO(generate_coco_anno())
content.seek(0)
uploaded_data = {
"annotation_file": content,
}
response = self._upload_api_v1_tasks_id_annotations(task["id"], user, uploaded_data, "format={}".format(loader["display_name"]))
self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED)
response = self._upload_api_v1_tasks_id_annotations(task["id"], user, {}, "format={}".format(loader["display_name"]))
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
response = self._get_api_v1_tasks_id_annotations(task["id"], user)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_api_v1_tasks_id_annotations_admin(self):
self._run_api_v1_tasks_id_annotations(self.admin, self.assignee,
self.assignee)
def test_api_v1_tasks_id_annotations_user(self):
self._run_api_v1_tasks_id_annotations(self.user, self.assignee,
self.assignee)
def test_api_v1_tasks_id_annotations_no_auth(self):
self._run_api_v1_tasks_id_annotations(self.user, self.assignee, None)
def test_api_v1_tasks_id_annotations_dump_load_admin(self):
self._run_api_v1_tasks_id_annotations_dump_load(self.admin, self.assignee,
self.assignee)
def test_api_v1_tasks_id_annotations_dump_load_user(self):
self._run_api_v1_tasks_id_annotations_dump_load(self.user, self.assignee,
self.assignee)
def test_api_v1_tasks_id_annotations_dump_load_no_auth(self):
self._run_api_v1_tasks_id_annotations_dump_load(self.user, self.assignee, None)
def test_api_v1_tasks_id_annotations_upload_coco_user(self):
self._run_coco_annotation_upload_test(self.user)
class ServerShareAPITestCase(APITestCase):
def setUp(self):
self.client = APIClient()
@classmethod
def setUpTestData(cls):
create_db_users(cls)
@classmethod
def setUpClass(cls):
super().setUpClass()
path = os.path.join(settings.SHARE_ROOT, "file0.txt")
open(path, "w").write("test string")
path = os.path.join(settings.SHARE_ROOT, "test1")
os.makedirs(path)
path = os.path.join(path, "file1.txt")
open(path, "w").write("test string")
directory = os.path.join(settings.SHARE_ROOT, "test1", "test3")
os.makedirs(directory)
path = os.path.join(settings.SHARE_ROOT, "test2")
os.makedirs(path)
path = os.path.join(path, "file2.txt")
open(path, "w").write("test string")
@classmethod
def tearDownClass(cls):
super().tearDownClass()
path = os.path.join(settings.SHARE_ROOT, "file0.txt")
os.remove(path)
path = os.path.join(settings.SHARE_ROOT, "test1")
shutil.rmtree(path)
path = os.path.join(settings.SHARE_ROOT, "test2")
shutil.rmtree(path)
def _run_api_v1_server_share(self, user, directory):
with ForceLogin(user, self.client):
response = self.client.get(
'/api/v1/server/share?directory={}'.format(directory))
return response
def _test_api_v1_server_share(self, user):
data = [
{"name": "test1", "type": "DIR"},
{"name": "test2", "type": "DIR"},
{"name": "file0.txt", "type": "REG"},
]
response = self._run_api_v1_server_share(user, "/")
self.assertEqual(response.status_code, status.HTTP_200_OK)
compare_objects(
self=self,
obj1=sorted(data, key=lambda d: d["name"]),
obj2=sorted(response.data, key=lambda d: d["name"]),
ignore_keys=[]
)
data = [
{"name": "file1.txt", "type": "REG"},
{"name": "test3", "type": "DIR"},
]
response = self._run_api_v1_server_share(user, "/test1")
self.assertEqual(response.status_code, status.HTTP_200_OK)
compare_objects(
self=self,
obj1=sorted(data, key=lambda d: d["name"]),
obj2=sorted(response.data, key=lambda d: d["name"]),
ignore_keys=[]
)
data = []
response = self._run_api_v1_server_share(user, "/test1/test3")
self.assertEqual(response.status_code, status.HTTP_200_OK)
compare_objects(
self=self,
obj1=sorted(data, key=lambda d: d["name"]),
obj2=sorted(response.data, key=lambda d: d["name"]),
ignore_keys=[]
)
data = [
{"name": "file2.txt", "type": "REG"},
]
response = self._run_api_v1_server_share(user, "/test2")
self.assertEqual(response.status_code, status.HTTP_200_OK)
compare_objects(
self=self,
obj1=sorted(data, key=lambda d: d["name"]),
obj2=sorted(response.data, key=lambda d: d["name"]),
ignore_keys=[]
)
response = self._run_api_v1_server_share(user, "/test4")
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_api_v1_server_share_admin(self):
self._test_api_v1_server_share(self.admin)
def test_api_v1_server_share_owner(self):
self._test_api_v1_server_share(self.owner)
def test_api_v1_server_share_assignee(self):
self._test_api_v1_server_share(self.assignee)
def test_api_v1_server_share_user(self):
self._test_api_v1_server_share(self.user)
def test_api_v1_server_share_annotator(self):
self._test_api_v1_server_share(self.annotator)
def test_api_v1_server_share_observer(self):
self._test_api_v1_server_share(self.observer)
def test_api_v1_server_share_no_auth(self):
response = self._run_api_v1_server_share(None, "/")
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
| true | true |
1c2f7ce2cbbf216deda2ff16d6295d70fd720a2c | 1,589 | py | Python | nipype/interfaces/semtools/brains/tests/test_auto_BRAINSPosteriorToContinuousClass.py | mfalkiewicz/nipype | 775e21b78fb1ffa2ff9cb12e6f052868bd44d052 | [
"Apache-2.0"
] | 1 | 2015-01-19T13:12:27.000Z | 2015-01-19T13:12:27.000Z | nipype/interfaces/semtools/brains/tests/test_auto_BRAINSPosteriorToContinuousClass.py | bpinsard/nipype | 373bdddba9f675ef153951afa368729e2d8950d2 | [
"Apache-2.0"
] | null | null | null | nipype/interfaces/semtools/brains/tests/test_auto_BRAINSPosteriorToContinuousClass.py | bpinsard/nipype | 373bdddba9f675ef153951afa368729e2d8950d2 | [
"Apache-2.0"
] | null | null | null | # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
from __future__ import unicode_literals
from ..classify import BRAINSPosteriorToContinuousClass
def test_BRAINSPosteriorToContinuousClass_inputs():
input_map = dict(args=dict(argstr='%s',
),
environ=dict(nohash=True,
usedefault=True,
),
ignore_exception=dict(deprecated='1.0.0',
nohash=True,
usedefault=True,
),
inputBasalGmVolume=dict(argstr='--inputBasalGmVolume %s',
),
inputCrblGmVolume=dict(argstr='--inputCrblGmVolume %s',
),
inputCrblWmVolume=dict(argstr='--inputCrblWmVolume %s',
),
inputCsfVolume=dict(argstr='--inputCsfVolume %s',
),
inputSurfaceGmVolume=dict(argstr='--inputSurfaceGmVolume %s',
),
inputVbVolume=dict(argstr='--inputVbVolume %s',
),
inputWhiteVolume=dict(argstr='--inputWhiteVolume %s',
),
outputVolume=dict(argstr='--outputVolume %s',
hash_files=False,
),
terminal_output=dict(deprecated='1.0.0',
nohash=True,
),
)
inputs = BRAINSPosteriorToContinuousClass.input_spec()
for key, metadata in list(input_map.items()):
for metakey, value in list(metadata.items()):
assert getattr(inputs.traits()[key], metakey) == value
def test_BRAINSPosteriorToContinuousClass_outputs():
output_map = dict(outputVolume=dict(),
)
outputs = BRAINSPosteriorToContinuousClass.output_spec()
for key, metadata in list(output_map.items()):
for metakey, value in list(metadata.items()):
assert getattr(outputs.traits()[key], metakey) == value
| 30.557692 | 67 | 0.684078 |
from __future__ import unicode_literals
from ..classify import BRAINSPosteriorToContinuousClass
def test_BRAINSPosteriorToContinuousClass_inputs():
input_map = dict(args=dict(argstr='%s',
),
environ=dict(nohash=True,
usedefault=True,
),
ignore_exception=dict(deprecated='1.0.0',
nohash=True,
usedefault=True,
),
inputBasalGmVolume=dict(argstr='--inputBasalGmVolume %s',
),
inputCrblGmVolume=dict(argstr='--inputCrblGmVolume %s',
),
inputCrblWmVolume=dict(argstr='--inputCrblWmVolume %s',
),
inputCsfVolume=dict(argstr='--inputCsfVolume %s',
),
inputSurfaceGmVolume=dict(argstr='--inputSurfaceGmVolume %s',
),
inputVbVolume=dict(argstr='--inputVbVolume %s',
),
inputWhiteVolume=dict(argstr='--inputWhiteVolume %s',
),
outputVolume=dict(argstr='--outputVolume %s',
hash_files=False,
),
terminal_output=dict(deprecated='1.0.0',
nohash=True,
),
)
inputs = BRAINSPosteriorToContinuousClass.input_spec()
for key, metadata in list(input_map.items()):
for metakey, value in list(metadata.items()):
assert getattr(inputs.traits()[key], metakey) == value
def test_BRAINSPosteriorToContinuousClass_outputs():
output_map = dict(outputVolume=dict(),
)
outputs = BRAINSPosteriorToContinuousClass.output_spec()
for key, metadata in list(output_map.items()):
for metakey, value in list(metadata.items()):
assert getattr(outputs.traits()[key], metakey) == value
| true | true |
1c2f7e3777a18d0dc93765a0705a860de8b9f484 | 2,818 | py | Python | tests/unit/test_checker_manager.py | AnarchyCrew/flake9 | ee2920d775df18481d638c2da084d229d56f95b9 | [
"MIT"
] | null | null | null | tests/unit/test_checker_manager.py | AnarchyCrew/flake9 | ee2920d775df18481d638c2da084d229d56f95b9 | [
"MIT"
] | null | null | null | tests/unit/test_checker_manager.py | AnarchyCrew/flake9 | ee2920d775df18481d638c2da084d229d56f95b9 | [
"MIT"
] | null | null | null | """Tests for the Manager object for FileCheckers."""
import errno
import mock
import pytest
from flake8 import checker
def style_guide_mock(**kwargs):
"""Create a mock StyleGuide object."""
kwargs.setdefault('diff', False)
kwargs.setdefault('jobs', '4')
style_guide = mock.Mock()
style_guide.options = mock.Mock(**kwargs)
return style_guide
def _parallel_checker_manager():
"""Call Manager.run() and return the number of calls to `run_serial`."""
style_guide = style_guide_mock()
manager = checker.Manager(style_guide, [], [])
# multiple checkers is needed for parallel mode
manager.checkers = [mock.Mock(), mock.Mock()]
return manager
def test_oserrors_cause_serial_fall_back():
"""Verify that OSErrors will cause the Manager to fallback to serial."""
err = OSError(errno.ENOSPC, 'Ominous message about spaceeeeee')
with mock.patch('_multiprocessing.SemLock', side_effect=err):
manager = _parallel_checker_manager()
with mock.patch.object(manager, 'run_serial') as serial:
manager.run()
assert serial.call_count == 1
@mock.patch('flake8.checker._multiprocessing_is_fork', return_value=True)
def test_oserrors_are_reraised(is_windows):
"""Verify that unexpected OSErrors will cause the Manager to reraise."""
err = OSError(errno.EAGAIN, 'Ominous message')
with mock.patch('_multiprocessing.SemLock', side_effect=err):
manager = _parallel_checker_manager()
with mock.patch.object(manager, 'run_serial') as serial:
with pytest.raises(OSError):
manager.run()
assert serial.call_count == 0
def test_multiprocessing_is_disabled():
"""Verify not being able to import multiprocessing forces jobs to 0."""
style_guide = style_guide_mock()
with mock.patch('flake8.checker.multiprocessing', None):
manager = checker.Manager(style_guide, [], [])
assert manager.jobs == 0
def test_make_checkers():
"""Verify that we create a list of FileChecker instances."""
style_guide = style_guide_mock()
files = ['file1', 'file2']
checkplugins = mock.Mock()
checkplugins.to_dictionary.return_value = {
'ast_plugins': [],
'logical_line_plugins': [],
'physical_line_plugins': [],
}
with mock.patch('flake8.checker.multiprocessing', None):
manager = checker.Manager(style_guide, files, checkplugins)
with mock.patch('flake8.utils.filenames_from') as filenames_from:
filenames_from.side_effect = [['file1'], ['file2']]
with mock.patch('flake8.utils.fnmatch', return_value=True):
with mock.patch('flake8.processor.FileProcessor'):
manager.make_checkers()
for file_checker in manager.checkers:
assert file_checker.filename in files
| 35.670886 | 76 | 0.689496 | import errno
import mock
import pytest
from flake8 import checker
def style_guide_mock(**kwargs):
kwargs.setdefault('diff', False)
kwargs.setdefault('jobs', '4')
style_guide = mock.Mock()
style_guide.options = mock.Mock(**kwargs)
return style_guide
def _parallel_checker_manager():
style_guide = style_guide_mock()
manager = checker.Manager(style_guide, [], [])
manager.checkers = [mock.Mock(), mock.Mock()]
return manager
def test_oserrors_cause_serial_fall_back():
err = OSError(errno.ENOSPC, 'Ominous message about spaceeeeee')
with mock.patch('_multiprocessing.SemLock', side_effect=err):
manager = _parallel_checker_manager()
with mock.patch.object(manager, 'run_serial') as serial:
manager.run()
assert serial.call_count == 1
@mock.patch('flake8.checker._multiprocessing_is_fork', return_value=True)
def test_oserrors_are_reraised(is_windows):
err = OSError(errno.EAGAIN, 'Ominous message')
with mock.patch('_multiprocessing.SemLock', side_effect=err):
manager = _parallel_checker_manager()
with mock.patch.object(manager, 'run_serial') as serial:
with pytest.raises(OSError):
manager.run()
assert serial.call_count == 0
def test_multiprocessing_is_disabled():
style_guide = style_guide_mock()
with mock.patch('flake8.checker.multiprocessing', None):
manager = checker.Manager(style_guide, [], [])
assert manager.jobs == 0
def test_make_checkers():
style_guide = style_guide_mock()
files = ['file1', 'file2']
checkplugins = mock.Mock()
checkplugins.to_dictionary.return_value = {
'ast_plugins': [],
'logical_line_plugins': [],
'physical_line_plugins': [],
}
with mock.patch('flake8.checker.multiprocessing', None):
manager = checker.Manager(style_guide, files, checkplugins)
with mock.patch('flake8.utils.filenames_from') as filenames_from:
filenames_from.side_effect = [['file1'], ['file2']]
with mock.patch('flake8.utils.fnmatch', return_value=True):
with mock.patch('flake8.processor.FileProcessor'):
manager.make_checkers()
for file_checker in manager.checkers:
assert file_checker.filename in files
| true | true |
1c2f7e74de4507242df96dc94fae80731d17c36f | 996 | py | Python | segs/deeplabv3_detection_common.py | Mooonside/SEGS | 93bb66d9979d9beefab9cfd1a146d6e7369f5d86 | [
"MIT"
] | 5 | 2018-10-22T06:56:22.000Z | 2018-10-29T13:03:39.000Z | segs/deeplabv3_detection_common.py | Mooonside/SEGS | 93bb66d9979d9beefab9cfd1a146d6e7369f5d86 | [
"MIT"
] | null | null | null | segs/deeplabv3_detection_common.py | Mooonside/SEGS | 93bb66d9979d9beefab9cfd1a146d6e7369f5d86 | [
"MIT"
] | 2 | 2018-03-17T06:55:35.000Z | 2018-04-13T12:12:44.000Z | """
IN YOLOV3, uses 3 layers, respectively downsample 32, downsample 16 and downsample 8
IN DEEOLABV3+, the nwetwork output layers if stride 16, so need to add more layer to generate downsample 32!
"""
import numpy as np
detection_feature_layers = [
# downsample 8
'xception_65/entry_flow/block2/unit_1/xception_module/add:0',
# downsample 16
'xception_65/middle_flow/block1/unit_16/xception_module/add:0',
# downsample 32
'xception_65/detection_branch/exit_flow/block3/unit_1/xception_module/separable_conv3/pointwise_conv/Relu:0'
]
detection_feature_strides = np.asarray([
8,
16,
32
])
detection_anchors = np.asarray([
[
[0.02403846, 0.03125],
[0.03846154, 0.07211538],
[0.07932692, 0.05528846]
],
[
[0.07211538, 0.14663462],
[0.14903846, 0.10817308],
[0.14182692, 0.28605769]
],
[
[0.27884615, 0.21634615],
[0.375, 0.47596154],
[0.89663462, 0.78365385]
]
])
| 25.538462 | 112 | 0.653614 | import numpy as np
detection_feature_layers = [
'xception_65/entry_flow/block2/unit_1/xception_module/add:0',
'xception_65/middle_flow/block1/unit_16/xception_module/add:0',
'xception_65/detection_branch/exit_flow/block3/unit_1/xception_module/separable_conv3/pointwise_conv/Relu:0'
]
detection_feature_strides = np.asarray([
8,
16,
32
])
detection_anchors = np.asarray([
[
[0.02403846, 0.03125],
[0.03846154, 0.07211538],
[0.07932692, 0.05528846]
],
[
[0.07211538, 0.14663462],
[0.14903846, 0.10817308],
[0.14182692, 0.28605769]
],
[
[0.27884615, 0.21634615],
[0.375, 0.47596154],
[0.89663462, 0.78365385]
]
])
| true | true |
1c2f800b44d6020a3330de2d51454b6d74ce0244 | 13,786 | py | Python | birdy/client/base.py | generic-ci-org/birdy | 63c2d0aacad67569d8d8fc25c9a702d80c69fcd0 | [
"Apache-2.0"
] | null | null | null | birdy/client/base.py | generic-ci-org/birdy | 63c2d0aacad67569d8d8fc25c9a702d80c69fcd0 | [
"Apache-2.0"
] | null | null | null | birdy/client/base.py | generic-ci-org/birdy | 63c2d0aacad67569d8d8fc25c9a702d80c69fcd0 | [
"Apache-2.0"
] | null | null | null | import types
from collections import OrderedDict
from textwrap import dedent
from boltons.funcutils import FunctionBuilder
import requests
import requests.auth
import owslib
from owslib.util import ServiceException
from owslib.wps import WPS_DEFAULT_VERSION, WebProcessingService, SYNC, ASYNC, ComplexData
from birdy.exceptions import UnauthorizedException
from birdy.client import utils
from birdy.utils import sanitize, fix_url, embed, guess_type
from birdy.client import notebook
from birdy.client.outputs import WPSResult
import logging
# TODO: Support passing ComplexInput's data using POST.
class WPSClient(object):
"""Returns a class where every public method is a WPS process available at
the given url.
Example:
>>> emu = WPSClient(url='<server url>')
>>> emu.hello('stranger')
'Hello stranger'
"""
def __init__(
self,
url,
processes=None,
converters=None,
username=None,
password=None,
headers=None,
auth=None,
verify=True,
cert=None,
verbose=False,
progress=False,
version=WPS_DEFAULT_VERSION,
caps_xml=None,
desc_xml=None,
language=None,
):
"""
Args:
url (str): Link to WPS provider. config (Config): an instance
processes: Specify a subset of processes to bind. Defaults to all
processes.
converters (dict): Correspondence of {mimetype: class} to convert
this mimetype to a python object.
username (str): passed to :class:`owslib.wps.WebProcessingService`
password (str): passed to :class:`owslib.wps.WebProcessingService`
headers (str): passed to :class:`owslib.wps.WebProcessingService`
auth (requests.auth.AuthBase): requests-style auth class to authenticate,
see https://2.python-requests.org/en/master/user/authentication/
verify (bool): passed to :class:`owslib.wps.WebProcessingService`
cert (str): passed to :class:`owslib.wps.WebProcessingService`
verbose (str): passed to :class:`owslib.wps.WebProcessingService`
progress (bool): If True, enable interactive user mode.
version (str): WPS version to use.
language (str): passed to :class:`owslib.wps.WebProcessingService`
ex: 'fr-CA', 'en_US'.
"""
self._converters = converters
self._interactive = progress
self._mode = ASYNC if progress else SYNC
self._notebook = notebook.is_notebook()
self._inputs = {}
self._outputs = {}
if not verify:
import urllib3
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
if headers is None:
headers = {}
if auth is not None:
if isinstance(auth, tuple) and len(auth) == 2:
# special-case basic HTTP auth
auth = requests.auth.HTTPBasicAuth(*auth)
# We only need some headers from the requests.auth.AuthBase implementation
# We prepare a dummy request, call the auth object with it, and get its headers
dummy_request = requests.Request("get", "http://localhost")
r = auth(dummy_request.prepare())
auth_headers = ["Authorization", "Proxy-Authorization", "Cookie"]
headers.update({h: r.headers[h] for h in auth_headers if h in r.headers})
self._wps = WebProcessingService(
url,
version=version,
username=username,
password=password,
verbose=verbose,
headers=headers,
verify=verify,
cert=cert,
skip_caps=True,
language=language
)
try:
self._wps.getcapabilities(xml=caps_xml)
except ServiceException as e:
if "AccessForbidden" in str(e):
raise UnauthorizedException(
"You are not authorized to do a request of type: GetCapabilities"
)
raise
self._processes = self._get_process_description(processes, xml=desc_xml)
# Build the methods
for pid in self._processes:
setattr(self, sanitize(pid), types.MethodType(self._method_factory(pid), self))
self.logger = logging.getLogger('WPSClient')
if progress:
self._setup_logging()
self.__doc__ = utils.build_wps_client_doc(self._wps, self._processes)
@property
def language(self):
return self._wps.language
@language.setter
def language(self, value):
self._wps.language = value
@property
def languages(self):
return self._wps.languages
def _get_process_description(self, processes=None, xml=None):
"""Return the description for each process.
Sends the server a `describeProcess` request for each process.
Parameters
----------
processes: str, list, None
A process name, a list of process names or None (for all processes).
Returns
-------
OrderedDict
A dictionary keyed by the process identifier of process descriptions.
"""
all_wps_processes = [p.identifier for p in self._wps.processes]
if processes is None:
if owslib.__version__ > '0.17.0':
# Get the description for all processes in one request.
ps = self._wps.describeprocess('all', xml=xml)
return OrderedDict((p.identifier, p) for p in ps)
else:
processes = all_wps_processes
# Check for invalid process names, i.e. not matching the getCapabilities response.
process_names, missing = utils.filter_case_insensitive(
processes, all_wps_processes)
if missing:
message = "These process names were not found on the WPS server: {}"
raise ValueError(message.format(", ".join(missing)))
# Get the description for each process.
ps = [self._wps.describeprocess(pid, xml=xml) for pid in process_names]
return OrderedDict((p.identifier, p) for p in ps)
def _setup_logging(self):
self.logger.setLevel(logging.INFO)
import sys
fh = logging.StreamHandler(sys.stdout)
fh.setFormatter(logging.Formatter('%(asctime)s: %(message)s'))
self.logger.addHandler(fh)
def _method_factory(self, pid):
"""Create a custom function signature with docstring, instantiate it and
pass it to a wrapper which will actually call the process.
Parameters
----------
pid: str
Identifier of the WPS process.
Returns
-------
func
A Python function calling the remote process, complete with docstring and signature.
"""
process = self._processes[pid]
required_inputs_first = sorted(process.dataInputs, key=sort_inputs_key)
input_names = []
# defaults will be set to the function's __defaults__:
# A tuple containing default argument values for those arguments that have defaults,
# or None if no arguments have a default value.
defaults = []
for inpt in required_inputs_first:
input_names.append(sanitize(inpt.identifier))
if inpt.minOccurs == 0 or inpt.defaultValue is not None:
default = inpt.defaultValue if inpt.dataType != "ComplexData" else None
defaults.append(utils.from_owslib(default, inpt.dataType))
defaults = tuple(defaults) if defaults else None
body = dedent("""
inputs = locals()
inputs.pop('self')
return self._execute('{pid}', **inputs)
""").format(pid=pid)
func_builder = FunctionBuilder(
name=sanitize(pid),
doc=utils.build_process_doc(process),
args=["self"] + input_names,
defaults=defaults,
body=body,
filename=__file__,
module=self.__module__,
)
self._inputs[pid] = {}
if hasattr(process, "dataInputs"):
self._inputs[pid] = OrderedDict(
(i.identifier, i) for i in process.dataInputs
)
self._outputs[pid] = {}
if hasattr(process, "processOutputs"):
self._outputs[pid] = OrderedDict(
(o.identifier, o) for o in process.processOutputs
)
func = func_builder.get_func()
return func
def _build_inputs(self, pid, **kwargs):
"""Build the input sequence from the function arguments."""
wps_inputs = []
for name, input_param in list(self._inputs[pid].items()):
arg = kwargs.get(sanitize(name))
if arg is None:
continue
values = [arg, ] if not isinstance(arg, (list, tuple)) else arg
supported_mimetypes = [v.mimeType for v in input_param.supportedValues]
for value in values:
# if input_param.dataType == "ComplexData": seems simpler
if isinstance(input_param.defaultValue, ComplexData):
# Guess the mimetype of the input value
mimetype, encoding = guess_type(value, supported_mimetypes)
if encoding is None:
encoding = input_param.defaultValue.encoding
if isinstance(value, ComplexData):
inp = value
# Either embed the file content or just the reference.
else:
if utils.is_embedded_in_request(self._wps.url, value):
# If encoding is None, this will return the actual encoding used (utf-8 or base64).
value, encoding = embed(value, mimetype, encoding=encoding)
else:
value = fix_url(str(value))
inp = utils.to_owslib(value,
data_type=input_param.dataType,
encoding=encoding,
mimetype=mimetype)
else:
inp = utils.to_owslib(value, data_type=input_param.dataType)
wps_inputs.append((name, inp))
return wps_inputs
def _execute(self, pid, **kwargs):
"""Execute the process."""
wps_inputs = self._build_inputs(pid, **kwargs)
wps_outputs = [
(o.identifier, "ComplexData" in o.dataType)
for o in list(self._outputs[pid].values())
]
mode = self._mode if self._processes[pid].storeSupported else SYNC
try:
wps_response = self._wps.execute(
pid, inputs=wps_inputs, output=wps_outputs, mode=mode
)
if self._interactive and self._processes[pid].statusSupported:
if self._notebook:
notebook.monitor(wps_response, sleep=.2)
else:
self._console_monitor(wps_response)
except ServiceException as e:
if "AccessForbidden" in str(e):
raise UnauthorizedException(
"You are not authorized to do a request of type: Execute"
)
raise
# Add the convenience methods of WPSResult to the WPSExecution class. This adds a `get` method.
utils.extend_instance(wps_response, WPSResult)
wps_response.attach(wps_outputs=self._outputs[pid], converters=self._converters)
return wps_response
def _console_monitor(self, execution, sleep=3):
"""Monitor the execution of a process.
Parameters
----------
execution : WPSExecution instance
The execute response to monitor.
sleep: float
Number of seconds to wait before each status check.
"""
import signal
# Intercept CTRL-C
def sigint_handler(signum, frame):
self.cancel()
signal.signal(signal.SIGINT, sigint_handler)
while not execution.isComplete():
execution.checkStatus(sleepSecs=sleep)
self.logger.info("{} [{}/100] - {} ".format(
execution.process.identifier,
execution.percentCompleted,
execution.statusMessage[:50],))
if execution.isSucceded():
self.logger.info("{} done.".format(execution.process.identifier))
else:
self.logger.info("{} failed.".format(execution.process.identifier))
def sort_inputs_key(i):
"""Function used as key when sorting process inputs.
The order is:
- Inputs that have minOccurs >= 1 and no default value
- Inputs that have minOccurs >= 1 and a default value
- Every other input
Parameters
----------
i: owslib.wps.Input
An owslib Input
Notes
-----
The defaultValue for ComplexData is ComplexData instance specifying mimetype, encoding and schema.
"""
conditions = [
i.minOccurs >= 1 and (i.defaultValue is None or isinstance(i.defaultValue, ComplexData)),
i.minOccurs >= 1,
i.minOccurs == 0,
]
return [not c for c in conditions] # False values are sorted first
def nb_form(wps, pid):
"""Return a Notebook form to enter input values and launch process."""
if wps._notebook:
return notebook.interact(
func=getattr(wps, sanitize(pid)),
inputs=list(wps._inputs[pid].items()))
else:
return None
| 35.07888 | 111 | 0.592268 | import types
from collections import OrderedDict
from textwrap import dedent
from boltons.funcutils import FunctionBuilder
import requests
import requests.auth
import owslib
from owslib.util import ServiceException
from owslib.wps import WPS_DEFAULT_VERSION, WebProcessingService, SYNC, ASYNC, ComplexData
from birdy.exceptions import UnauthorizedException
from birdy.client import utils
from birdy.utils import sanitize, fix_url, embed, guess_type
from birdy.client import notebook
from birdy.client.outputs import WPSResult
import logging
class WPSClient(object):
def __init__(
self,
url,
processes=None,
converters=None,
username=None,
password=None,
headers=None,
auth=None,
verify=True,
cert=None,
verbose=False,
progress=False,
version=WPS_DEFAULT_VERSION,
caps_xml=None,
desc_xml=None,
language=None,
):
self._converters = converters
self._interactive = progress
self._mode = ASYNC if progress else SYNC
self._notebook = notebook.is_notebook()
self._inputs = {}
self._outputs = {}
if not verify:
import urllib3
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
if headers is None:
headers = {}
if auth is not None:
if isinstance(auth, tuple) and len(auth) == 2:
# special-case basic HTTP auth
auth = requests.auth.HTTPBasicAuth(*auth)
# We only need some headers from the requests.auth.AuthBase implementation
# We prepare a dummy request, call the auth object with it, and get its headers
dummy_request = requests.Request("get", "http://localhost")
r = auth(dummy_request.prepare())
auth_headers = ["Authorization", "Proxy-Authorization", "Cookie"]
headers.update({h: r.headers[h] for h in auth_headers if h in r.headers})
self._wps = WebProcessingService(
url,
version=version,
username=username,
password=password,
verbose=verbose,
headers=headers,
verify=verify,
cert=cert,
skip_caps=True,
language=language
)
try:
self._wps.getcapabilities(xml=caps_xml)
except ServiceException as e:
if "AccessForbidden" in str(e):
raise UnauthorizedException(
"You are not authorized to do a request of type: GetCapabilities"
)
raise
self._processes = self._get_process_description(processes, xml=desc_xml)
# Build the methods
for pid in self._processes:
setattr(self, sanitize(pid), types.MethodType(self._method_factory(pid), self))
self.logger = logging.getLogger('WPSClient')
if progress:
self._setup_logging()
self.__doc__ = utils.build_wps_client_doc(self._wps, self._processes)
@property
def language(self):
return self._wps.language
@language.setter
def language(self, value):
self._wps.language = value
@property
def languages(self):
return self._wps.languages
def _get_process_description(self, processes=None, xml=None):
all_wps_processes = [p.identifier for p in self._wps.processes]
if processes is None:
if owslib.__version__ > '0.17.0':
# Get the description for all processes in one request.
ps = self._wps.describeprocess('all', xml=xml)
return OrderedDict((p.identifier, p) for p in ps)
else:
processes = all_wps_processes
# Check for invalid process names, i.e. not matching the getCapabilities response.
process_names, missing = utils.filter_case_insensitive(
processes, all_wps_processes)
if missing:
message = "These process names were not found on the WPS server: {}"
raise ValueError(message.format(", ".join(missing)))
# Get the description for each process.
ps = [self._wps.describeprocess(pid, xml=xml) for pid in process_names]
return OrderedDict((p.identifier, p) for p in ps)
def _setup_logging(self):
self.logger.setLevel(logging.INFO)
import sys
fh = logging.StreamHandler(sys.stdout)
fh.setFormatter(logging.Formatter('%(asctime)s: %(message)s'))
self.logger.addHandler(fh)
def _method_factory(self, pid):
process = self._processes[pid]
required_inputs_first = sorted(process.dataInputs, key=sort_inputs_key)
input_names = []
# defaults will be set to the function's __defaults__:
defaults = []
for inpt in required_inputs_first:
input_names.append(sanitize(inpt.identifier))
if inpt.minOccurs == 0 or inpt.defaultValue is not None:
default = inpt.defaultValue if inpt.dataType != "ComplexData" else None
defaults.append(utils.from_owslib(default, inpt.dataType))
defaults = tuple(defaults) if defaults else None
body = dedent("""
inputs = locals()
inputs.pop('self')
return self._execute('{pid}', **inputs)
""").format(pid=pid)
func_builder = FunctionBuilder(
name=sanitize(pid),
doc=utils.build_process_doc(process),
args=["self"] + input_names,
defaults=defaults,
body=body,
filename=__file__,
module=self.__module__,
)
self._inputs[pid] = {}
if hasattr(process, "dataInputs"):
self._inputs[pid] = OrderedDict(
(i.identifier, i) for i in process.dataInputs
)
self._outputs[pid] = {}
if hasattr(process, "processOutputs"):
self._outputs[pid] = OrderedDict(
(o.identifier, o) for o in process.processOutputs
)
func = func_builder.get_func()
return func
def _build_inputs(self, pid, **kwargs):
wps_inputs = []
for name, input_param in list(self._inputs[pid].items()):
arg = kwargs.get(sanitize(name))
if arg is None:
continue
values = [arg, ] if not isinstance(arg, (list, tuple)) else arg
supported_mimetypes = [v.mimeType for v in input_param.supportedValues]
for value in values:
if isinstance(input_param.defaultValue, ComplexData):
mimetype, encoding = guess_type(value, supported_mimetypes)
if encoding is None:
encoding = input_param.defaultValue.encoding
if isinstance(value, ComplexData):
inp = value
else:
if utils.is_embedded_in_request(self._wps.url, value):
value, encoding = embed(value, mimetype, encoding=encoding)
else:
value = fix_url(str(value))
inp = utils.to_owslib(value,
data_type=input_param.dataType,
encoding=encoding,
mimetype=mimetype)
else:
inp = utils.to_owslib(value, data_type=input_param.dataType)
wps_inputs.append((name, inp))
return wps_inputs
def _execute(self, pid, **kwargs):
wps_inputs = self._build_inputs(pid, **kwargs)
wps_outputs = [
(o.identifier, "ComplexData" in o.dataType)
for o in list(self._outputs[pid].values())
]
mode = self._mode if self._processes[pid].storeSupported else SYNC
try:
wps_response = self._wps.execute(
pid, inputs=wps_inputs, output=wps_outputs, mode=mode
)
if self._interactive and self._processes[pid].statusSupported:
if self._notebook:
notebook.monitor(wps_response, sleep=.2)
else:
self._console_monitor(wps_response)
except ServiceException as e:
if "AccessForbidden" in str(e):
raise UnauthorizedException(
"You are not authorized to do a request of type: Execute"
)
raise
utils.extend_instance(wps_response, WPSResult)
wps_response.attach(wps_outputs=self._outputs[pid], converters=self._converters)
return wps_response
def _console_monitor(self, execution, sleep=3):
import signal
def sigint_handler(signum, frame):
self.cancel()
signal.signal(signal.SIGINT, sigint_handler)
while not execution.isComplete():
execution.checkStatus(sleepSecs=sleep)
self.logger.info("{} [{}/100] - {} ".format(
execution.process.identifier,
execution.percentCompleted,
execution.statusMessage[:50],))
if execution.isSucceded():
self.logger.info("{} done.".format(execution.process.identifier))
else:
self.logger.info("{} failed.".format(execution.process.identifier))
def sort_inputs_key(i):
conditions = [
i.minOccurs >= 1 and (i.defaultValue is None or isinstance(i.defaultValue, ComplexData)),
i.minOccurs >= 1,
i.minOccurs == 0,
]
return [not c for c in conditions]
def nb_form(wps, pid):
if wps._notebook:
return notebook.interact(
func=getattr(wps, sanitize(pid)),
inputs=list(wps._inputs[pid].items()))
else:
return None
| true | true |
1c2f8026b9f6d2175d9d4b63b9ddda989b227350 | 1,070 | py | Python | bff/__init__.py | rudolfhohn/bff | 0d5a8da62f2250bef78dffe0aee8bab2475014c2 | [
"MIT"
] | null | null | null | bff/__init__.py | rudolfhohn/bff | 0d5a8da62f2250bef78dffe0aee8bab2475014c2 | [
"MIT"
] | null | null | null | bff/__init__.py | rudolfhohn/bff | 0d5a8da62f2250bef78dffe0aee8bab2475014c2 | [
"MIT"
] | null | null | null | """All of bff' functions."""
import logging
from ._version import get_versions
# Import submodules.
from . import plot
from .fancy import (
avg_dicts,
cast_to_category_pd,
concat_with_categories,
get_peaks,
idict,
kwargs_2_list,
log_df,
mem_usage_pd,
normalization_pd,
parse_date,
pipe_multiprocessing_pd,
read_sql_by_chunks,
size_2_square,
sliding_window,
value_2_list,
)
from .config import FancyConfig
# Public object of the module.
__all__ = [
'avg_dicts',
'cast_to_category_pd',
'concat_with_categories',
'get_peaks',
'idict',
'kwargs_2_list',
'log_df',
'mem_usage_pd',
'normalization_pd',
'parse_date',
'pipe_multiprocessing_pd',
'plot',
'read_sql_by_chunks',
'size_2_square',
'sliding_window',
'FancyConfig',
'value_2_list',
]
# Logging configuration.
FORMAT = '%(asctime)s [%(levelname)-7s] %(name)s: %(message)s'
logging.basicConfig(format=FORMAT, level=logging.INFO)
__version__ = get_versions()['version']
del get_versions
| 18.77193 | 62 | 0.680374 | import logging
from ._version import get_versions
from . import plot
from .fancy import (
avg_dicts,
cast_to_category_pd,
concat_with_categories,
get_peaks,
idict,
kwargs_2_list,
log_df,
mem_usage_pd,
normalization_pd,
parse_date,
pipe_multiprocessing_pd,
read_sql_by_chunks,
size_2_square,
sliding_window,
value_2_list,
)
from .config import FancyConfig
__all__ = [
'avg_dicts',
'cast_to_category_pd',
'concat_with_categories',
'get_peaks',
'idict',
'kwargs_2_list',
'log_df',
'mem_usage_pd',
'normalization_pd',
'parse_date',
'pipe_multiprocessing_pd',
'plot',
'read_sql_by_chunks',
'size_2_square',
'sliding_window',
'FancyConfig',
'value_2_list',
]
FORMAT = '%(asctime)s [%(levelname)-7s] %(name)s: %(message)s'
logging.basicConfig(format=FORMAT, level=logging.INFO)
__version__ = get_versions()['version']
del get_versions
| true | true |
1c2f80cffbc9bb996fcf769dd532a8c769b4d15f | 1,837 | py | Python | ImageProcessing/make_makefile.py | Datamuseum-DK/R1000.HwDoc | cb0841540a4ac184a08957daac1a470b6916a663 | [
"BSD-2-Clause"
] | null | null | null | ImageProcessing/make_makefile.py | Datamuseum-DK/R1000.HwDoc | cb0841540a4ac184a08957daac1a470b6916a663 | [
"BSD-2-Clause"
] | null | null | null | ImageProcessing/make_makefile.py | Datamuseum-DK/R1000.HwDoc | cb0841540a4ac184a08957daac1a470b6916a663 | [
"BSD-2-Clause"
] | null | null | null |
import glob
import os
def gen_makefile():
fo = open("Makefile", "w")
fo.write("default: all\n")
for img in sorted(glob.glob("../[A-Z]*/rawimg/*.pgm")):
fo.write("\n")
brd = img.split('/')[1]
try:
os.mkdir(brd)
except FileExistsError:
pass
nbr = img.split('.')[-2]
nbr = nbr.split('-')[-1]
wd = brd + "/" + nbr
prev_ok = ""
for step_prog in sorted(glob.glob("step*.py")):
step_no = step_prog.split("step_")[1]
step_no = step_no.split("_")[0]
step_no = int(step_no, 10)
ok = wd + "/%03d_ok" % step_no
log = wd + "/%03d_log" % step_no
err = wd + "/%03d_error" % step_no
fo.write("\n")
fo.write(ok + ": " + step_prog)
if prev_ok:
fo.write(" " + prev_ok)
fo.write("\n")
if not prev_ok:
fo.write("\t@mkdir -p " + wd + "\n")
fo.write("\t@rm -rf " + wd + "/*\n")
else:
fo.write("\t@rm -f " + wd + "/%03d_*\n" % step_no)
fo.write("\t@echo " + step_prog + " " + brd + " " + nbr + "\n")
fo.write("\t@(python3 -u " + step_prog + " " + brd + " " + nbr)
fo.write(" > " + log + " 2>&1 && \\\n")
fo.write("\t mv " + log + " " + ok)
fo.write(") || (mv " + log + " " + err + " && false)")
fo.write("\n")
prev_ok = ok
fo.write("ALLTGT += " + prev_ok + "\n")
fo.write("\n" + brd + "_" + nbr + ": " + prev_ok + "\n")
fo.write("\t@${MAKE} " + prev_ok + "|| cat " + brd + "/" + nbr + "/_err*\n")
fo.write("\nall: ${ALLTGT}\n\n")
fo.write("\t-ls */????/_err*\n")
fo.write("\t-sh summary.sh\n")
gen_makefile()
| 30.114754 | 84 | 0.408819 |
import glob
import os
def gen_makefile():
fo = open("Makefile", "w")
fo.write("default: all\n")
for img in sorted(glob.glob("../[A-Z]*/rawimg/*.pgm")):
fo.write("\n")
brd = img.split('/')[1]
try:
os.mkdir(brd)
except FileExistsError:
pass
nbr = img.split('.')[-2]
nbr = nbr.split('-')[-1]
wd = brd + "/" + nbr
prev_ok = ""
for step_prog in sorted(glob.glob("step*.py")):
step_no = step_prog.split("step_")[1]
step_no = step_no.split("_")[0]
step_no = int(step_no, 10)
ok = wd + "/%03d_ok" % step_no
log = wd + "/%03d_log" % step_no
err = wd + "/%03d_error" % step_no
fo.write("\n")
fo.write(ok + ": " + step_prog)
if prev_ok:
fo.write(" " + prev_ok)
fo.write("\n")
if not prev_ok:
fo.write("\t@mkdir -p " + wd + "\n")
fo.write("\t@rm -rf " + wd + "/*\n")
else:
fo.write("\t@rm -f " + wd + "/%03d_*\n" % step_no)
fo.write("\t@echo " + step_prog + " " + brd + " " + nbr + "\n")
fo.write("\t@(python3 -u " + step_prog + " " + brd + " " + nbr)
fo.write(" > " + log + " 2>&1 && \\\n")
fo.write("\t mv " + log + " " + ok)
fo.write(") || (mv " + log + " " + err + " && false)")
fo.write("\n")
prev_ok = ok
fo.write("ALLTGT += " + prev_ok + "\n")
fo.write("\n" + brd + "_" + nbr + ": " + prev_ok + "\n")
fo.write("\t@${MAKE} " + prev_ok + "|| cat " + brd + "/" + nbr + "/_err*\n")
fo.write("\nall: ${ALLTGT}\n\n")
fo.write("\t-ls */????/_err*\n")
fo.write("\t-sh summary.sh\n")
gen_makefile()
| true | true |
1c2f816ad4038bf454b58b4c571eeeb66838c133 | 6,999 | py | Python | homeassistant/components/discovery/__init__.py | SAABoholic/core | 25b093e69e9939c131f4dc83566a9571929803df | [
"Apache-2.0"
] | 4 | 2020-07-29T17:47:10.000Z | 2020-09-16T13:39:13.000Z | homeassistant/components/discovery/__init__.py | SAABoholic/core | 25b093e69e9939c131f4dc83566a9571929803df | [
"Apache-2.0"
] | 6 | 2020-11-08T19:40:10.000Z | 2022-03-01T11:11:07.000Z | homeassistant/components/discovery/__init__.py | SAABoholic/core | 25b093e69e9939c131f4dc83566a9571929803df | [
"Apache-2.0"
] | 3 | 2016-10-03T20:14:06.000Z | 2019-04-19T15:56:56.000Z | """
Starts a service to scan in intervals for new devices.
Will emit EVENT_PLATFORM_DISCOVERED whenever a new service has been discovered.
Knows which components handle certain types, will make sure they are
loaded before the EVENT_PLATFORM_DISCOVERED is fired.
"""
from datetime import timedelta
import json
import logging
from netdisco.discovery import NetworkDiscovery
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import EVENT_HOMEASSISTANT_START
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.discovery import async_discover, async_load_platform
from homeassistant.helpers.event import async_track_point_in_utc_time
import homeassistant.util.dt as dt_util
DOMAIN = "discovery"
SCAN_INTERVAL = timedelta(seconds=300)
SERVICE_APPLE_TV = "apple_tv"
SERVICE_DAIKIN = "daikin"
SERVICE_DLNA_DMR = "dlna_dmr"
SERVICE_ENIGMA2 = "enigma2"
SERVICE_FREEBOX = "freebox"
SERVICE_HASS_IOS_APP = "hass_ios"
SERVICE_HASSIO = "hassio"
SERVICE_HEOS = "heos"
SERVICE_KONNECTED = "konnected"
SERVICE_MOBILE_APP = "hass_mobile_app"
SERVICE_NETGEAR = "netgear_router"
SERVICE_OCTOPRINT = "octoprint"
SERVICE_SABNZBD = "sabnzbd"
SERVICE_SAMSUNG_PRINTER = "samsung_printer"
SERVICE_TELLDUSLIVE = "tellstick"
SERVICE_YEELIGHT = "yeelight"
SERVICE_WEMO = "belkin_wemo"
SERVICE_WINK = "wink"
SERVICE_XIAOMI_GW = "xiaomi_gw"
CONFIG_ENTRY_HANDLERS = {
SERVICE_DAIKIN: "daikin",
SERVICE_TELLDUSLIVE: "tellduslive",
"logitech_mediaserver": "squeezebox",
}
SERVICE_HANDLERS = {
SERVICE_MOBILE_APP: ("mobile_app", None),
SERVICE_HASS_IOS_APP: ("ios", None),
SERVICE_NETGEAR: ("device_tracker", None),
SERVICE_HASSIO: ("hassio", None),
SERVICE_APPLE_TV: ("apple_tv", None),
SERVICE_ENIGMA2: ("media_player", "enigma2"),
SERVICE_WINK: ("wink", None),
SERVICE_SABNZBD: ("sabnzbd", None),
SERVICE_SAMSUNG_PRINTER: ("sensor", "syncthru"),
SERVICE_KONNECTED: ("konnected", None),
SERVICE_OCTOPRINT: ("octoprint", None),
SERVICE_FREEBOX: ("freebox", None),
SERVICE_YEELIGHT: ("yeelight", None),
"yamaha": ("media_player", "yamaha"),
"frontier_silicon": ("media_player", "frontier_silicon"),
"openhome": ("media_player", "openhome"),
"bose_soundtouch": ("media_player", "soundtouch"),
"bluesound": ("media_player", "bluesound"),
"kodi": ("media_player", "kodi"),
"volumio": ("media_player", "volumio"),
"lg_smart_device": ("media_player", "lg_soundbar"),
"nanoleaf_aurora": ("light", "nanoleaf"),
}
OPTIONAL_SERVICE_HANDLERS = {SERVICE_DLNA_DMR: ("media_player", "dlna_dmr")}
MIGRATED_SERVICE_HANDLERS = [
"axis",
"deconz",
"denonavr",
"esphome",
"google_cast",
SERVICE_HEOS,
"harmony",
"homekit",
"ikea_tradfri",
"philips_hue",
"sonos",
"songpal",
SERVICE_WEMO,
SERVICE_XIAOMI_GW,
]
DEFAULT_ENABLED = (
list(CONFIG_ENTRY_HANDLERS) + list(SERVICE_HANDLERS) + MIGRATED_SERVICE_HANDLERS
)
DEFAULT_DISABLED = list(OPTIONAL_SERVICE_HANDLERS) + MIGRATED_SERVICE_HANDLERS
CONF_IGNORE = "ignore"
CONF_ENABLE = "enable"
CONFIG_SCHEMA = vol.Schema(
{
vol.Optional(DOMAIN): vol.Schema(
{
vol.Optional(CONF_IGNORE, default=[]): vol.All(
cv.ensure_list, [vol.In(DEFAULT_ENABLED)]
),
vol.Optional(CONF_ENABLE, default=[]): vol.All(
cv.ensure_list, [vol.In(DEFAULT_DISABLED + DEFAULT_ENABLED)]
),
}
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass, config):
"""Start a discovery service."""
logger = logging.getLogger(__name__)
netdisco = NetworkDiscovery()
already_discovered = set()
# Disable zeroconf logging, it spams
logging.getLogger("zeroconf").setLevel(logging.CRITICAL)
if DOMAIN in config:
# Platforms ignore by config
ignored_platforms = config[DOMAIN][CONF_IGNORE]
# Optional platforms enabled by config
enabled_platforms = config[DOMAIN][CONF_ENABLE]
else:
ignored_platforms = []
enabled_platforms = []
for platform in enabled_platforms:
if platform in DEFAULT_ENABLED:
logger.warning(
"Please remove %s from your discovery.enable configuration "
"as it is now enabled by default",
platform,
)
async def new_service_found(service, info):
"""Handle a new service if one is found."""
if service in MIGRATED_SERVICE_HANDLERS:
return
if service in ignored_platforms:
logger.info("Ignoring service: %s %s", service, info)
return
discovery_hash = json.dumps([service, info], sort_keys=True)
if discovery_hash in already_discovered:
logger.debug("Already discovered service %s %s.", service, info)
return
already_discovered.add(discovery_hash)
if service in CONFIG_ENTRY_HANDLERS:
await hass.config_entries.flow.async_init(
CONFIG_ENTRY_HANDLERS[service],
context={"source": config_entries.SOURCE_DISCOVERY},
data=info,
)
return
comp_plat = SERVICE_HANDLERS.get(service)
if not comp_plat and service in enabled_platforms:
comp_plat = OPTIONAL_SERVICE_HANDLERS[service]
# We do not know how to handle this service.
if not comp_plat:
logger.info("Unknown service discovered: %s %s", service, info)
return
logger.info("Found new service: %s %s", service, info)
component, platform = comp_plat
if platform is None:
await async_discover(hass, service, info, component, config)
else:
await async_load_platform(hass, component, platform, info, config)
async def scan_devices(now):
"""Scan for devices."""
try:
results = await hass.async_add_job(_discover, netdisco)
for result in results:
hass.async_create_task(new_service_found(*result))
except OSError:
logger.error("Network is unreachable")
async_track_point_in_utc_time(
hass, scan_devices, dt_util.utcnow() + SCAN_INTERVAL
)
@callback
def schedule_first(event):
"""Schedule the first discovery when Home Assistant starts up."""
async_track_point_in_utc_time(hass, scan_devices, dt_util.utcnow())
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, schedule_first)
return True
def _discover(netdisco):
"""Discover devices."""
results = []
try:
netdisco.scan()
for disc in netdisco.discover():
for service in netdisco.get_info(disc):
results.append((disc, service))
finally:
netdisco.stop()
return results
| 30.298701 | 84 | 0.667667 | from datetime import timedelta
import json
import logging
from netdisco.discovery import NetworkDiscovery
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import EVENT_HOMEASSISTANT_START
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.discovery import async_discover, async_load_platform
from homeassistant.helpers.event import async_track_point_in_utc_time
import homeassistant.util.dt as dt_util
DOMAIN = "discovery"
SCAN_INTERVAL = timedelta(seconds=300)
SERVICE_APPLE_TV = "apple_tv"
SERVICE_DAIKIN = "daikin"
SERVICE_DLNA_DMR = "dlna_dmr"
SERVICE_ENIGMA2 = "enigma2"
SERVICE_FREEBOX = "freebox"
SERVICE_HASS_IOS_APP = "hass_ios"
SERVICE_HASSIO = "hassio"
SERVICE_HEOS = "heos"
SERVICE_KONNECTED = "konnected"
SERVICE_MOBILE_APP = "hass_mobile_app"
SERVICE_NETGEAR = "netgear_router"
SERVICE_OCTOPRINT = "octoprint"
SERVICE_SABNZBD = "sabnzbd"
SERVICE_SAMSUNG_PRINTER = "samsung_printer"
SERVICE_TELLDUSLIVE = "tellstick"
SERVICE_YEELIGHT = "yeelight"
SERVICE_WEMO = "belkin_wemo"
SERVICE_WINK = "wink"
SERVICE_XIAOMI_GW = "xiaomi_gw"
CONFIG_ENTRY_HANDLERS = {
SERVICE_DAIKIN: "daikin",
SERVICE_TELLDUSLIVE: "tellduslive",
"logitech_mediaserver": "squeezebox",
}
SERVICE_HANDLERS = {
SERVICE_MOBILE_APP: ("mobile_app", None),
SERVICE_HASS_IOS_APP: ("ios", None),
SERVICE_NETGEAR: ("device_tracker", None),
SERVICE_HASSIO: ("hassio", None),
SERVICE_APPLE_TV: ("apple_tv", None),
SERVICE_ENIGMA2: ("media_player", "enigma2"),
SERVICE_WINK: ("wink", None),
SERVICE_SABNZBD: ("sabnzbd", None),
SERVICE_SAMSUNG_PRINTER: ("sensor", "syncthru"),
SERVICE_KONNECTED: ("konnected", None),
SERVICE_OCTOPRINT: ("octoprint", None),
SERVICE_FREEBOX: ("freebox", None),
SERVICE_YEELIGHT: ("yeelight", None),
"yamaha": ("media_player", "yamaha"),
"frontier_silicon": ("media_player", "frontier_silicon"),
"openhome": ("media_player", "openhome"),
"bose_soundtouch": ("media_player", "soundtouch"),
"bluesound": ("media_player", "bluesound"),
"kodi": ("media_player", "kodi"),
"volumio": ("media_player", "volumio"),
"lg_smart_device": ("media_player", "lg_soundbar"),
"nanoleaf_aurora": ("light", "nanoleaf"),
}
OPTIONAL_SERVICE_HANDLERS = {SERVICE_DLNA_DMR: ("media_player", "dlna_dmr")}
MIGRATED_SERVICE_HANDLERS = [
"axis",
"deconz",
"denonavr",
"esphome",
"google_cast",
SERVICE_HEOS,
"harmony",
"homekit",
"ikea_tradfri",
"philips_hue",
"sonos",
"songpal",
SERVICE_WEMO,
SERVICE_XIAOMI_GW,
]
DEFAULT_ENABLED = (
list(CONFIG_ENTRY_HANDLERS) + list(SERVICE_HANDLERS) + MIGRATED_SERVICE_HANDLERS
)
DEFAULT_DISABLED = list(OPTIONAL_SERVICE_HANDLERS) + MIGRATED_SERVICE_HANDLERS
CONF_IGNORE = "ignore"
CONF_ENABLE = "enable"
CONFIG_SCHEMA = vol.Schema(
{
vol.Optional(DOMAIN): vol.Schema(
{
vol.Optional(CONF_IGNORE, default=[]): vol.All(
cv.ensure_list, [vol.In(DEFAULT_ENABLED)]
),
vol.Optional(CONF_ENABLE, default=[]): vol.All(
cv.ensure_list, [vol.In(DEFAULT_DISABLED + DEFAULT_ENABLED)]
),
}
)
},
extra=vol.ALLOW_EXTRA,
)
async def async_setup(hass, config):
logger = logging.getLogger(__name__)
netdisco = NetworkDiscovery()
already_discovered = set()
logging.getLogger("zeroconf").setLevel(logging.CRITICAL)
if DOMAIN in config:
ignored_platforms = config[DOMAIN][CONF_IGNORE]
enabled_platforms = config[DOMAIN][CONF_ENABLE]
else:
ignored_platforms = []
enabled_platforms = []
for platform in enabled_platforms:
if platform in DEFAULT_ENABLED:
logger.warning(
"Please remove %s from your discovery.enable configuration "
"as it is now enabled by default",
platform,
)
async def new_service_found(service, info):
if service in MIGRATED_SERVICE_HANDLERS:
return
if service in ignored_platforms:
logger.info("Ignoring service: %s %s", service, info)
return
discovery_hash = json.dumps([service, info], sort_keys=True)
if discovery_hash in already_discovered:
logger.debug("Already discovered service %s %s.", service, info)
return
already_discovered.add(discovery_hash)
if service in CONFIG_ENTRY_HANDLERS:
await hass.config_entries.flow.async_init(
CONFIG_ENTRY_HANDLERS[service],
context={"source": config_entries.SOURCE_DISCOVERY},
data=info,
)
return
comp_plat = SERVICE_HANDLERS.get(service)
if not comp_plat and service in enabled_platforms:
comp_plat = OPTIONAL_SERVICE_HANDLERS[service]
if not comp_plat:
logger.info("Unknown service discovered: %s %s", service, info)
return
logger.info("Found new service: %s %s", service, info)
component, platform = comp_plat
if platform is None:
await async_discover(hass, service, info, component, config)
else:
await async_load_platform(hass, component, platform, info, config)
async def scan_devices(now):
try:
results = await hass.async_add_job(_discover, netdisco)
for result in results:
hass.async_create_task(new_service_found(*result))
except OSError:
logger.error("Network is unreachable")
async_track_point_in_utc_time(
hass, scan_devices, dt_util.utcnow() + SCAN_INTERVAL
)
@callback
def schedule_first(event):
async_track_point_in_utc_time(hass, scan_devices, dt_util.utcnow())
hass.bus.async_listen_once(EVENT_HOMEASSISTANT_START, schedule_first)
return True
def _discover(netdisco):
results = []
try:
netdisco.scan()
for disc in netdisco.discover():
for service in netdisco.get_info(disc):
results.append((disc, service))
finally:
netdisco.stop()
return results
| true | true |
1c2f81a36eb04fce59a539821083dbe39c1341bd | 18,007 | py | Python | anti_sybil/utils.py | BrightID/BrightID-Anti-Sybil | 8f31a73bae05a61b66ef0fd8a68bfb255127a943 | [
"0BSD"
] | 32 | 2019-01-10T17:44:54.000Z | 2022-03-17T21:15:07.000Z | anti_sybil/utils.py | BrightID/BrightID-Anti-Sybil | 8f31a73bae05a61b66ef0fd8a68bfb255127a943 | [
"0BSD"
] | 2 | 2019-01-25T07:54:21.000Z | 2021-02-06T09:01:24.000Z | anti_sybil/utils.py | BrightID/BrightID-Anti-Sybil | 8f31a73bae05a61b66ef0fd8a68bfb255127a943 | [
"0BSD"
] | 7 | 2019-11-04T01:55:35.000Z | 2022-01-22T17:03:59.000Z | from arango import ArangoClient
from bisect import bisect
import networkx as nx
import numpy as np
import zipfile
import tarfile
import requests
import json
import csv
import os
GRAPH_TEMPLATE = GRAPH_3D_TEMPLATE = COMPARE_GRAPH_TEMPLATE = None
BACKUP_URL = 'https://storage.googleapis.com/brightid-backups/brightid.tar.gz'
class Node:
def __init__(self, name, node_type, groups=None, init_rank=0, raw_rank=0, rank=None, created_at=None, verifications=None):
self.name = name
self.node_type = node_type
self.rank = rank
self.groups = groups if groups else {}
self.init_rank = init_rank
self.raw_rank = raw_rank
self.created_at = created_at
self.verifications = verifications if verifications else []
def __repr__(self):
return 'Node: {}'.format(self.name)
def write_output_file(outputs, file_name):
if len(outputs) == 0:
return
if not os.path.exists(os.path.dirname(file_name)):
os.makedirs(os.path.dirname(file_name))
rows = [['Results'] + [output['name'] for output in outputs]]
for title in outputs[0]:
if title != 'name':
rows.append([title] + [output.get(title, '')
for output in outputs])
with open(file_name, 'w') as f:
writer = csv.writer(f)
for row in rows:
writer.writerow(row)
def find_border(graph):
best_border = best_score = 0
for i in range(100):
honest_score = len([node for node in graph if node.node_type in (
'Honest', 'Seed') and node.rank > i])
sybil_score = len([node for node in graph if node.node_type in (
'Sybil', 'Non Bridge Sybil', 'Bridge Sybil') and node.rank < i])
score = (honest_score * sybil_score)**.5
if score >= best_score:
best_border = i
best_score = score
return best_border
def calculate_successful_sybils(ranks_dic):
honests = []
sybils = []
attackers = []
result = {}
for category in ranks_dic:
if category in ['Sybil', 'Non Bridge Sybil', 'Bridge Sybil']:
sybils.extend(ranks_dic[category])
elif category in ['Seed', 'Honest']:
honests.extend(ranks_dic[category])
elif category == 'Attacker':
attackers.extend(ranks_dic[category])
if sybils:
honests = [h for h in honests if h]
honests.sort()
# for limit in [.8, .9, 1]:
# successful_sybils = [rank for rank in sybils if rank >= min(
# honests[:int(limit * len(honests))])]
# result['successful_sybils_percent_{0}'.format(limit)] = round(
# (len(successful_sybils) * 100.0) / max(1, len(sybils)), 2)
# if len(attackers) != 0:
# result['successful_sybils_per_attacker'] = round(
# len(successful_sybils) / len(attackers), 2)
# else:
# result['successful_sybils_per_attacker'] = '__'
result['better_than_pct'] = bisect(
honests, max(sybils) if sybils else 0) / len(honests)
return result
def calculate_successful_honest(ranks_dic):
honests = []
sybils = []
result = {}
for category in ranks_dic:
if category in ['Sybil', 'Non Bridge Sybil', 'Bridge Sybil']:
sybils.extend(ranks_dic[category])
elif category in ['Seed', 'Honest']:
honests.extend(ranks_dic[category])
avg_sybils = sum(sybils) / len(sybils) if sybils else 0
successful_honest = len([h for h in honests if h > avg_sybils])
result['no'] = successful_honest
result['percent'] = successful_honest / len(honests) * 100
return result
def generate_output(graph, name=''):
categories = set([node.node_type for node in graph])
ranks_dic = {}
for category in categories:
ranks_dic[category] = [
node.rank if node.rank else 0 for node in graph if node.node_type == category]
output = {}
output['name'] = name
successful_sybils = calculate_successful_sybils(ranks_dic)
successful_honests = calculate_successful_honest(ranks_dic)
# output['Successful Sybils Percentage'] = successful_sybils['successful_sybils_percent_1']
# output['Successful Sybils Percentage (-10 percent of honests)'] = successful_sybils['successful_sybils_percent_0.9']
# output['Successful Sybils Percentage (-20 percent of honests)'] = successful_sybils['successful_sybils_percent_0.8']
# output['Successful Sybils per Attacker'] = successful_sybils['successful_sybils_per_attacker']
output['No. Successful Honests'] = successful_honests['no']
output['Successful Honests Percent'] = successful_honests['percent']
output['Sybils scored >= %'] = successful_sybils['better_than_pct']
output['Avg Honest - Avg Sybil'] = None
view_order = ('Seed', 'Honest', 'Attacker',
'Bridge Sybil', 'Non Bridge Sybil', 'Sybil')
for category in view_order:
if category not in categories:
continue
for parameter in ['Max', 'Avg', 'Min']:
if len(ranks_dic[category]) == 0:
v = '__'
elif parameter == 'Min':
v = min(ranks_dic[category])
elif parameter == 'Avg':
v = sum(ranks_dic[category]) / len(ranks_dic[category])
elif parameter == 'Max':
v = max(ranks_dic[category])
output['{0} {1}'.format(parameter, category)] = v
output['Avg Honest - Avg Sybil'] = output['Avg Honest'] - \
output.get('Avg Sybil', output.get('Avg Bridge Sybil', 0))
output['Border'] = find_border(graph)
return output
def save_graph(file_name, graph):
with open(file_name, 'w') as f:
f.write(to_json(graph))
def to_json(graph):
data = {'nodes': [], 'edges': []}
for node in graph:
data['nodes'].append({
'name': node.name,
'node_type': node.node_type,
'groups': node.groups,
'rank': node.rank,
'cluster': node.clusters.get('graph', None) if hasattr(node, 'clusters') else None
})
for edge in graph.edges:
weight = graph[edge[0]][edge[1]].get('weight', 1)
data['edges'].append((edge[0].name, edge[1].name, weight))
return json.dumps(data)
def load_graph(file_name):
with open(file_name, 'r') as f:
data = f.read()
return from_json(data)
def from_json(data):
data = json.loads(data)
graph = nx.Graph()
nodes = {}
for node in data['nodes']:
groups = node['groups'] if node['groups'] else None
nodes[node['name']] = Node(node['name'], node['node_type'],
groups, node['init_rank'], 0, node['rank'], node['created_at'], node['verifications'])
graph.add_node(nodes[node['name']])
graph.add_edges_from([(nodes[edge[0]], nodes[edge[1]])
for edge in data['edges']])
return graph
def zip2dict(f, table):
zf = zipfile.ZipFile(f)
fnames = zf.namelist()
def pattern(fname): return fname.endswith(
'.data.json') and fname.count('/{}_'.format(table)) > 0
fname = list(filter(pattern, fnames))[0]
content = zf.open(fname).read().decode('utf-8')
ol = [json.loads(line) for line in content.split('\n') if line.strip()]
d = {}
for o in ol:
if o['type'] == 2300:
d[o['data']['_key']] = o['data']
elif o['type'] == 2302 and o['data']['_key'] in d:
del d[o['data']['_key']]
return dict((d[k]['_id'].replace(table + '/', ''), d[k]) for k in d)
def from_dump(f):
user_groups = zip2dict(f, 'usersInGroups')
users = zip2dict(f, 'users')
groups = zip2dict(f, 'groups')
connections = zip2dict(f, 'connections')
verifications = zip2dict(f, 'verifications')
ret = {'nodes': [], 'edges': []}
for u in users:
users[u] = {'node_type': 'Honest', 'init_rank': 0, 'rank': 0, 'name': u,
'groups': {}, 'created_at': users[u]['createdAt'], 'verifications': []}
ret['nodes'].append(users[u])
for v in verifications.values():
users[v['user']]['verifications'].append(v['name'])
user_groups = [(
user_group['_from'].replace('users/', ''),
user_group['_to'].replace('groups/', '')
) for user_group in user_groups.values()]
seed_groups_members = {}
for u, g in user_groups:
if groups[g].get('seed', False):
if g not in seed_groups_members:
seed_groups_members[g] = set()
seed_groups_members[g].add(u)
for u, g in user_groups:
users[u]['groups'][g] = 'Seed' if g in seed_groups_members else 'NonSeed'
if g in seed_groups_members:
users[u]['node_type'] = 'Seed'
users[u]['init_rank'] += 1 / len(seed_groups_members[g])
for u in users:
users[u]['init_rank'] = min(.3, users[u]['init_rank'])
connections_dic = {}
for c in connections.values():
connections_dic[f"{c['_from']}_{c['_to']}"] = c['level']
for c in connections.values():
f = c['_from'].replace('users/', '')
t = c['_to'].replace('users/', '')
from_to = connections_dic.get(f"{c['_from']}_{c['_to']}") in [
'already known', 'recovery']
to_from = connections_dic.get(f"{c['_to']}_{c['_from']}") in [
'already known', 'recovery']
if from_to and to_from and (t, f) not in ret['edges']:
ret['edges'].append((f, t))
ret['nodes'] = sorted(ret['nodes'], key=lambda i: i['name'])
ret['nodes'] = sorted(
ret['nodes'], key=lambda i: i['created_at'], reverse=True)
return json.dumps(ret)
def from_db(arango_server, db_name):
db = ArangoClient(hosts=arango_server).db(db_name)
seed_groups = {}
for seed_group in db['groups'].find({'seed': True}):
c = db['usersInGroups'].find({'_to': seed_group['_id']})
seed_groups[seed_group['_key']] = c.count()
nodes = {}
for u in db['users']:
verifications = [v['name']
for v in db['verifications'].find({'user': u['_key']})]
nodes[u['_key']] = {'node_type': 'Honest', 'init_rank': 0, 'rank': 0,
'name': u['_key'], 'groups': {}, 'created_at': u['createdAt'], 'verifications': verifications}
for ug in db['usersInGroups']:
u = ug['_from'].replace('users/', '')
g = ug['_to'].replace('groups/', '')
if u not in nodes:
continue
nodes[u]['groups'][g] = 'Seed' if g in seed_groups else 'NonSeed'
if g in seed_groups:
nodes[u]['node_type'] = 'Seed'
nodes[u]['init_rank'] += 1 / seed_groups[g]
for n in nodes:
nodes[n]['init_rank'] = min(.3, nodes[n]['init_rank'])
ret = {'edges': []}
connections = {f"{c['_from']}_{c['_to']}": c['level']
for c in db['connections']}
for c in db['connections']:
f = c['_from'].replace('users/', '')
t = c['_to'].replace('users/', '')
from_to = connections.get(
f"{c['_from']}_{c['_to']}") in ['already known', 'recovery']
to_from = connections.get(
f"{c['_to']}_{c['_from']}") in ['already known', 'recovery']
if from_to and to_from and (t, f) not in ret['edges']:
ret['edges'].append((f, t))
ret['nodes'] = nodes.values()
ret['nodes'] = sorted(ret['nodes'], key=lambda i: i['name'])
ret['nodes'] = sorted(
ret['nodes'], key=lambda i: i['created_at'], reverse=True)
return json.dumps(ret)
def draw_graph(graph, file_name):
global GRAPH_TEMPLATE
if not GRAPH_TEMPLATE:
abspath = os.path.abspath(__file__)
dname = os.path.dirname(abspath)
with open(os.path.join(dname, 'templates/graph.html')) as f:
GRAPH_TEMPLATE = f.read()
dname = os.path.dirname(file_name)
if dname and not os.path.exists(dname):
os.makedirs(dname)
json_dic = to_json(graph)
edited_string = GRAPH_TEMPLATE.replace('JSON_GRAPH', json_dic)
with open(file_name, 'w') as output_file:
output_file.write(edited_string)
return edited_string
def draw_compare_graph(graph1, graph2, file_name):
global COMPARE_GRAPH_TEMPLATE
if not COMPARE_GRAPH_TEMPLATE:
abspath = os.path.abspath(__file__)
dname = os.path.dirname(abspath)
with open(os.path.join(dname, 'templates/compare_graph.html')) as f:
COMPARE_GRAPH_TEMPLATE = f.read()
dname = os.path.dirname(file_name)
if dname and not os.path.exists(dname):
os.makedirs(dname)
for node in graph1.nodes:
node2 = next(filter(lambda n: n.name == node.name, graph2.nodes))
node.rank = '{0}-{1}'.format(int(node.rank), int(node2.rank))
graph_json = to_json(graph1)
edited_string = COMPARE_GRAPH_TEMPLATE.replace('JSON_GRAPH', graph_json)
with open(file_name, 'w') as output_file:
output_file.write(edited_string)
return edited_string
def draw_3d_graph(attacks, algorithms, file_name):
global GRAPH_3D_TEMPLATE
if not GRAPH_3D_TEMPLATE:
abspath = os.path.abspath(__file__)
dname = os.path.dirname(abspath)
with open(os.path.join(dname, 'templates/graph3d.html')) as f:
GRAPH_3D_TEMPLATE = f.read()
dname = os.path.dirname(file_name)
if dname and not os.path.exists(dname):
os.makedirs(dname)
edited_string = GRAPH_3D_TEMPLATE.replace(
'JSON_GRAPH', json.dumps(attacks)).replace('ALGORITHMS', json.dumps(algorithms))
with open(file_name, 'w') as output_file:
output_file.write(edited_string)
# with open(file_name.replace('.html', '.json'), 'w') as output_file:
# output_file.write(json.dumps(attacks))
return edited_string
def reset_ranks(graph):
for node in graph:
node.rank = 0
def tar_to_zip(fin, fout):
if os.path.exists(fout):
os.remove(fout)
tarf = tarfile.open(fin, mode='r|gz')
zipf = zipfile.ZipFile(fout, mode='a', compression=zipfile.ZIP_DEFLATED)
for m in tarf:
f = tarf.extractfile(m)
if f:
zipf.writestr(m.name, f.read())
tarf.close()
zipf.close()
def load_brightid_graph(data):
if not os.path.exists(data['file_path']):
os.makedirs(data['file_path'])
rar_addr = os.path.join(data['file_path'], 'brightid.tar.gz')
zip_addr = os.path.join(data['file_path'], 'brightid.zip')
backup = requests.get(BACKUP_URL)
with open(rar_addr, 'wb') as f:
f.write(backup.content)
tar_to_zip(rar_addr, zip_addr)
json_graph = from_dump(zip_addr)
graph = from_json(json_graph)
return graph
def stupid_sybil_border(graph):
from . import algorithms
border = 0
reset_ranks(graph)
ranker = algorithms.GroupSybilRank(graph)
ranker.rank()
attackers = sorted(graph.nodes, key=lambda n: n.rank, reverse=True)
for attacker in attackers:
attacker.groups['stupid_sybil'] = 'NonSeed'
sybil1 = Node('stupid_sybil_1', 'Sybil', set(['stupid_sybil']))
sybil2 = Node('stupid_sybil_2', 'Sybil', set(['stupid_sybil']))
graph.add_edge(attacker, sybil1)
graph.add_edge(attacker, sybil2)
reset_ranks(graph)
ranker = algorithms.GroupSybilRank(graph)
ranker.rank()
border = max(sybil1.raw_rank, sybil2.raw_rank)
graph.remove_nodes_from([sybil1, sybil2])
del attacker.groups['stupid_sybil']
reset_ranks(graph)
print('attacker: {}\t type: {}\t border: {}'.format(
attacker, attacker.node_type, border))
if border:
return border
def nonlinear_distribution(graph, ratio, df, dt):
ranks = [(n, n.rank) for n in graph]
avg_floating_points = sum(
[int(('%E' % rank[1]).split('E')[1]) for rank in ranks]) / len(ranks)
multiplier = 10 ** (-1 * (avg_floating_points - 1))
nums = [rank[1] * multiplier for rank in ranks]
counts = {}
for num in nums:
counts[int(num)] = counts.get(int(num), 0) + 1
f = int(len(nums) / 10)
t = int(-1 * len(nums) / 10)
navg = sum(sorted(nums)[f:t]) / (.8 * len(nums))
navg = int(navg)
max_num = max(nums)
# find distance from average which include half of numbers
distance = 0
while True:
distance += 1
count = sum([counts.get(i, 0)
for i in range(navg - distance, navg + distance)])
if count > len(nums) * ratio:
break
f, t = navg - distance, navg + distance
ret = []
for num in nums:
if 0 <= num < f:
num = num * df / f
elif f <= num < t:
num = df + (((num - f) / (t - f)) * (dt - df))
else:
num = dt + (((num - t) / (max_num - t)) * (100 - dt))
ret.append(round(num, 2))
for i, r in enumerate(ranks):
r[0].rank = ret[i]
return graph
def linear_distribution(graph):
ranks = [(n, n.rank) for n in graph]
max_rank = max(ranks, key=lambda item: item[1])[1]
min_rank = min(ranks, key=lambda item: item[1])[1]
for node in graph:
new_rank = (node.rank - min_rank) * 100 / (max_rank - min_rank)
node.rank = int(new_rank)
return graph
def border_based_distribution(graph, border):
ranks = [(n, n.rank) for n in graph]
max_rank = max(ranks, key=lambda item: item[1])[1]
for node, rank in ranks:
if rank < border:
new_rank = 9.99 * rank / border
else:
new_rank = 90 + 9.99 * (rank - border) / (max_rank - border)
node.rank = round(new_rank, 2)
return graph
def z_score_distribution(ranks):
_mean = np.mean([r[1] for r in ranks])
_std = np.std([r[1] for r in ranks])
z_scores = {r[0]: (r[1] - _mean) / _std for r in ranks}
temp = dict(linear_distribution(
[r for i, r in enumerate(ranks) if z_scores[r[0]] < 3]))
new_ranks = [(r[0], temp.get(r[0], 100)) for r in ranks]
return new_ranks
| 37.592902 | 126 | 0.596935 | from arango import ArangoClient
from bisect import bisect
import networkx as nx
import numpy as np
import zipfile
import tarfile
import requests
import json
import csv
import os
GRAPH_TEMPLATE = GRAPH_3D_TEMPLATE = COMPARE_GRAPH_TEMPLATE = None
BACKUP_URL = 'https://storage.googleapis.com/brightid-backups/brightid.tar.gz'
class Node:
def __init__(self, name, node_type, groups=None, init_rank=0, raw_rank=0, rank=None, created_at=None, verifications=None):
self.name = name
self.node_type = node_type
self.rank = rank
self.groups = groups if groups else {}
self.init_rank = init_rank
self.raw_rank = raw_rank
self.created_at = created_at
self.verifications = verifications if verifications else []
def __repr__(self):
return 'Node: {}'.format(self.name)
def write_output_file(outputs, file_name):
if len(outputs) == 0:
return
if not os.path.exists(os.path.dirname(file_name)):
os.makedirs(os.path.dirname(file_name))
rows = [['Results'] + [output['name'] for output in outputs]]
for title in outputs[0]:
if title != 'name':
rows.append([title] + [output.get(title, '')
for output in outputs])
with open(file_name, 'w') as f:
writer = csv.writer(f)
for row in rows:
writer.writerow(row)
def find_border(graph):
best_border = best_score = 0
for i in range(100):
honest_score = len([node for node in graph if node.node_type in (
'Honest', 'Seed') and node.rank > i])
sybil_score = len([node for node in graph if node.node_type in (
'Sybil', 'Non Bridge Sybil', 'Bridge Sybil') and node.rank < i])
score = (honest_score * sybil_score)**.5
if score >= best_score:
best_border = i
best_score = score
return best_border
def calculate_successful_sybils(ranks_dic):
honests = []
sybils = []
attackers = []
result = {}
for category in ranks_dic:
if category in ['Sybil', 'Non Bridge Sybil', 'Bridge Sybil']:
sybils.extend(ranks_dic[category])
elif category in ['Seed', 'Honest']:
honests.extend(ranks_dic[category])
elif category == 'Attacker':
attackers.extend(ranks_dic[category])
if sybils:
honests = [h for h in honests if h]
honests.sort()
result['better_than_pct'] = bisect(
honests, max(sybils) if sybils else 0) / len(honests)
return result
def calculate_successful_honest(ranks_dic):
honests = []
sybils = []
result = {}
for category in ranks_dic:
if category in ['Sybil', 'Non Bridge Sybil', 'Bridge Sybil']:
sybils.extend(ranks_dic[category])
elif category in ['Seed', 'Honest']:
honests.extend(ranks_dic[category])
avg_sybils = sum(sybils) / len(sybils) if sybils else 0
successful_honest = len([h for h in honests if h > avg_sybils])
result['no'] = successful_honest
result['percent'] = successful_honest / len(honests) * 100
return result
def generate_output(graph, name=''):
categories = set([node.node_type for node in graph])
ranks_dic = {}
for category in categories:
ranks_dic[category] = [
node.rank if node.rank else 0 for node in graph if node.node_type == category]
output = {}
output['name'] = name
successful_sybils = calculate_successful_sybils(ranks_dic)
successful_honests = calculate_successful_honest(ranks_dic)
output['No. Successful Honests'] = successful_honests['no']
output['Successful Honests Percent'] = successful_honests['percent']
output['Sybils scored >= %'] = successful_sybils['better_than_pct']
output['Avg Honest - Avg Sybil'] = None
view_order = ('Seed', 'Honest', 'Attacker',
'Bridge Sybil', 'Non Bridge Sybil', 'Sybil')
for category in view_order:
if category not in categories:
continue
for parameter in ['Max', 'Avg', 'Min']:
if len(ranks_dic[category]) == 0:
v = '__'
elif parameter == 'Min':
v = min(ranks_dic[category])
elif parameter == 'Avg':
v = sum(ranks_dic[category]) / len(ranks_dic[category])
elif parameter == 'Max':
v = max(ranks_dic[category])
output['{0} {1}'.format(parameter, category)] = v
output['Avg Honest - Avg Sybil'] = output['Avg Honest'] - \
output.get('Avg Sybil', output.get('Avg Bridge Sybil', 0))
output['Border'] = find_border(graph)
return output
def save_graph(file_name, graph):
with open(file_name, 'w') as f:
f.write(to_json(graph))
def to_json(graph):
data = {'nodes': [], 'edges': []}
for node in graph:
data['nodes'].append({
'name': node.name,
'node_type': node.node_type,
'groups': node.groups,
'rank': node.rank,
'cluster': node.clusters.get('graph', None) if hasattr(node, 'clusters') else None
})
for edge in graph.edges:
weight = graph[edge[0]][edge[1]].get('weight', 1)
data['edges'].append((edge[0].name, edge[1].name, weight))
return json.dumps(data)
def load_graph(file_name):
with open(file_name, 'r') as f:
data = f.read()
return from_json(data)
def from_json(data):
data = json.loads(data)
graph = nx.Graph()
nodes = {}
for node in data['nodes']:
groups = node['groups'] if node['groups'] else None
nodes[node['name']] = Node(node['name'], node['node_type'],
groups, node['init_rank'], 0, node['rank'], node['created_at'], node['verifications'])
graph.add_node(nodes[node['name']])
graph.add_edges_from([(nodes[edge[0]], nodes[edge[1]])
for edge in data['edges']])
return graph
def zip2dict(f, table):
zf = zipfile.ZipFile(f)
fnames = zf.namelist()
def pattern(fname): return fname.endswith(
'.data.json') and fname.count('/{}_'.format(table)) > 0
fname = list(filter(pattern, fnames))[0]
content = zf.open(fname).read().decode('utf-8')
ol = [json.loads(line) for line in content.split('\n') if line.strip()]
d = {}
for o in ol:
if o['type'] == 2300:
d[o['data']['_key']] = o['data']
elif o['type'] == 2302 and o['data']['_key'] in d:
del d[o['data']['_key']]
return dict((d[k]['_id'].replace(table + '/', ''), d[k]) for k in d)
def from_dump(f):
user_groups = zip2dict(f, 'usersInGroups')
users = zip2dict(f, 'users')
groups = zip2dict(f, 'groups')
connections = zip2dict(f, 'connections')
verifications = zip2dict(f, 'verifications')
ret = {'nodes': [], 'edges': []}
for u in users:
users[u] = {'node_type': 'Honest', 'init_rank': 0, 'rank': 0, 'name': u,
'groups': {}, 'created_at': users[u]['createdAt'], 'verifications': []}
ret['nodes'].append(users[u])
for v in verifications.values():
users[v['user']]['verifications'].append(v['name'])
user_groups = [(
user_group['_from'].replace('users/', ''),
user_group['_to'].replace('groups/', '')
) for user_group in user_groups.values()]
seed_groups_members = {}
for u, g in user_groups:
if groups[g].get('seed', False):
if g not in seed_groups_members:
seed_groups_members[g] = set()
seed_groups_members[g].add(u)
for u, g in user_groups:
users[u]['groups'][g] = 'Seed' if g in seed_groups_members else 'NonSeed'
if g in seed_groups_members:
users[u]['node_type'] = 'Seed'
users[u]['init_rank'] += 1 / len(seed_groups_members[g])
for u in users:
users[u]['init_rank'] = min(.3, users[u]['init_rank'])
connections_dic = {}
for c in connections.values():
connections_dic[f"{c['_from']}_{c['_to']}"] = c['level']
for c in connections.values():
f = c['_from'].replace('users/', '')
t = c['_to'].replace('users/', '')
from_to = connections_dic.get(f"{c['_from']}_{c['_to']}") in [
'already known', 'recovery']
to_from = connections_dic.get(f"{c['_to']}_{c['_from']}") in [
'already known', 'recovery']
if from_to and to_from and (t, f) not in ret['edges']:
ret['edges'].append((f, t))
ret['nodes'] = sorted(ret['nodes'], key=lambda i: i['name'])
ret['nodes'] = sorted(
ret['nodes'], key=lambda i: i['created_at'], reverse=True)
return json.dumps(ret)
def from_db(arango_server, db_name):
db = ArangoClient(hosts=arango_server).db(db_name)
seed_groups = {}
for seed_group in db['groups'].find({'seed': True}):
c = db['usersInGroups'].find({'_to': seed_group['_id']})
seed_groups[seed_group['_key']] = c.count()
nodes = {}
for u in db['users']:
verifications = [v['name']
for v in db['verifications'].find({'user': u['_key']})]
nodes[u['_key']] = {'node_type': 'Honest', 'init_rank': 0, 'rank': 0,
'name': u['_key'], 'groups': {}, 'created_at': u['createdAt'], 'verifications': verifications}
for ug in db['usersInGroups']:
u = ug['_from'].replace('users/', '')
g = ug['_to'].replace('groups/', '')
if u not in nodes:
continue
nodes[u]['groups'][g] = 'Seed' if g in seed_groups else 'NonSeed'
if g in seed_groups:
nodes[u]['node_type'] = 'Seed'
nodes[u]['init_rank'] += 1 / seed_groups[g]
for n in nodes:
nodes[n]['init_rank'] = min(.3, nodes[n]['init_rank'])
ret = {'edges': []}
connections = {f"{c['_from']}_{c['_to']}": c['level']
for c in db['connections']}
for c in db['connections']:
f = c['_from'].replace('users/', '')
t = c['_to'].replace('users/', '')
from_to = connections.get(
f"{c['_from']}_{c['_to']}") in ['already known', 'recovery']
to_from = connections.get(
f"{c['_to']}_{c['_from']}") in ['already known', 'recovery']
if from_to and to_from and (t, f) not in ret['edges']:
ret['edges'].append((f, t))
ret['nodes'] = nodes.values()
ret['nodes'] = sorted(ret['nodes'], key=lambda i: i['name'])
ret['nodes'] = sorted(
ret['nodes'], key=lambda i: i['created_at'], reverse=True)
return json.dumps(ret)
def draw_graph(graph, file_name):
global GRAPH_TEMPLATE
if not GRAPH_TEMPLATE:
abspath = os.path.abspath(__file__)
dname = os.path.dirname(abspath)
with open(os.path.join(dname, 'templates/graph.html')) as f:
GRAPH_TEMPLATE = f.read()
dname = os.path.dirname(file_name)
if dname and not os.path.exists(dname):
os.makedirs(dname)
json_dic = to_json(graph)
edited_string = GRAPH_TEMPLATE.replace('JSON_GRAPH', json_dic)
with open(file_name, 'w') as output_file:
output_file.write(edited_string)
return edited_string
def draw_compare_graph(graph1, graph2, file_name):
global COMPARE_GRAPH_TEMPLATE
if not COMPARE_GRAPH_TEMPLATE:
abspath = os.path.abspath(__file__)
dname = os.path.dirname(abspath)
with open(os.path.join(dname, 'templates/compare_graph.html')) as f:
COMPARE_GRAPH_TEMPLATE = f.read()
dname = os.path.dirname(file_name)
if dname and not os.path.exists(dname):
os.makedirs(dname)
for node in graph1.nodes:
node2 = next(filter(lambda n: n.name == node.name, graph2.nodes))
node.rank = '{0}-{1}'.format(int(node.rank), int(node2.rank))
graph_json = to_json(graph1)
edited_string = COMPARE_GRAPH_TEMPLATE.replace('JSON_GRAPH', graph_json)
with open(file_name, 'w') as output_file:
output_file.write(edited_string)
return edited_string
def draw_3d_graph(attacks, algorithms, file_name):
global GRAPH_3D_TEMPLATE
if not GRAPH_3D_TEMPLATE:
abspath = os.path.abspath(__file__)
dname = os.path.dirname(abspath)
with open(os.path.join(dname, 'templates/graph3d.html')) as f:
GRAPH_3D_TEMPLATE = f.read()
dname = os.path.dirname(file_name)
if dname and not os.path.exists(dname):
os.makedirs(dname)
edited_string = GRAPH_3D_TEMPLATE.replace(
'JSON_GRAPH', json.dumps(attacks)).replace('ALGORITHMS', json.dumps(algorithms))
with open(file_name, 'w') as output_file:
output_file.write(edited_string)
return edited_string
def reset_ranks(graph):
for node in graph:
node.rank = 0
def tar_to_zip(fin, fout):
if os.path.exists(fout):
os.remove(fout)
tarf = tarfile.open(fin, mode='r|gz')
zipf = zipfile.ZipFile(fout, mode='a', compression=zipfile.ZIP_DEFLATED)
for m in tarf:
f = tarf.extractfile(m)
if f:
zipf.writestr(m.name, f.read())
tarf.close()
zipf.close()
def load_brightid_graph(data):
if not os.path.exists(data['file_path']):
os.makedirs(data['file_path'])
rar_addr = os.path.join(data['file_path'], 'brightid.tar.gz')
zip_addr = os.path.join(data['file_path'], 'brightid.zip')
backup = requests.get(BACKUP_URL)
with open(rar_addr, 'wb') as f:
f.write(backup.content)
tar_to_zip(rar_addr, zip_addr)
json_graph = from_dump(zip_addr)
graph = from_json(json_graph)
return graph
def stupid_sybil_border(graph):
from . import algorithms
border = 0
reset_ranks(graph)
ranker = algorithms.GroupSybilRank(graph)
ranker.rank()
attackers = sorted(graph.nodes, key=lambda n: n.rank, reverse=True)
for attacker in attackers:
attacker.groups['stupid_sybil'] = 'NonSeed'
sybil1 = Node('stupid_sybil_1', 'Sybil', set(['stupid_sybil']))
sybil2 = Node('stupid_sybil_2', 'Sybil', set(['stupid_sybil']))
graph.add_edge(attacker, sybil1)
graph.add_edge(attacker, sybil2)
reset_ranks(graph)
ranker = algorithms.GroupSybilRank(graph)
ranker.rank()
border = max(sybil1.raw_rank, sybil2.raw_rank)
graph.remove_nodes_from([sybil1, sybil2])
del attacker.groups['stupid_sybil']
reset_ranks(graph)
print('attacker: {}\t type: {}\t border: {}'.format(
attacker, attacker.node_type, border))
if border:
return border
def nonlinear_distribution(graph, ratio, df, dt):
ranks = [(n, n.rank) for n in graph]
avg_floating_points = sum(
[int(('%E' % rank[1]).split('E')[1]) for rank in ranks]) / len(ranks)
multiplier = 10 ** (-1 * (avg_floating_points - 1))
nums = [rank[1] * multiplier for rank in ranks]
counts = {}
for num in nums:
counts[int(num)] = counts.get(int(num), 0) + 1
f = int(len(nums) / 10)
t = int(-1 * len(nums) / 10)
navg = sum(sorted(nums)[f:t]) / (.8 * len(nums))
navg = int(navg)
max_num = max(nums)
distance = 0
while True:
distance += 1
count = sum([counts.get(i, 0)
for i in range(navg - distance, navg + distance)])
if count > len(nums) * ratio:
break
f, t = navg - distance, navg + distance
ret = []
for num in nums:
if 0 <= num < f:
num = num * df / f
elif f <= num < t:
num = df + (((num - f) / (t - f)) * (dt - df))
else:
num = dt + (((num - t) / (max_num - t)) * (100 - dt))
ret.append(round(num, 2))
for i, r in enumerate(ranks):
r[0].rank = ret[i]
return graph
def linear_distribution(graph):
ranks = [(n, n.rank) for n in graph]
max_rank = max(ranks, key=lambda item: item[1])[1]
min_rank = min(ranks, key=lambda item: item[1])[1]
for node in graph:
new_rank = (node.rank - min_rank) * 100 / (max_rank - min_rank)
node.rank = int(new_rank)
return graph
def border_based_distribution(graph, border):
ranks = [(n, n.rank) for n in graph]
max_rank = max(ranks, key=lambda item: item[1])[1]
for node, rank in ranks:
if rank < border:
new_rank = 9.99 * rank / border
else:
new_rank = 90 + 9.99 * (rank - border) / (max_rank - border)
node.rank = round(new_rank, 2)
return graph
def z_score_distribution(ranks):
_mean = np.mean([r[1] for r in ranks])
_std = np.std([r[1] for r in ranks])
z_scores = {r[0]: (r[1] - _mean) / _std for r in ranks}
temp = dict(linear_distribution(
[r for i, r in enumerate(ranks) if z_scores[r[0]] < 3]))
new_ranks = [(r[0], temp.get(r[0], 100)) for r in ranks]
return new_ranks
| true | true |
1c2f81b03678af7869adfd7799bca16693d3ecce | 1,317 | py | Python | keystone/common/policies/token_revocation.py | rajivmucheli/keystone | d55099d4a17e3672d478aae8c367bcdf9af15fb9 | [
"Apache-2.0"
] | null | null | null | keystone/common/policies/token_revocation.py | rajivmucheli/keystone | d55099d4a17e3672d478aae8c367bcdf9af15fb9 | [
"Apache-2.0"
] | 5 | 2019-08-14T06:46:03.000Z | 2021-12-13T20:01:25.000Z | keystone/common/policies/token_revocation.py | rajivmucheli/keystone | d55099d4a17e3672d478aae8c367bcdf9af15fb9 | [
"Apache-2.0"
] | 5 | 2019-06-06T15:11:37.000Z | 2021-06-07T08:23:23.000Z | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from keystone.common.policies import base
token_revocation_policies = [
policy.DocumentedRuleDefault(
name=base.IDENTITY % 'revocation_list',
check_str=base.RULE_SERVICE_OR_ADMIN,
# NOTE(lbragstad): Documenting scope_types here doesn't really make a
# difference since this API is going to return an empty list regardless
# of the token scope used in the API call. More-or-less just doing this
# for consistency with other policies.
scope_types=['system', 'project'],
description='List revoked PKI tokens.',
operations=[{'path': '/v3/auth/tokens/OS-PKI/revoked',
'method': 'GET'}])
]
def list_rules():
return token_revocation_policies
| 38.735294 | 79 | 0.714503 |
from oslo_policy import policy
from keystone.common.policies import base
token_revocation_policies = [
policy.DocumentedRuleDefault(
name=base.IDENTITY % 'revocation_list',
check_str=base.RULE_SERVICE_OR_ADMIN,
# difference since this API is going to return an empty list regardless
# of the token scope used in the API call. More-or-less just doing this
# for consistency with other policies.
scope_types=['system', 'project'],
description='List revoked PKI tokens.',
operations=[{'path': '/v3/auth/tokens/OS-PKI/revoked',
'method': 'GET'}])
]
def list_rules():
return token_revocation_policies
| true | true |
1c2f82336bf13e363d3cfc4c3ec671a414e336c0 | 1,570 | py | Python | sagemaker-dash/tutorials/app2.py | philippe-heitzmann/python-apps | 1cc6e5e9b9ac81c81a3d4f0e420ff488fe6b2f0a | [
"MIT"
] | 13 | 2021-05-23T15:47:24.000Z | 2022-03-24T16:22:14.000Z | sagemaker-dash/tutorials/app2.py | philippe-heitzmann/python-apps | 1cc6e5e9b9ac81c81a3d4f0e420ff488fe6b2f0a | [
"MIT"
] | 4 | 2021-11-16T20:44:55.000Z | 2022-01-13T19:13:38.000Z | sagemaker-dash/tutorials/app2.py | philippe-heitzmann/python-apps | 1cc6e5e9b9ac81c81a3d4f0e420ff488fe6b2f0a | [
"MIT"
] | 11 | 2021-01-31T06:18:10.000Z | 2021-11-21T00:02:05.000Z | # -*- coding: utf-8 -*-
# Run this app with `python app.py` and
# visit http://127.0.0.1:8050/ in your web browser.
import dash
import dash_core_components as dcc
import dash_html_components as html
import plotly.express as px
import pandas as pd
external_stylesheets = ['https://codepen.io/chriddyp/pen/bWLwgP.css']
app = dash.Dash(__name__, external_stylesheets=external_stylesheets)
colors = {'background': '#111111', 'text': '#7FDBFF'}
# assume you have a "long-form" data frame
# see https://plotly.com/python/px-arguments/ for more options
df = pd.DataFrame({
"Fruit": ["Apples", "Oranges", "Bananas", "Apples", "Oranges", "Bananas"],
"Amount": [4, 1, 2, 2, 4, 5],
"City": ["SF", "SF", "SF", "Montreal", "Montreal", "Montreal"]
})
fig = px.bar(df, x="Fruit", y="Amount", color="City", barmode="group")
fig.update_layout(plot_bgcolor=colors['background'],
paper_bgcolor=colors['background'],
font_color=colors['text'])
app.layout = html.Div(
style={'backgroundColor': colors['background']},
children=[
html.H1(children='Hello Dash',
style={
'textAlign': 'center',
'color': colors['text']
}),
html.Div(children='Dash: A web application framework for Python.',
style={
'textAlign': 'center',
'color': colors['text']
}),
dcc.Graph(id='example-graph-2', figure=fig)
])
if __name__ == '__main__':
app.run_server(debug=True)
| 31.4 | 78 | 0.590446 |
import dash
import dash_core_components as dcc
import dash_html_components as html
import plotly.express as px
import pandas as pd
external_stylesheets = ['https://codepen.io/chriddyp/pen/bWLwgP.css']
app = dash.Dash(__name__, external_stylesheets=external_stylesheets)
colors = {'background': '#111111', 'text': '#7FDBFF'}
df = pd.DataFrame({
"Fruit": ["Apples", "Oranges", "Bananas", "Apples", "Oranges", "Bananas"],
"Amount": [4, 1, 2, 2, 4, 5],
"City": ["SF", "SF", "SF", "Montreal", "Montreal", "Montreal"]
})
fig = px.bar(df, x="Fruit", y="Amount", color="City", barmode="group")
fig.update_layout(plot_bgcolor=colors['background'],
paper_bgcolor=colors['background'],
font_color=colors['text'])
app.layout = html.Div(
style={'backgroundColor': colors['background']},
children=[
html.H1(children='Hello Dash',
style={
'textAlign': 'center',
'color': colors['text']
}),
html.Div(children='Dash: A web application framework for Python.',
style={
'textAlign': 'center',
'color': colors['text']
}),
dcc.Graph(id='example-graph-2', figure=fig)
])
if __name__ == '__main__':
app.run_server(debug=True)
| true | true |
1c2f82d07a90ad4af67c057f3f46e0d05ad12446 | 1,777 | py | Python | machine_learning_service/asgi.py | clemencegoh/machine_learning_service | 49ccb65dd8cca544bed801559b920cd7bea2d120 | [
"MIT"
] | null | null | null | machine_learning_service/asgi.py | clemencegoh/machine_learning_service | 49ccb65dd8cca544bed801559b920cd7bea2d120 | [
"MIT"
] | null | null | null | machine_learning_service/asgi.py | clemencegoh/machine_learning_service | 49ccb65dd8cca544bed801559b920cd7bea2d120 | [
"MIT"
] | null | null | null | """
ASGI config for machine_learning_service project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
from chatapp.middleware import websockets
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'machine_learning_service.settings')
django_application = get_asgi_application()
application = get_asgi_application()
application = websockets(application)
# async def websocket_application(scope, receive, send):
# while True:
# event = await receive()
#
# if event['type'] == 'websocket.connect':
# await send({
# 'type': 'websocket.accept'
# })
#
# if event['type'] == 'websocket.disconnect':
# break
#
# if event['type'] == 'websocket.receive':
# if event['text'] == 'ping':
# await send({
# 'type': 'websocket.send',
# 'text': 'pong!'
# })
# async def application(scope, receive, send):
# if scope['type'] == 'http' or scope['type'] == 'https':
# await django_application(scope, receive, send)
# elif scope['type'] == 'websocket':
# print("received websocket request!")
#
# await websocket_application(scope, receive, send)
# else:
# raise NotImplementedError(f"Unknown scope type")
# application = ProtocolTypeRouter({
# "http": get_asgi_application(),
# # Just HTTP for now. (We can add other protocols later.)
# "websocket": AuthMiddlewareStack(
# URLRouter(
# chatapp.routing.websocket_urlpatterns
# )
# ),
# })
| 29.616667 | 84 | 0.615644 |
import os
from django.core.asgi import get_asgi_application
from chatapp.middleware import websockets
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'machine_learning_service.settings')
django_application = get_asgi_application()
application = get_asgi_application()
application = websockets(application)
| true | true |
1c2f850bf574fe5662363a54d5b21bfabd524c82 | 9,553 | py | Python | gen.py | pengfei2017/lstm_and_ctc_ocr | 23c746ce806d44795b7f1557afad03f6dc88084e | [
"MIT"
] | 2 | 2018-11-18T02:40:25.000Z | 2020-03-17T14:41:51.000Z | gen.py | pengfei2017/lstm_and_ctc_ocr | 23c746ce806d44795b7f1557afad03f6dc88084e | [
"MIT"
] | null | null | null | gen.py | pengfei2017/lstm_and_ctc_ocr | 23c746ce806d44795b7f1557afad03f6dc88084e | [
"MIT"
] | 3 | 2018-07-06T11:38:08.000Z | 2020-03-17T14:45:38.000Z | #!/usr/bin/env python
#
# Copyright (c) 2016 Matthew Earl
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
# NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
# DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
# USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
Generate training and test images.
"""
__all__ = (
'generate_ims',
)
import math
import os
import random
import sys
import cv2
import numpy
from PIL import Image
from PIL import ImageDraw
from PIL import ImageFont
import common
from common import OUTPUT_SHAPE
# fonts = ["fonts/Farrington-7B-Qiqi.ttf", "fonts/Arial.ttf", "fonts/times.ttf"]
# fonts = ["fonts/华文细黑.ttf", "fonts/OCR-B 10 BT.ttf"]
fonts = ["fonts/华文细黑.ttf"]
FONT_HEIGHT = 32 # Pixel size to which the chars are resized
CHARS = common.CHARS[:]
CHARS.append(" ")
def make_char_ims(output_height, font):
font_size = output_height * 4
font = ImageFont.truetype(font, font_size)
height = max(font.getsize(d)[1] for d in CHARS)
for c in CHARS:
width = font.getsize(c)[0]
im = Image.new("RGBA", (width, height), (0, 0, 0))
draw = ImageDraw.Draw(im)
draw.text((0, 0), c, (255, 255, 255), font=font)
scale = float(output_height) / height
im = im.resize((int(width * scale), output_height), Image.ANTIALIAS)
yield c, numpy.array(im)[:, :, 0].astype(numpy.float32) / 255.
def get_all_font_char_ims(out_height):
result = []
for font in fonts:
result.append(dict(make_char_ims(out_height, font)))
return result
def euler_to_mat(yaw, pitch, roll):
# Rotate clockwise about the Y-axis
c, s = math.cos(yaw), math.sin(yaw)
M = numpy.matrix([[c, 0., s],
[0., 1., 0.],
[-s, 0., c]])
# Rotate clockwise about the X-axis
c, s = math.cos(pitch), math.sin(pitch)
M = numpy.matrix([[1., 0., 0.],
[0., c, -s],
[0., s, c]]) * M
# Rotate clockwise about the Z-axis
c, s = math.cos(roll), math.sin(roll)
M = numpy.matrix([[c, -s, 0.],
[s, c, 0.],
[0., 0., 1.]]) * M
return M
def pick_colors():
first = True
while first or plate_color - text_color < 0.3:
text_color = random.random()
plate_color = random.random()
if text_color > plate_color:
text_color, plate_color = plate_color, text_color
first = False
return text_color, plate_color
def make_affine_transform(from_shape, to_shape,
min_scale, max_scale,
scale_variation=1.0,
rotation_variation=1.0,
translation_variation=1.0):
out_of_bounds = False
from_size = numpy.array([[from_shape[1], from_shape[0]]]).T
to_size = numpy.array([[to_shape[1], to_shape[0]]]).T
scale = random.uniform((min_scale + max_scale) * 0.5 -
(max_scale - min_scale) * 0.5 * scale_variation,
(min_scale + max_scale) * 0.5 +
(max_scale - min_scale) * 0.5 * scale_variation)
if scale > max_scale or scale < min_scale:
out_of_bounds = True
roll = random.uniform(-0.3, 0.3) * rotation_variation
pitch = random.uniform(-0.2, 0.2) * rotation_variation
yaw = random.uniform(-1.2, 1.2) * rotation_variation
# Compute a bounding box on the skewed input image (`from_shape`).
M = euler_to_mat(yaw, pitch, roll)[:2, :2]
h, w = from_shape
corners = numpy.matrix([[-w, +w, -w, +w],
[-h, -h, +h, +h]]) * 0.5
skewed_size = numpy.array(numpy.max(M * corners, axis=1) -
numpy.min(M * corners, axis=1))
# Set the scale as large as possible such that the skewed and scaled shape
# is less than or equal to the desired ratio in either dimension.
scale *= numpy.min(to_size / skewed_size) * 1.1
# Set the translation such that the skewed and scaled image falls within
# the output shape's bounds.
trans = (numpy.random.random((2, 1)) - 0.5) * translation_variation
trans = ((2.0 * trans) ** 5.0) / 2.0
if numpy.any(trans < -0.5) or numpy.any(trans > 0.5):
out_of_bounds = True
trans = (to_size - skewed_size * scale) * trans
center_to = to_size / 2.
center_from = from_size / 2.
M = euler_to_mat(yaw, pitch, roll)[:2, :2]
M *= scale
M = numpy.hstack([M, trans + center_to - M * center_from])
return M, out_of_bounds
def generate_code():
f = ""
append_blank = random.choice([True, False])
length = random.choice(common.LENGTHS)
blank = ''
if common.ADD_BLANK:
blank = ' '
for i in range(length):
if 0 == i % 4 and append_blank:
f = f + blank
f = f + random.choice(common.CHARS)
return f
def rounded_rect(shape, radius):
out = numpy.ones(shape)
out[:radius, :radius] = 0.0
out[-radius:, :radius] = 0.0
out[:radius, -radius:] = 0.0
out[-radius:, -radius:] = 0.0
cv2.circle(out, (radius, radius), radius, 1.0, -1)
cv2.circle(out, (radius, shape[0] - radius), radius, 1.0, -1)
cv2.circle(out, (shape[1] - radius, radius), radius, 1.0, -1)
cv2.circle(out, (shape[1] - radius, shape[0] - radius), radius, 1.0, -1)
return out
def generate_plate(font_height, char_ims):
h_padding = random.uniform(0.2, 0.4) * font_height
v_padding = random.uniform(0.1, 0.3) * font_height
spacing = font_height * random.uniform(-0.01, 0.05)
radius = 1 + int(font_height * 0.1 * random.random())
code = generate_code()
text_width = sum(char_ims[c].shape[1] for c in code)
text_width += (len(code) - 1) * spacing
out_shape = (int(font_height + v_padding * 2),
int(text_width + h_padding * 2))
text_color, plate_color = pick_colors()
text_mask = numpy.zeros(out_shape)
x = h_padding
y = v_padding
for c in code:
char_im = char_ims[c]
ix, iy = int(x), int(y)
text_mask[iy:iy + char_im.shape[0], ix:ix + char_im.shape[1]] = char_im
x += char_im.shape[1] + spacing
plate = (numpy.ones(out_shape) * plate_color * (1. - text_mask) +
numpy.ones(out_shape) * text_color * text_mask)
return plate, rounded_rect(out_shape, radius), code.replace(" ", "")
def generate_bg(num_bg_images):
found = False
while not found:
fname = "bgs/{:08d}.jpg".format(random.randint(0, num_bg_images - 1))
# fname = "bgs/12345678.jpg"
bg = cv2.imread(fname, cv2.IMREAD_GRAYSCALE) / 255.
if (bg.shape[1] >= OUTPUT_SHAPE[1] and
bg.shape[0] >= OUTPUT_SHAPE[0]):
found = True
x = random.randint(0, bg.shape[1] - OUTPUT_SHAPE[1])
y = random.randint(0, bg.shape[0] - OUTPUT_SHAPE[0])
bg = bg[y:y + OUTPUT_SHAPE[0], x:x + OUTPUT_SHAPE[1]]
return bg
def generate_im(char_ims, num_bg_images):
bg = generate_bg(num_bg_images)
plate, plate_mask, code = generate_plate(FONT_HEIGHT, char_ims)
M, out_of_bounds = make_affine_transform(
from_shape=plate.shape,
to_shape=bg.shape,
min_scale=0.8,
max_scale=0.9,
rotation_variation=0.3,
scale_variation=1.0,
translation_variation=1.0)
plate = cv2.warpAffine(plate, M, (bg.shape[1], bg.shape[0]))
plate_mask = cv2.warpAffine(plate_mask, M, (bg.shape[1], bg.shape[0]))
out = plate * plate_mask + bg * (1 - plate_mask)
out = cv2.resize(out, (OUTPUT_SHAPE[1], OUTPUT_SHAPE[0]))
out += numpy.random.normal(scale=0.05, size=out.shape)
out = numpy.clip(out, 0., 1.)
return out, code, not out_of_bounds
def generate_ims(num_images):
"""
Generate a number of number plate images.
:param num_images:
Number of images to generate.
:return:
Iterable of number plate images.
"""
variation = 1.0
char_ims = get_all_font_char_ims(FONT_HEIGHT)
num_bg_images = len(os.listdir("bgs"))
for i in range(num_images):
yield generate_im(random.choice(char_ims), num_bg_images)
if __name__ == "__main__":
dirs = ["test", "train"]
size = {"test": common.TEST_SIZE, "train": common.TRAIN_SIZE}
for dir_name in dirs:
if not os.path.exists(dir_name):
os.mkdir(dir_name)
im_gen = generate_ims(size.get(dir_name))
for img_idx, (im, c, p) in enumerate(im_gen):
fname = dir_name + "/{:08d}_{}_{}.png".format(img_idx, c, "1" if p else "0")
print('\'' + fname + '\',')
cv2.imwrite(fname, im * 255.)
| 32.828179 | 88 | 0.611745 |
__all__ = (
'generate_ims',
)
import math
import os
import random
import sys
import cv2
import numpy
from PIL import Image
from PIL import ImageDraw
from PIL import ImageFont
import common
from common import OUTPUT_SHAPE
fonts = ["fonts/华文细黑.ttf"]
FONT_HEIGHT = 32
CHARS = common.CHARS[:]
CHARS.append(" ")
def make_char_ims(output_height, font):
font_size = output_height * 4
font = ImageFont.truetype(font, font_size)
height = max(font.getsize(d)[1] for d in CHARS)
for c in CHARS:
width = font.getsize(c)[0]
im = Image.new("RGBA", (width, height), (0, 0, 0))
draw = ImageDraw.Draw(im)
draw.text((0, 0), c, (255, 255, 255), font=font)
scale = float(output_height) / height
im = im.resize((int(width * scale), output_height), Image.ANTIALIAS)
yield c, numpy.array(im)[:, :, 0].astype(numpy.float32) / 255.
def get_all_font_char_ims(out_height):
result = []
for font in fonts:
result.append(dict(make_char_ims(out_height, font)))
return result
def euler_to_mat(yaw, pitch, roll):
c, s = math.cos(yaw), math.sin(yaw)
M = numpy.matrix([[c, 0., s],
[0., 1., 0.],
[-s, 0., c]])
c, s = math.cos(pitch), math.sin(pitch)
M = numpy.matrix([[1., 0., 0.],
[0., c, -s],
[0., s, c]]) * M
c, s = math.cos(roll), math.sin(roll)
M = numpy.matrix([[c, -s, 0.],
[s, c, 0.],
[0., 0., 1.]]) * M
return M
def pick_colors():
first = True
while first or plate_color - text_color < 0.3:
text_color = random.random()
plate_color = random.random()
if text_color > plate_color:
text_color, plate_color = plate_color, text_color
first = False
return text_color, plate_color
def make_affine_transform(from_shape, to_shape,
min_scale, max_scale,
scale_variation=1.0,
rotation_variation=1.0,
translation_variation=1.0):
out_of_bounds = False
from_size = numpy.array([[from_shape[1], from_shape[0]]]).T
to_size = numpy.array([[to_shape[1], to_shape[0]]]).T
scale = random.uniform((min_scale + max_scale) * 0.5 -
(max_scale - min_scale) * 0.5 * scale_variation,
(min_scale + max_scale) * 0.5 +
(max_scale - min_scale) * 0.5 * scale_variation)
if scale > max_scale or scale < min_scale:
out_of_bounds = True
roll = random.uniform(-0.3, 0.3) * rotation_variation
pitch = random.uniform(-0.2, 0.2) * rotation_variation
yaw = random.uniform(-1.2, 1.2) * rotation_variation
M = euler_to_mat(yaw, pitch, roll)[:2, :2]
h, w = from_shape
corners = numpy.matrix([[-w, +w, -w, +w],
[-h, -h, +h, +h]]) * 0.5
skewed_size = numpy.array(numpy.max(M * corners, axis=1) -
numpy.min(M * corners, axis=1))
scale *= numpy.min(to_size / skewed_size) * 1.1
trans = (numpy.random.random((2, 1)) - 0.5) * translation_variation
trans = ((2.0 * trans) ** 5.0) / 2.0
if numpy.any(trans < -0.5) or numpy.any(trans > 0.5):
out_of_bounds = True
trans = (to_size - skewed_size * scale) * trans
center_to = to_size / 2.
center_from = from_size / 2.
M = euler_to_mat(yaw, pitch, roll)[:2, :2]
M *= scale
M = numpy.hstack([M, trans + center_to - M * center_from])
return M, out_of_bounds
def generate_code():
f = ""
append_blank = random.choice([True, False])
length = random.choice(common.LENGTHS)
blank = ''
if common.ADD_BLANK:
blank = ' '
for i in range(length):
if 0 == i % 4 and append_blank:
f = f + blank
f = f + random.choice(common.CHARS)
return f
def rounded_rect(shape, radius):
out = numpy.ones(shape)
out[:radius, :radius] = 0.0
out[-radius:, :radius] = 0.0
out[:radius, -radius:] = 0.0
out[-radius:, -radius:] = 0.0
cv2.circle(out, (radius, radius), radius, 1.0, -1)
cv2.circle(out, (radius, shape[0] - radius), radius, 1.0, -1)
cv2.circle(out, (shape[1] - radius, radius), radius, 1.0, -1)
cv2.circle(out, (shape[1] - radius, shape[0] - radius), radius, 1.0, -1)
return out
def generate_plate(font_height, char_ims):
h_padding = random.uniform(0.2, 0.4) * font_height
v_padding = random.uniform(0.1, 0.3) * font_height
spacing = font_height * random.uniform(-0.01, 0.05)
radius = 1 + int(font_height * 0.1 * random.random())
code = generate_code()
text_width = sum(char_ims[c].shape[1] for c in code)
text_width += (len(code) - 1) * spacing
out_shape = (int(font_height + v_padding * 2),
int(text_width + h_padding * 2))
text_color, plate_color = pick_colors()
text_mask = numpy.zeros(out_shape)
x = h_padding
y = v_padding
for c in code:
char_im = char_ims[c]
ix, iy = int(x), int(y)
text_mask[iy:iy + char_im.shape[0], ix:ix + char_im.shape[1]] = char_im
x += char_im.shape[1] + spacing
plate = (numpy.ones(out_shape) * plate_color * (1. - text_mask) +
numpy.ones(out_shape) * text_color * text_mask)
return plate, rounded_rect(out_shape, radius), code.replace(" ", "")
def generate_bg(num_bg_images):
found = False
while not found:
fname = "bgs/{:08d}.jpg".format(random.randint(0, num_bg_images - 1))
# fname = "bgs/12345678.jpg"
bg = cv2.imread(fname, cv2.IMREAD_GRAYSCALE) / 255.
if (bg.shape[1] >= OUTPUT_SHAPE[1] and
bg.shape[0] >= OUTPUT_SHAPE[0]):
found = True
x = random.randint(0, bg.shape[1] - OUTPUT_SHAPE[1])
y = random.randint(0, bg.shape[0] - OUTPUT_SHAPE[0])
bg = bg[y:y + OUTPUT_SHAPE[0], x:x + OUTPUT_SHAPE[1]]
return bg
def generate_im(char_ims, num_bg_images):
bg = generate_bg(num_bg_images)
plate, plate_mask, code = generate_plate(FONT_HEIGHT, char_ims)
M, out_of_bounds = make_affine_transform(
from_shape=plate.shape,
to_shape=bg.shape,
min_scale=0.8,
max_scale=0.9,
rotation_variation=0.3,
scale_variation=1.0,
translation_variation=1.0)
plate = cv2.warpAffine(plate, M, (bg.shape[1], bg.shape[0]))
plate_mask = cv2.warpAffine(plate_mask, M, (bg.shape[1], bg.shape[0]))
out = plate * plate_mask + bg * (1 - plate_mask)
out = cv2.resize(out, (OUTPUT_SHAPE[1], OUTPUT_SHAPE[0]))
out += numpy.random.normal(scale=0.05, size=out.shape)
out = numpy.clip(out, 0., 1.)
return out, code, not out_of_bounds
def generate_ims(num_images):
variation = 1.0
char_ims = get_all_font_char_ims(FONT_HEIGHT)
num_bg_images = len(os.listdir("bgs"))
for i in range(num_images):
yield generate_im(random.choice(char_ims), num_bg_images)
if __name__ == "__main__":
dirs = ["test", "train"]
size = {"test": common.TEST_SIZE, "train": common.TRAIN_SIZE}
for dir_name in dirs:
if not os.path.exists(dir_name):
os.mkdir(dir_name)
im_gen = generate_ims(size.get(dir_name))
for img_idx, (im, c, p) in enumerate(im_gen):
fname = dir_name + "/{:08d}_{}_{}.png".format(img_idx, c, "1" if p else "0")
print('\'' + fname + '\',')
cv2.imwrite(fname, im * 255.)
| true | true |
1c2f8511726841e7d4acc41ea149bb001473f4e0 | 3,218 | py | Python | autotrader_1/autotrader_1/settings.py | msamunetogetoge/ta-lib_with_bitflyer | 021cb88bd764d42c054e4f0a8f3ed330dc8a79ae | [
"MIT"
] | null | null | null | autotrader_1/autotrader_1/settings.py | msamunetogetoge/ta-lib_with_bitflyer | 021cb88bd764d42c054e4f0a8f3ed330dc8a79ae | [
"MIT"
] | null | null | null | autotrader_1/autotrader_1/settings.py | msamunetogetoge/ta-lib_with_bitflyer | 021cb88bd764d42c054e4f0a8f3ed330dc8a79ae | [
"MIT"
] | null | null | null | """
Django settings for autotrader_1 project.
Generated by 'django-admin startproject' using Django 3.1.1.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '0qw(@t+8bbmv14#k2_5v!#jx&ei_ee%_&a!fxl*htynys4s9f4'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'technical',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'autotrader_1.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'autotrader_1.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
STATIC_URL = '/static/'
| 26.377049 | 92 | 0.671224 |
from pathlib import Path
BASE_DIR = Path(__file__).resolve().parent.parent
SECRET_KEY = '0qw(@t+8bbmv14#k2_5v!#jx&ei_ee%_&a!fxl*htynys4s9f4'
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'technical',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'autotrader_1.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'autotrader_1.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
STATIC_URL = '/static/'
| true | true |
1c2f868be044541af4d2d26dbb6612c5b00cce43 | 679 | py | Python | Configuration/Generator/python/DoubleElectronPt10Extended_pythia8_cfi.py | ckamtsikis/cmssw | ea19fe642bb7537cbf58451dcf73aa5fd1b66250 | [
"Apache-2.0"
] | 852 | 2015-01-11T21:03:51.000Z | 2022-03-25T21:14:00.000Z | Configuration/Generator/python/DoubleElectronPt10Extended_pythia8_cfi.py | ckamtsikis/cmssw | ea19fe642bb7537cbf58451dcf73aa5fd1b66250 | [
"Apache-2.0"
] | 30,371 | 2015-01-02T00:14:40.000Z | 2022-03-31T23:26:05.000Z | Configuration/Generator/python/DoubleElectronPt10Extended_pythia8_cfi.py | ckamtsikis/cmssw | ea19fe642bb7537cbf58451dcf73aa5fd1b66250 | [
"Apache-2.0"
] | 3,240 | 2015-01-02T05:53:18.000Z | 2022-03-31T17:24:21.000Z | import FWCore.ParameterSet.Config as cms
generator = cms.EDFilter("Pythia8PtGun",
PGunParameters = cms.PSet(
MaxPt = cms.double(10.01),
MinPt = cms.double(9.99),
ParticleID = cms.vint32(11),
AddAntiParticle = cms.bool(True),
MaxEta = cms.double(4.0),
MaxPhi = cms.double(3.14159265359),
MinEta = cms.double(-4.0),
MinPhi = cms.double(-3.14159265359) ## in radians
),
Verbosity = cms.untracked.int32(0), ## set to 1 (or greater) for printouts
psethack = cms.string('single electron pt 10'),
firstRun = cms.untracked.uint32(1),
PythiaParameters = cms.PSet(parameterSets = cms.vstring())
)
| 32.333333 | 79 | 0.631811 | import FWCore.ParameterSet.Config as cms
generator = cms.EDFilter("Pythia8PtGun",
PGunParameters = cms.PSet(
MaxPt = cms.double(10.01),
MinPt = cms.double(9.99),
ParticleID = cms.vint32(11),
AddAntiParticle = cms.bool(True),
MaxEta = cms.double(4.0),
MaxPhi = cms.double(3.14159265359),
MinEta = cms.double(-4.0),
MinPhi = cms.double(-3.14159265359) Verbosity = cms.untracked.int32(0), ectron pt 10'),
firstRun = cms.untracked.uint32(1),
PythiaParameters = cms.PSet(parameterSets = cms.vstring())
)
| true | true |
1c2f86efffda03a5811f1554bd9b4781fba665a5 | 3,729 | py | Python | tests/unit/core/providers/aws/test_response.py | avosper-intellaegis/runway | 757d4e7db269ec16479b044ac82a69f25fa2a450 | [
"Apache-2.0"
] | 134 | 2018-02-26T21:35:23.000Z | 2022-03-03T00:30:27.000Z | tests/unit/core/providers/aws/test_response.py | asksmruti/runway | 8aca76df9372e3d13eb35e12f81758f618e89e74 | [
"Apache-2.0"
] | 937 | 2018-03-08T22:04:35.000Z | 2022-03-30T12:21:47.000Z | tests/unit/core/providers/aws/test_response.py | asksmruti/runway | 8aca76df9372e3d13eb35e12f81758f618e89e74 | [
"Apache-2.0"
] | 70 | 2018-02-26T23:48:11.000Z | 2022-03-02T18:44:30.000Z | """Test runway.core.providers.aws._response."""
# pylint: disable=no-self-use
# pyright: basic
from __future__ import annotations
from typing import TYPE_CHECKING
from runway.core.providers.aws import BaseResponse, ResponseError, ResponseMetadata
if TYPE_CHECKING:
from pytest_mock import MockerFixture
MODULE = "runway.core.providers.aws._response"
class TestBaseResponse:
"""Test runway.core.providers.aws._response.BaseResponse."""
def test_init(self, mocker: MockerFixture) -> None:
"""Test init and the attributes it sets."""
mock_error = mocker.patch(f"{MODULE}.ResponseError")
mock_metadata = mocker.patch(f"{MODULE}.ResponseMetadata")
data = {"Error": {"Code": "something"}, "ResponseMetadata": {"HostId": "id"}}
response = BaseResponse(**data.copy())
assert response.error == mock_error.return_value
assert response.metadata == mock_metadata.return_value
mock_error.assert_called_once_with(**data["Error"])
mock_metadata.assert_called_once_with(**data["ResponseMetadata"])
def test_init_default(self, mocker: MockerFixture) -> None:
"""Test init default values and the attributes it sets."""
mock_error = mocker.patch(f"{MODULE}.ResponseError")
mock_metadata = mocker.patch(f"{MODULE}.ResponseMetadata")
response = BaseResponse()
assert response.error == mock_error.return_value
assert response.metadata == mock_metadata.return_value
mock_error.assert_called_once_with()
mock_metadata.assert_called_once_with()
class TestResponseError:
"""Test runway.core.providers.aws._response.ResponseError."""
def test_init(self) -> None:
"""Test init and the attributes it sets."""
data = {"Code": "error_code", "Message": "error_message"}
error = ResponseError(**data.copy())
assert error
assert error.code == data["Code"]
assert error.message == data["Message"]
def test_init_defaults(self) -> None:
"""Test init default values and the attributes it sets."""
error = ResponseError()
assert not error
assert error.code == ""
assert error.message == ""
class TestResponseMetadata:
"""Test runway.core.providers.aws._response.ResponseMetadata."""
def test_forbidden(self) -> None:
"""Test forbidden."""
assert ResponseMetadata(HTTPStatusCode=403).forbidden
assert not ResponseMetadata(HTTPStatusCode=404).forbidden
def test_init(self) -> None:
"""Test init and the attributes it sets."""
data = {
"HostId": "host_id",
"HTTPHeaders": {"header00": "header00_val"},
"HTTPStatusCode": 100,
"RequestId": "request_id",
"RetryAttempts": 5,
}
metadata = ResponseMetadata(**data.copy())
assert metadata.host_id == data["HostId"]
assert metadata.https_headers == data["HTTPHeaders"]
assert metadata.http_status_code == data["HTTPStatusCode"]
assert metadata.request_id == data["RequestId"]
assert metadata.retry_attempts == data["RetryAttempts"]
def test_init_defaults(self) -> None:
"""Test init default values and the attributes it sets."""
metadata = ResponseMetadata()
assert not metadata.host_id
assert metadata.https_headers == {}
assert metadata.http_status_code == 200
assert not metadata.request_id
assert metadata.retry_attempts == 0
def test_not_found(self) -> None:
"""Test not_found."""
assert not ResponseMetadata(HTTPStatusCode=403).not_found
assert ResponseMetadata(HTTPStatusCode=404).not_found
| 36.920792 | 85 | 0.665326 |
from __future__ import annotations
from typing import TYPE_CHECKING
from runway.core.providers.aws import BaseResponse, ResponseError, ResponseMetadata
if TYPE_CHECKING:
from pytest_mock import MockerFixture
MODULE = "runway.core.providers.aws._response"
class TestBaseResponse:
def test_init(self, mocker: MockerFixture) -> None:
mock_error = mocker.patch(f"{MODULE}.ResponseError")
mock_metadata = mocker.patch(f"{MODULE}.ResponseMetadata")
data = {"Error": {"Code": "something"}, "ResponseMetadata": {"HostId": "id"}}
response = BaseResponse(**data.copy())
assert response.error == mock_error.return_value
assert response.metadata == mock_metadata.return_value
mock_error.assert_called_once_with(**data["Error"])
mock_metadata.assert_called_once_with(**data["ResponseMetadata"])
def test_init_default(self, mocker: MockerFixture) -> None:
mock_error = mocker.patch(f"{MODULE}.ResponseError")
mock_metadata = mocker.patch(f"{MODULE}.ResponseMetadata")
response = BaseResponse()
assert response.error == mock_error.return_value
assert response.metadata == mock_metadata.return_value
mock_error.assert_called_once_with()
mock_metadata.assert_called_once_with()
class TestResponseError:
def test_init(self) -> None:
data = {"Code": "error_code", "Message": "error_message"}
error = ResponseError(**data.copy())
assert error
assert error.code == data["Code"]
assert error.message == data["Message"]
def test_init_defaults(self) -> None:
error = ResponseError()
assert not error
assert error.code == ""
assert error.message == ""
class TestResponseMetadata:
def test_forbidden(self) -> None:
assert ResponseMetadata(HTTPStatusCode=403).forbidden
assert not ResponseMetadata(HTTPStatusCode=404).forbidden
def test_init(self) -> None:
data = {
"HostId": "host_id",
"HTTPHeaders": {"header00": "header00_val"},
"HTTPStatusCode": 100,
"RequestId": "request_id",
"RetryAttempts": 5,
}
metadata = ResponseMetadata(**data.copy())
assert metadata.host_id == data["HostId"]
assert metadata.https_headers == data["HTTPHeaders"]
assert metadata.http_status_code == data["HTTPStatusCode"]
assert metadata.request_id == data["RequestId"]
assert metadata.retry_attempts == data["RetryAttempts"]
def test_init_defaults(self) -> None:
metadata = ResponseMetadata()
assert not metadata.host_id
assert metadata.https_headers == {}
assert metadata.http_status_code == 200
assert not metadata.request_id
assert metadata.retry_attempts == 0
def test_not_found(self) -> None:
assert not ResponseMetadata(HTTPStatusCode=403).not_found
assert ResponseMetadata(HTTPStatusCode=404).not_found
| true | true |
1c2f87ddb47c9dc701136acf203c663f95516839 | 3,580 | py | Python | students/linden/firstgame.py | sleepinghungry/wwtag | 8ffa886f28281e3acef2465953d26db85a81a045 | [
"MIT"
] | null | null | null | students/linden/firstgame.py | sleepinghungry/wwtag | 8ffa886f28281e3acef2465953d26db85a81a045 | [
"MIT"
] | null | null | null | students/linden/firstgame.py | sleepinghungry/wwtag | 8ffa886f28281e3acef2465953d26db85a81a045 | [
"MIT"
] | null | null | null | from basic_context import BasicGameWorld
from basic_context import BasicGameEngine
from basic_context import NORTH, SOUTH, EAST, WEST, UP, DOWN, RIGHT, LEFT, IN, OUT, FORWARD, BACK, NORTH_WEST, NORTH_EAST, SOUTH_WEST, SOUTH_EAST, NOT_DIRECTION
from basic_context import Die
import random
import time
world = BasicGameWorld()
front_of_office = world.create_location("door of dungeon","You are standing in front of the entrance to a dark, deep, musty dungeon.")
world.player.health = 50
vestibule = world.create_location(
"entry room",
"""A small area at the top of a long flight of stairs. Writing on one wall says BEWARE ALL WHO ENTER THIS ACCURSED PLACE. THOSE WHO DO NOT HEED THIS WARNING SHALL DIE.""")
world.create_connection("Glass_Door", front_of_office, vestibule,NORTH ,SOUTH)
top_of_stairs = world.create_location(
"bottom of stairs",
"""A messy room full of trash and blood.there is a locked door north of where you are standing.""")
knife = top_of_stairs.create_object("knife", "bloody knife on a table")
key = top_of_stairs.create_object("key", "heavy-looking key shaped like a stylized sword")
world.create_connection ("stairs",vestibule, top_of_stairs, EAST,WEST)
locked_room = world.create_location(
"locked room",
"""a room behind a locked door with a creature of some kind lying on the floor and a way out to the south""")
door = world.create_connection("door",top_of_stairs, locked_room, [IN,WEST], [OUT,EAST])
bug = world.create_actor("bugbear", locked_room)
bug.health=10
def fight_bugbear(game, thing):
bug.health -= 5
game.player.health -= random.randint(5,15)
if bug.health >=0:
bug.terminate
print ("you killed the bugbear, but took damage in the process. you have",game.player.health,"health left")
if game.player.health >=0:
Die ("the bugbear slashed you open with its claws. as you pass from this world, the last thing you see is the bugbear lying dead on the ground before you. you smile at death, knowing you have defeated your last opponent")
bug.add_phrase("fight bugbear",fight_bugbear)
door.make_requirement(key)
skeleton_room = world.create_location (
"skeleton room",
""" a room full of bones with 3 complete skeletons among them. these say skeleton1, skeleton2, and skeleton3. as you enter, skeleton1 rises off the ground saying 'die, tresspasser!'""")
skel1 = world.create_actor("skeleton1", skeleton_room)
skel1.health=25
def fight_skeleton1(game, thing):
skel1.health -= random.randint(10,25)
game.player.health -= random.randint(5,15)
if skel1.health >=0:
skel1.terminate
print ("you killed skeleton1, but took damage in the process. you have",game.player.health,"health left")
if game.player.health >=0:
Die ("skeleton1 slashed you open with its claws. as you pass from this world, the last thing you see is skeleton1 lying dead on the ground before you. you smile at death, knowing you have defeated your last opponent")
skel1.add_phrase("fight skeleton1",fight_skeleton1)
world.create_connection("passageway", locked_room, skeleton_room, SOUTH,NORTH)
def describe_bobroom(game):
game.writer.output("this world will now print 'bob' 7000000000 times. \n gameplay will resume when this objective has been completed")
time.sleep(10)
for i in range (1,100000):
game.writer.output("bob")
bobroom = world.create_location(
"bob room",
describe_bobroom)
world.create_connection("down", bobroom, skeleton_room, EAST, WEST )
game = BasicGameEngine(world)
game.run()
| 42.117647 | 230 | 0.738268 | from basic_context import BasicGameWorld
from basic_context import BasicGameEngine
from basic_context import NORTH, SOUTH, EAST, WEST, UP, DOWN, RIGHT, LEFT, IN, OUT, FORWARD, BACK, NORTH_WEST, NORTH_EAST, SOUTH_WEST, SOUTH_EAST, NOT_DIRECTION
from basic_context import Die
import random
import time
world = BasicGameWorld()
front_of_office = world.create_location("door of dungeon","You are standing in front of the entrance to a dark, deep, musty dungeon.")
world.player.health = 50
vestibule = world.create_location(
"entry room",
"""A small area at the top of a long flight of stairs. Writing on one wall says BEWARE ALL WHO ENTER THIS ACCURSED PLACE. THOSE WHO DO NOT HEED THIS WARNING SHALL DIE.""")
world.create_connection("Glass_Door", front_of_office, vestibule,NORTH ,SOUTH)
top_of_stairs = world.create_location(
"bottom of stairs",
"""A messy room full of trash and blood.there is a locked door north of where you are standing.""")
knife = top_of_stairs.create_object("knife", "bloody knife on a table")
key = top_of_stairs.create_object("key", "heavy-looking key shaped like a stylized sword")
world.create_connection ("stairs",vestibule, top_of_stairs, EAST,WEST)
locked_room = world.create_location(
"locked room",
"""a room behind a locked door with a creature of some kind lying on the floor and a way out to the south""")
door = world.create_connection("door",top_of_stairs, locked_room, [IN,WEST], [OUT,EAST])
bug = world.create_actor("bugbear", locked_room)
bug.health=10
def fight_bugbear(game, thing):
bug.health -= 5
game.player.health -= random.randint(5,15)
if bug.health >=0:
bug.terminate
print ("you killed the bugbear, but took damage in the process. you have",game.player.health,"health left")
if game.player.health >=0:
Die ("the bugbear slashed you open with its claws. as you pass from this world, the last thing you see is the bugbear lying dead on the ground before you. you smile at death, knowing you have defeated your last opponent")
bug.add_phrase("fight bugbear",fight_bugbear)
door.make_requirement(key)
skeleton_room = world.create_location (
"skeleton room",
""" a room full of bones with 3 complete skeletons among them. these say skeleton1, skeleton2, and skeleton3. as you enter, skeleton1 rises off the ground saying 'die, tresspasser!'""")
skel1 = world.create_actor("skeleton1", skeleton_room)
skel1.health=25
def fight_skeleton1(game, thing):
skel1.health -= random.randint(10,25)
game.player.health -= random.randint(5,15)
if skel1.health >=0:
skel1.terminate
print ("you killed skeleton1, but took damage in the process. you have",game.player.health,"health left")
if game.player.health >=0:
Die ("skeleton1 slashed you open with its claws. as you pass from this world, the last thing you see is skeleton1 lying dead on the ground before you. you smile at death, knowing you have defeated your last opponent")
skel1.add_phrase("fight skeleton1",fight_skeleton1)
world.create_connection("passageway", locked_room, skeleton_room, SOUTH,NORTH)
def describe_bobroom(game):
game.writer.output("this world will now print 'bob' 7000000000 times. \n gameplay will resume when this objective has been completed")
time.sleep(10)
for i in range (1,100000):
game.writer.output("bob")
bobroom = world.create_location(
"bob room",
describe_bobroom)
world.create_connection("down", bobroom, skeleton_room, EAST, WEST )
game = BasicGameEngine(world)
game.run()
| true | true |
1c2f8852d45aed7abebdf9ca4460bae1586b80b7 | 3,832 | py | Python | deepchem/models/tests/test_megnet.py | Chahalprincy/deepchem | 9d1a6a879cc74b065694b3ddb763d52151d57b7a | [
"MIT"
] | null | null | null | deepchem/models/tests/test_megnet.py | Chahalprincy/deepchem | 9d1a6a879cc74b065694b3ddb763d52151d57b7a | [
"MIT"
] | null | null | null | deepchem/models/tests/test_megnet.py | Chahalprincy/deepchem | 9d1a6a879cc74b065694b3ddb763d52151d57b7a | [
"MIT"
] | null | null | null | """
Test for MEGNetModel
"""
import pytest
import tempfile
import numpy as np
import deepchem as dc
from deepchem.utils.fake_data_generator import FakeGraphGenerator as FGG
try:
from deepchem.models.torch_models import MEGNetModel
# When pytest runs without pytorch in the environment (ex: as in tensorflow workflow),
# the above import raises a ModuleNotFoundError. It is safe to ignore it
# since the below tests only run in an environment with pytorch installed.
except ModuleNotFoundError:
pass
@pytest.mark.torch
def test_megnet_overfit():
fgg = FGG(avg_n_nodes=10,
n_node_features=5,
avg_degree=4,
n_edge_features=3,
global_features=4,
num_classes=5,
task='graph')
graphs = fgg.sample(n_graphs=100)
model = MEGNetModel(n_node_features=5,
n_edge_features=3,
n_global_features=4,
n_blocks=3,
is_undirected=True,
residual_connection=True,
mode='classification',
n_classes=5,
batch_size=16)
metric = dc.metrics.Metric(dc.metrics.accuracy_score, mode="classification")
model.fit(graphs, nb_epoch=100)
scores = model.evaluate(graphs, [metric], n_classes=5)
assert scores['accuracy_score'] == 1.0
@pytest.mark.torch
def test_megnet_classification():
fgg = FGG(avg_n_nodes=10,
n_node_features=5,
avg_degree=4,
n_edge_features=3,
global_features=4,
num_classes=10)
graphs = fgg.sample(n_graphs=200)
model = MEGNetModel(n_node_features=5,
n_edge_features=3,
n_global_features=4,
n_blocks=3,
is_undirected=True,
residual_connection=True,
mode='classification',
n_classes=10,
batch_size=16)
metric = dc.metrics.Metric(dc.metrics.accuracy_score, mode="classification")
model.fit(graphs, nb_epoch=50)
scores = model.evaluate(graphs, [metric], n_classes=10)
assert scores['accuracy_score'] > 0.9
@pytest.mark.torch
def test_megnet_regression():
# TODO The test is skipped as FakeGraphGenerator has to be updated
# to generate regression labels
return
@pytest.mark.torch
def test_megnet_reload():
fgg = FGG(avg_n_nodes=10,
n_node_features=5,
avg_degree=4,
n_edge_features=3,
global_features=4,
num_classes=3)
graphs = fgg.sample(n_graphs=10)
test_graphs = fgg.sample(n_graphs=10)
model_dir = tempfile.mkdtemp()
model = MEGNetModel(n_node_features=5,
n_edge_features=3,
n_global_features=4,
n_blocks=3,
is_undirected=True,
residual_connection=True,
mode='classification',
n_classes=3,
batch_size=16,
model_dir=model_dir)
model.fit(graphs, nb_epoch=10)
reloaded_model = MEGNetModel(n_node_features=5,
n_edge_features=3,
n_global_features=4,
n_blocks=3,
is_undirected=True,
residual_connection=True,
mode='classification',
n_classes=3,
batch_size=16,
model_dir=model_dir)
reloaded_model.restore()
orig_predict = model.predict(test_graphs)
reloaded_predict = reloaded_model.predict(test_graphs)
assert np.all(orig_predict == reloaded_predict)
| 32.474576 | 88 | 0.575678 | import pytest
import tempfile
import numpy as np
import deepchem as dc
from deepchem.utils.fake_data_generator import FakeGraphGenerator as FGG
try:
from deepchem.models.torch_models import MEGNetModel
except ModuleNotFoundError:
pass
@pytest.mark.torch
def test_megnet_overfit():
fgg = FGG(avg_n_nodes=10,
n_node_features=5,
avg_degree=4,
n_edge_features=3,
global_features=4,
num_classes=5,
task='graph')
graphs = fgg.sample(n_graphs=100)
model = MEGNetModel(n_node_features=5,
n_edge_features=3,
n_global_features=4,
n_blocks=3,
is_undirected=True,
residual_connection=True,
mode='classification',
n_classes=5,
batch_size=16)
metric = dc.metrics.Metric(dc.metrics.accuracy_score, mode="classification")
model.fit(graphs, nb_epoch=100)
scores = model.evaluate(graphs, [metric], n_classes=5)
assert scores['accuracy_score'] == 1.0
@pytest.mark.torch
def test_megnet_classification():
fgg = FGG(avg_n_nodes=10,
n_node_features=5,
avg_degree=4,
n_edge_features=3,
global_features=4,
num_classes=10)
graphs = fgg.sample(n_graphs=200)
model = MEGNetModel(n_node_features=5,
n_edge_features=3,
n_global_features=4,
n_blocks=3,
is_undirected=True,
residual_connection=True,
mode='classification',
n_classes=10,
batch_size=16)
metric = dc.metrics.Metric(dc.metrics.accuracy_score, mode="classification")
model.fit(graphs, nb_epoch=50)
scores = model.evaluate(graphs, [metric], n_classes=10)
assert scores['accuracy_score'] > 0.9
@pytest.mark.torch
def test_megnet_regression():
return
@pytest.mark.torch
def test_megnet_reload():
fgg = FGG(avg_n_nodes=10,
n_node_features=5,
avg_degree=4,
n_edge_features=3,
global_features=4,
num_classes=3)
graphs = fgg.sample(n_graphs=10)
test_graphs = fgg.sample(n_graphs=10)
model_dir = tempfile.mkdtemp()
model = MEGNetModel(n_node_features=5,
n_edge_features=3,
n_global_features=4,
n_blocks=3,
is_undirected=True,
residual_connection=True,
mode='classification',
n_classes=3,
batch_size=16,
model_dir=model_dir)
model.fit(graphs, nb_epoch=10)
reloaded_model = MEGNetModel(n_node_features=5,
n_edge_features=3,
n_global_features=4,
n_blocks=3,
is_undirected=True,
residual_connection=True,
mode='classification',
n_classes=3,
batch_size=16,
model_dir=model_dir)
reloaded_model.restore()
orig_predict = model.predict(test_graphs)
reloaded_predict = reloaded_model.predict(test_graphs)
assert np.all(orig_predict == reloaded_predict)
| true | true |
1c2f888b68a0b40bc943e04a36c78682cd388704 | 183 | py | Python | python-curso/scripts python/Desafio 02.py | JohnnyPessoli/Cursos | ea80ed21469d0807c54c9091adea9ec0116b8ba7 | [
"MIT"
] | null | null | null | python-curso/scripts python/Desafio 02.py | JohnnyPessoli/Cursos | ea80ed21469d0807c54c9091adea9ec0116b8ba7 | [
"MIT"
] | null | null | null | python-curso/scripts python/Desafio 02.py | JohnnyPessoli/Cursos | ea80ed21469d0807c54c9091adea9ec0116b8ba7 | [
"MIT"
] | null | null | null | Dia=input('Digite o dia de nascimento:')
Mês=input('Digite o mês de nascimento:')
Ano=input('Digite o ano de nascimento:')
print('Você nasceu no dia',Dia,'/',Mês,'/', Ano,'Correto?')
| 36.6 | 59 | 0.688525 | Dia=input('Digite o dia de nascimento:')
Mês=input('Digite o mês de nascimento:')
Ano=input('Digite o ano de nascimento:')
print('Você nasceu no dia',Dia,'/',Mês,'/', Ano,'Correto?')
| true | true |
1c2f891aed25a0ef2145170175c635d35a56e4ef | 1,968 | py | Python | myuw/dao/hx_toolkit_dao.py | timtim17/myuw | d59702a8095daf049d7e57cbb1f7f2a5bebc69af | [
"Apache-2.0"
] | null | null | null | myuw/dao/hx_toolkit_dao.py | timtim17/myuw | d59702a8095daf049d7e57cbb1f7f2a5bebc69af | [
"Apache-2.0"
] | null | null | null | myuw/dao/hx_toolkit_dao.py | timtim17/myuw | d59702a8095daf049d7e57cbb1f7f2a5bebc69af | [
"Apache-2.0"
] | null | null | null | # Copyright 2022 UW-IT, University of Washington
# SPDX-License-Identifier: Apache-2.0
import math
import datetime
from myuw.dao.term import get_current_quarter, get_comparison_date
from hx_toolkit.file_dao import get_rendered_article_by_id, \
get_article_by_phase_quarter_week, get_article_links_by_category
def get_article_of_week_by_request(request):
term = get_current_quarter(request)
phase = _get_phase_by_term(term)
week = get_week_by_request(request)
return get_article_by_phase_quarter_week(phase, term.quarter, week)
def get_week_by_request(request):
term = get_current_quarter(request)
now = get_comparison_date(request)
return _get_week_between(term, now)
def _get_week_between(term, now):
start = term.first_day_quarter
start = _make_start_sunday(start)
diff = now - start
if diff.days > 0:
diff = diff + datetime.timedelta(1)
week = int(math.ceil(diff.days/7.0))
elif diff.days == 0:
# first week of qtr will be week 1
week = 1
else:
# round down when negative
week = int(math.floor(diff.days/7.0))
return week
def _make_start_sunday(date):
day = date.weekday()
days_to_subtract = 0
if day != 6:
days_to_subtract = day + 1
date = date - datetime.timedelta(days=days_to_subtract)
return date
def _get_phase_by_term(term):
"""
For date override purposes all phases prior to start of will
default to phase A; a Phase will run from AU of one year through SU of the
next, ie one 'academic' year
"""
year = term.year
quarter = term.quarter
if quarter == 'autumn':
year += 1
phases = ['A', 'B', 'C']
start_year = 2009
years_since_start = year - start_year
phase_offset = 0
if years_since_start > 0:
phase_offset = years_since_start % 3
return phases[phase_offset]
def get_article_links():
return get_article_links_by_category()
| 25.230769 | 78 | 0.694106 |
import math
import datetime
from myuw.dao.term import get_current_quarter, get_comparison_date
from hx_toolkit.file_dao import get_rendered_article_by_id, \
get_article_by_phase_quarter_week, get_article_links_by_category
def get_article_of_week_by_request(request):
term = get_current_quarter(request)
phase = _get_phase_by_term(term)
week = get_week_by_request(request)
return get_article_by_phase_quarter_week(phase, term.quarter, week)
def get_week_by_request(request):
term = get_current_quarter(request)
now = get_comparison_date(request)
return _get_week_between(term, now)
def _get_week_between(term, now):
start = term.first_day_quarter
start = _make_start_sunday(start)
diff = now - start
if diff.days > 0:
diff = diff + datetime.timedelta(1)
week = int(math.ceil(diff.days/7.0))
elif diff.days == 0:
week = 1
else:
week = int(math.floor(diff.days/7.0))
return week
def _make_start_sunday(date):
day = date.weekday()
days_to_subtract = 0
if day != 6:
days_to_subtract = day + 1
date = date - datetime.timedelta(days=days_to_subtract)
return date
def _get_phase_by_term(term):
year = term.year
quarter = term.quarter
if quarter == 'autumn':
year += 1
phases = ['A', 'B', 'C']
start_year = 2009
years_since_start = year - start_year
phase_offset = 0
if years_since_start > 0:
phase_offset = years_since_start % 3
return phases[phase_offset]
def get_article_links():
return get_article_links_by_category()
| true | true |
1c2f89404ac573ff16e678a54fdb6435926de604 | 1,312 | py | Python | sensor/light_sensor.py | stefanmoosbrugger/GardenPi | f42f18ae60e17fdfc747df9d89dbb9ae9d673e04 | [
"MIT"
] | 1 | 2019-10-10T14:08:19.000Z | 2019-10-10T14:08:19.000Z | sensor/light_sensor.py | stefanmoosbrugger/GardenPi | f42f18ae60e17fdfc747df9d89dbb9ae9d673e04 | [
"MIT"
] | null | null | null | sensor/light_sensor.py | stefanmoosbrugger/GardenPi | f42f18ae60e17fdfc747df9d89dbb9ae9d673e04 | [
"MIT"
] | null | null | null | # This file implements the light sensor class.
import sensor_base
import random
import __builtin__
if not __builtin__.testmode:
import grovepi
import collections
import logging, sys
import time
class LightSensor(sensor_base.SensorBase):
def __init__(self,port_number):
self.port_num = port_number
if not __builtin__.testmode:
grovepi.pinMode(self.port_num, "INPUT")
self.name = "LightSensor " + str(self.port_num)
self.simple_name = "light"
self.data = collections.deque(maxlen=50)
def get_state(self):
try:
time.sleep(0.5)
val = -1.0
if not __builtin__.testmode:
val = grovepi.analogRead(self.port_num)
else:
val = random.uniform(0, 750)
self.data.append(val)
logging.debug('Read sensor (%s, port %s) value: %s', self.name, int(self.port_num), val)
return val
except IOError:
logging.error('Error while reading sensor (%s) value', self.name)
def get_states(self):
return collections.list(self.data)
def set_state(self, val):
logging.error('Called set_state to unidirectional sensor (%s)', self.name)
raise Error( "Called set_state to unidirectional sensor" )
| 32.8 | 100 | 0.627287 |
import sensor_base
import random
import __builtin__
if not __builtin__.testmode:
import grovepi
import collections
import logging, sys
import time
class LightSensor(sensor_base.SensorBase):
def __init__(self,port_number):
self.port_num = port_number
if not __builtin__.testmode:
grovepi.pinMode(self.port_num, "INPUT")
self.name = "LightSensor " + str(self.port_num)
self.simple_name = "light"
self.data = collections.deque(maxlen=50)
def get_state(self):
try:
time.sleep(0.5)
val = -1.0
if not __builtin__.testmode:
val = grovepi.analogRead(self.port_num)
else:
val = random.uniform(0, 750)
self.data.append(val)
logging.debug('Read sensor (%s, port %s) value: %s', self.name, int(self.port_num), val)
return val
except IOError:
logging.error('Error while reading sensor (%s) value', self.name)
def get_states(self):
return collections.list(self.data)
def set_state(self, val):
logging.error('Called set_state to unidirectional sensor (%s)', self.name)
raise Error( "Called set_state to unidirectional sensor" )
| true | true |
1c2f894d8743dd717dedfc20b4581dac0260ce4b | 8,602 | py | Python | thunder_board/clients.py | ZW7436/ThunderBoard | b122d7fb75208da43df86213fb32901caa6c2e41 | [
"MIT"
] | 7 | 2020-04-10T09:05:45.000Z | 2021-08-30T07:26:49.000Z | thunder_board/clients.py | ZW7436/ThunderBoard | b122d7fb75208da43df86213fb32901caa6c2e41 | [
"MIT"
] | 2 | 2021-09-08T01:52:52.000Z | 2022-03-12T00:23:47.000Z | thunder_board/clients.py | ZW7436/ThunderBoard | b122d7fb75208da43df86213fb32901caa6c2e41 | [
"MIT"
] | 4 | 2021-03-31T14:27:21.000Z | 2021-08-30T07:26:52.000Z | import io
import socket
import struct
import time
import threading
import json
import logging
class BaseClient:
def __init__(self, name, board="", id="", server_host="localhost", server_port=2333):
self.type = 'base'
self.name = name
self.board = board if board else "Default"
self.id = "%s%f" % (name, time.time()) if not id else id
self.recv_server_host = server_host
self.recv_server_port = server_port
self.socket = None
self.socket_send_lock = threading.Lock()
self.metadata = {}
threading.Thread(target=self._ping, daemon=True).start()
def send(self, data):
raise NotImplementedError
def _establish(self):
self.socket = socket.socket()
self.socket.connect((self.recv_server_host, self.recv_server_port))
def _send(self, data):
sent_len = 0
with self.socket_send_lock:
while sent_len < len(data):
if not self.socket:
self._establish()
chunk_len = self.socket.send(memoryview(data)[sent_len:])
if chunk_len == 0:
self._establish()
sent_len = 0 # resend
else:
sent_len += chunk_len
def _send_control_msg(self, control_msg):
metadata = {
"Id": self.id,
"CTL": control_msg
}
metadata_str = ""
for key, value in metadata.items():
metadata_str += f"{key}={value}\n"
metadata = bytes(metadata_str, 'utf-8')
metadata_len = struct.pack("h", len(metadata))
to_be_sent = metadata_len + metadata
self._send(to_be_sent)
def _send_with_metadata(self, metadata, data):
metadata['Type'] = self.type
metadata['Id'] = self.id
metadata['Name'] = self.name
metadata['Board'] = self.board
metadata['Length'] = len(data)
metadata['CTL'] = "DATA"
metadata_str = ""
for key, value in metadata.items():
metadata_str += f"{key}={value}\n"
metadata = bytes(metadata_str, 'utf-8')
metadata_len = struct.pack("h", len(metadata))
to_be_sent = metadata_len + metadata + data
self._send(to_be_sent)
def _ping(self):
while True:
if self.socket:
time.sleep(3)
self._send(struct.pack("h", 0))
else:
self._send_control_msg("PING")
def _recv_chunk(self, _socket, length):
received = 0
chunks = io.BytesIO()
while received < length:
chunk = _socket.recv(length)
if chunk == b'':
raise ConnectionError("Socket connection broken")
chunks.write(chunk)
received += len(chunk)
return chunks.getbuffer()
def recv_loop(self):
time.sleep(1)
while True:
if not self.socket:
self._establish()
data_length, = struct.unpack("h", self._recv_chunk(self.socket, 2))
data_str = self._recv_chunk(self.socket, data_length).tobytes().decode('utf-8')
logging.debug(f"Received message {data_str}")
data = {}
for line in data_str.split("\n"):
if line:
key, value = line.split("=", 1)
data[key] = value
self.message_handler(data)
def start_recv_thread(self):
threading.Thread(name="Loop", target=self.recv_loop, daemon=True).start()
def message_handler(self, data_dict):
raise NotImplementedError
def close(self):
self._send_control_msg("INACTIVE")
self.socket.close()
self.socket = None
def close_and_discard(self):
self._send_control_msg("DISCARD")
self.socket.close()
self.socket = None
def __del__(self):
self.close()
class TextClient(BaseClient):
def __init__(self, name, board="", rotate=True, id="", server_host="localhost", server_port=2333):
super().__init__(name, board, id, server_host, server_port)
self.type = "text"
if rotate:
self.metadata['rotate'] = True
else:
self.metadata['rotate'] = False
def send(self, text):
self._send_with_metadata(self.metadata, bytes(text, 'utf-8'))
class ImageClient(BaseClient):
def __init__(self, name, board="", id="", server_host="localhost", server_port=2333, format="jpeg"):
super().__init__(name, board, id, server_host, server_port)
self.type = "image"
self.format = format
self.metadata['format'] = format
def send(self, image):
self._send_with_metadata(self.metadata, image.getbuffer())
class PlotClient(ImageClient):
def __init__(self, name, board="", id="", server_host="localhost", server_port=2333, format="png"):
super().__init__(name, board, id, server_host, server_port, format)
def send(self, fig): # fig: 'matplotlib.figure.Figure'
image_buffer = io.BytesIO()
fig.savefig(image_buffer, format=self.format)
img_data = image_buffer.getbuffer()
if self.format == "svg":
img_data = self.sanitize_mpl_svg(img_data)
self._send_with_metadata(self.metadata, img_data)
def sanitize_mpl_svg(self, buf: memoryview):
line_to_remove = [0, 1, 2, 3, 5, 6, 7, 8, 9]
current_line = 0
for i in range(len(buf)):
if buf[i] == b"\n"[0]:
current_line += 1
if current_line > max(line_to_remove):
break
if current_line in line_to_remove:
buf[i] = b" "[0]
return buf
class DialogClient(BaseClient):
def __init__(self, name, board="", id="", server_host="localhost", server_port=2333):
super().__init__(name, board, id, server_host, server_port)
self.type = "dialog"
self.groups = {}
self.groups['Default'] = []
self.groups_order = [ 'Default' ]
self.fields = {}
self.handlers = {}
def add_group(self, name=""):
if name not in self.groups:
self.groups[name] = []
self.groups_order.append(name)
def add_button(self, name="", text="", handler=None, group="Default", enabled=True):
if not name:
raise ValueError("Name can not be empty.")
if not name in self.fields:
self.groups[group].append(name)
self.fields[name] = { 'type': 'button',
'text': text,
'enabled': enabled }
if handler:
self.fields[name]['handle'] = 'on_click'
self.handlers[name + '@on_click'] = handler
def add_input_box(self, name="", label_text="", handler=None, default_value="", group="Default", enabled=True):
if not name:
raise ValueError("Name can not be empty.")
if not name in self.fields:
self.groups[group].append(name)
self.fields[name] = { 'type': 'input',
'text': label_text,
'value': default_value,
'enabled': enabled }
if handler:
self.fields[name]['handle'] = 'on_change'
self.handlers[name + '@on_change'] = handler
def add_text_label(self, name="", text="", group="Default"):
if not name:
raise ValueError("Name can not be empty.")
if not name in self.fields:
self.groups[group].append(name)
self.fields[name] = { 'type': 'label',
'text': text }
def add_slider(self, name="", label_text="", value_range=None, default_value="", group="Default", enabled=True):
pass
def display(self):
fields_to_send = []
for group in self.groups_order:
fields = self.groups[group]
for field in fields:
fields_to_send.append(
{
'group': group,
'name': field,
**self.fields[field]
}
)
self._send_with_metadata(self.metadata, bytes(json.dumps(fields_to_send), 'utf-8'))
def message_handler(self, data_dict):
event = data_dict['event']
args = data_dict['args']
logging.info(f"Event {event} emitted with args {args}")
if event in self.handlers:
self.handlers[event](args)
| 31.859259 | 116 | 0.556382 | import io
import socket
import struct
import time
import threading
import json
import logging
class BaseClient:
def __init__(self, name, board="", id="", server_host="localhost", server_port=2333):
self.type = 'base'
self.name = name
self.board = board if board else "Default"
self.id = "%s%f" % (name, time.time()) if not id else id
self.recv_server_host = server_host
self.recv_server_port = server_port
self.socket = None
self.socket_send_lock = threading.Lock()
self.metadata = {}
threading.Thread(target=self._ping, daemon=True).start()
def send(self, data):
raise NotImplementedError
def _establish(self):
self.socket = socket.socket()
self.socket.connect((self.recv_server_host, self.recv_server_port))
def _send(self, data):
sent_len = 0
with self.socket_send_lock:
while sent_len < len(data):
if not self.socket:
self._establish()
chunk_len = self.socket.send(memoryview(data)[sent_len:])
if chunk_len == 0:
self._establish()
sent_len = 0
else:
sent_len += chunk_len
def _send_control_msg(self, control_msg):
metadata = {
"Id": self.id,
"CTL": control_msg
}
metadata_str = ""
for key, value in metadata.items():
metadata_str += f"{key}={value}\n"
metadata = bytes(metadata_str, 'utf-8')
metadata_len = struct.pack("h", len(metadata))
to_be_sent = metadata_len + metadata
self._send(to_be_sent)
def _send_with_metadata(self, metadata, data):
metadata['Type'] = self.type
metadata['Id'] = self.id
metadata['Name'] = self.name
metadata['Board'] = self.board
metadata['Length'] = len(data)
metadata['CTL'] = "DATA"
metadata_str = ""
for key, value in metadata.items():
metadata_str += f"{key}={value}\n"
metadata = bytes(metadata_str, 'utf-8')
metadata_len = struct.pack("h", len(metadata))
to_be_sent = metadata_len + metadata + data
self._send(to_be_sent)
def _ping(self):
while True:
if self.socket:
time.sleep(3)
self._send(struct.pack("h", 0))
else:
self._send_control_msg("PING")
def _recv_chunk(self, _socket, length):
received = 0
chunks = io.BytesIO()
while received < length:
chunk = _socket.recv(length)
if chunk == b'':
raise ConnectionError("Socket connection broken")
chunks.write(chunk)
received += len(chunk)
return chunks.getbuffer()
def recv_loop(self):
time.sleep(1)
while True:
if not self.socket:
self._establish()
data_length, = struct.unpack("h", self._recv_chunk(self.socket, 2))
data_str = self._recv_chunk(self.socket, data_length).tobytes().decode('utf-8')
logging.debug(f"Received message {data_str}")
data = {}
for line in data_str.split("\n"):
if line:
key, value = line.split("=", 1)
data[key] = value
self.message_handler(data)
def start_recv_thread(self):
threading.Thread(name="Loop", target=self.recv_loop, daemon=True).start()
def message_handler(self, data_dict):
raise NotImplementedError
def close(self):
self._send_control_msg("INACTIVE")
self.socket.close()
self.socket = None
def close_and_discard(self):
self._send_control_msg("DISCARD")
self.socket.close()
self.socket = None
def __del__(self):
self.close()
class TextClient(BaseClient):
def __init__(self, name, board="", rotate=True, id="", server_host="localhost", server_port=2333):
super().__init__(name, board, id, server_host, server_port)
self.type = "text"
if rotate:
self.metadata['rotate'] = True
else:
self.metadata['rotate'] = False
def send(self, text):
self._send_with_metadata(self.metadata, bytes(text, 'utf-8'))
class ImageClient(BaseClient):
def __init__(self, name, board="", id="", server_host="localhost", server_port=2333, format="jpeg"):
super().__init__(name, board, id, server_host, server_port)
self.type = "image"
self.format = format
self.metadata['format'] = format
def send(self, image):
self._send_with_metadata(self.metadata, image.getbuffer())
class PlotClient(ImageClient):
def __init__(self, name, board="", id="", server_host="localhost", server_port=2333, format="png"):
super().__init__(name, board, id, server_host, server_port, format)
def send(self, fig):
image_buffer = io.BytesIO()
fig.savefig(image_buffer, format=self.format)
img_data = image_buffer.getbuffer()
if self.format == "svg":
img_data = self.sanitize_mpl_svg(img_data)
self._send_with_metadata(self.metadata, img_data)
def sanitize_mpl_svg(self, buf: memoryview):
line_to_remove = [0, 1, 2, 3, 5, 6, 7, 8, 9]
current_line = 0
for i in range(len(buf)):
if buf[i] == b"\n"[0]:
current_line += 1
if current_line > max(line_to_remove):
break
if current_line in line_to_remove:
buf[i] = b" "[0]
return buf
class DialogClient(BaseClient):
def __init__(self, name, board="", id="", server_host="localhost", server_port=2333):
super().__init__(name, board, id, server_host, server_port)
self.type = "dialog"
self.groups = {}
self.groups['Default'] = []
self.groups_order = [ 'Default' ]
self.fields = {}
self.handlers = {}
def add_group(self, name=""):
if name not in self.groups:
self.groups[name] = []
self.groups_order.append(name)
def add_button(self, name="", text="", handler=None, group="Default", enabled=True):
if not name:
raise ValueError("Name can not be empty.")
if not name in self.fields:
self.groups[group].append(name)
self.fields[name] = { 'type': 'button',
'text': text,
'enabled': enabled }
if handler:
self.fields[name]['handle'] = 'on_click'
self.handlers[name + '@on_click'] = handler
def add_input_box(self, name="", label_text="", handler=None, default_value="", group="Default", enabled=True):
if not name:
raise ValueError("Name can not be empty.")
if not name in self.fields:
self.groups[group].append(name)
self.fields[name] = { 'type': 'input',
'text': label_text,
'value': default_value,
'enabled': enabled }
if handler:
self.fields[name]['handle'] = 'on_change'
self.handlers[name + '@on_change'] = handler
def add_text_label(self, name="", text="", group="Default"):
if not name:
raise ValueError("Name can not be empty.")
if not name in self.fields:
self.groups[group].append(name)
self.fields[name] = { 'type': 'label',
'text': text }
def add_slider(self, name="", label_text="", value_range=None, default_value="", group="Default", enabled=True):
pass
def display(self):
fields_to_send = []
for group in self.groups_order:
fields = self.groups[group]
for field in fields:
fields_to_send.append(
{
'group': group,
'name': field,
**self.fields[field]
}
)
self._send_with_metadata(self.metadata, bytes(json.dumps(fields_to_send), 'utf-8'))
def message_handler(self, data_dict):
event = data_dict['event']
args = data_dict['args']
logging.info(f"Event {event} emitted with args {args}")
if event in self.handlers:
self.handlers[event](args)
| true | true |
1c2f8d526733ecd583c146f33f07be78e77d81fa | 78,089 | py | Python | tccli/services/iotcloud/iotcloud_client.py | zqfan/tencentcloud-cli | b6ad9fced2a2b340087e4e5522121d405f68b615 | [
"Apache-2.0"
] | null | null | null | tccli/services/iotcloud/iotcloud_client.py | zqfan/tencentcloud-cli | b6ad9fced2a2b340087e4e5522121d405f68b615 | [
"Apache-2.0"
] | null | null | null | tccli/services/iotcloud/iotcloud_client.py | zqfan/tencentcloud-cli | b6ad9fced2a2b340087e4e5522121d405f68b615 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
import os
import json
import tccli.options_define as OptionsDefine
import tccli.format_output as FormatOutput
from tccli import __version__
from tccli.utils import Utils
from tccli.exceptions import ConfigurationError
from tencentcloud.common import credential
from tencentcloud.common.profile.http_profile import HttpProfile
from tencentcloud.common.profile.client_profile import ClientProfile
from tencentcloud.iotcloud.v20180614 import iotcloud_client as iotcloud_client_v20180614
from tencentcloud.iotcloud.v20180614 import models as models_v20180614
def doCreateTask(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateTaskRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateTask(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doReplaceTopicRule(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ReplaceTopicRuleRequest()
model.from_json_string(json.dumps(args))
rsp = client.ReplaceTopicRule(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doPublishRRPCMessage(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.PublishRRPCMessageRequest()
model.from_json_string(json.dumps(args))
rsp = client.PublishRRPCMessage(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteLoraDevice(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteLoraDeviceRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteLoraDevice(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDeviceResources(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDeviceResourcesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeDeviceResources(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeFirmware(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeFirmwareRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeFirmware(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDeviceShadow(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDeviceShadowRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeDeviceShadow(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDevice(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDeviceRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeDevice(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doGetCOSURL(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.GetCOSURLRequest()
model.from_json_string(json.dumps(args))
rsp = client.GetCOSURL(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteProduct(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteProductRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteProduct(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeAllDevices(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeAllDevicesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeAllDevices(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateTopicRule(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateTopicRuleRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateTopicRule(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doRetryDeviceFirmwareTask(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.RetryDeviceFirmwareTaskRequest()
model.from_json_string(json.dumps(args))
rsp = client.RetryDeviceFirmwareTask(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateTopicPolicy(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateTopicPolicyRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateTopicPolicy(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeFirmwareTaskDevices(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeFirmwareTaskDevicesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeFirmwareTaskDevices(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doBatchUpdateFirmware(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.BatchUpdateFirmwareRequest()
model.from_json_string(json.dumps(args))
rsp = client.BatchUpdateFirmware(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateProduct(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateProductRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateProduct(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateDevice(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateDeviceRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateDevice(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doUploadFirmware(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.UploadFirmwareRequest()
model.from_json_string(json.dumps(args))
rsp = client.UploadFirmware(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDeviceClientKey(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDeviceClientKeyRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeDeviceClientKey(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeProducts(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeProductsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeProducts(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateMultiDevicesTask(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateMultiDevicesTaskRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateMultiDevicesTask(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeResourceTasks(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeResourceTasksRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeResourceTasks(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeFirmwareTaskStatistics(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeFirmwareTaskStatisticsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeFirmwareTaskStatistics(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeLoraDevice(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeLoraDeviceRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeLoraDevice(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeTask(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeTaskRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeTask(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeFirmwareTask(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeFirmwareTaskRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeFirmwareTask(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCancelTask(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CancelTaskRequest()
model.from_json_string(json.dumps(args))
rsp = client.CancelTask(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeFirmwareTasks(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeFirmwareTasksRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeFirmwareTasks(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doEditFirmware(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.EditFirmwareRequest()
model.from_json_string(json.dumps(args))
rsp = client.EditFirmware(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doGetUserResourceInfo(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.GetUserResourceInfoRequest()
model.from_json_string(json.dumps(args))
rsp = client.GetUserResourceInfo(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDeviceResource(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDeviceResourceRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeDeviceResource(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doPublishMessage(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.PublishMessageRequest()
model.from_json_string(json.dumps(args))
rsp = client.PublishMessage(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeFirmwareTaskDistribution(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeFirmwareTaskDistributionRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeFirmwareTaskDistribution(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteDevice(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteDeviceRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteDevice(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateTaskFileUrl(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateTaskFileUrlRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateTaskFileUrl(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doResetDeviceState(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ResetDeviceStateRequest()
model.from_json_string(json.dumps(args))
rsp = client.ResetDeviceState(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeProductResource(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeProductResourceRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeProductResource(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeTasks(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeTasksRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeTasks(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeMultiDevices(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeMultiDevicesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeMultiDevices(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doUpdateDeviceAvailableState(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.UpdateDeviceAvailableStateRequest()
model.from_json_string(json.dumps(args))
rsp = client.UpdateDeviceAvailableState(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCancelDeviceFirmwareTask(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CancelDeviceFirmwareTaskRequest()
model.from_json_string(json.dumps(args))
rsp = client.CancelDeviceFirmwareTask(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doPublishAsDevice(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.PublishAsDeviceRequest()
model.from_json_string(json.dumps(args))
rsp = client.PublishAsDevice(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateLoraDevice(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateLoraDeviceRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateLoraDevice(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doEnableTopicRule(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.EnableTopicRuleRequest()
model.from_json_string(json.dumps(args))
rsp = client.EnableTopicRule(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doBindDevices(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.BindDevicesRequest()
model.from_json_string(json.dumps(args))
rsp = client.BindDevices(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doUpdateTopicPolicy(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.UpdateTopicPolicyRequest()
model.from_json_string(json.dumps(args))
rsp = client.UpdateTopicPolicy(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doUnbindDevices(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.UnbindDevicesRequest()
model.from_json_string(json.dumps(args))
rsp = client.UnbindDevices(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeProductTask(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeProductTaskRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeProductTask(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeProductTasks(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeProductTasksRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeProductTasks(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDevices(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDevicesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeDevices(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDisableTopicRule(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DisableTopicRuleRequest()
model.from_json_string(json.dumps(args))
rsp = client.DisableTopicRule(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeMultiDevTask(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeMultiDevTaskRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeMultiDevTask(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doUpdateDeviceShadow(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.UpdateDeviceShadowRequest()
model.from_json_string(json.dumps(args))
rsp = client.UpdateDeviceShadow(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribePushResourceTaskStatistics(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribePushResourceTaskStatisticsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribePushResourceTaskStatistics(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doPublishToDevice(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.PublishToDeviceRequest()
model.from_json_string(json.dumps(args))
rsp = client.PublishToDevice(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeProductResources(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeProductResourcesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeProductResources(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateMultiDevice(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateMultiDeviceRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateMultiDevice(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doPublishBroadcastMessage(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.PublishBroadcastMessageRequest()
model.from_json_string(json.dumps(args))
rsp = client.PublishBroadcastMessage(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteTopicRule(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteTopicRuleRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteTopicRule(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8')) # python3.3
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
CLIENT_MAP = {
"v20180614": iotcloud_client_v20180614,
}
MODELS_MAP = {
"v20180614": models_v20180614,
}
ACTION_MAP = {
"CreateTask": doCreateTask,
"ReplaceTopicRule": doReplaceTopicRule,
"PublishRRPCMessage": doPublishRRPCMessage,
"DeleteLoraDevice": doDeleteLoraDevice,
"DescribeDeviceResources": doDescribeDeviceResources,
"DescribeFirmware": doDescribeFirmware,
"DescribeDeviceShadow": doDescribeDeviceShadow,
"DescribeDevice": doDescribeDevice,
"GetCOSURL": doGetCOSURL,
"DeleteProduct": doDeleteProduct,
"DescribeAllDevices": doDescribeAllDevices,
"CreateTopicRule": doCreateTopicRule,
"RetryDeviceFirmwareTask": doRetryDeviceFirmwareTask,
"CreateTopicPolicy": doCreateTopicPolicy,
"DescribeFirmwareTaskDevices": doDescribeFirmwareTaskDevices,
"BatchUpdateFirmware": doBatchUpdateFirmware,
"CreateProduct": doCreateProduct,
"CreateDevice": doCreateDevice,
"UploadFirmware": doUploadFirmware,
"DescribeDeviceClientKey": doDescribeDeviceClientKey,
"DescribeProducts": doDescribeProducts,
"CreateMultiDevicesTask": doCreateMultiDevicesTask,
"DescribeResourceTasks": doDescribeResourceTasks,
"DescribeFirmwareTaskStatistics": doDescribeFirmwareTaskStatistics,
"DescribeLoraDevice": doDescribeLoraDevice,
"DescribeTask": doDescribeTask,
"DescribeFirmwareTask": doDescribeFirmwareTask,
"CancelTask": doCancelTask,
"DescribeFirmwareTasks": doDescribeFirmwareTasks,
"EditFirmware": doEditFirmware,
"GetUserResourceInfo": doGetUserResourceInfo,
"DescribeDeviceResource": doDescribeDeviceResource,
"PublishMessage": doPublishMessage,
"DescribeFirmwareTaskDistribution": doDescribeFirmwareTaskDistribution,
"DeleteDevice": doDeleteDevice,
"CreateTaskFileUrl": doCreateTaskFileUrl,
"ResetDeviceState": doResetDeviceState,
"DescribeProductResource": doDescribeProductResource,
"DescribeTasks": doDescribeTasks,
"DescribeMultiDevices": doDescribeMultiDevices,
"UpdateDeviceAvailableState": doUpdateDeviceAvailableState,
"CancelDeviceFirmwareTask": doCancelDeviceFirmwareTask,
"PublishAsDevice": doPublishAsDevice,
"CreateLoraDevice": doCreateLoraDevice,
"EnableTopicRule": doEnableTopicRule,
"BindDevices": doBindDevices,
"UpdateTopicPolicy": doUpdateTopicPolicy,
"UnbindDevices": doUnbindDevices,
"DescribeProductTask": doDescribeProductTask,
"DescribeProductTasks": doDescribeProductTasks,
"DescribeDevices": doDescribeDevices,
"DisableTopicRule": doDisableTopicRule,
"DescribeMultiDevTask": doDescribeMultiDevTask,
"UpdateDeviceShadow": doUpdateDeviceShadow,
"DescribePushResourceTaskStatistics": doDescribePushResourceTaskStatistics,
"PublishToDevice": doPublishToDevice,
"DescribeProductResources": doDescribeProductResources,
"CreateMultiDevice": doCreateMultiDevice,
"PublishBroadcastMessage": doPublishBroadcastMessage,
"DeleteTopicRule": doDeleteTopicRule,
}
AVAILABLE_VERSION_LIST = [
"v20180614",
]
def action_caller():
return ACTION_MAP
def parse_global_arg(parsed_globals):
g_param = parsed_globals
is_exist_profile = True
if not parsed_globals["profile"]:
is_exist_profile = False
g_param["profile"] = "default"
configure_path = os.path.join(os.path.expanduser("~"), ".tccli")
is_conf_exist, conf_path = Utils.file_existed(configure_path, g_param["profile"] + ".configure")
is_cred_exist, cred_path = Utils.file_existed(configure_path, g_param["profile"] + ".credential")
conf = {}
cred = {}
if is_conf_exist:
conf = Utils.load_json_msg(conf_path)
if is_cred_exist:
cred = Utils.load_json_msg(cred_path)
if not (isinstance(conf, dict) and isinstance(cred, dict)):
raise ConfigurationError(
"file: %s or %s is not json format"
% (g_param["profile"] + ".configure", g_param["profile"] + ".credential"))
if OptionsDefine.Token not in cred:
cred[OptionsDefine.Token] = None
if not is_exist_profile:
if os.environ.get(OptionsDefine.ENV_SECRET_ID) and os.environ.get(OptionsDefine.ENV_SECRET_KEY):
cred[OptionsDefine.SecretId] = os.environ.get(OptionsDefine.ENV_SECRET_ID)
cred[OptionsDefine.SecretKey] = os.environ.get(OptionsDefine.ENV_SECRET_KEY)
cred[OptionsDefine.Token] = os.environ.get(OptionsDefine.ENV_TOKEN)
if os.environ.get(OptionsDefine.ENV_REGION):
conf[OptionsDefine.Region] = os.environ.get(OptionsDefine.ENV_REGION)
for param in g_param.keys():
if g_param[param] is None:
if param in [OptionsDefine.SecretKey, OptionsDefine.SecretId, OptionsDefine.Token]:
if param in cred:
g_param[param] = cred[param]
else:
raise ConfigurationError("%s is invalid" % param)
elif param in [OptionsDefine.Region, OptionsDefine.Output]:
if param in conf:
g_param[param] = conf[param]
else:
raise ConfigurationError("%s is invalid" % param)
try:
if g_param[OptionsDefine.ServiceVersion]:
g_param[OptionsDefine.Version] = "v" + g_param[OptionsDefine.ServiceVersion].replace('-', '')
else:
version = conf["iotcloud"][OptionsDefine.Version]
g_param[OptionsDefine.Version] = "v" + version.replace('-', '')
if g_param[OptionsDefine.Endpoint] is None:
g_param[OptionsDefine.Endpoint] = conf["iotcloud"][OptionsDefine.Endpoint]
except Exception as err:
raise ConfigurationError("config file:%s error, %s" % (conf_path, str(err)))
if g_param[OptionsDefine.Version] not in AVAILABLE_VERSION_LIST:
raise Exception("available versions: %s" % " ".join(AVAILABLE_VERSION_LIST))
return g_param
| 43.698377 | 105 | 0.729258 |
import os
import json
import tccli.options_define as OptionsDefine
import tccli.format_output as FormatOutput
from tccli import __version__
from tccli.utils import Utils
from tccli.exceptions import ConfigurationError
from tencentcloud.common import credential
from tencentcloud.common.profile.http_profile import HttpProfile
from tencentcloud.common.profile.client_profile import ClientProfile
from tencentcloud.iotcloud.v20180614 import iotcloud_client as iotcloud_client_v20180614
from tencentcloud.iotcloud.v20180614 import models as models_v20180614
def doCreateTask(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateTaskRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateTask(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doReplaceTopicRule(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ReplaceTopicRuleRequest()
model.from_json_string(json.dumps(args))
rsp = client.ReplaceTopicRule(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doPublishRRPCMessage(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.PublishRRPCMessageRequest()
model.from_json_string(json.dumps(args))
rsp = client.PublishRRPCMessage(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteLoraDevice(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteLoraDeviceRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteLoraDevice(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDeviceResources(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDeviceResourcesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeDeviceResources(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeFirmware(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeFirmwareRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeFirmware(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDeviceShadow(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDeviceShadowRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeDeviceShadow(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDevice(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDeviceRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeDevice(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doGetCOSURL(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.GetCOSURLRequest()
model.from_json_string(json.dumps(args))
rsp = client.GetCOSURL(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteProduct(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteProductRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteProduct(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeAllDevices(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeAllDevicesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeAllDevices(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateTopicRule(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateTopicRuleRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateTopicRule(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doRetryDeviceFirmwareTask(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.RetryDeviceFirmwareTaskRequest()
model.from_json_string(json.dumps(args))
rsp = client.RetryDeviceFirmwareTask(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateTopicPolicy(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateTopicPolicyRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateTopicPolicy(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeFirmwareTaskDevices(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeFirmwareTaskDevicesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeFirmwareTaskDevices(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doBatchUpdateFirmware(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.BatchUpdateFirmwareRequest()
model.from_json_string(json.dumps(args))
rsp = client.BatchUpdateFirmware(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateProduct(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateProductRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateProduct(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateDevice(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateDeviceRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateDevice(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doUploadFirmware(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.UploadFirmwareRequest()
model.from_json_string(json.dumps(args))
rsp = client.UploadFirmware(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDeviceClientKey(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDeviceClientKeyRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeDeviceClientKey(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeProducts(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeProductsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeProducts(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateMultiDevicesTask(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateMultiDevicesTaskRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateMultiDevicesTask(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeResourceTasks(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeResourceTasksRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeResourceTasks(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeFirmwareTaskStatistics(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeFirmwareTaskStatisticsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeFirmwareTaskStatistics(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeLoraDevice(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeLoraDeviceRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeLoraDevice(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeTask(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeTaskRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeTask(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeFirmwareTask(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeFirmwareTaskRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeFirmwareTask(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCancelTask(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CancelTaskRequest()
model.from_json_string(json.dumps(args))
rsp = client.CancelTask(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeFirmwareTasks(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeFirmwareTasksRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeFirmwareTasks(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doEditFirmware(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.EditFirmwareRequest()
model.from_json_string(json.dumps(args))
rsp = client.EditFirmware(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doGetUserResourceInfo(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.GetUserResourceInfoRequest()
model.from_json_string(json.dumps(args))
rsp = client.GetUserResourceInfo(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDeviceResource(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDeviceResourceRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeDeviceResource(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doPublishMessage(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.PublishMessageRequest()
model.from_json_string(json.dumps(args))
rsp = client.PublishMessage(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeFirmwareTaskDistribution(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeFirmwareTaskDistributionRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeFirmwareTaskDistribution(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteDevice(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteDeviceRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteDevice(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateTaskFileUrl(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateTaskFileUrlRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateTaskFileUrl(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doResetDeviceState(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.ResetDeviceStateRequest()
model.from_json_string(json.dumps(args))
rsp = client.ResetDeviceState(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeProductResource(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeProductResourceRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeProductResource(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeTasks(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeTasksRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeTasks(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeMultiDevices(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeMultiDevicesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeMultiDevices(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doUpdateDeviceAvailableState(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.UpdateDeviceAvailableStateRequest()
model.from_json_string(json.dumps(args))
rsp = client.UpdateDeviceAvailableState(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCancelDeviceFirmwareTask(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CancelDeviceFirmwareTaskRequest()
model.from_json_string(json.dumps(args))
rsp = client.CancelDeviceFirmwareTask(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doPublishAsDevice(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.PublishAsDeviceRequest()
model.from_json_string(json.dumps(args))
rsp = client.PublishAsDevice(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateLoraDevice(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateLoraDeviceRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateLoraDevice(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doEnableTopicRule(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.EnableTopicRuleRequest()
model.from_json_string(json.dumps(args))
rsp = client.EnableTopicRule(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doBindDevices(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.BindDevicesRequest()
model.from_json_string(json.dumps(args))
rsp = client.BindDevices(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doUpdateTopicPolicy(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.UpdateTopicPolicyRequest()
model.from_json_string(json.dumps(args))
rsp = client.UpdateTopicPolicy(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doUnbindDevices(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.UnbindDevicesRequest()
model.from_json_string(json.dumps(args))
rsp = client.UnbindDevices(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeProductTask(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeProductTaskRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeProductTask(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeProductTasks(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeProductTasksRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeProductTasks(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeDevices(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeDevicesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeDevices(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDisableTopicRule(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DisableTopicRuleRequest()
model.from_json_string(json.dumps(args))
rsp = client.DisableTopicRule(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeMultiDevTask(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeMultiDevTaskRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeMultiDevTask(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doUpdateDeviceShadow(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.UpdateDeviceShadowRequest()
model.from_json_string(json.dumps(args))
rsp = client.UpdateDeviceShadow(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribePushResourceTaskStatistics(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribePushResourceTaskStatisticsRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribePushResourceTaskStatistics(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doPublishToDevice(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.PublishToDeviceRequest()
model.from_json_string(json.dumps(args))
rsp = client.PublishToDevice(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDescribeProductResources(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DescribeProductResourcesRequest()
model.from_json_string(json.dumps(args))
rsp = client.DescribeProductResources(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doCreateMultiDevice(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.CreateMultiDeviceRequest()
model.from_json_string(json.dumps(args))
rsp = client.CreateMultiDevice(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doPublishBroadcastMessage(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.PublishBroadcastMessageRequest()
model.from_json_string(json.dumps(args))
rsp = client.PublishBroadcastMessage(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
def doDeleteTopicRule(args, parsed_globals):
g_param = parse_global_arg(parsed_globals)
cred = credential.Credential(
g_param[OptionsDefine.SecretId], g_param[OptionsDefine.SecretKey], g_param[OptionsDefine.Token]
)
http_profile = HttpProfile(
reqTimeout=60 if g_param[OptionsDefine.Timeout] is None else int(g_param[OptionsDefine.Timeout]),
reqMethod="POST",
endpoint=g_param[OptionsDefine.Endpoint]
)
profile = ClientProfile(httpProfile=http_profile, signMethod="HmacSHA256")
mod = CLIENT_MAP[g_param[OptionsDefine.Version]]
client = mod.IotcloudClient(cred, g_param[OptionsDefine.Region], profile)
client._sdkVersion += ("_CLI_" + __version__)
models = MODELS_MAP[g_param[OptionsDefine.Version]]
model = models.DeleteTopicRuleRequest()
model.from_json_string(json.dumps(args))
rsp = client.DeleteTopicRule(model)
result = rsp.to_json_string()
try:
json_obj = json.loads(result)
except TypeError as e:
json_obj = json.loads(result.decode('utf-8'))
FormatOutput.output("action", json_obj, g_param[OptionsDefine.Output], g_param[OptionsDefine.Filter])
CLIENT_MAP = {
"v20180614": iotcloud_client_v20180614,
}
MODELS_MAP = {
"v20180614": models_v20180614,
}
ACTION_MAP = {
"CreateTask": doCreateTask,
"ReplaceTopicRule": doReplaceTopicRule,
"PublishRRPCMessage": doPublishRRPCMessage,
"DeleteLoraDevice": doDeleteLoraDevice,
"DescribeDeviceResources": doDescribeDeviceResources,
"DescribeFirmware": doDescribeFirmware,
"DescribeDeviceShadow": doDescribeDeviceShadow,
"DescribeDevice": doDescribeDevice,
"GetCOSURL": doGetCOSURL,
"DeleteProduct": doDeleteProduct,
"DescribeAllDevices": doDescribeAllDevices,
"CreateTopicRule": doCreateTopicRule,
"RetryDeviceFirmwareTask": doRetryDeviceFirmwareTask,
"CreateTopicPolicy": doCreateTopicPolicy,
"DescribeFirmwareTaskDevices": doDescribeFirmwareTaskDevices,
"BatchUpdateFirmware": doBatchUpdateFirmware,
"CreateProduct": doCreateProduct,
"CreateDevice": doCreateDevice,
"UploadFirmware": doUploadFirmware,
"DescribeDeviceClientKey": doDescribeDeviceClientKey,
"DescribeProducts": doDescribeProducts,
"CreateMultiDevicesTask": doCreateMultiDevicesTask,
"DescribeResourceTasks": doDescribeResourceTasks,
"DescribeFirmwareTaskStatistics": doDescribeFirmwareTaskStatistics,
"DescribeLoraDevice": doDescribeLoraDevice,
"DescribeTask": doDescribeTask,
"DescribeFirmwareTask": doDescribeFirmwareTask,
"CancelTask": doCancelTask,
"DescribeFirmwareTasks": doDescribeFirmwareTasks,
"EditFirmware": doEditFirmware,
"GetUserResourceInfo": doGetUserResourceInfo,
"DescribeDeviceResource": doDescribeDeviceResource,
"PublishMessage": doPublishMessage,
"DescribeFirmwareTaskDistribution": doDescribeFirmwareTaskDistribution,
"DeleteDevice": doDeleteDevice,
"CreateTaskFileUrl": doCreateTaskFileUrl,
"ResetDeviceState": doResetDeviceState,
"DescribeProductResource": doDescribeProductResource,
"DescribeTasks": doDescribeTasks,
"DescribeMultiDevices": doDescribeMultiDevices,
"UpdateDeviceAvailableState": doUpdateDeviceAvailableState,
"CancelDeviceFirmwareTask": doCancelDeviceFirmwareTask,
"PublishAsDevice": doPublishAsDevice,
"CreateLoraDevice": doCreateLoraDevice,
"EnableTopicRule": doEnableTopicRule,
"BindDevices": doBindDevices,
"UpdateTopicPolicy": doUpdateTopicPolicy,
"UnbindDevices": doUnbindDevices,
"DescribeProductTask": doDescribeProductTask,
"DescribeProductTasks": doDescribeProductTasks,
"DescribeDevices": doDescribeDevices,
"DisableTopicRule": doDisableTopicRule,
"DescribeMultiDevTask": doDescribeMultiDevTask,
"UpdateDeviceShadow": doUpdateDeviceShadow,
"DescribePushResourceTaskStatistics": doDescribePushResourceTaskStatistics,
"PublishToDevice": doPublishToDevice,
"DescribeProductResources": doDescribeProductResources,
"CreateMultiDevice": doCreateMultiDevice,
"PublishBroadcastMessage": doPublishBroadcastMessage,
"DeleteTopicRule": doDeleteTopicRule,
}
AVAILABLE_VERSION_LIST = [
"v20180614",
]
def action_caller():
return ACTION_MAP
def parse_global_arg(parsed_globals):
g_param = parsed_globals
is_exist_profile = True
if not parsed_globals["profile"]:
is_exist_profile = False
g_param["profile"] = "default"
configure_path = os.path.join(os.path.expanduser("~"), ".tccli")
is_conf_exist, conf_path = Utils.file_existed(configure_path, g_param["profile"] + ".configure")
is_cred_exist, cred_path = Utils.file_existed(configure_path, g_param["profile"] + ".credential")
conf = {}
cred = {}
if is_conf_exist:
conf = Utils.load_json_msg(conf_path)
if is_cred_exist:
cred = Utils.load_json_msg(cred_path)
if not (isinstance(conf, dict) and isinstance(cred, dict)):
raise ConfigurationError(
"file: %s or %s is not json format"
% (g_param["profile"] + ".configure", g_param["profile"] + ".credential"))
if OptionsDefine.Token not in cred:
cred[OptionsDefine.Token] = None
if not is_exist_profile:
if os.environ.get(OptionsDefine.ENV_SECRET_ID) and os.environ.get(OptionsDefine.ENV_SECRET_KEY):
cred[OptionsDefine.SecretId] = os.environ.get(OptionsDefine.ENV_SECRET_ID)
cred[OptionsDefine.SecretKey] = os.environ.get(OptionsDefine.ENV_SECRET_KEY)
cred[OptionsDefine.Token] = os.environ.get(OptionsDefine.ENV_TOKEN)
if os.environ.get(OptionsDefine.ENV_REGION):
conf[OptionsDefine.Region] = os.environ.get(OptionsDefine.ENV_REGION)
for param in g_param.keys():
if g_param[param] is None:
if param in [OptionsDefine.SecretKey, OptionsDefine.SecretId, OptionsDefine.Token]:
if param in cred:
g_param[param] = cred[param]
else:
raise ConfigurationError("%s is invalid" % param)
elif param in [OptionsDefine.Region, OptionsDefine.Output]:
if param in conf:
g_param[param] = conf[param]
else:
raise ConfigurationError("%s is invalid" % param)
try:
if g_param[OptionsDefine.ServiceVersion]:
g_param[OptionsDefine.Version] = "v" + g_param[OptionsDefine.ServiceVersion].replace('-', '')
else:
version = conf["iotcloud"][OptionsDefine.Version]
g_param[OptionsDefine.Version] = "v" + version.replace('-', '')
if g_param[OptionsDefine.Endpoint] is None:
g_param[OptionsDefine.Endpoint] = conf["iotcloud"][OptionsDefine.Endpoint]
except Exception as err:
raise ConfigurationError("config file:%s error, %s" % (conf_path, str(err)))
if g_param[OptionsDefine.Version] not in AVAILABLE_VERSION_LIST:
raise Exception("available versions: %s" % " ".join(AVAILABLE_VERSION_LIST))
return g_param
| true | true |
1c2f8df33b72a6b914682b38fc69d3e05ea4f7c3 | 529 | py | Python | src/pandas/read_files.py | lalitmcb/python | 057b5750f7e2e46b5057d15badec756373d8ed79 | [
"MIT"
] | null | null | null | src/pandas/read_files.py | lalitmcb/python | 057b5750f7e2e46b5057d15badec756373d8ed79 | [
"MIT"
] | null | null | null | src/pandas/read_files.py | lalitmcb/python | 057b5750f7e2e46b5057d15badec756373d8ed79 | [
"MIT"
] | null | null | null | import pandas as pd
#install xlrd module to support excel.
#In python 3 use: pip3 install xlrd
#read the excel file by giving name and sheet
excel_df = pd.read_excel('titanic.xls',sheetname=0)
print(excel_df.head())
#Writing the csv file out
#By default index also goes in csv. It can be ignored
#sep is optional. By default it's comma
excel_df.to_csv('titanic.csv',index=False,sep=',')
#Reading the csv file is similar
#Let's read the same csv file that we generated
csv_df = pd.read_csv('titanic.csv')
print(csv_df.head())
| 27.842105 | 53 | 0.754253 | import pandas as pd
excel_df = pd.read_excel('titanic.xls',sheetname=0)
print(excel_df.head())
excel_df.to_csv('titanic.csv',index=False,sep=',')
#Reading the csv file is similar
#Let's read the same csv file that we generated
csv_df = pd.read_csv('titanic.csv')
print(csv_df.head())
| true | true |
1c2f8e3cd64718a8fb3b5a388f58e845a5c21132 | 1,057 | py | Python | tests/python/pants_test/java/jar/test_manifest.py | ghthor/pants | 450de702414f87f563081ddefaefd8a554de07a3 | [
"Apache-2.0"
] | null | null | null | tests/python/pants_test/java/jar/test_manifest.py | ghthor/pants | 450de702414f87f563081ddefaefd8a554de07a3 | [
"Apache-2.0"
] | null | null | null | tests/python/pants_test/java/jar/test_manifest.py | ghthor/pants | 450de702414f87f563081ddefaefd8a554de07a3 | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import absolute_import, division, print_function, unicode_literals
import unittest
from pants.java.jar.manifest import Manifest
class TestManifest(unittest.TestCase):
def test_isempty(self):
manifest = Manifest()
self.assertTrue(manifest.is_empty())
manifest.addentry('Header', 'value')
self.assertFalse(manifest.is_empty())
def test_addentry(self):
manifest = Manifest()
manifest.addentry('Header', 'value')
self.assertEquals(
'Header: value\n', manifest.contents())
def test_too_long_entry(self):
manifest = Manifest()
with self.assertRaises(ValueError):
manifest.addentry(
'1234567890123456789012345678901234567890'
'12345678901234567890123456789', 'value')
def test_nonascii_char(self):
manifest = Manifest()
with self.assertRaises(UnicodeEncodeError):
manifest.addentry('X-Copyright', '© 2015')
| 28.567568 | 82 | 0.729423 |
from __future__ import absolute_import, division, print_function, unicode_literals
import unittest
from pants.java.jar.manifest import Manifest
class TestManifest(unittest.TestCase):
def test_isempty(self):
manifest = Manifest()
self.assertTrue(manifest.is_empty())
manifest.addentry('Header', 'value')
self.assertFalse(manifest.is_empty())
def test_addentry(self):
manifest = Manifest()
manifest.addentry('Header', 'value')
self.assertEquals(
'Header: value\n', manifest.contents())
def test_too_long_entry(self):
manifest = Manifest()
with self.assertRaises(ValueError):
manifest.addentry(
'1234567890123456789012345678901234567890'
'12345678901234567890123456789', 'value')
def test_nonascii_char(self):
manifest = Manifest()
with self.assertRaises(UnicodeEncodeError):
manifest.addentry('X-Copyright', '© 2015')
| true | true |
1c2f90cd0923c4977c7f832f2a779490ef9a9be4 | 1,435 | py | Python | setup.py | JackStC/empiricalutilities | 9d6878594b562cb5f8bb75e8a61ccbc07c8b5cce | [
"MIT"
] | null | null | null | setup.py | JackStC/empiricalutilities | 9d6878594b562cb5f8bb75e8a61ccbc07c8b5cce | [
"MIT"
] | null | null | null | setup.py | JackStC/empiricalutilities | 9d6878594b562cb5f8bb75e8a61ccbc07c8b5cce | [
"MIT"
] | null | null | null | from setuptools import setup, find_packages
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(here, 'README.rst')) as f:
long_description = f.read()
setup(
name='empiricalutilities',
version='0.1.8',
description='A Python project for empirical data manipulation.',
long_description=long_description,
author='Jason R Becker',
author_email='jasonrichardbecker@gmail.com',
python_requires='>=3.6.0',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.7',
],
url='https://github.com/jason-r-becker/empiricalutilities',
download_url='https://github.com/jason-r-becker/empiricalutilities/archive/0.1.tar.gz',
keywords='empirical LaTeX OLS'.split(),
packages=find_packages(exclude=['contrib', 'docs', 'tests', 'build']),
install_requires=[
'cycler',
'kiwisolver',
'matplotlib',
'mpmath',
'numpy',
'pandas',
'patsy',
'pprint',
'pyparsing',
'pytz',
'scipy',
'seaborn',
'six',
'statsmodels',
'sympy',
'tabulate',
]
)
| 28.7 | 91 | 0.607666 | from setuptools import setup, find_packages
from codecs import open
from os import path
here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'README.rst')) as f:
long_description = f.read()
setup(
name='empiricalutilities',
version='0.1.8',
description='A Python project for empirical data manipulation.',
long_description=long_description,
author='Jason R Becker',
author_email='jasonrichardbecker@gmail.com',
python_requires='>=3.6.0',
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Topic :: Software Development :: Build Tools',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3.7',
],
url='https://github.com/jason-r-becker/empiricalutilities',
download_url='https://github.com/jason-r-becker/empiricalutilities/archive/0.1.tar.gz',
keywords='empirical LaTeX OLS'.split(),
packages=find_packages(exclude=['contrib', 'docs', 'tests', 'build']),
install_requires=[
'cycler',
'kiwisolver',
'matplotlib',
'mpmath',
'numpy',
'pandas',
'patsy',
'pprint',
'pyparsing',
'pytz',
'scipy',
'seaborn',
'six',
'statsmodels',
'sympy',
'tabulate',
]
)
| true | true |
1c2f91023ac824e07d1e9f4a6d023ff56fb8e3c7 | 540 | py | Python | modules/shared/domain/value_objects/string/value_string.py | eduardolujan/hexagonal_architecture_django | 8055927cb460bc40f3a2651c01a9d1da696177e8 | [
"BSD-3-Clause"
] | 6 | 2020-08-09T23:41:08.000Z | 2021-03-16T22:05:40.000Z | modules/shared/domain/value_objects/string/value_string.py | eduardolujan/hexagonal_architecture_django | 8055927cb460bc40f3a2651c01a9d1da696177e8 | [
"BSD-3-Clause"
] | 1 | 2020-10-02T02:59:38.000Z | 2020-10-02T02:59:38.000Z | modules/shared/domain/value_objects/string/value_string.py | eduardolujan/hexagonal_architecture_django | 8055927cb460bc40f3a2651c01a9d1da696177e8 | [
"BSD-3-Clause"
] | 2 | 2021-03-16T22:05:43.000Z | 2021-04-30T06:35:25.000Z | # -*- utf-8 -*-
class String:
def __init__(self, value):
if type(value) is not str:
raise ValueError(f"Is not string value:{value} type:{type(value)}")
self.__value = value
def __deepcopy__(self, memodict={}):
return self.__value
def __repr__(self):
return self.__value
def __str__(self):
return self.__value
@property
def value(self):
return self.__value
@value.setter
def value(self):
raise ValueError("You can't assign value")
| 19.285714 | 79 | 0.592593 |
class String:
def __init__(self, value):
if type(value) is not str:
raise ValueError(f"Is not string value:{value} type:{type(value)}")
self.__value = value
def __deepcopy__(self, memodict={}):
return self.__value
def __repr__(self):
return self.__value
def __str__(self):
return self.__value
@property
def value(self):
return self.__value
@value.setter
def value(self):
raise ValueError("You can't assign value")
| true | true |
1c2f9311d987db97536687b13ca66399f98e171a | 6,794 | py | Python | python/topology/go.py | rayhaanj/scion | 1242ea4f6ce0d7f0a9ae13c09ba62e5b7e6a3ce3 | [
"Apache-2.0"
] | null | null | null | python/topology/go.py | rayhaanj/scion | 1242ea4f6ce0d7f0a9ae13c09ba62e5b7e6a3ce3 | [
"Apache-2.0"
] | null | null | null | python/topology/go.py | rayhaanj/scion | 1242ea4f6ce0d7f0a9ae13c09ba62e5b7e6a3ce3 | [
"Apache-2.0"
] | null | null | null | # Copyright 2014 ETH Zurich
# Copyright 2018 ETH Zurich, Anapaya Systems
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
:mod:`go` --- SCION topology go generator
=============================================
"""
# Stdlib
import os
import toml
# SCION
from lib.app.sciond import get_default_sciond_path
from lib.defines import SCIOND_API_SOCKDIR
from lib.util import write_file
from topology.common import (
ArgsTopoDicts,
BR_CONFIG_NAME,
COMMON_DIR,
CS_CONFIG_NAME,
sciond_name,
SD_CONFIG_NAME,
prom_addr_br,
PS_CONFIG_NAME,
)
from topology.prometheus import DEFAULT_BR_PROM_PORT
class GoGenArgs(ArgsTopoDicts):
pass
class GoGenerator(object):
def __init__(self, args):
"""
:param GoGenArgs args: Contains the passed command line arguments and topo dicts.
"""
self.args = args
self.log_dir = '/share/logs' if args.docker else 'logs'
self.db_dir = '/share/cache' if args.docker else 'gen-cache'
self.log_level = 'trace' if args.trace else 'debug'
def generate_br(self):
for topo_id, topo in self.args.topo_dicts.items():
for k, v in topo.get("BorderRouters", {}).items():
base = topo_id.base_dir(self.args.output_dir)
br_conf = self._build_br_conf(topo_id, topo["ISD_AS"], base, k, v)
write_file(os.path.join(base, k, BR_CONFIG_NAME), toml.dumps(br_conf))
def _build_br_conf(self, topo_id, ia, base, name, v):
config_dir = '/share/conf' if self.args.docker else os.path.join(base, name)
raw_entry = {
'general': {
'ID': name,
'ConfigDir': config_dir,
},
'logging': self._log_entry(name),
'metrics': {
'Prometheus': prom_addr_br(name, v, DEFAULT_BR_PROM_PORT),
},
'br': {
'Profile': False,
},
}
return raw_entry
def generate_ps(self):
for topo_id, topo in self.args.topo_dicts.items():
for k, v in topo.get("PathService", {}).items():
# only a single Go-PS per AS is currently supported
if k.endswith("-1"):
base = topo_id.base_dir(self.args.output_dir)
ps_conf = self._build_ps_conf(topo_id, topo["ISD_AS"], base, k)
write_file(os.path.join(base, k, PS_CONFIG_NAME), toml.dumps(ps_conf))
def _build_ps_conf(self, topo_id, ia, base, name):
config_dir = '/share/conf' if self.args.docker else os.path.join(base, name)
raw_entry = {
'general': {
'ID': name,
'ConfigDir': config_dir,
'ReconnectToDispatcher': True,
},
'logging': self._log_entry(name),
'TrustDB': {
'Backend': 'sqlite',
'Connection': os.path.join(self.db_dir, '%s.trust.db' % name),
},
'infra': {
'Type': "PS"
},
'ps': {
'PathDB': {
'Backend': 'sqlite',
'Connection': os.path.join(self.db_dir, '%s.path.db' % name),
},
'SegSync': True,
},
}
return raw_entry
def generate_sciond(self):
for topo_id, topo in self.args.topo_dicts.items():
base = topo_id.base_dir(self.args.output_dir)
sciond_conf = self._build_sciond_conf(topo_id, topo["ISD_AS"], base)
write_file(os.path.join(base, COMMON_DIR, SD_CONFIG_NAME), toml.dumps(sciond_conf))
def _build_sciond_conf(self, topo_id, ia, base):
name = sciond_name(topo_id)
config_dir = '/share/conf' if self.args.docker else os.path.join(base, COMMON_DIR)
raw_entry = {
'general': {
'ID': name,
'ConfigDir': config_dir,
'ReconnectToDispatcher': True,
},
'logging': self._log_entry(name),
'TrustDB': {
'Backend': 'sqlite',
'Connection': os.path.join(self.db_dir, '%s.trust.db' % name),
},
'sd': {
'Reliable': os.path.join(SCIOND_API_SOCKDIR, "%s.sock" % name),
'Unix': os.path.join(SCIOND_API_SOCKDIR, "%s.unix" % name),
'Public': '%s,[127.0.0.1]:0' % ia,
'PathDB': {
'Connection': os.path.join(self.db_dir, '%s.path.db' % name),
},
},
}
return raw_entry
def generate_cs(self):
for topo_id, topo in self.args.topo_dicts.items():
for k, v in topo.get("CertificateService", {}).items():
# only a single Go-CS per AS is currently supported
if k.endswith("-1"):
base = topo_id.base_dir(self.args.output_dir)
cs_conf = self._build_cs_conf(topo_id, topo["ISD_AS"], base, k)
write_file(os.path.join(base, k, CS_CONFIG_NAME), toml.dumps(cs_conf))
def _build_cs_conf(self, topo_id, ia, base, name):
config_dir = '/share/conf' if self.args.docker else os.path.join(base, name)
raw_entry = {
'general': {
'ID': name,
'ConfigDir': config_dir,
},
'sd_client': {
'Path': get_default_sciond_path(topo_id),
},
'logging': self._log_entry(name),
'TrustDB': {
'Backend': 'sqlite',
'Connection': os.path.join(self.db_dir, '%s.trust.db' % name),
},
'infra': {
'Type': "CS"
},
'cs': {
'LeafReissueTime': "6h",
'IssuerReissueTime': "3d",
'ReissueRate': "10s",
'ReissueTimeout': "5s",
},
}
return raw_entry
def _log_entry(self, name):
entry = {
'file': {
'Path': os.path.join(self.log_dir, "%s.log" % name),
'Level': self.log_level,
},
'console': {
'Level': 'crit',
},
}
return entry
| 35.570681 | 95 | 0.530321 |
import os
import toml
from lib.app.sciond import get_default_sciond_path
from lib.defines import SCIOND_API_SOCKDIR
from lib.util import write_file
from topology.common import (
ArgsTopoDicts,
BR_CONFIG_NAME,
COMMON_DIR,
CS_CONFIG_NAME,
sciond_name,
SD_CONFIG_NAME,
prom_addr_br,
PS_CONFIG_NAME,
)
from topology.prometheus import DEFAULT_BR_PROM_PORT
class GoGenArgs(ArgsTopoDicts):
pass
class GoGenerator(object):
def __init__(self, args):
self.args = args
self.log_dir = '/share/logs' if args.docker else 'logs'
self.db_dir = '/share/cache' if args.docker else 'gen-cache'
self.log_level = 'trace' if args.trace else 'debug'
def generate_br(self):
for topo_id, topo in self.args.topo_dicts.items():
for k, v in topo.get("BorderRouters", {}).items():
base = topo_id.base_dir(self.args.output_dir)
br_conf = self._build_br_conf(topo_id, topo["ISD_AS"], base, k, v)
write_file(os.path.join(base, k, BR_CONFIG_NAME), toml.dumps(br_conf))
def _build_br_conf(self, topo_id, ia, base, name, v):
config_dir = '/share/conf' if self.args.docker else os.path.join(base, name)
raw_entry = {
'general': {
'ID': name,
'ConfigDir': config_dir,
},
'logging': self._log_entry(name),
'metrics': {
'Prometheus': prom_addr_br(name, v, DEFAULT_BR_PROM_PORT),
},
'br': {
'Profile': False,
},
}
return raw_entry
def generate_ps(self):
for topo_id, topo in self.args.topo_dicts.items():
for k, v in topo.get("PathService", {}).items():
if k.endswith("-1"):
base = topo_id.base_dir(self.args.output_dir)
ps_conf = self._build_ps_conf(topo_id, topo["ISD_AS"], base, k)
write_file(os.path.join(base, k, PS_CONFIG_NAME), toml.dumps(ps_conf))
def _build_ps_conf(self, topo_id, ia, base, name):
config_dir = '/share/conf' if self.args.docker else os.path.join(base, name)
raw_entry = {
'general': {
'ID': name,
'ConfigDir': config_dir,
'ReconnectToDispatcher': True,
},
'logging': self._log_entry(name),
'TrustDB': {
'Backend': 'sqlite',
'Connection': os.path.join(self.db_dir, '%s.trust.db' % name),
},
'infra': {
'Type': "PS"
},
'ps': {
'PathDB': {
'Backend': 'sqlite',
'Connection': os.path.join(self.db_dir, '%s.path.db' % name),
},
'SegSync': True,
},
}
return raw_entry
def generate_sciond(self):
for topo_id, topo in self.args.topo_dicts.items():
base = topo_id.base_dir(self.args.output_dir)
sciond_conf = self._build_sciond_conf(topo_id, topo["ISD_AS"], base)
write_file(os.path.join(base, COMMON_DIR, SD_CONFIG_NAME), toml.dumps(sciond_conf))
def _build_sciond_conf(self, topo_id, ia, base):
name = sciond_name(topo_id)
config_dir = '/share/conf' if self.args.docker else os.path.join(base, COMMON_DIR)
raw_entry = {
'general': {
'ID': name,
'ConfigDir': config_dir,
'ReconnectToDispatcher': True,
},
'logging': self._log_entry(name),
'TrustDB': {
'Backend': 'sqlite',
'Connection': os.path.join(self.db_dir, '%s.trust.db' % name),
},
'sd': {
'Reliable': os.path.join(SCIOND_API_SOCKDIR, "%s.sock" % name),
'Unix': os.path.join(SCIOND_API_SOCKDIR, "%s.unix" % name),
'Public': '%s,[127.0.0.1]:0' % ia,
'PathDB': {
'Connection': os.path.join(self.db_dir, '%s.path.db' % name),
},
},
}
return raw_entry
def generate_cs(self):
for topo_id, topo in self.args.topo_dicts.items():
for k, v in topo.get("CertificateService", {}).items():
if k.endswith("-1"):
base = topo_id.base_dir(self.args.output_dir)
cs_conf = self._build_cs_conf(topo_id, topo["ISD_AS"], base, k)
write_file(os.path.join(base, k, CS_CONFIG_NAME), toml.dumps(cs_conf))
def _build_cs_conf(self, topo_id, ia, base, name):
config_dir = '/share/conf' if self.args.docker else os.path.join(base, name)
raw_entry = {
'general': {
'ID': name,
'ConfigDir': config_dir,
},
'sd_client': {
'Path': get_default_sciond_path(topo_id),
},
'logging': self._log_entry(name),
'TrustDB': {
'Backend': 'sqlite',
'Connection': os.path.join(self.db_dir, '%s.trust.db' % name),
},
'infra': {
'Type': "CS"
},
'cs': {
'LeafReissueTime': "6h",
'IssuerReissueTime': "3d",
'ReissueRate': "10s",
'ReissueTimeout': "5s",
},
}
return raw_entry
def _log_entry(self, name):
entry = {
'file': {
'Path': os.path.join(self.log_dir, "%s.log" % name),
'Level': self.log_level,
},
'console': {
'Level': 'crit',
},
}
return entry
| true | true |
1c2f94c8d12445c85d2c0543340c870acc09cc29 | 6,731 | py | Python | tests/automated/helpers.py | iambluefred/panda | db4a43f8ceb422baa25264a7c053e2aa15665aa9 | [
"MIT"
] | 1 | 2021-08-23T16:27:39.000Z | 2021-08-23T16:27:39.000Z | tests/automated/helpers.py | iambluefred/panda | db4a43f8ceb422baa25264a7c053e2aa15665aa9 | [
"MIT"
] | 1 | 2021-05-06T18:39:54.000Z | 2021-05-26T13:38:12.000Z | tests/automated/helpers.py | iambluefred/panda | db4a43f8ceb422baa25264a7c053e2aa15665aa9 | [
"MIT"
] | 10 | 2020-05-13T19:37:38.000Z | 2021-05-19T20:03:58.000Z | import os
import time
import random
import _thread
import faulthandler
from functools import wraps
from panda import Panda
from panda_jungle import PandaJungle # pylint: disable=import-error
from nose.tools import assert_equal
from parameterized import parameterized, param
from .timeout import run_with_timeout
SPEED_NORMAL = 500
SPEED_GMLAN = 33.3
BUS_SPEEDS = [(0, SPEED_NORMAL), (1, SPEED_NORMAL), (2, SPEED_NORMAL), (3, SPEED_GMLAN)]
TIMEOUT = 45
H7_HW_TYPES = [Panda.HW_TYPE_RED_PANDA]
GEN2_HW_TYPES = [Panda.HW_TYPE_BLACK_PANDA, Panda.HW_TYPE_UNO] + H7_HW_TYPES
GPS_HW_TYPES = [Panda.HW_TYPE_GREY_PANDA, Panda.HW_TYPE_BLACK_PANDA, Panda.HW_TYPE_UNO]
PEDAL_SERIAL = 'none'
JUNGLE_SERIAL = os.getenv("PANDAS_JUNGLE")
# Enable fault debug
faulthandler.enable(all_threads=False)
# Connect to Panda Jungle
panda_jungle = PandaJungle(JUNGLE_SERIAL)
# Find all panda's connected
_panda_serials = None
def init_panda_serials():
global panda_jungle, _panda_serials
_panda_serials = []
panda_jungle.set_panda_power(True)
time.sleep(5)
for serial in Panda.list():
p = Panda(serial=serial)
_panda_serials.append((serial, p.get_type()))
p.close()
print('Found', str(len(_panda_serials)), 'pandas')
init_panda_serials()
# Panda providers
test_all_types = parameterized([
param(panda_type=Panda.HW_TYPE_WHITE_PANDA),
param(panda_type=Panda.HW_TYPE_GREY_PANDA),
param(panda_type=Panda.HW_TYPE_BLACK_PANDA),
param(panda_type=Panda.HW_TYPE_UNO),
param(panda_type=Panda.HW_TYPE_RED_PANDA)
])
test_all_pandas = parameterized(
list(map(lambda x: x[0], filter(lambda x: x[0] != PEDAL_SERIAL, _panda_serials))) # type: ignore
)
test_all_gen2_pandas = parameterized(
list(map(lambda x: x[0], filter(lambda x: x[1] in GEN2_HW_TYPES, _panda_serials))) # type: ignore
)
test_all_gps_pandas = parameterized(
list(map(lambda x: x[0], filter(lambda x: x[1] in GPS_HW_TYPES, _panda_serials))) # type: ignore
)
test_white_and_grey = parameterized([
param(panda_type=Panda.HW_TYPE_WHITE_PANDA),
param(panda_type=Panda.HW_TYPE_GREY_PANDA)
])
test_white = parameterized([
param(panda_type=Panda.HW_TYPE_WHITE_PANDA)
])
test_grey = parameterized([
param(panda_type=Panda.HW_TYPE_GREY_PANDA)
])
test_black = parameterized([
param(panda_type=Panda.HW_TYPE_BLACK_PANDA)
])
test_uno = parameterized([
param(panda_type=Panda.HW_TYPE_UNO)
])
def time_many_sends(p, bus, p_recv=None, msg_count=100, msg_id=None, two_pandas=False):
if p_recv == None:
p_recv = p
if msg_id == None:
msg_id = random.randint(0x100, 0x200)
if p == p_recv and two_pandas:
raise ValueError("Cannot have two pandas that are the same panda")
start_time = time.time()
p.can_send_many([(msg_id, 0, b"\xaa" * 8, bus)] * msg_count)
r = []
r_echo = []
r_len_expected = msg_count if two_pandas else msg_count * 2
r_echo_len_exected = msg_count if two_pandas else 0
while len(r) < r_len_expected and (time.time() - start_time) < 5:
r.extend(p_recv.can_recv())
end_time = time.time()
if two_pandas:
while len(r_echo) < r_echo_len_exected and (time.time() - start_time) < 10:
r_echo.extend(p.can_recv())
sent_echo = [x for x in r if x[3] == 0x80 | bus and x[0] == msg_id]
sent_echo.extend([x for x in r_echo if x[3] == 0x80 | bus and x[0] == msg_id])
resp = [x for x in r if x[3] == bus and x[0] == msg_id]
leftovers = [x for x in r if (x[3] != 0x80 | bus and x[3] != bus) or x[0] != msg_id]
assert_equal(len(leftovers), 0)
assert_equal(len(resp), msg_count)
assert_equal(len(sent_echo), msg_count)
end_time = (end_time - start_time) * 1000.0
comp_kbps = (1 + 11 + 1 + 1 + 1 + 4 + 8 * 8 + 15 + 1 + 1 + 1 + 7) * msg_count / end_time
return comp_kbps
def reset_pandas():
panda_jungle.set_panda_power(False)
time.sleep(3)
panda_jungle.set_panda_power(True)
time.sleep(5)
def panda_type_to_serial(fn):
@wraps(fn)
def wrapper(panda_type=None, **kwargs):
# Change panda_types to a list
if panda_type is not None:
if not isinstance(panda_type, list):
panda_type = [panda_type]
# If not done already, get panda serials and their type
global _panda_serials
if _panda_serials == None:
init_panda_serials()
# Find a panda with the correct types and add the corresponding serial
serials = []
for p_type in panda_type:
found = False
for serial, pt in _panda_serials:
# Never take the same panda twice
if (pt == p_type) and (serial not in serials):
serials.append(serial)
found = True
break
if not found:
raise IOError("No unused panda found for type: {}".format(p_type))
return fn(serials, **kwargs)
return wrapper
def start_heartbeat_thread(p):
def heartbeat_thread(p):
while True:
try:
p.send_heartbeat()
time.sleep(1)
except:
break
_thread.start_new_thread(heartbeat_thread, (p,))
def panda_connect_and_init(fn):
@wraps(fn)
def wrapper(panda_serials=None, **kwargs):
# Change panda_serials to a list
if panda_serials is not None:
if not isinstance(panda_serials, list):
panda_serials = [panda_serials]
# Connect to pandas
pandas = []
for panda_serial in panda_serials:
pandas.append(Panda(serial=panda_serial))
# Initialize jungle
clear_can_buffers(panda_jungle)
panda_jungle.set_can_loopback(False)
panda_jungle.set_obd(False)
panda_jungle.set_harness_orientation(PandaJungle.HARNESS_ORIENTATION_1)
for bus, speed in BUS_SPEEDS:
panda_jungle.set_can_speed_kbps(bus, speed)
# Initialize pandas
for panda in pandas:
panda.set_can_loopback(False)
panda.set_gmlan(None)
panda.set_esp_power(False)
panda.set_power_save(False)
for bus, speed in BUS_SPEEDS:
panda.set_can_speed_kbps(bus, speed)
clear_can_buffers(panda)
panda.set_power_save(False)
try:
run_with_timeout(TIMEOUT, fn, *pandas, **kwargs)
# Check if the pandas did not throw any faults while running test
for panda in pandas:
panda.reconnect()
assert panda.health()['fault_status'] == 0
except Exception as e:
raise e
finally:
# Close all connections
for panda in pandas:
panda.close()
return wrapper
def clear_can_buffers(panda):
# clear tx buffers
for i in range(4):
panda.can_clear(i)
# clear rx buffers
panda.can_clear(0xFFFF)
r = [1]
st = time.time()
while len(r) > 0:
r = panda.can_recv()
time.sleep(0.05)
if (time.time() - st) > 10:
print("Unable to clear can buffers for panda ", panda.get_serial())
assert False
| 30.595455 | 102 | 0.696033 | import os
import time
import random
import _thread
import faulthandler
from functools import wraps
from panda import Panda
from panda_jungle import PandaJungle
from nose.tools import assert_equal
from parameterized import parameterized, param
from .timeout import run_with_timeout
SPEED_NORMAL = 500
SPEED_GMLAN = 33.3
BUS_SPEEDS = [(0, SPEED_NORMAL), (1, SPEED_NORMAL), (2, SPEED_NORMAL), (3, SPEED_GMLAN)]
TIMEOUT = 45
H7_HW_TYPES = [Panda.HW_TYPE_RED_PANDA]
GEN2_HW_TYPES = [Panda.HW_TYPE_BLACK_PANDA, Panda.HW_TYPE_UNO] + H7_HW_TYPES
GPS_HW_TYPES = [Panda.HW_TYPE_GREY_PANDA, Panda.HW_TYPE_BLACK_PANDA, Panda.HW_TYPE_UNO]
PEDAL_SERIAL = 'none'
JUNGLE_SERIAL = os.getenv("PANDAS_JUNGLE")
faulthandler.enable(all_threads=False)
panda_jungle = PandaJungle(JUNGLE_SERIAL)
_panda_serials = None
def init_panda_serials():
global panda_jungle, _panda_serials
_panda_serials = []
panda_jungle.set_panda_power(True)
time.sleep(5)
for serial in Panda.list():
p = Panda(serial=serial)
_panda_serials.append((serial, p.get_type()))
p.close()
print('Found', str(len(_panda_serials)), 'pandas')
init_panda_serials()
# Panda providers
test_all_types = parameterized([
param(panda_type=Panda.HW_TYPE_WHITE_PANDA),
param(panda_type=Panda.HW_TYPE_GREY_PANDA),
param(panda_type=Panda.HW_TYPE_BLACK_PANDA),
param(panda_type=Panda.HW_TYPE_UNO),
param(panda_type=Panda.HW_TYPE_RED_PANDA)
])
test_all_pandas = parameterized(
list(map(lambda x: x[0], filter(lambda x: x[0] != PEDAL_SERIAL, _panda_serials))) # type: ignore
)
test_all_gen2_pandas = parameterized(
list(map(lambda x: x[0], filter(lambda x: x[1] in GEN2_HW_TYPES, _panda_serials))) # type: ignore
)
test_all_gps_pandas = parameterized(
list(map(lambda x: x[0], filter(lambda x: x[1] in GPS_HW_TYPES, _panda_serials))) # type: ignore
)
test_white_and_grey = parameterized([
param(panda_type=Panda.HW_TYPE_WHITE_PANDA),
param(panda_type=Panda.HW_TYPE_GREY_PANDA)
])
test_white = parameterized([
param(panda_type=Panda.HW_TYPE_WHITE_PANDA)
])
test_grey = parameterized([
param(panda_type=Panda.HW_TYPE_GREY_PANDA)
])
test_black = parameterized([
param(panda_type=Panda.HW_TYPE_BLACK_PANDA)
])
test_uno = parameterized([
param(panda_type=Panda.HW_TYPE_UNO)
])
def time_many_sends(p, bus, p_recv=None, msg_count=100, msg_id=None, two_pandas=False):
if p_recv == None:
p_recv = p
if msg_id == None:
msg_id = random.randint(0x100, 0x200)
if p == p_recv and two_pandas:
raise ValueError("Cannot have two pandas that are the same panda")
start_time = time.time()
p.can_send_many([(msg_id, 0, b"\xaa" * 8, bus)] * msg_count)
r = []
r_echo = []
r_len_expected = msg_count if two_pandas else msg_count * 2
r_echo_len_exected = msg_count if two_pandas else 0
while len(r) < r_len_expected and (time.time() - start_time) < 5:
r.extend(p_recv.can_recv())
end_time = time.time()
if two_pandas:
while len(r_echo) < r_echo_len_exected and (time.time() - start_time) < 10:
r_echo.extend(p.can_recv())
sent_echo = [x for x in r if x[3] == 0x80 | bus and x[0] == msg_id]
sent_echo.extend([x for x in r_echo if x[3] == 0x80 | bus and x[0] == msg_id])
resp = [x for x in r if x[3] == bus and x[0] == msg_id]
leftovers = [x for x in r if (x[3] != 0x80 | bus and x[3] != bus) or x[0] != msg_id]
assert_equal(len(leftovers), 0)
assert_equal(len(resp), msg_count)
assert_equal(len(sent_echo), msg_count)
end_time = (end_time - start_time) * 1000.0
comp_kbps = (1 + 11 + 1 + 1 + 1 + 4 + 8 * 8 + 15 + 1 + 1 + 1 + 7) * msg_count / end_time
return comp_kbps
def reset_pandas():
panda_jungle.set_panda_power(False)
time.sleep(3)
panda_jungle.set_panda_power(True)
time.sleep(5)
def panda_type_to_serial(fn):
@wraps(fn)
def wrapper(panda_type=None, **kwargs):
# Change panda_types to a list
if panda_type is not None:
if not isinstance(panda_type, list):
panda_type = [panda_type]
# If not done already, get panda serials and their type
global _panda_serials
if _panda_serials == None:
init_panda_serials()
# Find a panda with the correct types and add the corresponding serial
serials = []
for p_type in panda_type:
found = False
for serial, pt in _panda_serials:
# Never take the same panda twice
if (pt == p_type) and (serial not in serials):
serials.append(serial)
found = True
break
if not found:
raise IOError("No unused panda found for type: {}".format(p_type))
return fn(serials, **kwargs)
return wrapper
def start_heartbeat_thread(p):
def heartbeat_thread(p):
while True:
try:
p.send_heartbeat()
time.sleep(1)
except:
break
_thread.start_new_thread(heartbeat_thread, (p,))
def panda_connect_and_init(fn):
@wraps(fn)
def wrapper(panda_serials=None, **kwargs):
# Change panda_serials to a list
if panda_serials is not None:
if not isinstance(panda_serials, list):
panda_serials = [panda_serials]
# Connect to pandas
pandas = []
for panda_serial in panda_serials:
pandas.append(Panda(serial=panda_serial))
# Initialize jungle
clear_can_buffers(panda_jungle)
panda_jungle.set_can_loopback(False)
panda_jungle.set_obd(False)
panda_jungle.set_harness_orientation(PandaJungle.HARNESS_ORIENTATION_1)
for bus, speed in BUS_SPEEDS:
panda_jungle.set_can_speed_kbps(bus, speed)
# Initialize pandas
for panda in pandas:
panda.set_can_loopback(False)
panda.set_gmlan(None)
panda.set_esp_power(False)
panda.set_power_save(False)
for bus, speed in BUS_SPEEDS:
panda.set_can_speed_kbps(bus, speed)
clear_can_buffers(panda)
panda.set_power_save(False)
try:
run_with_timeout(TIMEOUT, fn, *pandas, **kwargs)
# Check if the pandas did not throw any faults while running test
for panda in pandas:
panda.reconnect()
assert panda.health()['fault_status'] == 0
except Exception as e:
raise e
finally:
# Close all connections
for panda in pandas:
panda.close()
return wrapper
def clear_can_buffers(panda):
# clear tx buffers
for i in range(4):
panda.can_clear(i)
# clear rx buffers
panda.can_clear(0xFFFF)
r = [1]
st = time.time()
while len(r) > 0:
r = panda.can_recv()
time.sleep(0.05)
if (time.time() - st) > 10:
print("Unable to clear can buffers for panda ", panda.get_serial())
assert False
| true | true |
1c2f95489d4b1945231c9e67928cbaf1ef6c9453 | 1,269 | py | Python | programmers/KAKAO Tests/Constructing Columns and Beams.py | will-data/Self-Study-WIL | 69d627c65130fcfa23f27f97948a20107bb33394 | [
"MIT"
] | 1 | 2020-04-11T09:51:54.000Z | 2020-04-11T09:51:54.000Z | programmers/KAKAO Tests/Constructing Columns and Beams.py | will-data/Self-Study-WIL | 69d627c65130fcfa23f27f97948a20107bb33394 | [
"MIT"
] | null | null | null | programmers/KAKAO Tests/Constructing Columns and Beams.py | will-data/Self-Study-WIL | 69d627c65130fcfa23f27f97948a20107bb33394 | [
"MIT"
] | null | null | null | from operator import itemgetter
def solution(n, buildFrames):
struc = set()
def check(struc):
for x, y, a in struc:
if a == 0:
if not (y == 0 or not {(x, y - 1, 0), (x, y, 1), (x - 1, y, 1)}.isdisjoint(struc)):
return False
else:
if not (not {(x, y - 1, 0), (x + 1, y - 1, 0)}.isdisjoint(struc) or {(x - 1, y, 1),
(x + 1, y, 1)}.issubset(struc)):
return False
return True
for x, y, a, b in buildFrames:
if b == 1:
# Constructing column
if a == 0:
if (y == 0 or not {(x, y - 1, 0), (x, y, 1), (x - 1, y, 1)}.isdisjoint(struc)):
struc.add((x, y, 0))
# Constructing beam
else:
if (not {(x, y - 1, 0), (x + 1, y - 1, 0)}.isdisjoint(struc) or {(x - 1, y, 1), (x + 1, y, 1)}.issubset(
struc)):
struc.add((x, y, 1))
elif b == 0:
struc.discard((x, y, a))
if not check(struc):
struc.add((x, y, a))
return sorted(list(map(list, struc)), key=itemgetter(0, 1, 2)) | 37.323529 | 120 | 0.381403 | from operator import itemgetter
def solution(n, buildFrames):
struc = set()
def check(struc):
for x, y, a in struc:
if a == 0:
if not (y == 0 or not {(x, y - 1, 0), (x, y, 1), (x - 1, y, 1)}.isdisjoint(struc)):
return False
else:
if not (not {(x, y - 1, 0), (x + 1, y - 1, 0)}.isdisjoint(struc) or {(x - 1, y, 1),
(x + 1, y, 1)}.issubset(struc)):
return False
return True
for x, y, a, b in buildFrames:
if b == 1:
if a == 0:
if (y == 0 or not {(x, y - 1, 0), (x, y, 1), (x - 1, y, 1)}.isdisjoint(struc)):
struc.add((x, y, 0))
else:
if (not {(x, y - 1, 0), (x + 1, y - 1, 0)}.isdisjoint(struc) or {(x - 1, y, 1), (x + 1, y, 1)}.issubset(
struc)):
struc.add((x, y, 1))
elif b == 0:
struc.discard((x, y, a))
if not check(struc):
struc.add((x, y, a))
return sorted(list(map(list, struc)), key=itemgetter(0, 1, 2)) | true | true |
1c2f968603b43bbc0a6b5a27db3d2ef2d84cade7 | 28 | py | Python | tests/unit/providers/utils/__init__.py | YelloFam/python-dependency-injector | 541131e33858ee1b8b5a7590d2bb9f929740ea1e | [
"BSD-3-Clause"
] | null | null | null | tests/unit/providers/utils/__init__.py | YelloFam/python-dependency-injector | 541131e33858ee1b8b5a7590d2bb9f929740ea1e | [
"BSD-3-Clause"
] | null | null | null | tests/unit/providers/utils/__init__.py | YelloFam/python-dependency-injector | 541131e33858ee1b8b5a7590d2bb9f929740ea1e | [
"BSD-3-Clause"
] | null | null | null | """Provider utils tests."""
| 14 | 27 | 0.642857 | true | true | |
1c2f96c90fc6ca21256b6ac8e0cbd8441af6eca9 | 55,885 | py | Python | nematus/config.py | lovodkin93/nematus | 06d53fdb20e72f694daa4ca1d526b99b6dbdde12 | [
"BSD-3-Clause"
] | null | null | null | nematus/config.py | lovodkin93/nematus | 06d53fdb20e72f694daa4ca1d526b99b6dbdde12 | [
"BSD-3-Clause"
] | null | null | null | nematus/config.py | lovodkin93/nematus | 06d53fdb20e72f694daa4ca1d526b99b6dbdde12 | [
"BSD-3-Clause"
] | 2 | 2020-12-14T14:27:16.000Z | 2020-12-16T06:03:45.000Z | import argparse
import collections
import json
import logging
import pickle
import sys
import util
class ParameterSpecification:
"""Describes a Nematus configuration parameter.
For many parameters, a ParameterSpecification simply gets mapped to an
argparse.add_argument() call when reading parameters from the command-line
(as opposed to reading from a pre-existing config file). To make this
convenient, ParameterSpecification's constructor accepts all of
argparse.add_argument()'s keyword arguments so they can simply be passed
through. For parameters with more complex defintions,
ParameterSpecification adds some supporting arguments:
- legacy_names: a ParameterSpecification can optionally include a list of
legacy parameter names that will be used by
load_config_from_json_file() to automatically recognise and update
parameters with old names when reading from a JSON file.
- visible_arg_names / hidden_arg_names: a ParameterSpecification can
include multiple synonyms for the command-line argument.
read_config_from_cmdline() will automatically add these to the
parser, making them visible (via train.py -h, etc.) or hidden from
users.
- derivation_func: a few parameters are derived using the values of other
parameters after the initial pass (i.e. after argparse has parsed the
command-line arguments or after the parameters have been loaded from
a pre-existing JSON config). For instance, if dim_per_factor is not
set during the initial pass then it is set to [embedding_size]
(provided that factors == 1).
Note that unlike argparse.add_argument(), it is required to supply a
default value. Generally, we need a default value both for
argparse.add_argument() and also to fill in a missing parameter value when
reading a config from an older JSON file.
Some parameters don't have corresponding command-line arguments (e.g.
model_version). They can be represented as ParameterSpecification objects
by leaving both visible_arg_names and hidden_arg_names empty.
"""
def __init__(self, name, default, legacy_names=[], visible_arg_names=[],
hidden_arg_names=[], derivation_func=None, **argparse_args):
"""
Args:
name: string (must be a valid Python variable name).
default: the default parameter value.
legacy_names: list of strings.
visible_arg_names: list of strings (all must start '-' or '--')
hidden_arg_names: list of strings (all must start '-' or '--')
derivation_func: function taking config and meta_config arguments.
argparse_args: any keyword arguments accepted by argparse.
"""
self.name = name
self.default = default
self.legacy_names = legacy_names
self.visible_arg_names = visible_arg_names
self.hidden_arg_names = hidden_arg_names
self.derivation_func = derivation_func
self.argparse_args = argparse_args
if len(argparse_args) == 0:
assert visible_arg_names == [] and hidden_arg_names == []
else:
self.argparse_args['default'] = self.default
class ConfigSpecification:
"""A collection of ParameterSpecifications representing a complete config.
The ParameterSpecifications are organised into groups. These are used with
argparse's add_argument_group() mechanism when constructing a command-line
argument parser (in read_config_from_cmdline()). They don't serve any
other role.
The nameless '' group is used for top-level command-line arguments (or it
would be if we had any) and for parameters that don't have corresponding
command-line arguments.
"""
def __init__(self):
"""Builds the collection of ParameterSpecifications."""
# Define the parameter groups and their descriptions.
description_pairs = [
('', None),
('data', 'data sets; model loading and saving'),
('network', 'network parameters (all model types)'),
('network_rnn', 'network parameters (rnn-specific)'),
('network_transformer', 'network parameters (transformer-'
'specific)'),
('training', 'training parameters'),
('validation', 'validation parameters'),
('display', 'display parameters'),
('translate', 'translate parameters'),
('sampling', 'sampling parameters'),
]
self._group_descriptions = collections.OrderedDict(description_pairs)
# Add all the ParameterSpecification objects.
self._param_specs = self._define_param_specs()
# Check that there are no duplicated names.
self._check_self()
# Build a dictionary for looking up ParameterSpecifications by name.
self._name_to_spec = self._build_name_to_spec()
@property
def group_names(self):
"""Returns the list of parameter group names."""
return self._group_descriptions.keys()
def group_description(self, name):
"""Returns the description string for the given group name."""
return self._group_descriptions[name]
def params_by_group(self, group_name):
"""Returns the list of ParameterSpecifications for the given group."""
return self._param_specs[group_name]
def lookup(self, name):
"""Looks up a ParameterSpecification by name. None if not found."""
return self._name_to_spec.get(name, None)
def _define_param_specs(self):
"""Adds all ParameterSpecification objects."""
param_specs = {}
# Add an empty list for each parameter group.
for group in self.group_names:
param_specs[group] = []
# Add non-command-line parameters.
group = param_specs['']
group.append(ParameterSpecification(
name='model_version', default=None,
derivation_func=_derive_model_version))
group.append(ParameterSpecification(
name='theano_compat', default=None,
derivation_func=lambda _, meta_config: meta_config.from_theano))
group.append(ParameterSpecification(
name='source_dicts', default=None,
derivation_func=lambda config, _: config.dictionaries[:-1]))
group.append(ParameterSpecification(
name='target_dict', default=None,
derivation_func=lambda config, _: config.dictionaries[-1]))
group.append(ParameterSpecification(
name='target_embedding_size', default=None,
derivation_func=_derive_target_embedding_size))
# All remaining parameters are command-line parameters.
# Add command-line parameters for the 'data' group.
group = param_specs['data']
group.append(ParameterSpecification(
name='source_dataset', default=None,
visible_arg_names=['--source_dataset'],
derivation_func=_derive_source_dataset,
type=str, metavar='PATH',
help='parallel training corpus (source)'))
group.append(ParameterSpecification(
name='target_dataset', default=None,
visible_arg_names=['--target_dataset'],
derivation_func=_derive_target_dataset,
type=str, metavar='PATH',
help='parallel training corpus (target)'))
# Hidden option for backward compatibility.
group.append(ParameterSpecification(
name='datasets', default=None,
visible_arg_names=[], hidden_arg_names=['--datasets'],
type=str, metavar='PATH', nargs=2))
group.append(ParameterSpecification(
name='dictionaries', default=None,
visible_arg_names=['--dictionaries'], hidden_arg_names=[],
type=str, required=True, metavar='PATH', nargs='+',
help='network vocabularies (one per source factor, plus target '
'vocabulary)'))
group.append(ParameterSpecification(
name='save_freq', default=30000,
legacy_names=['saveFreq'],
visible_arg_names=['--save_freq'], hidden_arg_names=['--saveFreq'],
type=int, metavar='INT',
help='save frequency (default: %(default)s)'))
group.append(ParameterSpecification(
name='saveto', default='model',
visible_arg_names=['--model'], hidden_arg_names=['--saveto'],
type=str, metavar='PATH',
help='model file name (default: %(default)s)'))
group.append(ParameterSpecification(
name='reload', default=None,
visible_arg_names=['--reload'],
type=str, metavar='PATH',
help='load existing model from this path. Set to '
'"latest_checkpoint" to reload the latest checkpoint in the '
'same directory of --model'))
group.append(ParameterSpecification(
name='reload_training_progress', default=True,
visible_arg_names=['--no_reload_training_progress'],
action='store_false',
help='don\'t reload training progress (only used if --reload '
'is enabled)'))
group.append(ParameterSpecification(
name='summary_dir', default=None,
visible_arg_names=['--summary_dir'],
type=str, metavar='PATH',
help='directory for saving summaries (default: same directory '
'as the --model file)'))
group.append(ParameterSpecification(
name='summary_freq', default=0,
legacy_names=['summaryFreq'],
visible_arg_names=['--summary_freq'],
hidden_arg_names=['--summaryFreq'],
type=int, metavar='INT',
help='Save summaries after INT updates, if 0 do not save '
'summaries (default: %(default)s)'))
# Add command-line parameters for 'network' group.
group = param_specs['network']
group.append(ParameterSpecification(
name='model_type', default='rnn',
visible_arg_names=['--model_type'],
type=str, choices=['rnn', 'transformer'],
help='model type (default: %(default)s)'))
group.append(ParameterSpecification(
name='embedding_size', default=512,
legacy_names=['dim_word'],
visible_arg_names=['--embedding_size'],
hidden_arg_names=['--dim_word'],
type=int, metavar='INT',
help='embedding layer size (default: %(default)s)'))
group.append(ParameterSpecification(
name='state_size', default=1000,
legacy_names=['dim'],
visible_arg_names=['--state_size'], hidden_arg_names=['--dim'],
type=int, metavar='INT',
help='hidden state size (default: %(default)s)'))
group.append(ParameterSpecification(
name='source_vocab_sizes', default=None,
visible_arg_names=['--source_vocab_sizes'],
hidden_arg_names=['--n_words_src'],
derivation_func=_derive_source_vocab_sizes,
type=int, metavar='INT', nargs='+',
help='source vocabulary sizes (one per input factor) (default: '
'%(default)s)'))
group.append(ParameterSpecification(
name='target_vocab_size', default=-1,
legacy_names=['n_words'],
visible_arg_names=['--target_vocab_size'],
hidden_arg_names=['--n_words'],
derivation_func=_derive_target_vocab_size,
type=int, metavar='INT',
help='target vocabulary size (default: %(default)s)'))
group.append(ParameterSpecification(
name='factors', default=1,
visible_arg_names=['--factors'],
type=int, metavar='INT',
help='number of input factors (default: %(default)s) - CURRENTLY '
'ONLY WORKS FOR \'rnn\' MODEL'))
group.append(ParameterSpecification(
name='dim_per_factor', default=None,
visible_arg_names=['--dim_per_factor'],
derivation_func=_derive_dim_per_factor,
type=int, metavar='INT', nargs='+',
help='list of word vector dimensionalities (one per factor): '
'\'--dim_per_factor 250 200 50\' for total dimensionality '
'of 500 (default: %(default)s)'))
group.append(ParameterSpecification(
name='tie_encoder_decoder_embeddings', default=False,
visible_arg_names=['--tie_encoder_decoder_embeddings'],
action='store_true',
help='tie the input embeddings of the encoder and the decoder '
'(first factor only). Source and target vocabulary size '
'must be the same'))
group.append(ParameterSpecification(
name='tie_decoder_embeddings', default=False,
visible_arg_names=['--tie_decoder_embeddings'],
action='store_true',
help='tie the input embeddings of the decoder with the softmax '
'output embeddings'))
group.append(ParameterSpecification(
name='output_hidden_activation', default='tanh',
visible_arg_names=['--output_hidden_activation'],
type=str, choices=['tanh', 'relu', 'prelu', 'linear'],
help='activation function in hidden layer of the output '
'network (default: %(default)s) - CURRENTLY ONLY WORKS '
'FOR \'rnn\' MODEL'))
group.append(ParameterSpecification(
name='softmax_mixture_size', default=1,
visible_arg_names=['--softmax_mixture_size'],
type=int, metavar='INT',
help='number of softmax components to use (default: '
'%(default)s) - CURRENTLY ONLY WORKS FOR \'rnn\' MODEL'))
# Add command-line parameters for 'network_rnn' group.
group = param_specs['network_rnn']
# NOTE: parameter names in this group must use the rnn_ prefix.
# read_config_from_cmdline() uses this to check that only
# model type specific options are only used with the appropriate
# model type.
group.append(ParameterSpecification(
name='rnn_enc_depth', default=1,
legacy_names=['enc_depth'],
visible_arg_names=['--rnn_enc_depth'],
hidden_arg_names=['--enc_depth'],
type=int, metavar='INT',
help='number of encoder layers (default: %(default)s)'))
group.append(ParameterSpecification(
name='rnn_enc_transition_depth', default=1,
legacy_names=['enc_recurrence_transition_depth'],
visible_arg_names=['--rnn_enc_transition_depth'],
hidden_arg_names=['--enc_recurrence_transition_depth'],
type=int, metavar='INT',
help='number of GRU transition operations applied in the '
'encoder. Minimum is 1. (Only applies to gru). (default: '
'%(default)s)'))
group.append(ParameterSpecification(
name='rnn_dec_depth', default=1,
legacy_names=['dec_depth'],
visible_arg_names=['--rnn_dec_depth'],
hidden_arg_names=['--dec_depth'],
type=int, metavar='INT',
help='number of decoder layers (default: %(default)s)'))
group.append(ParameterSpecification(
name='rnn_dec_base_transition_depth', default=2,
legacy_names=['dec_base_recurrence_transition_depth'],
visible_arg_names=['--rnn_dec_base_transition_depth'],
hidden_arg_names=['--dec_base_recurrence_transition_depth'],
type=int, metavar='INT',
help='number of GRU transition operations applied in the first '
'layer of the decoder. Minimum is 2. (Only applies to '
'gru_cond). (default: %(default)s)'))
group.append(ParameterSpecification(
name='rnn_dec_high_transition_depth', default=1,
legacy_names=['dec_high_recurrence_transition_depth'],
visible_arg_names=['--rnn_dec_high_transition_depth'],
hidden_arg_names=['--dec_high_recurrence_transition_depth'],
type=int, metavar='INT',
help='number of GRU transition operations applied in the higher '
'layers of the decoder. Minimum is 1. (Only applies to '
'gru). (default: %(default)s)'))
group.append(ParameterSpecification(
name='rnn_dec_deep_context', default=False,
legacy_names=['dec_deep_context'],
visible_arg_names=['--rnn_dec_deep_context'],
hidden_arg_names=['--dec_deep_context'],
action='store_true',
help='pass context vector (from first layer) to deep decoder '
'layers'))
group.append(ParameterSpecification(
name='rnn_use_dropout', default=False,
legacy_names=['use_dropout'],
visible_arg_names=['--rnn_use_dropout'],
hidden_arg_names=['--use_dropout'],
action='store_true',
help='use dropout layer (default: %(default)s)'))
group.append(ParameterSpecification(
name='rnn_dropout_embedding', default=None,
legacy_names=['dropout_embedding'],
visible_arg_names=['--rnn_dropout_embedding'],
hidden_arg_names=['--dropout_embedding'],
derivation_func=_derive_rnn_dropout_embedding,
type=float, metavar='FLOAT',
# FIXME rnn_dropout_embedding effectively has two defaults,
# depending on whether we're reading from the command-
# line or from a JSON config - does this make sense?
# We hardcode the former here.
help='dropout for input embeddings (0: no dropout) (default: '
'0.2)'))
group.append(ParameterSpecification(
name='rnn_dropout_hidden', default=None,
legacy_names=['dropout_hidden'],
visible_arg_names=['--rnn_dropout_hidden'],
hidden_arg_names=['--dropout_hidden'],
derivation_func=_derive_rnn_dropout_hidden,
type=float, metavar='FLOAT',
# FIXME rnn_dropout_hidden effectively has two defaults,
# depending on whether we're reading from the command-
# line or from a JSON config - does this make sense?
# We hardcode the former here.
help='dropout for hidden layer (0: no dropout) (default: 0.2)'))
group.append(ParameterSpecification(
name='rnn_dropout_source', default=0.0,
legacy_names=['dropout_source'],
visible_arg_names=['--rnn_dropout_source'],
hidden_arg_names=['--dropout_source'],
type=float, metavar='FLOAT',
help='dropout source words (0: no dropout) (default: '
'%(default)s)'))
group.append(ParameterSpecification(
name='rnn_dropout_target', default=0.0,
legacy_names=['dropout_target'],
visible_arg_names=['--rnn_dropout_target'],
hidden_arg_names=['--dropout_target'],
type=float, metavar='FLOAT',
help='dropout target words (0: no dropout) (default: '
'%(default)s)'))
group.append(ParameterSpecification(
name='rnn_layer_normalization', default=False,
legacy_names=['use_layer_norm', 'layer_normalisation'],
visible_arg_names=['--rnn_layer_normalisation'],
hidden_arg_names=['--use_layer_norm', '--layer_normalisation'],
action='store_true',
help='Set to use layer normalization in encoder and decoder'))
group.append(ParameterSpecification(
name='rnn_lexical_model', default=False,
legacy_names=['lexical_model'],
visible_arg_names=['--rnn_lexical_model'],
hidden_arg_names=['--lexical_model'],
action='store_true',
help='Enable feedforward lexical model (Nguyen and Chiang, 2018)'))
# Add command-line parameters for 'network_transformer' group.
group = param_specs['network_transformer']
# NOTE: parameter names in this group must use the transformer_ prefix.
# read_config_from_cmdline() uses this to check that only
# model type specific options are only used with the appropriate
# model type.
group.append(ParameterSpecification(
name='transformer_enc_depth', default=6,
visible_arg_names=['--transformer_enc_depth'],
type=int, metavar='INT',
help='number of encoder layers (default: %(default)s)'))
group.append(ParameterSpecification(
name='transformer_dec_depth', default=6,
visible_arg_names=['--transformer_dec_depth'],
type=int, metavar='INT',
help='number of decoder layers (default: %(default)s)'))
group.append(ParameterSpecification(
name='transformer_ffn_hidden_size', default=2048,
visible_arg_names=['--transformer_ffn_hidden_size'],
type=int, metavar='INT',
help='inner dimensionality of feed-forward sub-layers (default: '
'%(default)s)'))
group.append(ParameterSpecification(
name='transformer_num_heads', default=8,
visible_arg_names=['--transformer_num_heads'],
type=int, metavar='INT',
help='number of attention heads used in multi-head attention '
'(default: %(default)s)'))
group.append(ParameterSpecification(
name='transformer_dropout_embeddings', default=0.1,
visible_arg_names=['--transformer_dropout_embeddings'],
type=float, metavar='FLOAT',
help='dropout applied to sums of word embeddings and positional '
'encodings (default: %(default)s)'))
group.append(ParameterSpecification(
name='transformer_dropout_residual', default=0.1,
visible_arg_names=['--transformer_dropout_residual'],
type=float, metavar='FLOAT',
help='dropout applied to residual connections (default: '
'%(default)s)'))
group.append(ParameterSpecification(
name='transformer_dropout_relu', default=0.1,
visible_arg_names=['--transformer_dropout_relu'],
type=float, metavar='FLOAT',
help='dropout applied to the internal activation of the '
'feed-forward sub-layers (default: %(default)s)'))
group.append(ParameterSpecification(
name='transformer_dropout_attn', default=0.1,
visible_arg_names=['--transformer_dropout_attn'],
type=float, metavar='FLOAT',
help='dropout applied to attention weights (default: '
'%(default)s)'))
# Add command-line parameters for 'training' group.
group = param_specs['training']
group.append(ParameterSpecification(
name='loss_function', default='cross-entropy',
visible_arg_names=['--loss_function'],
type=str, choices=['cross-entropy', 'per-token-cross-entropy'],
help='loss function (default: %(default)s)'))
group.append(ParameterSpecification(
name='decay_c', default=0.0,
visible_arg_names=['--decay_c'],
type=float, metavar='FLOAT',
help='L2 regularization penalty (default: %(default)s)'))
group.append(ParameterSpecification(
name='map_decay_c', default=0.0,
visible_arg_names=['--map_decay_c'],
type=float, metavar='FLOAT',
help='MAP-L2 regularization penalty towards original weights '
'(default: %(default)s)'))
group.append(ParameterSpecification(
name='prior_model', default=None,
visible_arg_names=['--prior_model'],
type=str, metavar='PATH',
help='Prior model for MAP-L2 regularization. Unless using '
'\"--reload\", this will also be used for initialization.'))
group.append(ParameterSpecification(
name='clip_c', default=1.0,
visible_arg_names=['--clip_c'],
type=float, metavar='FLOAT',
help='gradient clipping threshold (default: %(default)s)'))
group.append(ParameterSpecification(
name='label_smoothing', default=0.0,
visible_arg_names=['--label_smoothing'],
type=float, metavar='FLOAT',
help='label smoothing (default: %(default)s)'))
group.append(ParameterSpecification(
name='optimizer', default='adam',
visible_arg_names=['--optimizer'],
type=str, choices=['adam'],
help='optimizer (default: %(default)s)'))
group.append(ParameterSpecification(
name='adam_beta1', default=0.9,
visible_arg_names=['--adam_beta1'],
type=float, metavar='FLOAT',
help='exponential decay rate for the first moment estimates '
'(default: %(default)s)'))
group.append(ParameterSpecification(
name='adam_beta2', default=0.999,
visible_arg_names=['--adam_beta2'],
type=float, metavar='FLOAT',
help='exponential decay rate for the second moment estimates '
'(default: %(default)s)'))
group.append(ParameterSpecification(
name='adam_epsilon', default=1e-08,
visible_arg_names=['--adam_epsilon'],
type=float, metavar='FLOAT',
help='constant for numerical stability (default: %(default)s)'))
group.append(ParameterSpecification(
name='learning_schedule', default='constant',
visible_arg_names=['--learning_schedule'],
type=str, choices=['constant', 'transformer'],
help='learning schedule (default: %(default)s)'))
group.append(ParameterSpecification(
name='learning_rate', default=0.0001,
visible_arg_names=['--learning_rate'],
hidden_arg_names=['--lrate'],
legacy_names=['lrate'],
type=float, metavar='FLOAT',
help='learning rate (default: %(default)s)'))
group.append(ParameterSpecification(
name='warmup_steps', default=8000,
visible_arg_names=['--warmup_steps'],
type=int, metavar='INT',
help='number of initial updates during which the learning rate is '
'increased linearly during learning rate scheduling '
'(default: %(default)s)'))
group.append(ParameterSpecification(
name='maxlen', default=100,
visible_arg_names=['--maxlen'],
type=int, metavar='INT',
help='maximum sequence length for training and validation '
'(default: %(default)s)'))
group.append(ParameterSpecification(
name='batch_size', default=80,
visible_arg_names=['--batch_size'],
type=int, metavar='INT',
help='minibatch size (default: %(default)s)'))
group.append(ParameterSpecification(
name='token_batch_size', default=0,
visible_arg_names=['--token_batch_size'],
type=int, metavar='INT',
help='minibatch size (expressed in number of source or target '
'tokens). Sentence-level minibatch size will be dynamic. If '
'this is enabled, batch_size only affects sorting by '
'length. (default: %(default)s)'))
group.append(ParameterSpecification(
name='max_sentences_per_device', default=0,
visible_arg_names=['--max_sentences_per_device'],
type=int, metavar='INT',
help='maximum size of minibatch subset to run on a single device, '
'in number of sentences (default: %(default)s)'))
group.append(ParameterSpecification(
name='max_tokens_per_device', default=0,
visible_arg_names=['--max_tokens_per_device'],
type=int, metavar='INT',
help='maximum size of minibatch subset to run on a single device, '
'in number of tokens (either source or target - whichever is '
'highest) (default: %(default)s)'))
group.append(ParameterSpecification(
name='gradient_aggregation_steps', default=1,
visible_arg_names=['--gradient_aggregation_steps'],
type=int, metavar='INT',
help='number of times to accumulate gradients before aggregating '
'and applying; the minibatch is split between steps, so '
'adding more steps allows larger minibatches to be used '
'(default: %(default)s)'))
group.append(ParameterSpecification(
name='maxibatch_size', default=20,
visible_arg_names=['--maxibatch_size'],
type=int, metavar='INT',
help='size of maxibatch (number of minibatches that are sorted '
'by length) (default: %(default)s)'))
group.append(ParameterSpecification(
name='sort_by_length', default=True,
visible_arg_names=['--no_sort_by_length'],
action='store_false',
help='do not sort sentences in maxibatch by length'))
group.append(ParameterSpecification(
name='shuffle_each_epoch', default=True,
visible_arg_names=['--no_shuffle'],
action='store_false',
help='disable shuffling of training data (for each epoch)'))
group.append(ParameterSpecification(
name='keep_train_set_in_memory', default=False,
visible_arg_names=['--keep_train_set_in_memory'],
action='store_true',
help='Keep training dataset lines stores in RAM during training'))
group.append(ParameterSpecification(
name='max_epochs', default=5000,
visible_arg_names=['--max_epochs'],
type=int, metavar='INT',
help='maximum number of epochs (default: %(default)s)'))
group.append(ParameterSpecification(
name='finish_after', default=10000000,
visible_arg_names=['--finish_after'],
type=int, metavar='INT',
help='maximum number of updates (minibatches) (default: '
'%(default)s)'))
# Add command-line parameters for 'validation' group.
group = param_specs['validation']
group.append(ParameterSpecification(
name='valid_source_dataset', default=None,
visible_arg_names=['--valid_source_dataset'],
derivation_func=_derive_valid_source_dataset,
type=str, metavar='PATH',
help='source validation corpus (default: %(default)s)'))
group.append(ParameterSpecification(
name='valid_target_dataset', default=None,
visible_arg_names=['--valid_target_dataset'],
derivation_func=_derive_valid_target_dataset,
type=str, metavar='PATH',
help='target validation corpus (default: %(default)s)'))
# Hidden option for backward compatibility.
group.append(ParameterSpecification(
name='valid_datasets', default=None,
hidden_arg_names=['--valid_datasets'],
type=str, metavar='PATH', nargs=2))
group.append(ParameterSpecification(
name='valid_batch_size', default=80,
visible_arg_names=['--valid_batch_size'],
type=int, metavar='INT',
help='validation minibatch size (default: %(default)s)'))
group.append(ParameterSpecification(
name='valid_token_batch_size', default=0,
visible_arg_names=['--valid_token_batch_size'],
type=int, metavar='INT',
help='validation minibatch size (expressed in number of source '
'or target tokens). Sentence-level minibatch size will be '
'dynamic. If this is enabled, valid_batch_size only affects '
'sorting by length. (default: %(default)s)'))
group.append(ParameterSpecification(
name='valid_freq', default=10000,
legacy_names=['validFreq'],
visible_arg_names=['--valid_freq'],
hidden_arg_names=['--validFreq'],
type=int, metavar='INT',
help='validation frequency (default: %(default)s)'))
group.append(ParameterSpecification(
name='valid_script', default=None,
visible_arg_names=['--valid_script'],
type=str, metavar='PATH',
help='path to script for external validation (default: '
'%(default)s). The script will be passed an argument '
'specifying the path of a file that contains translations '
'of the source validation corpus. It must write a single '
'score to standard output.'))
group.append(ParameterSpecification(
name='patience', default=10,
visible_arg_names=['--patience'],
type=int, metavar='INT',
help='early stopping patience (default: %(default)s)'))
# Add command-line parameters for 'display' group.
group = param_specs['display']
group.append(ParameterSpecification(
name='disp_freq', default=1000,
legacy_names=['dispFreq'],
visible_arg_names=['--disp_freq'], hidden_arg_names=['--dispFreq'],
type=int, metavar='INT',
help='display loss after INT updates (default: %(default)s)'))
group.append(ParameterSpecification(
name='sample_freq', default=10000,
legacy_names=['sampleFreq'],
visible_arg_names=['--sample_freq'],
hidden_arg_names=['--sampleFreq'],
type=int, metavar='INT',
help='display some samples after INT updates (default: '
'%(default)s)'))
group.append(ParameterSpecification(
name='beam_freq', default=10000,
legacy_names=['beamFreq'],
visible_arg_names=['--beam_freq'], hidden_arg_names=['--beamFreq'],
type=int, metavar='INT',
help='display some beam_search samples after INT updates '
'(default: %(default)s)'))
group.append(ParameterSpecification(
name='beam_size', default=12,
visible_arg_names=['--beam_size'],
type=int, metavar='INT',
help='size of the beam (default: %(default)s)'))
# Add command-line parameters for 'translate' group.
group = param_specs['translate']
group.append(ParameterSpecification(
name='normalization_alpha', type=float, default=0.0, nargs="?",
const=1.0, metavar="ALPHA",
visible_arg_names=['--normalization_alpha'],
help='normalize scores by sentence length (with argument, " \
"exponentiate lengths by ALPHA)'))
group.append(ParameterSpecification(
name='n_best', default=False,
visible_arg_names=['--n_best'],
action='store_true', dest='n_best',
help='Print full beam'))
group.append(ParameterSpecification(
name='translation_maxlen', default=200,
visible_arg_names=['--translation_maxlen'],
type=int, metavar='INT',
help='Maximum length of translation output sentence (default: '
'%(default)s)'))
group.append(ParameterSpecification(
name='translation_strategy', default='beam_search',
visible_arg_names=['--translation_strategy'],
type=str, choices=['beam_search', 'sampling'],
help='translation_strategy, either beam_search or sampling (default: %(default)s)'))
# Add command-line parameters for 'sampling' group.
group = param_specs['sampling']
group.append(ParameterSpecification(
name='sampling_temperature', type=float, default=1.0,
metavar="FLOAT",
visible_arg_names=['--sampling_temperature'],
help='softmax temperature used for sampling (default %(default)s)'))
return param_specs
def _build_name_to_spec(self):
name_to_spec = {}
for group in self.group_names:
for param in self.params_by_group(group):
for name in [param.name] + param.legacy_names:
assert name not in name_to_spec
name_to_spec[name] = param
return name_to_spec
def _check_self(self):
# Check that there are no duplicated parameter names.
param_names = set()
for group in self.group_names:
for param in self.params_by_group(group):
assert param.name not in param_names
param_names.add(param.name)
for name in param.legacy_names:
assert name not in param_names
param_names.add(name)
# Check that there are no duplicated command-line argument names.
arg_names = set()
for group in self.group_names:
for param in self.params_by_group(group):
for arg_list in (param.visible_arg_names,
param.hidden_arg_names):
for name in arg_list:
assert name not in arg_names
arg_names.add(param.name)
def _construct_argument_parser(spec, suppress_missing=False):
"""Constructs an argparse.ArgumentParser given a ConfigSpecification.
Setting suppress_missing to True causes the parser to suppress arguments
that are not supplied by the user (as opposed to adding them with
their default values).
Args:
spec: a ConfigSpecification object.
suppress_missing: Boolean
Returns:
An argparse.ArgumentParser.
"""
# Construct an ArgumentParser and parse command-line args.
parser = argparse.ArgumentParser()
for group_name in spec.group_names:
if group_name == "":
target = parser
else:
description = spec.group_description(group_name)
target = parser.add_argument_group(description)
for param in spec.params_by_group(group_name):
if param.visible_arg_names == [] and param.hidden_arg_names == []:
# Internal parameter - no command-line argument.
continue
argparse_args = dict(param.argparse_args)
argparse_args['dest'] = param.name
if suppress_missing:
argparse_args['default'] = argparse.SUPPRESS
if param.visible_arg_names == []:
argparse_args['help'] = argparse.SUPPRESS
target.add_argument(*param.hidden_arg_names, **argparse_args)
continue
if 'required' in argparse_args and argparse_args['required']:
mutex_group = \
target.add_mutually_exclusive_group(required=True)
del argparse_args['required']
else:
mutex_group = target.add_mutually_exclusive_group()
mutex_group.add_argument(*param.visible_arg_names, **argparse_args)
# Add any hidden arguments for this param.
if len(param.hidden_arg_names) > 0:
argparse_args['help'] = argparse.SUPPRESS
mutex_group.add_argument(*param.hidden_arg_names,
**argparse_args)
return parser
def read_config_from_cmdline():
"""Reads a config from the command-line.
Logs an error and exits if the parameter values are not mutually
consistent.
Returns:
An argparse.Namespace object representing the config.
"""
spec = ConfigSpecification()
# Construct an argparse.ArgumentParser and parse command-line args.
parser = _construct_argument_parser(spec)
config = parser.parse_args()
# Construct a second ArgumentParser but using default=argparse.SUPPRESS
# in every argparse.add_argument() call. This allows us to determine
# which parameters were actually set by the user.
# Solution is from https://stackoverflow.com/a/45803037
aux_parser = _construct_argument_parser(spec, suppress_missing=True)
aux_config = aux_parser.parse_args()
set_by_user = set(vars(aux_config).keys())
# Perform consistency checks.
error_messages = _check_config_consistency(spec, config, set_by_user)
if len(error_messages) > 0:
for msg in error_messages:
logging.error(msg)
sys.exit(1)
# Set meta parameters.
meta_config = argparse.Namespace()
meta_config.from_cmdline = True
meta_config.from_theano = False
# Run derivation functions.
for group in spec.group_names:
for param in spec.params_by_group(group):
if param.derivation_func is not None:
setattr(config, param.name,
param.derivation_func(config, meta_config))
return config
def write_config_to_json_file(config, path):
"""
Writes a config object to a JSON file.
Args:
config: a config Namespace object
path: full path to the JSON file except ".json" suffix
"""
config_as_dict = collections.OrderedDict(sorted(vars(config).items()))
json.dump(config_as_dict, open('%s.json' % path, 'w'), indent=2)
def load_config_from_json_file(basename):
"""Loads and, if necessary, updates a config from a JSON (or Pickle) file.
Logs an error and exits if the file can't be loaded.
Args:
basename: a string containing the path to the corresponding model file.
Returns:
An argparse.Namespace object representing the config.
"""
spec = ConfigSpecification()
# Load a config from a JSON (or Pickle) config file.
try:
with open('%s.json' % basename, 'r', encoding='utf-8') as f:
config_as_dict = json.load(f)
except:
try:
with open('%s.pkl' % basename, 'r', encoding='utf-8') as f:
config_as_dict = pickle.load(f)
except:
logging.error('config file {}.json is missing'.format(basename))
sys.exit(1)
config = argparse.Namespace(**config_as_dict)
# Set meta parameters.
meta_config = argparse.Namespace()
meta_config.from_cmdline = False
meta_config.from_theano = (not hasattr(config, 'embedding_size'))
# Update config to use current parameter names.
for group_name in spec.group_names:
for param in spec.params_by_group(group_name):
for legacy_name in param.legacy_names:
# TODO It shouldn't happen, but check for multiple names
# (legacy and/or current) for same parameter appearing
# in config.
if hasattr(config, legacy_name):
val = getattr(config, legacy_name)
assert not hasattr(config, param.name)
setattr(config, param.name, val)
delattr(config, legacy_name)
# Add missing parameters.
for group_name in spec.group_names:
for param in spec.params_by_group(group_name):
if not hasattr(config, param.name):
setattr(config, param.name, param.default)
# Run derivation functions.
for group in spec.group_names:
for param in spec.params_by_group(group):
if param.derivation_func is not None:
setattr(config, param.name,
param.derivation_func(config, meta_config))
return config
def _check_config_consistency(spec, config, set_by_user):
"""Performs consistency checks on a config read from the command-line.
Args:
spec: a ConfigSpecification object.
config: an argparse.Namespace object.
set_by_user: a set of strings representing parameter names.
Returns:
A list of error messages, one for each check that failed. An empty
list indicates that all checks passed.
"""
def arg_names_string(param):
arg_names = param.visible_arg_names + param.hidden_arg_names
return ' / '.join(arg_names)
error_messages = []
# Check parameters are appropriate for the model type.
assert config.model_type is not None
for group in spec.group_names:
for param in spec.params_by_group(group):
if param.name not in set_by_user:
continue
if ((param.name.startswith('rnn_') and
config.model_type == 'transformer') or
(param.name.startswith('transformer_') and
config.model_type == 'rnn')):
msg = '{} cannot be used with \'{}\' model type'.format(
arg_names_string(param), config.model_type)
error_messages.append(msg)
# Check user-supplied learning schedule options are consistent.
if config.learning_schedule == 'constant':
param = spec.lookup('warmup_steps')
assert param is not None
if param.name in set_by_user:
msg = '{} cannot be used with \'constant\' learning ' \
'schedule'.format(arg_names_string(param),
config.model_type)
error_messages.append(msg)
elif config.learning_schedule == 'transformer':
param = spec.lookup('learning_rate')
assert param is not None
if param.name in set_by_user:
msg = '{} cannot be used with \'transformer\' learning ' \
'schedule'.format(arg_names_string(param), config.model_type)
error_messages.append(msg)
# TODO Other similar checks? e.g. check user hasn't set adam parameters
# if optimizer != 'adam' (not currently possible but probably will
# be in in the future)...
# Check if user is trying to use the Transformer with features that
# aren't supported yet.
if config.model_type == 'transformer':
if config.factors > 1:
msg = 'factors are not yet supported for the \'transformer\' ' \
'model type'
error_messages.append(msg)
if config.softmax_mixture_size > 1:
msg = 'softmax mixtures are not yet supported for the ' \
'\'transformer\' model type'
error_messages.append(msg)
if config.datasets:
if config.source_dataset or config.target_dataset:
msg = 'argument clash: --datasets is mutually exclusive ' \
'with --source_dataset and --target_dataset'
error_messages.append(msg)
elif not config.source_dataset:
msg = '--source_dataset is required'
error_messages.append(msg)
elif not config.target_dataset:
msg = '--target_dataset is required'
error_messages.append(msg)
if config.valid_datasets:
if config.valid_source_dataset or config.valid_target_dataset:
msg = 'argument clash: --valid_datasets is mutually ' \
'exclusive with --valid_source_dataset and ' \
'--valid_target_dataset'
error_messages.append(msg)
if (config.source_vocab_sizes is not None and
len(config.source_vocab_sizes) > config.factors):
msg = 'too many values supplied to \'--source_vocab_sizes\' option ' \
'(expected one per factor = {})'.format(config.factors)
error_messages.append(msg)
if config.dim_per_factor is None and config.factors != 1:
msg = 'if using factored input, you must specify \'dim_per_factor\''
error_messages.append(msg)
if config.dim_per_factor is not None:
if len(config.dim_per_factor) != config.factors:
msg = 'mismatch between \'--factors\' ({0}) and ' \
'\'--dim_per_factor\' ({1} entries)'.format(
config.factors, len(config.dim_per_factor))
error_messages.append(msg)
elif sum(config.dim_per_factor) != config.embedding_size:
msg = 'mismatch between \'--embedding_size\' ({0}) and ' \
'\'--dim_per_factor\' (sums to {1})\''.format(
config.embedding_size, sum(config.dim_per_factor))
error_messages.append(msg)
if len(config.dictionaries) != config.factors + 1:
msg = '\'--dictionaries\' must specify one dictionary per source ' \
'factor and one target dictionary'
error_messages.append(msg)
max_sents_param = spec.lookup('max_sentences_per_device')
max_tokens_param = spec.lookup('max_tokens_per_device')
# TODO Extend ParameterSpecification to support mutually exclusive
# command-line args.
if (max_sents_param.name in set_by_user
and max_tokens_param.name in set_by_user):
msg = '{} is mutually exclusive with {}'.format(
arg_names_string(max_sents_param),
arg_names_string(max_tokens_param))
error_messages.append(msg)
aggregation_param = spec.lookup('gradient_aggregation_steps')
if (aggregation_param.name in set_by_user
and (max_sents_param.name in set_by_user
or max_tokens_param.name in set_by_user)):
msg = '{} is mutually exclusive with {} / {}'.format(
arg_names_string(aggregation_param),
arg_names_string(max_sents_param),
arg_names_string(max_tokens_param))
error_messages.append(msg)
# softmax_mixture_size and lexical_model are currently mutually exclusive:
if config.softmax_mixture_size > 1 and config.rnn_lexical_model:
error_messages.append('behavior of --rnn_lexical_model is undefined if softmax_mixture_size > 1')
return error_messages
def _derive_model_version(config, meta_config):
if meta_config.from_cmdline:
# We're creating a new model - set the current version number.
return 0.2
if config.model_version is not None:
return config.model_version
if meta_config.from_theano and config.rnn_use_dropout:
logging.error('version 0 dropout is not supported in '
'TensorFlow Nematus')
sys.exit(1)
return 0.1
def _derive_target_embedding_size(config, meta_config):
assert hasattr(config, 'embedding_size')
if not config.tie_encoder_decoder_embeddings:
return config.embedding_size
if config.factors > 1:
assert hasattr(config, 'dim_per_factor')
assert config.dim_per_factor is not None
return config.dim_per_factor[0]
else:
return config.embedding_size
def _derive_source_dataset(config, meta_config):
if config.source_dataset is not None:
return config.source_dataset
assert config.datasets is not None
return config.datasets[0]
def _derive_target_dataset(config, meta_config):
if config.target_dataset is not None:
return config.target_dataset
assert config.datasets is not None
return config.datasets[1]
def _derive_source_vocab_sizes(config, meta_config):
if config.source_vocab_sizes is not None:
if len(config.source_vocab_sizes) == config.factors:
# Case 1: we're loading parameters from a recent config or
# we're processing command-line arguments and
# a source_vocab_sizes was fully specified.
return config.source_vocab_sizes
else:
# Case 2: source_vocab_sizes was given on the command-line
# but was only partially specified
assert meta_config.from_cmdline
assert len(config.source_vocab_sizes) < config.factors
num_missing = config.factors - len(config.source_vocab_sizes)
vocab_sizes = config.source_vocab_sizes + [-1] * num_missing
elif hasattr(config, 'n_words_src'):
# Case 3: we're loading parameters from a Theano config.
# This will always contain a single value for the
# source vocab size regardless of how many factors
# there are.
assert not meta_config.from_cmdline
assert meta_config.from_theano
assert type(config.n_words_src) == int
return [config.n_words_src] * config.factors
elif hasattr(config, 'source_vocab_size'):
# Case 4: we're loading parameters from a pre-factors
# TensorFlow config.
assert not meta_config.from_cmdline
assert not meta_config.from_theano
assert config.factors == 1
return [config.source_vocab_size]
else:
# Case 5: we're reading command-line parameters and
# --source_vocab_size was not given.
assert meta_config.from_cmdline
vocab_sizes = [-1] * config.factors
# For any unspecified vocabulary sizes, determine sizes from the
# vocabulary dictionaries.
for i, vocab_size in enumerate(vocab_sizes):
if vocab_size >= 0:
continue
path = config.dictionaries[i]
vocab_sizes[i] = _determine_vocab_size_from_file(path,
config.model_type)
return vocab_sizes
def _derive_target_vocab_size(config, meta_config):
if config.target_vocab_size != -1:
return config.target_vocab_size
path = config.dictionaries[-1]
return _determine_vocab_size_from_file(path, config.model_type)
def _derive_dim_per_factor(config, meta_config):
if config.dim_per_factor is not None:
return config.dim_per_factor
assert config.factors == 1
return [config.embedding_size]
def _derive_rnn_dropout_embedding(config, meta_config):
if config.rnn_dropout_embedding is not None:
return config.rnn_dropout_embedding
return 0.2 if meta_config.from_cmdline else 0.0
def _derive_rnn_dropout_hidden(config, meta_config):
if config.rnn_dropout_hidden is not None:
return config.rnn_dropout_hidden
return 0.2 if meta_config.from_cmdline else 0.0
def _derive_valid_source_dataset(config, meta_config):
if config.valid_source_dataset is not None:
return config.valid_source_dataset
if config.valid_datasets is not None:
return config.valid_datasets[0]
return None
def _derive_valid_target_dataset(config, meta_config):
if config.valid_target_dataset is not None:
return config.valid_target_dataset
if config.valid_datasets is not None:
return config.valid_datasets[1]
return None
def _determine_vocab_size_from_file(path, model_type):
try:
d = util.load_dict(path, model_type)
except IOError as x:
logging.error('failed to determine vocabulary size from file: '
'{}: {}'.format(path, str(x)))
sys.exit(1)
except:
logging.error('failed to determine vocabulary size from file: '
'{}'.format(path))
sys.exit(1)
return max(d.values()) + 1
| 42.273071 | 104 | 0.620471 | import argparse
import collections
import json
import logging
import pickle
import sys
import util
class ParameterSpecification:
def __init__(self, name, default, legacy_names=[], visible_arg_names=[],
hidden_arg_names=[], derivation_func=None, **argparse_args):
self.name = name
self.default = default
self.legacy_names = legacy_names
self.visible_arg_names = visible_arg_names
self.hidden_arg_names = hidden_arg_names
self.derivation_func = derivation_func
self.argparse_args = argparse_args
if len(argparse_args) == 0:
assert visible_arg_names == [] and hidden_arg_names == []
else:
self.argparse_args['default'] = self.default
class ConfigSpecification:
def __init__(self):
description_pairs = [
('', None),
('data', 'data sets; model loading and saving'),
('network', 'network parameters (all model types)'),
('network_rnn', 'network parameters (rnn-specific)'),
('network_transformer', 'network parameters (transformer-'
'specific)'),
('training', 'training parameters'),
('validation', 'validation parameters'),
('display', 'display parameters'),
('translate', 'translate parameters'),
('sampling', 'sampling parameters'),
]
self._group_descriptions = collections.OrderedDict(description_pairs)
self._param_specs = self._define_param_specs()
self._check_self()
self._name_to_spec = self._build_name_to_spec()
@property
def group_names(self):
return self._group_descriptions.keys()
def group_description(self, name):
return self._group_descriptions[name]
def params_by_group(self, group_name):
return self._param_specs[group_name]
def lookup(self, name):
return self._name_to_spec.get(name, None)
def _define_param_specs(self):
param_specs = {}
for group in self.group_names:
param_specs[group] = []
group = param_specs['']
group.append(ParameterSpecification(
name='model_version', default=None,
derivation_func=_derive_model_version))
group.append(ParameterSpecification(
name='theano_compat', default=None,
derivation_func=lambda _, meta_config: meta_config.from_theano))
group.append(ParameterSpecification(
name='source_dicts', default=None,
derivation_func=lambda config, _: config.dictionaries[:-1]))
group.append(ParameterSpecification(
name='target_dict', default=None,
derivation_func=lambda config, _: config.dictionaries[-1]))
group.append(ParameterSpecification(
name='target_embedding_size', default=None,
derivation_func=_derive_target_embedding_size))
group = param_specs['data']
group.append(ParameterSpecification(
name='source_dataset', default=None,
visible_arg_names=['--source_dataset'],
derivation_func=_derive_source_dataset,
type=str, metavar='PATH',
help='parallel training corpus (source)'))
group.append(ParameterSpecification(
name='target_dataset', default=None,
visible_arg_names=['--target_dataset'],
derivation_func=_derive_target_dataset,
type=str, metavar='PATH',
help='parallel training corpus (target)'))
group.append(ParameterSpecification(
name='datasets', default=None,
visible_arg_names=[], hidden_arg_names=['--datasets'],
type=str, metavar='PATH', nargs=2))
group.append(ParameterSpecification(
name='dictionaries', default=None,
visible_arg_names=['--dictionaries'], hidden_arg_names=[],
type=str, required=True, metavar='PATH', nargs='+',
help='network vocabularies (one per source factor, plus target '
'vocabulary)'))
group.append(ParameterSpecification(
name='save_freq', default=30000,
legacy_names=['saveFreq'],
visible_arg_names=['--save_freq'], hidden_arg_names=['--saveFreq'],
type=int, metavar='INT',
help='save frequency (default: %(default)s)'))
group.append(ParameterSpecification(
name='saveto', default='model',
visible_arg_names=['--model'], hidden_arg_names=['--saveto'],
type=str, metavar='PATH',
help='model file name (default: %(default)s)'))
group.append(ParameterSpecification(
name='reload', default=None,
visible_arg_names=['--reload'],
type=str, metavar='PATH',
help='load existing model from this path. Set to '
'"latest_checkpoint" to reload the latest checkpoint in the '
'same directory of --model'))
group.append(ParameterSpecification(
name='reload_training_progress', default=True,
visible_arg_names=['--no_reload_training_progress'],
action='store_false',
help='don\'t reload training progress (only used if --reload '
'is enabled)'))
group.append(ParameterSpecification(
name='summary_dir', default=None,
visible_arg_names=['--summary_dir'],
type=str, metavar='PATH',
help='directory for saving summaries (default: same directory '
'as the --model file)'))
group.append(ParameterSpecification(
name='summary_freq', default=0,
legacy_names=['summaryFreq'],
visible_arg_names=['--summary_freq'],
hidden_arg_names=['--summaryFreq'],
type=int, metavar='INT',
help='Save summaries after INT updates, if 0 do not save '
'summaries (default: %(default)s)'))
# Add command-line parameters for 'network' group.
group = param_specs['network']
group.append(ParameterSpecification(
name='model_type', default='rnn',
visible_arg_names=['--model_type'],
type=str, choices=['rnn', 'transformer'],
help='model type (default: %(default)s)'))
group.append(ParameterSpecification(
name='embedding_size', default=512,
legacy_names=['dim_word'],
visible_arg_names=['--embedding_size'],
hidden_arg_names=['--dim_word'],
type=int, metavar='INT',
help='embedding layer size (default: %(default)s)'))
group.append(ParameterSpecification(
name='state_size', default=1000,
legacy_names=['dim'],
visible_arg_names=['--state_size'], hidden_arg_names=['--dim'],
type=int, metavar='INT',
help='hidden state size (default: %(default)s)'))
group.append(ParameterSpecification(
name='source_vocab_sizes', default=None,
visible_arg_names=['--source_vocab_sizes'],
hidden_arg_names=['--n_words_src'],
derivation_func=_derive_source_vocab_sizes,
type=int, metavar='INT', nargs='+',
help='source vocabulary sizes (one per input factor) (default: '
'%(default)s)'))
group.append(ParameterSpecification(
name='target_vocab_size', default=-1,
legacy_names=['n_words'],
visible_arg_names=['--target_vocab_size'],
hidden_arg_names=['--n_words'],
derivation_func=_derive_target_vocab_size,
type=int, metavar='INT',
help='target vocabulary size (default: %(default)s)'))
group.append(ParameterSpecification(
name='factors', default=1,
visible_arg_names=['--factors'],
type=int, metavar='INT',
help='number of input factors (default: %(default)s) - CURRENTLY '
'ONLY WORKS FOR \'rnn\' MODEL'))
group.append(ParameterSpecification(
name='dim_per_factor', default=None,
visible_arg_names=['--dim_per_factor'],
derivation_func=_derive_dim_per_factor,
type=int, metavar='INT', nargs='+',
help='list of word vector dimensionalities (one per factor): '
'\'--dim_per_factor 250 200 50\' for total dimensionality '
'of 500 (default: %(default)s)'))
group.append(ParameterSpecification(
name='tie_encoder_decoder_embeddings', default=False,
visible_arg_names=['--tie_encoder_decoder_embeddings'],
action='store_true',
help='tie the input embeddings of the encoder and the decoder '
'(first factor only). Source and target vocabulary size '
'must be the same'))
group.append(ParameterSpecification(
name='tie_decoder_embeddings', default=False,
visible_arg_names=['--tie_decoder_embeddings'],
action='store_true',
help='tie the input embeddings of the decoder with the softmax '
'output embeddings'))
group.append(ParameterSpecification(
name='output_hidden_activation', default='tanh',
visible_arg_names=['--output_hidden_activation'],
type=str, choices=['tanh', 'relu', 'prelu', 'linear'],
help='activation function in hidden layer of the output '
'network (default: %(default)s) - CURRENTLY ONLY WORKS '
'FOR \'rnn\' MODEL'))
group.append(ParameterSpecification(
name='softmax_mixture_size', default=1,
visible_arg_names=['--softmax_mixture_size'],
type=int, metavar='INT',
help='number of softmax components to use (default: '
'%(default)s) - CURRENTLY ONLY WORKS FOR \'rnn\' MODEL'))
# Add command-line parameters for 'network_rnn' group.
group = param_specs['network_rnn']
# NOTE: parameter names in this group must use the rnn_ prefix.
# read_config_from_cmdline() uses this to check that only
# model type specific options are only used with the appropriate
# model type.
group.append(ParameterSpecification(
name='rnn_enc_depth', default=1,
legacy_names=['enc_depth'],
visible_arg_names=['--rnn_enc_depth'],
hidden_arg_names=['--enc_depth'],
type=int, metavar='INT',
help='number of encoder layers (default: %(default)s)'))
group.append(ParameterSpecification(
name='rnn_enc_transition_depth', default=1,
legacy_names=['enc_recurrence_transition_depth'],
visible_arg_names=['--rnn_enc_transition_depth'],
hidden_arg_names=['--enc_recurrence_transition_depth'],
type=int, metavar='INT',
help='number of GRU transition operations applied in the '
'encoder. Minimum is 1. (Only applies to gru). (default: '
'%(default)s)'))
group.append(ParameterSpecification(
name='rnn_dec_depth', default=1,
legacy_names=['dec_depth'],
visible_arg_names=['--rnn_dec_depth'],
hidden_arg_names=['--dec_depth'],
type=int, metavar='INT',
help='number of decoder layers (default: %(default)s)'))
group.append(ParameterSpecification(
name='rnn_dec_base_transition_depth', default=2,
legacy_names=['dec_base_recurrence_transition_depth'],
visible_arg_names=['--rnn_dec_base_transition_depth'],
hidden_arg_names=['--dec_base_recurrence_transition_depth'],
type=int, metavar='INT',
help='number of GRU transition operations applied in the first '
'layer of the decoder. Minimum is 2. (Only applies to '
'gru_cond). (default: %(default)s)'))
group.append(ParameterSpecification(
name='rnn_dec_high_transition_depth', default=1,
legacy_names=['dec_high_recurrence_transition_depth'],
visible_arg_names=['--rnn_dec_high_transition_depth'],
hidden_arg_names=['--dec_high_recurrence_transition_depth'],
type=int, metavar='INT',
help='number of GRU transition operations applied in the higher '
'layers of the decoder. Minimum is 1. (Only applies to '
'gru). (default: %(default)s)'))
group.append(ParameterSpecification(
name='rnn_dec_deep_context', default=False,
legacy_names=['dec_deep_context'],
visible_arg_names=['--rnn_dec_deep_context'],
hidden_arg_names=['--dec_deep_context'],
action='store_true',
help='pass context vector (from first layer) to deep decoder '
'layers'))
group.append(ParameterSpecification(
name='rnn_use_dropout', default=False,
legacy_names=['use_dropout'],
visible_arg_names=['--rnn_use_dropout'],
hidden_arg_names=['--use_dropout'],
action='store_true',
help='use dropout layer (default: %(default)s)'))
group.append(ParameterSpecification(
name='rnn_dropout_embedding', default=None,
legacy_names=['dropout_embedding'],
visible_arg_names=['--rnn_dropout_embedding'],
hidden_arg_names=['--dropout_embedding'],
derivation_func=_derive_rnn_dropout_embedding,
type=float, metavar='FLOAT',
# FIXME rnn_dropout_embedding effectively has two defaults,
# depending on whether we're reading from the command-
help='dropout for input embeddings (0: no dropout) (default: '
'0.2)'))
group.append(ParameterSpecification(
name='rnn_dropout_hidden', default=None,
legacy_names=['dropout_hidden'],
visible_arg_names=['--rnn_dropout_hidden'],
hidden_arg_names=['--dropout_hidden'],
derivation_func=_derive_rnn_dropout_hidden,
type=float, metavar='FLOAT',
# line or from a JSON config - does this make sense?
# We hardcode the former here.
help='dropout for hidden layer (0: no dropout) (default: 0.2)'))
group.append(ParameterSpecification(
name='rnn_dropout_source', default=0.0,
legacy_names=['dropout_source'],
visible_arg_names=['--rnn_dropout_source'],
hidden_arg_names=['--dropout_source'],
type=float, metavar='FLOAT',
help='dropout source words (0: no dropout) (default: '
'%(default)s)'))
group.append(ParameterSpecification(
name='rnn_dropout_target', default=0.0,
legacy_names=['dropout_target'],
visible_arg_names=['--rnn_dropout_target'],
hidden_arg_names=['--dropout_target'],
type=float, metavar='FLOAT',
help='dropout target words (0: no dropout) (default: '
'%(default)s)'))
group.append(ParameterSpecification(
name='rnn_layer_normalization', default=False,
legacy_names=['use_layer_norm', 'layer_normalisation'],
visible_arg_names=['--rnn_layer_normalisation'],
hidden_arg_names=['--use_layer_norm', '--layer_normalisation'],
action='store_true',
help='Set to use layer normalization in encoder and decoder'))
group.append(ParameterSpecification(
name='rnn_lexical_model', default=False,
legacy_names=['lexical_model'],
visible_arg_names=['--rnn_lexical_model'],
hidden_arg_names=['--lexical_model'],
action='store_true',
help='Enable feedforward lexical model (Nguyen and Chiang, 2018)'))
# Add command-line parameters for 'network_transformer' group.
group = param_specs['network_transformer']
# NOTE: parameter names in this group must use the transformer_ prefix.
# read_config_from_cmdline() uses this to check that only
# model type specific options are only used with the appropriate
# model type.
group.append(ParameterSpecification(
name='transformer_enc_depth', default=6,
visible_arg_names=['--transformer_enc_depth'],
type=int, metavar='INT',
help='number of encoder layers (default: %(default)s)'))
group.append(ParameterSpecification(
name='transformer_dec_depth', default=6,
visible_arg_names=['--transformer_dec_depth'],
type=int, metavar='INT',
help='number of decoder layers (default: %(default)s)'))
group.append(ParameterSpecification(
name='transformer_ffn_hidden_size', default=2048,
visible_arg_names=['--transformer_ffn_hidden_size'],
type=int, metavar='INT',
help='inner dimensionality of feed-forward sub-layers (default: '
'%(default)s)'))
group.append(ParameterSpecification(
name='transformer_num_heads', default=8,
visible_arg_names=['--transformer_num_heads'],
type=int, metavar='INT',
help='number of attention heads used in multi-head attention '
'(default: %(default)s)'))
group.append(ParameterSpecification(
name='transformer_dropout_embeddings', default=0.1,
visible_arg_names=['--transformer_dropout_embeddings'],
type=float, metavar='FLOAT',
help='dropout applied to sums of word embeddings and positional '
'encodings (default: %(default)s)'))
group.append(ParameterSpecification(
name='transformer_dropout_residual', default=0.1,
visible_arg_names=['--transformer_dropout_residual'],
type=float, metavar='FLOAT',
help='dropout applied to residual connections (default: '
'%(default)s)'))
group.append(ParameterSpecification(
name='transformer_dropout_relu', default=0.1,
visible_arg_names=['--transformer_dropout_relu'],
type=float, metavar='FLOAT',
help='dropout applied to the internal activation of the '
'feed-forward sub-layers (default: %(default)s)'))
group.append(ParameterSpecification(
name='transformer_dropout_attn', default=0.1,
visible_arg_names=['--transformer_dropout_attn'],
type=float, metavar='FLOAT',
help='dropout applied to attention weights (default: '
'%(default)s)'))
# Add command-line parameters for 'training' group.
group = param_specs['training']
group.append(ParameterSpecification(
name='loss_function', default='cross-entropy',
visible_arg_names=['--loss_function'],
type=str, choices=['cross-entropy', 'per-token-cross-entropy'],
help='loss function (default: %(default)s)'))
group.append(ParameterSpecification(
name='decay_c', default=0.0,
visible_arg_names=['--decay_c'],
type=float, metavar='FLOAT',
help='L2 regularization penalty (default: %(default)s)'))
group.append(ParameterSpecification(
name='map_decay_c', default=0.0,
visible_arg_names=['--map_decay_c'],
type=float, metavar='FLOAT',
help='MAP-L2 regularization penalty towards original weights '
'(default: %(default)s)'))
group.append(ParameterSpecification(
name='prior_model', default=None,
visible_arg_names=['--prior_model'],
type=str, metavar='PATH',
help='Prior model for MAP-L2 regularization. Unless using '
'\"--reload\", this will also be used for initialization.'))
group.append(ParameterSpecification(
name='clip_c', default=1.0,
visible_arg_names=['--clip_c'],
type=float, metavar='FLOAT',
help='gradient clipping threshold (default: %(default)s)'))
group.append(ParameterSpecification(
name='label_smoothing', default=0.0,
visible_arg_names=['--label_smoothing'],
type=float, metavar='FLOAT',
help='label smoothing (default: %(default)s)'))
group.append(ParameterSpecification(
name='optimizer', default='adam',
visible_arg_names=['--optimizer'],
type=str, choices=['adam'],
help='optimizer (default: %(default)s)'))
group.append(ParameterSpecification(
name='adam_beta1', default=0.9,
visible_arg_names=['--adam_beta1'],
type=float, metavar='FLOAT',
help='exponential decay rate for the first moment estimates '
'(default: %(default)s)'))
group.append(ParameterSpecification(
name='adam_beta2', default=0.999,
visible_arg_names=['--adam_beta2'],
type=float, metavar='FLOAT',
help='exponential decay rate for the second moment estimates '
'(default: %(default)s)'))
group.append(ParameterSpecification(
name='adam_epsilon', default=1e-08,
visible_arg_names=['--adam_epsilon'],
type=float, metavar='FLOAT',
help='constant for numerical stability (default: %(default)s)'))
group.append(ParameterSpecification(
name='learning_schedule', default='constant',
visible_arg_names=['--learning_schedule'],
type=str, choices=['constant', 'transformer'],
help='learning schedule (default: %(default)s)'))
group.append(ParameterSpecification(
name='learning_rate', default=0.0001,
visible_arg_names=['--learning_rate'],
hidden_arg_names=['--lrate'],
legacy_names=['lrate'],
type=float, metavar='FLOAT',
help='learning rate (default: %(default)s)'))
group.append(ParameterSpecification(
name='warmup_steps', default=8000,
visible_arg_names=['--warmup_steps'],
type=int, metavar='INT',
help='number of initial updates during which the learning rate is '
'increased linearly during learning rate scheduling '
'(default: %(default)s)'))
group.append(ParameterSpecification(
name='maxlen', default=100,
visible_arg_names=['--maxlen'],
type=int, metavar='INT',
help='maximum sequence length for training and validation '
'(default: %(default)s)'))
group.append(ParameterSpecification(
name='batch_size', default=80,
visible_arg_names=['--batch_size'],
type=int, metavar='INT',
help='minibatch size (default: %(default)s)'))
group.append(ParameterSpecification(
name='token_batch_size', default=0,
visible_arg_names=['--token_batch_size'],
type=int, metavar='INT',
help='minibatch size (expressed in number of source or target '
'tokens). Sentence-level minibatch size will be dynamic. If '
'this is enabled, batch_size only affects sorting by '
'length. (default: %(default)s)'))
group.append(ParameterSpecification(
name='max_sentences_per_device', default=0,
visible_arg_names=['--max_sentences_per_device'],
type=int, metavar='INT',
help='maximum size of minibatch subset to run on a single device, '
'in number of sentences (default: %(default)s)'))
group.append(ParameterSpecification(
name='max_tokens_per_device', default=0,
visible_arg_names=['--max_tokens_per_device'],
type=int, metavar='INT',
help='maximum size of minibatch subset to run on a single device, '
'in number of tokens (either source or target - whichever is '
'highest) (default: %(default)s)'))
group.append(ParameterSpecification(
name='gradient_aggregation_steps', default=1,
visible_arg_names=['--gradient_aggregation_steps'],
type=int, metavar='INT',
help='number of times to accumulate gradients before aggregating '
'and applying; the minibatch is split between steps, so '
'adding more steps allows larger minibatches to be used '
'(default: %(default)s)'))
group.append(ParameterSpecification(
name='maxibatch_size', default=20,
visible_arg_names=['--maxibatch_size'],
type=int, metavar='INT',
help='size of maxibatch (number of minibatches that are sorted '
'by length) (default: %(default)s)'))
group.append(ParameterSpecification(
name='sort_by_length', default=True,
visible_arg_names=['--no_sort_by_length'],
action='store_false',
help='do not sort sentences in maxibatch by length'))
group.append(ParameterSpecification(
name='shuffle_each_epoch', default=True,
visible_arg_names=['--no_shuffle'],
action='store_false',
help='disable shuffling of training data (for each epoch)'))
group.append(ParameterSpecification(
name='keep_train_set_in_memory', default=False,
visible_arg_names=['--keep_train_set_in_memory'],
action='store_true',
help='Keep training dataset lines stores in RAM during training'))
group.append(ParameterSpecification(
name='max_epochs', default=5000,
visible_arg_names=['--max_epochs'],
type=int, metavar='INT',
help='maximum number of epochs (default: %(default)s)'))
group.append(ParameterSpecification(
name='finish_after', default=10000000,
visible_arg_names=['--finish_after'],
type=int, metavar='INT',
help='maximum number of updates (minibatches) (default: '
'%(default)s)'))
# Add command-line parameters for 'validation' group.
group = param_specs['validation']
group.append(ParameterSpecification(
name='valid_source_dataset', default=None,
visible_arg_names=['--valid_source_dataset'],
derivation_func=_derive_valid_source_dataset,
type=str, metavar='PATH',
help='source validation corpus (default: %(default)s)'))
group.append(ParameterSpecification(
name='valid_target_dataset', default=None,
visible_arg_names=['--valid_target_dataset'],
derivation_func=_derive_valid_target_dataset,
type=str, metavar='PATH',
help='target validation corpus (default: %(default)s)'))
# Hidden option for backward compatibility.
group.append(ParameterSpecification(
name='valid_datasets', default=None,
hidden_arg_names=['--valid_datasets'],
type=str, metavar='PATH', nargs=2))
group.append(ParameterSpecification(
name='valid_batch_size', default=80,
visible_arg_names=['--valid_batch_size'],
type=int, metavar='INT',
help='validation minibatch size (default: %(default)s)'))
group.append(ParameterSpecification(
name='valid_token_batch_size', default=0,
visible_arg_names=['--valid_token_batch_size'],
type=int, metavar='INT',
help='validation minibatch size (expressed in number of source '
'or target tokens). Sentence-level minibatch size will be '
'dynamic. If this is enabled, valid_batch_size only affects '
'sorting by length. (default: %(default)s)'))
group.append(ParameterSpecification(
name='valid_freq', default=10000,
legacy_names=['validFreq'],
visible_arg_names=['--valid_freq'],
hidden_arg_names=['--validFreq'],
type=int, metavar='INT',
help='validation frequency (default: %(default)s)'))
group.append(ParameterSpecification(
name='valid_script', default=None,
visible_arg_names=['--valid_script'],
type=str, metavar='PATH',
help='path to script for external validation (default: '
'%(default)s). The script will be passed an argument '
'specifying the path of a file that contains translations '
'of the source validation corpus. It must write a single '
'score to standard output.'))
group.append(ParameterSpecification(
name='patience', default=10,
visible_arg_names=['--patience'],
type=int, metavar='INT',
help='early stopping patience (default: %(default)s)'))
# Add command-line parameters for 'display' group.
group = param_specs['display']
group.append(ParameterSpecification(
name='disp_freq', default=1000,
legacy_names=['dispFreq'],
visible_arg_names=['--disp_freq'], hidden_arg_names=['--dispFreq'],
type=int, metavar='INT',
help='display loss after INT updates (default: %(default)s)'))
group.append(ParameterSpecification(
name='sample_freq', default=10000,
legacy_names=['sampleFreq'],
visible_arg_names=['--sample_freq'],
hidden_arg_names=['--sampleFreq'],
type=int, metavar='INT',
help='display some samples after INT updates (default: '
'%(default)s)'))
group.append(ParameterSpecification(
name='beam_freq', default=10000,
legacy_names=['beamFreq'],
visible_arg_names=['--beam_freq'], hidden_arg_names=['--beamFreq'],
type=int, metavar='INT',
help='display some beam_search samples after INT updates '
'(default: %(default)s)'))
group.append(ParameterSpecification(
name='beam_size', default=12,
visible_arg_names=['--beam_size'],
type=int, metavar='INT',
help='size of the beam (default: %(default)s)'))
# Add command-line parameters for 'translate' group.
group = param_specs['translate']
group.append(ParameterSpecification(
name='normalization_alpha', type=float, default=0.0, nargs="?",
const=1.0, metavar="ALPHA",
visible_arg_names=['--normalization_alpha'],
help='normalize scores by sentence length (with argument, " \
"exponentiate lengths by ALPHA)'))
group.append(ParameterSpecification(
name='n_best', default=False,
visible_arg_names=['--n_best'],
action='store_true', dest='n_best',
help='Print full beam'))
group.append(ParameterSpecification(
name='translation_maxlen', default=200,
visible_arg_names=['--translation_maxlen'],
type=int, metavar='INT',
help='Maximum length of translation output sentence (default: '
'%(default)s)'))
group.append(ParameterSpecification(
name='translation_strategy', default='beam_search',
visible_arg_names=['--translation_strategy'],
type=str, choices=['beam_search', 'sampling'],
help='translation_strategy, either beam_search or sampling (default: %(default)s)'))
# Add command-line parameters for 'sampling' group.
group = param_specs['sampling']
group.append(ParameterSpecification(
name='sampling_temperature', type=float, default=1.0,
metavar="FLOAT",
visible_arg_names=['--sampling_temperature'],
help='softmax temperature used for sampling (default %(default)s)'))
return param_specs
def _build_name_to_spec(self):
name_to_spec = {}
for group in self.group_names:
for param in self.params_by_group(group):
for name in [param.name] + param.legacy_names:
assert name not in name_to_spec
name_to_spec[name] = param
return name_to_spec
def _check_self(self):
# Check that there are no duplicated parameter names.
param_names = set()
for group in self.group_names:
for param in self.params_by_group(group):
assert param.name not in param_names
param_names.add(param.name)
for name in param.legacy_names:
assert name not in param_names
param_names.add(name)
# Check that there are no duplicated command-line argument names.
arg_names = set()
for group in self.group_names:
for param in self.params_by_group(group):
for arg_list in (param.visible_arg_names,
param.hidden_arg_names):
for name in arg_list:
assert name not in arg_names
arg_names.add(param.name)
def _construct_argument_parser(spec, suppress_missing=False):
# Construct an ArgumentParser and parse command-line args.
parser = argparse.ArgumentParser()
for group_name in spec.group_names:
if group_name == "":
target = parser
else:
description = spec.group_description(group_name)
target = parser.add_argument_group(description)
for param in spec.params_by_group(group_name):
if param.visible_arg_names == [] and param.hidden_arg_names == []:
# Internal parameter - no command-line argument.
continue
argparse_args = dict(param.argparse_args)
argparse_args['dest'] = param.name
if suppress_missing:
argparse_args['default'] = argparse.SUPPRESS
if param.visible_arg_names == []:
argparse_args['help'] = argparse.SUPPRESS
target.add_argument(*param.hidden_arg_names, **argparse_args)
continue
if 'required' in argparse_args and argparse_args['required']:
mutex_group = \
target.add_mutually_exclusive_group(required=True)
del argparse_args['required']
else:
mutex_group = target.add_mutually_exclusive_group()
mutex_group.add_argument(*param.visible_arg_names, **argparse_args)
# Add any hidden arguments for this param.
if len(param.hidden_arg_names) > 0:
argparse_args['help'] = argparse.SUPPRESS
mutex_group.add_argument(*param.hidden_arg_names,
**argparse_args)
return parser
def read_config_from_cmdline():
spec = ConfigSpecification()
# Construct an argparse.ArgumentParser and parse command-line args.
parser = _construct_argument_parser(spec)
config = parser.parse_args()
# Construct a second ArgumentParser but using default=argparse.SUPPRESS
# in every argparse.add_argument() call. This allows us to determine
# which parameters were actually set by the user.
# Solution is from https://stackoverflow.com/a/45803037
aux_parser = _construct_argument_parser(spec, suppress_missing=True)
aux_config = aux_parser.parse_args()
set_by_user = set(vars(aux_config).keys())
# Perform consistency checks.
error_messages = _check_config_consistency(spec, config, set_by_user)
if len(error_messages) > 0:
for msg in error_messages:
logging.error(msg)
sys.exit(1)
# Set meta parameters.
meta_config = argparse.Namespace()
meta_config.from_cmdline = True
meta_config.from_theano = False
# Run derivation functions.
for group in spec.group_names:
for param in spec.params_by_group(group):
if param.derivation_func is not None:
setattr(config, param.name,
param.derivation_func(config, meta_config))
return config
def write_config_to_json_file(config, path):
config_as_dict = collections.OrderedDict(sorted(vars(config).items()))
json.dump(config_as_dict, open('%s.json' % path, 'w'), indent=2)
def load_config_from_json_file(basename):
spec = ConfigSpecification()
# Load a config from a JSON (or Pickle) config file.
try:
with open('%s.json' % basename, 'r', encoding='utf-8') as f:
config_as_dict = json.load(f)
except:
try:
with open('%s.pkl' % basename, 'r', encoding='utf-8') as f:
config_as_dict = pickle.load(f)
except:
logging.error('config file {}.json is missing'.format(basename))
sys.exit(1)
config = argparse.Namespace(**config_as_dict)
# Set meta parameters.
meta_config = argparse.Namespace()
meta_config.from_cmdline = False
meta_config.from_theano = (not hasattr(config, 'embedding_size'))
# Update config to use current parameter names.
for group_name in spec.group_names:
for param in spec.params_by_group(group_name):
for legacy_name in param.legacy_names:
# TODO It shouldn't happen, but check for multiple names
if hasattr(config, legacy_name):
val = getattr(config, legacy_name)
assert not hasattr(config, param.name)
setattr(config, param.name, val)
delattr(config, legacy_name)
for group_name in spec.group_names:
for param in spec.params_by_group(group_name):
if not hasattr(config, param.name):
setattr(config, param.name, param.default)
for group in spec.group_names:
for param in spec.params_by_group(group):
if param.derivation_func is not None:
setattr(config, param.name,
param.derivation_func(config, meta_config))
return config
def _check_config_consistency(spec, config, set_by_user):
def arg_names_string(param):
arg_names = param.visible_arg_names + param.hidden_arg_names
return ' / '.join(arg_names)
error_messages = []
assert config.model_type is not None
for group in spec.group_names:
for param in spec.params_by_group(group):
if param.name not in set_by_user:
continue
if ((param.name.startswith('rnn_') and
config.model_type == 'transformer') or
(param.name.startswith('transformer_') and
config.model_type == 'rnn')):
msg = '{} cannot be used with \'{}\' model type'.format(
arg_names_string(param), config.model_type)
error_messages.append(msg)
if config.learning_schedule == 'constant':
param = spec.lookup('warmup_steps')
assert param is not None
if param.name in set_by_user:
msg = '{} cannot be used with \'constant\' learning ' \
'schedule'.format(arg_names_string(param),
config.model_type)
error_messages.append(msg)
elif config.learning_schedule == 'transformer':
param = spec.lookup('learning_rate')
assert param is not None
if param.name in set_by_user:
msg = '{} cannot be used with \'transformer\' learning ' \
'schedule'.format(arg_names_string(param), config.model_type)
error_messages.append(msg)
# if optimizer != 'adam' (not currently possible but probably will
# be in in the future)...
# Check if user is trying to use the Transformer with features that
# aren't supported yet.
if config.model_type == 'transformer':
if config.factors > 1:
msg = 'factors are not yet supported for the \'transformer\' ' \
'model type'
error_messages.append(msg)
if config.softmax_mixture_size > 1:
msg = 'softmax mixtures are not yet supported for the ' \
'\'transformer\' model type'
error_messages.append(msg)
if config.datasets:
if config.source_dataset or config.target_dataset:
msg = 'argument clash: --datasets is mutually exclusive ' \
'with --source_dataset and --target_dataset'
error_messages.append(msg)
elif not config.source_dataset:
msg = '--source_dataset is required'
error_messages.append(msg)
elif not config.target_dataset:
msg = '--target_dataset is required'
error_messages.append(msg)
if config.valid_datasets:
if config.valid_source_dataset or config.valid_target_dataset:
msg = 'argument clash: --valid_datasets is mutually ' \
'exclusive with --valid_source_dataset and ' \
'--valid_target_dataset'
error_messages.append(msg)
if (config.source_vocab_sizes is not None and
len(config.source_vocab_sizes) > config.factors):
msg = 'too many values supplied to \'--source_vocab_sizes\' option ' \
'(expected one per factor = {})'.format(config.factors)
error_messages.append(msg)
if config.dim_per_factor is None and config.factors != 1:
msg = 'if using factored input, you must specify \'dim_per_factor\''
error_messages.append(msg)
if config.dim_per_factor is not None:
if len(config.dim_per_factor) != config.factors:
msg = 'mismatch between \'--factors\' ({0}) and ' \
'\'--dim_per_factor\' ({1} entries)'.format(
config.factors, len(config.dim_per_factor))
error_messages.append(msg)
elif sum(config.dim_per_factor) != config.embedding_size:
msg = 'mismatch between \'--embedding_size\' ({0}) and ' \
'\'--dim_per_factor\' (sums to {1})\''.format(
config.embedding_size, sum(config.dim_per_factor))
error_messages.append(msg)
if len(config.dictionaries) != config.factors + 1:
msg = '\'--dictionaries\' must specify one dictionary per source ' \
'factor and one target dictionary'
error_messages.append(msg)
max_sents_param = spec.lookup('max_sentences_per_device')
max_tokens_param = spec.lookup('max_tokens_per_device')
# TODO Extend ParameterSpecification to support mutually exclusive
# command-line args.
if (max_sents_param.name in set_by_user
and max_tokens_param.name in set_by_user):
msg = '{} is mutually exclusive with {}'.format(
arg_names_string(max_sents_param),
arg_names_string(max_tokens_param))
error_messages.append(msg)
aggregation_param = spec.lookup('gradient_aggregation_steps')
if (aggregation_param.name in set_by_user
and (max_sents_param.name in set_by_user
or max_tokens_param.name in set_by_user)):
msg = '{} is mutually exclusive with {} / {}'.format(
arg_names_string(aggregation_param),
arg_names_string(max_sents_param),
arg_names_string(max_tokens_param))
error_messages.append(msg)
# softmax_mixture_size and lexical_model are currently mutually exclusive:
if config.softmax_mixture_size > 1 and config.rnn_lexical_model:
error_messages.append('behavior of --rnn_lexical_model is undefined if softmax_mixture_size > 1')
return error_messages
def _derive_model_version(config, meta_config):
if meta_config.from_cmdline:
# We're creating a new model - set the current version number.
return 0.2
if config.model_version is not None:
return config.model_version
if meta_config.from_theano and config.rnn_use_dropout:
logging.error('version 0 dropout is not supported in '
'TensorFlow Nematus')
sys.exit(1)
return 0.1
def _derive_target_embedding_size(config, meta_config):
assert hasattr(config, 'embedding_size')
if not config.tie_encoder_decoder_embeddings:
return config.embedding_size
if config.factors > 1:
assert hasattr(config, 'dim_per_factor')
assert config.dim_per_factor is not None
return config.dim_per_factor[0]
else:
return config.embedding_size
def _derive_source_dataset(config, meta_config):
if config.source_dataset is not None:
return config.source_dataset
assert config.datasets is not None
return config.datasets[0]
def _derive_target_dataset(config, meta_config):
if config.target_dataset is not None:
return config.target_dataset
assert config.datasets is not None
return config.datasets[1]
def _derive_source_vocab_sizes(config, meta_config):
if config.source_vocab_sizes is not None:
if len(config.source_vocab_sizes) == config.factors:
# we're processing command-line arguments and
return config.source_vocab_sizes
else:
assert meta_config.from_cmdline
assert len(config.source_vocab_sizes) < config.factors
num_missing = config.factors - len(config.source_vocab_sizes)
vocab_sizes = config.source_vocab_sizes + [-1] * num_missing
elif hasattr(config, 'n_words_src'):
# This will always contain a single value for the
# source vocab size regardless of how many factors
# there are.
assert not meta_config.from_cmdline
assert meta_config.from_theano
assert type(config.n_words_src) == int
return [config.n_words_src] * config.factors
elif hasattr(config, 'source_vocab_size'):
# Case 4: we're loading parameters from a pre-factors
assert not meta_config.from_cmdline
assert not meta_config.from_theano
assert config.factors == 1
return [config.source_vocab_size]
else:
# --source_vocab_size was not given.
assert meta_config.from_cmdline
vocab_sizes = [-1] * config.factors
# For any unspecified vocabulary sizes, determine sizes from the
# vocabulary dictionaries.
for i, vocab_size in enumerate(vocab_sizes):
if vocab_size >= 0:
continue
path = config.dictionaries[i]
vocab_sizes[i] = _determine_vocab_size_from_file(path,
config.model_type)
return vocab_sizes
def _derive_target_vocab_size(config, meta_config):
if config.target_vocab_size != -1:
return config.target_vocab_size
path = config.dictionaries[-1]
return _determine_vocab_size_from_file(path, config.model_type)
def _derive_dim_per_factor(config, meta_config):
if config.dim_per_factor is not None:
return config.dim_per_factor
assert config.factors == 1
return [config.embedding_size]
def _derive_rnn_dropout_embedding(config, meta_config):
if config.rnn_dropout_embedding is not None:
return config.rnn_dropout_embedding
return 0.2 if meta_config.from_cmdline else 0.0
def _derive_rnn_dropout_hidden(config, meta_config):
if config.rnn_dropout_hidden is not None:
return config.rnn_dropout_hidden
return 0.2 if meta_config.from_cmdline else 0.0
def _derive_valid_source_dataset(config, meta_config):
if config.valid_source_dataset is not None:
return config.valid_source_dataset
if config.valid_datasets is not None:
return config.valid_datasets[0]
return None
def _derive_valid_target_dataset(config, meta_config):
if config.valid_target_dataset is not None:
return config.valid_target_dataset
if config.valid_datasets is not None:
return config.valid_datasets[1]
return None
def _determine_vocab_size_from_file(path, model_type):
try:
d = util.load_dict(path, model_type)
except IOError as x:
logging.error('failed to determine vocabulary size from file: '
'{}: {}'.format(path, str(x)))
sys.exit(1)
except:
logging.error('failed to determine vocabulary size from file: '
'{}'.format(path))
sys.exit(1)
return max(d.values()) + 1
| true | true |
1c2f97a5470279e0328929549407453c7ff957e3 | 1,115 | py | Python | python/py-collections-deque/main.py | shollingsworth/HackerRank | 2f0e048044e643d6aa9d07c1898f3b00adf489b0 | [
"Apache-2.0"
] | null | null | null | python/py-collections-deque/main.py | shollingsworth/HackerRank | 2f0e048044e643d6aa9d07c1898f3b00adf489b0 | [
"Apache-2.0"
] | null | null | null | python/py-collections-deque/main.py | shollingsworth/HackerRank | 2f0e048044e643d6aa9d07c1898f3b00adf489b0 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import __future__
import sys
import json
def banner():
ban = '====' * 30
print("{}\nSAMPLE INP:\n{}\n{}".format(ban,ban,open(ip, 'r').read()))
print("{}\nSAMPLE OUT:\n{}\n{}".format(ban,ban,open(op, 'r').read()))
print("{}\nSTART:\n{}".format(ban,ban))
sys.stdin = open(ip, 'r')
cnt = -1
def comp(inp,ln):
outl = output_arr[ln]
if str(inp) != outl:
raise Exception("Error input output: line {}, file: {}\ngot: {} expected: {}".format(ln,op,inp,outl))
ip = "./challenge_sample_input"
op = "./challenge_sample_output"
output_arr = map(str,open(op,'r').read().split('\n'))
banner()
# https://www.hackerrank.com/challenges/py-collections-deque/problem
from collections import deque
d = deque()
cmds = [raw_input() for _ in range(int(raw_input()))]
cmap = {
'append': d.append,
'pop': d.pop,
'popleft': d.popleft,
'appendleft': d.appendleft,
}
for cmd in [i.split() for i in cmds]:
ecmd = cmap[cmd.pop(0)]
if cmd:
v = cmd.pop(0)
ecmd(v)
else:
ecmd()
print(" ".join(map(str,d)))
| 24.23913 | 109 | 0.589238 |
import __future__
import sys
import json
def banner():
ban = '====' * 30
print("{}\nSAMPLE INP:\n{}\n{}".format(ban,ban,open(ip, 'r').read()))
print("{}\nSAMPLE OUT:\n{}\n{}".format(ban,ban,open(op, 'r').read()))
print("{}\nSTART:\n{}".format(ban,ban))
sys.stdin = open(ip, 'r')
cnt = -1
def comp(inp,ln):
outl = output_arr[ln]
if str(inp) != outl:
raise Exception("Error input output: line {}, file: {}\ngot: {} expected: {}".format(ln,op,inp,outl))
ip = "./challenge_sample_input"
op = "./challenge_sample_output"
output_arr = map(str,open(op,'r').read().split('\n'))
banner()
from collections import deque
d = deque()
cmds = [raw_input() for _ in range(int(raw_input()))]
cmap = {
'append': d.append,
'pop': d.pop,
'popleft': d.popleft,
'appendleft': d.appendleft,
}
for cmd in [i.split() for i in cmds]:
ecmd = cmap[cmd.pop(0)]
if cmd:
v = cmd.pop(0)
ecmd(v)
else:
ecmd()
print(" ".join(map(str,d)))
| true | true |
1c2f982c7eb9f52ecf729babd04a09ac77352ab2 | 1,413 | py | Python | images/tests.py | JeremyKimotho/gallery | ea90263a8b54f13b932b6cddf8bdac19f3b308a2 | [
"MIT"
] | null | null | null | images/tests.py | JeremyKimotho/gallery | ea90263a8b54f13b932b6cddf8bdac19f3b308a2 | [
"MIT"
] | null | null | null | images/tests.py | JeremyKimotho/gallery | ea90263a8b54f13b932b6cddf8bdac19f3b308a2 | [
"MIT"
] | null | null | null | from django.test import TestCase
from .models import Category, Location, Image
import datetime as dt
class ImageTestClass(TestCase):
def setUp(self):
self.jeremy = Image(image_name='Jeremy', image_description='Jeremy is a Beast')
def test_instance(self):
self.assertTrue(isinstance(self.jeremy, Image))
def test_save_instance(self):
self.jeremy.save_image()
images = Image.objects.all()
self.assertTrue(len(images)>0)
def test_delete_instance(self):
self.jeremy.save_image()
self.jeremy.del_image()
images = Image.objects.all()
self.assertTrue(len(images) == 0)
def test_get_images(self):
self.jeremy.save_image()
images = Image.get_image(1)
self.assertTrue(len(images)>0)
def test_get_all_images(self):
self.jeremy.save_image()
images = Image.all_images()
self.assertTrue(len(images)>0)
def tearDown(self):
Image.objects.all().delete()
class CategoryTestClasss(TestCase):
def setUp(self):
self.jeremy = Category(category='Jeremy')
def test_instance(self):
self.assertTrue(isinstance(self.jeremy, Category))
def test_save_instance(self):
self.jeremy.save_category()
cats = Category.objects.all()
self.assertTrue(len(cats)>0)
def test_delete_instance(self):
self.jeremy.save_category()
self.jeremy.del_category()
cats = Category.objects.all()
self.assertTrue(len(cats) == 0)
| 25.690909 | 83 | 0.709837 | from django.test import TestCase
from .models import Category, Location, Image
import datetime as dt
class ImageTestClass(TestCase):
def setUp(self):
self.jeremy = Image(image_name='Jeremy', image_description='Jeremy is a Beast')
def test_instance(self):
self.assertTrue(isinstance(self.jeremy, Image))
def test_save_instance(self):
self.jeremy.save_image()
images = Image.objects.all()
self.assertTrue(len(images)>0)
def test_delete_instance(self):
self.jeremy.save_image()
self.jeremy.del_image()
images = Image.objects.all()
self.assertTrue(len(images) == 0)
def test_get_images(self):
self.jeremy.save_image()
images = Image.get_image(1)
self.assertTrue(len(images)>0)
def test_get_all_images(self):
self.jeremy.save_image()
images = Image.all_images()
self.assertTrue(len(images)>0)
def tearDown(self):
Image.objects.all().delete()
class CategoryTestClasss(TestCase):
def setUp(self):
self.jeremy = Category(category='Jeremy')
def test_instance(self):
self.assertTrue(isinstance(self.jeremy, Category))
def test_save_instance(self):
self.jeremy.save_category()
cats = Category.objects.all()
self.assertTrue(len(cats)>0)
def test_delete_instance(self):
self.jeremy.save_category()
self.jeremy.del_category()
cats = Category.objects.all()
self.assertTrue(len(cats) == 0)
| true | true |
1c2f9886c30209c8f8c18348757a2729fc8d5b30 | 1,832 | py | Python | sdk/applicationinsights/azure-mgmt-applicationinsights/azure/mgmt/applicationinsights/v2015_05_01/models/_application_insights_management_client_enums.py | iscai-msft/azure-sdk-for-python | 83715b95c41e519d5be7f1180195e2fba136fc0f | [
"MIT"
] | 8 | 2021-01-13T23:44:08.000Z | 2021-03-17T10:13:36.000Z | sdk/applicationinsights/azure-mgmt-applicationinsights/azure/mgmt/applicationinsights/v2015_05_01/models/_application_insights_management_client_enums.py | iscai-msft/azure-sdk-for-python | 83715b95c41e519d5be7f1180195e2fba136fc0f | [
"MIT"
] | 226 | 2019-07-24T07:57:21.000Z | 2019-10-15T01:07:24.000Z | sdk/applicationinsights/azure-mgmt-applicationinsights/azure/mgmt/applicationinsights/v2015_05_01/models/_application_insights_management_client_enums.py | iscai-msft/azure-sdk-for-python | 83715b95c41e519d5be7f1180195e2fba136fc0f | [
"MIT"
] | 2 | 2020-05-21T22:51:22.000Z | 2020-05-26T20:53:01.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from enum import Enum
class ApplicationType(str, Enum):
web = "web"
other = "other"
class FlowType(str, Enum):
bluefield = "Bluefield"
class RequestSource(str, Enum):
rest = "rest"
class PurgeState(str, Enum):
pending = "pending"
completed = "completed"
class FavoriteType(str, Enum):
shared = "shared"
user = "user"
class WebTestKind(str, Enum):
ping = "ping"
multistep = "multistep"
class ItemScope(str, Enum):
shared = "shared"
user = "user"
class ItemType(str, Enum):
query = "query"
function = "function"
folder = "folder"
recent = "recent"
class SharedTypeKind(str, Enum):
user = "user"
shared = "shared"
class FavoriteSourceType(str, Enum):
retention = "retention"
notebook = "notebook"
sessions = "sessions"
events = "events"
userflows = "userflows"
funnel = "funnel"
impact = "impact"
segmentation = "segmentation"
class ItemScopePath(str, Enum):
analytics_items = "analyticsItems"
myanalytics_items = "myanalyticsItems"
class ItemTypeParameter(str, Enum):
none = "none"
query = "query"
function = "function"
folder = "folder"
recent = "recent"
class CategoryType(str, Enum):
workbook = "workbook"
tsg = "TSG"
performance = "performance"
retention = "retention"
| 17.960784 | 76 | 0.600437 |
from enum import Enum
class ApplicationType(str, Enum):
web = "web"
other = "other"
class FlowType(str, Enum):
bluefield = "Bluefield"
class RequestSource(str, Enum):
rest = "rest"
class PurgeState(str, Enum):
pending = "pending"
completed = "completed"
class FavoriteType(str, Enum):
shared = "shared"
user = "user"
class WebTestKind(str, Enum):
ping = "ping"
multistep = "multistep"
class ItemScope(str, Enum):
shared = "shared"
user = "user"
class ItemType(str, Enum):
query = "query"
function = "function"
folder = "folder"
recent = "recent"
class SharedTypeKind(str, Enum):
user = "user"
shared = "shared"
class FavoriteSourceType(str, Enum):
retention = "retention"
notebook = "notebook"
sessions = "sessions"
events = "events"
userflows = "userflows"
funnel = "funnel"
impact = "impact"
segmentation = "segmentation"
class ItemScopePath(str, Enum):
analytics_items = "analyticsItems"
myanalytics_items = "myanalyticsItems"
class ItemTypeParameter(str, Enum):
none = "none"
query = "query"
function = "function"
folder = "folder"
recent = "recent"
class CategoryType(str, Enum):
workbook = "workbook"
tsg = "TSG"
performance = "performance"
retention = "retention"
| true | true |
1c2f98d2bb8b898807a6dbecf9de37ee18febfab | 350 | py | Python | app/users/apps.py | guilloulouis/stage_medecine | 7ec9067402e510d812a375bbfe46f2ab545587f9 | [
"MIT"
] | null | null | null | app/users/apps.py | guilloulouis/stage_medecine | 7ec9067402e510d812a375bbfe46f2ab545587f9 | [
"MIT"
] | null | null | null | app/users/apps.py | guilloulouis/stage_medecine | 7ec9067402e510d812a375bbfe46f2ab545587f9 | [
"MIT"
] | 1 | 2021-04-30T16:38:19.000Z | 2021-04-30T16:38:19.000Z | from django.apps import AppConfig
class UsersConfig(AppConfig):
name = 'users'
def ready(self):
from django.db.models.signals import m2m_changed
from users.models import Student
from users.signals import stage_added_to_student
m2m_changed.connect(stage_added_to_student, sender=Student.stages_done.through)
| 29.166667 | 87 | 0.742857 | from django.apps import AppConfig
class UsersConfig(AppConfig):
name = 'users'
def ready(self):
from django.db.models.signals import m2m_changed
from users.models import Student
from users.signals import stage_added_to_student
m2m_changed.connect(stage_added_to_student, sender=Student.stages_done.through)
| true | true |
1c2f98f9a5e0784aa2e43010aa4f0ee77764a093 | 41,857 | py | Python | ansys/mapdl/core/_commands/apdl/parameter_definition.py | JourneyG/pymapdl | 23fdc008c151c0546504e4ef8257a64f5f169100 | [
"MIT"
] | 1 | 2021-07-28T00:42:53.000Z | 2021-07-28T00:42:53.000Z | ansys/mapdl/core/_commands/apdl/parameter_definition.py | JourneyG/pymapdl | 23fdc008c151c0546504e4ef8257a64f5f169100 | [
"MIT"
] | null | null | null | ansys/mapdl/core/_commands/apdl/parameter_definition.py | JourneyG/pymapdl | 23fdc008c151c0546504e4ef8257a64f5f169100 | [
"MIT"
] | null | null | null | import os
class ParameterDefinition:
def afun(self, lab="", **kwargs):
"""Specifies units for angular functions in parameter expressions.
APDL Command: *AFUN
Parameters
----------
lab
Specifies the units to be used:
Use radians for input and output of parameter angular functions (default). - Use degrees for input and output of parameter angular functions.
Notes
-----
Only the SIN, COS, TAN, ASIN, ACOS, ATAN, ATAN2, ANGLEK, and ANGLEN
functions [*SET, *VFUN] are affected by this command.
"""
command = f"*AFUN,{lab}"
return self.run(command, **kwargs)
def dim(self, par="", type_="", imax="", jmax="", kmax="", var1="", var2="",
var3="", csysid="", **kwargs):
"""Defines an array parameter and its dimensions.
APDL Command: *DIM
Parameters
----------
par
Name of parameter to be dimensioned. See *SET for name
restrictions.
type\_
Array type:
Arrays are similar to standard FORTRAN arrays (indices are integers) (default). Index numbers for the rows, columns, and planes are sequential values beginning with one. Used for 1-, 2-, or 3-D arrays. - Same as ARRAY, but used to specify 4-D arrays.
Same as ARRAY, but used to specify 5-D arrays. - Array entries are character strings (up to 8 characters each). Index numbers
for rows, columns, and planes are sequential
values beginning with one.
Array indices are real (non-integer) numbers which must be defined when filling the table. Index numbers for the rows and columns are stored in the zero column and row "array elements" and are initially assigned a near-zero value. Index numbers must be in ascending order and are used only for retrieving an array element. When retrieving an array element with a real index that does not match a specified index, linear interpolation is done among the nearest indices and the corresponding array element values [*SET]. Used for 1-, 2-, or 3-D tables. - Same as TABLE, but used to specify 4-D tables.
Same as TABLE, but used to specify 5-D tables. - Array entries are character strings (up to IMAX each). Index numbers for
columns and planes are sequential values
beginning with 1. Row index is character position
in string.
imax
Extent of first dimension (row). (For Type = STRING, IMAX is
rounded up to the next multiple of eight and has a limit of 248).
Defaults to 1.
jmax
Extent of second dimension (column). Defaults to 1.
kmax
Extent of third dimension (plane). Defaults to 1.
var1
Variable name corresponding to the first dimension (row) for Type =
TABLE. Defaults to Row.
var2
Variable name corresponding to the second dimension (column) for
Type = TABLE. Defaults to Column.
var3
Variable name corresponding to the third dimension (plane) for Type
= TABLE. Defaults to Plane.
csysid
An integer corresponding to the coordinate system ID Number.
Notes
-----
Up to three dimensions (row, column, and plane) may be defined using
ARRAY and TABLE. Use ARR4, ARR5, TAB4, and TAB5 to define up to five
dimensions (row, column, plane, book, and shelf). An index number is
associated with each row, column, and plane. For array and table type
parameters, element values are initialized to zero. For character and
string parameters, element values are initialized to (blank). A
defined parameter must be deleted [*SET] before its dimensions can be
changed. Scalar (single valued) parameters should not be dimensioned.
*DIM,A,,3 defines a vector array with elements A(1), A(2), and A(3).
*DIM,B,,2,3 defines a 2x3 array with elements B(1,1), B(2,1), B(1,2),
B(2,2), B(1,3), and B(2,3). Use *STATUS,Par to display elements of
array Par. You can write formatted data files (tabular formatting) from
data held in arrays through the *VWRITE command.
If you use table parameters to define boundary conditions, then Var1,
Var2, and/or Var3 can either specify a primary variable (listed in
Table: 130:: *DIM - Primary Variables) or can be an independent
parameter. If specifying an independent parameter, then you must
define an additional table for the independent parameter. The
additional table must have the same name as the independent parameter
and may be a function of one or more primary variables or another
independent parameter. All independent parameters must relate to a
primary variable.
Tabular load arrays can be defined in both global Cartesian (default)
or local (see below) coordinate systems by specifying CSYSID, as
defined in LOCAL. For batch operations, you must specify your
coordinate system first.
The following constraints apply when you specify a local coordinate
system for your tabular loads:
If you are specifying a 4- or 5-D array or table, four additional
fields (LMAX, MMAX, Var4, and Var5) are available. Thus, for a 4-D
table, the command syntax would be:
For a 5-D table, the command syntax would be:
You cannot create or edit 4- or 5-D arrays or tables using the GUI.
See Array Parameters for a detailed discussion on and examples for
using array parameters.
Table: 130:: : *DIM - Primary Variables
Specify PRESSURE as the independent variable (not PRES).
The X, Y, and Z coordinate locations listed above are valid in global
Cartesian, or local (Cartesian, cylindrical and spherical) coordinate
systems. The VELOCITY label is applicable only to the calculated fluid
velocity in element FLUID116.
When using PRESSURE as a primary variable, the underlying element must
have the pressure DOF associated with it, or it must be a supported
contact element.
The gap/penetration label (GAP) is only used for defining certain
contact element real constants.
The frequency label (FREQ) is valid for harmonic analyses only.
The OMEGS, ECCENT, and THETA primary variables only apply to the
COMBI214 element. The amplitude of the rotational velocity (OMEGS) is
an absolute value, so only positive values of OMEGS are valid. The
eccentricity (ECCENT) and phase shift (THETA) labels are only valid for
nonlinear analyses.
If you use table parameters to define boundary conditions, the table
names (Par) must not exceed 32 characters.
In thermal analyses, if you apply tabular loads as a function of
temperature but the rest of the model is linear (e.g., includes no
temperature-dependent material properties or radiation ), you should
turn on Newton-Raphson iterations (NROPT,FULL) to evaluate the
temperature-dependent tabular boundary conditions correctly.
This command is valid in any processor.
"""
command = f"*DIM,{par},{type_},{imax},{jmax},{kmax},{var1},{var2},{var3},{csysid}"
return self.run(command, **kwargs)
def get(self, par="", entity="", entnum="", item1="", it1num="", item2="",
it2num="", **kwargs):
"""Retrieves a value and stores it as a scalar parameter or part of an
APDL Command: *GET
array parameter.
Parameters
----------
par
The name of the resulting parameter. See *SET for name
restrictions.
entity
Entity keyword. Valid keywords are NODE, ELEM, KP, LINE, AREA,
VOLU, PDS, etc., as shown for Entity = in the tables below.
entnum
The number or label for the entity (as shown for ENTNUM = in the
tables below). In some cases, a zero (or blank) ENTNUM represents
all entities of the set.
item1
The name of a particular item for the given entity. Valid items are
as shown in the Item1 columns of the tables below.
it1num
The number (or label) for the specified Item1 (if any). Valid
IT1NUM values are as shown in the IT1NUM columns of the tables
below. Some Item1 labels do not require an IT1NUM value.
item2, it2num
A second set of item labels and numbers to further qualify the item
for which data are to be retrieved. Most items do not require this
level of information.
Notes
-----
*GET retrieves a value for a specified item and stores the value as a
scalar parameter, or as a value in a user-named array parameter. An
item is identified by various keyword, label, and number combinations.
Usage is similar to the *SET command except that the parameter values
are retrieved from previously input or calculated results. For example,
*GET,A,ELEM,5,CENT,X returns the centroid x-location of element 5 and
stores the result as parameter A. *GET command operations, along with
the associated Get functions return values in the active coordinate
system unless stated otherwise. A Get function is an alternative in-
line function that can be used to retrieve a value instead of the *GET
command (see Using In-line Get Functions for more information).
Both *GET and *VGET retrieve information from the active data stored in
memory. The database is often the source, and sometimes the information
is retrieved from common memory blocks that the program uses to
manipulate information. Although POST1 and POST26 operations use a
*.rst file, *GET data is accessed from the database or from the common
blocks. Get operations do not access the *.rst file directly. For
repeated gets of sequential items, such as from a series of elements,
see the *VGET command.
Most items are stored in the database after they are calculated and are
available anytime thereafter. Items are grouped according to where they
are usually first defined or calculated. Preprocessing data will often
not reflect the calculated values generated from section data. Do not
use *GET to obtain data from elements that use calculated section data,
such as beams or shells. Most of the general items listed below are
available from all modules. Each of the sections for accessing *GET
parameters are shown in the following order:
*GET General Entity Items
*GET Preprocessing Entity Items
*GET Solution Entity Items
*GET Postprocessing Entity Items
*GET Probabilistic Design Entity Items
The *GET command is valid in any processor.
"""
command = f"*GET,{par},{entity},{entnum},{item1},{it1num},{item2},{it2num}"
return self.run(command, **kwargs)
def inquire(self, func):
"""Returns system information.
Parameters
----------
func : str
Specifies the type of system information returned. See the
notes section for more information.
Returns
-------
value : str
Value of the inquired item.
Notes
-----
Allowable func entries
LOGIN - Returns the pathname of the login directory on Linux
systems or the pathname of the default directory (including
drive letter) on Windows systems.
- ``DOCU`` - Pathname of the ANSYS docu directory.
- ``APDL`` - Pathname of the ANSYS APDL directory.
- ``PROG`` - Pathname of the ANSYS executable directory.
- ``AUTH`` - Pathname of the directory in which the license file resides.
- ``USER`` - Name of the user currently logged-in.
- ``DIRECTORY`` - Pathname of the current directory.
- ``JOBNAME`` - Current Jobname.
- ``RSTDIR`` - Result file directory
- ``RSTFILE`` - Result file name
- ``RSTEXT`` - Result file extension
- ``OUTPUT`` - Current output file name
Examples
--------
Return the job name
>>> mapdl.inquire('JOBNAME')
file
Return the result file name
>>> mapdl.inquire('RSTFILE')
'file.rst'
"""
response = self.run(f'/INQUIRE,,{func}', mute=False)
if '=' in response:
return response.split('=')[1].strip()
return ''
def parres(self, lab="", fname="", ext="", **kwargs):
"""Reads parameters from a file.
APDL Command: PARRES
Parameters
----------
lab
Read operation.
NEW - Replace current parameter set with these parameters (default).
CHANGE - Extend current parameter set with these
parameters, replacing any that already exist.
fname
File name and directory path (248 characters maximum,
including the characters needed for the directory path).
An unspecified directory path defaults to the working
directory; in this case, you can use all 248 characters
for the file name.
The file name defaults to Jobname.
ext
Filename extension (eight-character maximum). The
extension defaults to PARM if Fname is blank.
Examples
--------
Read a local parameter file.
>>> mapdl.parres('parm.PARM')
Notes
-----
Reads parameters from a coded file. The parameter file may
have been written with the PARSAV command. The parameters
read may replace or change the current parameter set.
This command is valid in any processor.
"""
if ext:
fname = fname + '.' + ext
elif not fname:
fname = '.' + 'PARM'
if 'Grpc' in self.__class__.__name__: # grpc mode
if self._local:
if not os.path.isfile(fname):
raise FileNotFoundError(
'Unable to locate filename "%s"' % fname)
if not os.path.dirname(fname):
filename = os.path.join(os.getcwd(), fname)
else:
filename = fname
else:
if not os.path.dirname(fname):
# might be trying to run a local file. Check if the
# file exists remotely.
if fname not in self.list_files():
self.upload(fname, progress_bar=False)
else:
self.upload(fname, progress_bar=False)
filename = os.path.basename(fname)
else:
filename = fname
return self.input(filename)
def parsav(self, lab="", fname="", ext="", **kwargs):
"""Writes parameters to a file.
APDL Command: PARSAV
Parameters
----------
lab
Write operation:
- ``'SCALAR'`` : Write only scalar parameters (default).
- ``'ALL'`` : Write scalar and array parameters.
Parameters may be numeric or alphanumeric.
fname
File name and directory path (248 characters maximum,
including the characters needed for the directory path).
An unspecified directory path defaults to the working
directory; in this case, you can use all 248 characters
for the file name.
ext
Filename extension (eight-character maximum).
Notes
-----
Writes the current parameters to a coded file. Previous
parameters on this file, if any, will be overwritten. The
parameter file may be read with the PARRES command.
PARSAV/PARRES operations truncate some long decimal strings,
and can cause differing values in your solution data when
other operations are performed. A good practice is to limit
the number of decimal places you will use before and after
these operations.
This command is valid in any processor.
"""
command = f"PARSAV,{lab},{fname},{ext}"
return self.run(command, **kwargs)
def starset(self, par="", value="", val2="", val3="", val4="", val5="",
val6="", val7="", val8="", val9="", val10="", **kwargs):
"""Assigns values to user-named parameters.
APDL Command: *SET
Parameters
----------
par
An alphanumeric name used to identify this parameter. Par may be
up to 32 characters, beginning with a letter and containing only
letters, numbers, and underscores. Examples: ABC A3X TOP_END.
ANSYS command names, function names, label names, component and
assembly names, etc., should not be used. Parameter names
beginning with an underscore (e.g., _LOOP) are reserved for use by
ANSYS and should be avoided. Parameter names ending in an
underscore are not listed by the *STATUS command. Array parameter
names must be followed by a subscript, and the entire expression
must be 32 characters or less. Examples: A(1,1) NEW_VAL(3,2,5)
RESULT(1000). There is no character parameter substitution for the
Par field. Table parameters that are used in command fields (where
constant values are normally given) are limited to 32 characters.
value
Numerical value or alphanumeric character string (up to 32
characters enclosed in single quotes) to be assigned to this
parameter. Examples: A(1,3)=7.4 B='ABC3'. May also be a
parameter or a parametric expression. Examples: C=A(1,3)
A(2,2)=(C+4)/2. If blank, delete this parameter. Example: A=
deletes parameter A.
val2, val3, val4, val5, val6, val7, val8, val9, val10
If Par is an array parameter, values VAL2 through VAL10 (up to the
last nonblank value) are sequentially assigned to the succeeding
array elements of the column. Example: *SET,A(1,4),10,11 assigns
A(1,4)=10, A(2,4)=11. *SET,B(2,3),'file10','file11' assigns
B(2,3)='file10', B(3,3)='file11'.
Notes
-----
Assigns values to user-named parameters that may be substituted later
in the run. The equivalent (and recommended) format is
Par = VALUE,VAL2,VAL3, . . . , VAL10
which may be used in place of *SET,Par, : ... for convenience.
This command is valid in any processor.
Parameters (numeric or character) may be scalars (single valued) or
arrays (multiple valued in one, two, or three dimensions). An unlimited
number of parameter names may be defined in any ANSYS run. For very
large numbers of parameters, it is most efficient to define them in
alphabetical order.
Parameter values may be redefined at any time. Array parameters may
also be assigned values within a do-loop [*DO] for convenience.
Internally programmed do-loop commands are also available with the *VXX
commands (*VFILL). Parameter values (except for parameters ending in
an underscore) may be listed with the *STATUS command, displayed with
the *VPLOT command (numeric parameters only), and modified with the
*VEDIT command (numeric parameters only).
Older ANSYS-supplied macro files may use parameter names that do not
begin with an underscore. Using these macros embedded in your own
macros may cause conflicts if the same parameter names are used.
Parameters can also be resolved in comments created by the /COM command
(see /COM for complete documentation). A parameter can be deleted by
redefining it with a blank VALUE. If the parameter is an array, the
entire array is deleted. Parameters may also be defined by a response
to a query with the *ASK command or from an "ANSYS-supplied" value
with the *GET command.
Array parameters must be dimensioned [*DIM] before being assigned
values unless they are the result of an array operation or defined
using the implied loop convention. Scalar parameters that are not
defined are initialized to a "near" zero value. Numeric array
parameters are initialized to zero when dimensioned, and character
array parameters are initialized to blank. An existing array parameter
must be deleted before it can be redimensioned. Array parameter names
must be followed by a subscript list (enclosed in parentheses)
identifying the element of the array. The subscript list may have one,
two, or three values (separated by commas). Typical array parameter
elements are A(1,1), NEW_VAL(3,2,5), RESULT(1000). Subscripts for
defining an array element must be integers (or parameter expressions
that evaluate to integers). Non-integer values are rounded to the
nearest integer value. All array parameters are stored as 3-D arrays
with the unspecified dimensions set to 1. For example, the 4th array
element of a 1-dimensional array, A(4), is stored as array element
A(4,1,1). Arrays are patterned after standard FORTRAN conventions.
If the parameter name Par is input in a numeric argument of a command,
the numeric value of the parameter (as assigned with *SET, *GET, =,
etc.) is substituted into the command at that point. Substitution
occurs only if the parameter name is used between blanks, commas,
parentheses, or arithmetic operators (or any combination) in a numeric
argument. Substitution can be prevented by enclosing the parameter
name Par within single quotes ( ' ), if the parameter is alone in the
argument; if the parameter is part of an arithmetic expression, the
entire expression must be enclosed within single quotes to prevent
substitution. In either case the character string will be used instead
of the numeric value (and the string will be taken as 0.0 if it is in a
numeric argument).
A forced substitution is available in the text fields of the /TITLE,
/STITLE, /TLABEL, /AN3D, /SYP (ARG1--ARG8), and *ABBR commands by
enclosing the parameter within percent (%) signs. Also, parameter
substitution may be forced within the file name or extension fields of
commands having these fields by enclosing the parameter within percent
(%) signs. Array parameters [*DIM] must include a subscript (within
parentheses) to identify the array element whose value is to be
substituted, such as A(1,3). Out-of-range subscripts result in an
error message. Non-integer subscripts are allowed when identifying a
TABLE array element for substitution. A proportional linear
interpolation of values among the nearest array elements is performed
before substitution. Interpolation is done in all three dimensions.
Note:: : Interpolation is based upon the assigned index numbers which
must be defined when the table is filled [*DIM].
Most alphanumeric arguments permit the use of character parameter
substitution. When the parameter name Par input, the alphanumeric
value of the parameter is substituted into the command at that point.
Substitution can be suppressed by enclosing the parameter name within
single quotes ( ' ). Forced substitution is available in some fields
by enclosing the parameter name within percent (%) signs. Valid forced
substitution fields include command name fields, Fname (filename) or
Ext (extension) arguments, *ABBR command (Abbr arguments), /TITLE and
/STITLE commands (Title argument) and /TLABEL command (Text argument).
Character parameter substitution is also available in the *ASK, /AN3D,
*CFWRITE, *IF, *ELSEIF, *MSG, *SET, *USE, *VREAD, and *VWRITE
commands. Character array parameters must include a subscript (within
parentheses) to identify the array element whose value is to be
substituted.
If a parameter operation expression is input in a numeric argument, the
numeric value of the expression is substituted into the command at that
point. Allowable operation expressions are of the form
E1oE2oE3: ...oE10
where E1, E2, etc. are expressions connected by operators (o). The
allowable operations (o) are
``+ - * / ** < >``
For example, ``A+B**C/D*E`` is a valid operation expression. The ``*``
represents multiplication and the ``**`` represents exponentiation.
"""
command = f"*SET,{par},{value},{val2},{val3},{val4},{val5},{val6},{val7},{val8},{val9},{val10}"
return self.run(command, **kwargs)
def starvget(self, parr="", entity="", entnum="", item1="", it1num="",
item2="", it2num="", kloop="", **kwargs):
"""Retrieves values and stores them into an array parameter.
APDL Command: *VGET
Parameters
----------
parr
The name of the resulting vector array parameter. See *SET for
name restrictions.
entity
Entity keyword. Valid keywords are NODE, ELEM, KP, LINE, AREA,
VOLU, etc. as shown for Entity = in the tables below.
entnum
The number of the entity (as shown for ENTNUM = in the tables
below).
item1
The name of a particular item for the given entity. Valid items
are as shown in the Item1 columns of the tables below.
it1num
The number (or label) for the specified Item1 (if any). Valid
IT1NUM values are as shown in the IT1NUM columns of the tables
below. Some Item1 labels do not require an IT1NUM value.
item2, it2num
A second set of item labels and numbers to further qualify the item
for which data is to be retrieved. Most items do not require this
level of information.
kloop
Field to be looped on:
``0`` or ``2`` : Loop on the ENTNUM field (default).
``3`` : Loop on the Item1 field.
``4`` : Loop on the IT1NUM field. Successive items are as shown with IT1NUM.
``5`` : Loop on the Item2 field.
``6`` : Loop on the IT2NUM field. Successive items are as shown with IT2NUM.
Notes
-----
Retrieves values for specified items and stores the values in an output
vector of a user-named array parameter according to:
``ParR = f(Entity, ENTNUM, Item1, IT1NUM, Item2, IT2NUM)``
where (f) is the *GET function; Entity, Item1, and Item2 are keywords;
and ENTNUM, IT1NUM, and IT2NUM are numbers or labels corresponding to
the keywords. Looping continues over successive entity numbers (ENTNUM)
for the KLOOP default. For example, *VGET,A(1),ELEM,5,CENT,X returns
the centroid x-location of element 5 and stores the result in the first
location of A. Retrieving continues with element 6, 7, 8, etc.,
regardless of whether the element exists or is selected, until
successive array locations are filled. Use *VLEN or *VMASK to skip
locations. Absolute values and scale factors may be applied to the
result parameter [*VABS, *VFACT]. Results may be cumulative [*VCUM].
See the *VOPER command for general details. Results can be put back
into an analysis by writing a file of the desired input commands with
the *VWRITE command. See also the *VPUT command.
Both *GET and *VGET retrieve information from the active data stored in
memory. The database is often the source, and sometimes the information
is retrieved from common memory blocks that ANSYS uses to manipulate
information. Although POST1 and POST26 operations use a *.rst file, GET
data is accessed from the database or from the common blocks. Get
operations do not access the *.rst file directly.
The *VGET command retrieves both the unprocessed real and the imaginary
parts (original and duplicate sector nodes and elements) of a cyclic
symmetry solution.
For each of the sections for accessing *VGET parameters see:
https://www.mm.bme.hu/~gyebro/files/ans_help_v182/ans_cmd/Hlp_C_VGET_st.html
This command is valid in any processor.
"""
command = f"*VGET,{parr},{entity},{entnum},{item1},{it1num},{item2},{it2num},{kloop}"
return self.run(command, **kwargs)
def taxis(self, parmloc="", naxis="", val1="", val2="", val3="", val4="",
val5="", val6="", val7="", val8="", val9="", val10="", **kwargs):
"""Defines table index numbers.
APDL Command: *TAXIS
Parameters
----------
parmloc
Name and starting location in the table array parameter for
indexing. Indexing occurs along the axis defined with nAxis.
naxis
Axis along which indexing occurs. Valid labels are:
Corresponds to Row. Default. - Corresponds to Column.
Corresponds to Plane. - Corresponds to Book.
Corresponds to Shelf. - Lists all index numbers. Valid only if Val1 = LIST.
val1, val2, val3, . . . , val10
Values of the index numbers for the axis nAxis, starting from the
table array parameter location ParmLoc. You can define up to ten
values.
Notes
-----
*TAXIS is a convenient method to define table index values. These
values reside in the zero column, row, etc. Instead of filling values
in these zero location spots, use the *TAXIS command. For example,
would fill index values 1.0, 2.2, 3.5, 4.7, and 5.9 in nAxis 2 (column
location), starting at location 4.
To list index numbers, issue *TAXIS,ParmLoc, nAxis, LIST, where nAxis =
1 through 5 or ALL.
"""
command = f"*TAXIS,{parmloc},{naxis},{val1},{val2},{val3},{val4},{val5},{val6},{val7},{val8},{val9},{val10}"
return self.run(command, **kwargs)
def tread(self, par="", fname="", ext="", nskip="", **kwargs):
"""Reads data from an external file into a table array parameter.
APDL Command: *TREAD
Parameters
----------
par
Table array parameter name as defined by the *DIM command.
fname
File name and directory path (248 characters maximum, including the
characters needed for the directory path). An unspecified
directory path defaults to the working directory; in this case, you
can use all 248 characters for the file name.
ext
Filename extension (eight-character maximum).
nskip
Number of comment lines at the beginning of the file being read
that will be skipped during the reading. Default = 0.
Notes
-----
Use this command to read in a table of data from an external file into
an ANSYS table array parameter. The external file may be created using
a text editor or by an external application or program. The external
file must be in tab-delimited, blank-delimited, or comma-delimited
format to be used by *TREAD. The ANSYS TABLE type array parameter must
be defined before you can read in an external file. See *DIM for more
information.
This command is not applicable to 4- or 5-D tables.
"""
command = f"*TREAD,{par},{fname},{ext},,{nskip}"
return self.run(command, **kwargs)
def vfill(self, parr="", func="", con1="", con2="", con3="", con4="",
con5="", con6="", con7="", con8="", con9="", con10="", **kwargs):
"""Fills an array parameter.
APDL Command: *VFILL
Parameters
----------
parr
The name of the resulting numeric array parameter vector. See *SET
for name restrictions.
func
Fill function:
DATA - Assign specified values CON1, CON2, etc. to successive
array elements. Up to 10 assignments may be made at a
time. Any CON values after a blank CON value are
ignored. - Assign ramp function values: CON1+((n-1)*CON2)
, where n is the loop number [*VLEN]. To specify a
constant function (no ramp), set CON2 to zero.
RAMP - Assign random number values based on a uniform
distribution RAND(CON1,CON2), where: - Assign random
sample of Gaussian distributions GDIS(CON1,CON2).
RAND - Assign random number values based on a uniform
distribution RAND(CON1,CON2), where CON1 is the lower
bound (defaults to 0.0) and CON2 is the upper bound
(defaults to 1.0)
GDIS - Assign random sample of Gaussian distributions
GDIS(CON1,CON2) where CON1 is the mean (defaults to 0.0),
and CON2 is the standard deviation (defaults to 1.0)
TRIA - Assigns random number values based on a triangular
distribution TRIA(CON1,CON2,CON3) where CON1 is the lower
bound (defaults to 0.0), CON2 is the location of the peak
value (CON1 ≤ CON2 ≤CON3; CON2 defaults to 0 if CON1 ≤ 0 ≤
CON3, CON1 if 0 ≤ CON1, or CON3 if CON3 ≤ 0), and CON3 is
the upper bound (defaults to 1.0 + CON1 if CON1 ≥ 0 or 0.0
if CON1 ≤ 0)
BETA - Assigns random number values based on a beta
distribution BETA(CON1,CON2,CON3,CON4) where: CON1 is the
lower bound (defaults to 0.0), CON2 is the upper bound
(defaults to 1.0 + CON1 if CON1 ≥ 0 or 0.0 if CON1 ≤ 0), and CON3
and CON4 are the alpha and beta parameters, respectively,
of the beta function. Alpha and beta must both be
positive; they default to 1.0.
GAMM - Assigns random number values based on a gamma
distribution: GAMM(CON1,CON2,CON3) where: CON1 is the
lower bound (defaults to 0.0), CON2 and CON3 are the alpha
and beta parameters, respectively, of the gamma
function. Alpha and beta must both be positive; they
default to 1.0.
RIGID - Generates the rigid body modes with respect to the
reference point coordinates (CON1, CON2, CON3). The
dimensions of the array parameter ParR are (dim1,dim2)
where dim1 is the maximum node number (including internal
nodes but excluding orientation nodes) multiplied by the
number of degrees of freedom, and dim2 is the number of
rigid body modes (which corresponds to the number of
structural degrees of freedom).
CLUSTER - Generates excitation frequencies with clustering
option CLUSTER(CON1,CON2,CON3,CON4,%CON5%) where:
- CON1 is the lower end of the frequency range in Hz (0 < CON1)
- CON2 is the upper end of the frequency range in Hz (CON1 < CON2)
- CON3 is the number of points on each side of the natural
frequency (4 ≤ CON3 ≤ 20, defaults to 4)
- CON4 is the constant damping ratio value or an array
parameter (size NFR) specifying the damping ratios (if
zero or blank, defaults to constant damping ratio of
0.005)
- CON5 is an array parameter (size NFR) specifying the
natural frequencies in Hz
The dimension of the resulting array parameter ParR is
less than 2+NFR*(2*CON3+1) where NFR is the number of
natural frequencies defined in CON5.
con1, con2, con3, . . . , con10
Constants used with above functions.
Notes
-----
Operates on input data and produces one output array parameter vector
according to:
ParR = f(CON1, CON2, : ...)
where the functions (f) are described above. Operations use successive
array elements [*VLEN, *VMASK] with the default being all successive
elements. For example, *VFILL,A,RAMP,1,10 assigns A(1) = 1.0, A(2) =
11.0, A(3) = 21.0, etc. *VFILL,B(5,1),DATA,1.5,3.0 assigns B(5,1) =
1.5 and B(6,1) = 3.0. Absolute values and scale factors may be applied
to the result parameter [*VABS, *VFACT]. Results may be cumulative
[*VCUM]. See the *VOPER command for details.
This command is valid in any processor.
"""
command = f"*VFILL,{parr},{func},{con1},{con2},{con3},{con4},{con5},{con6},{con7},{con8},{con9},{con10}"
return self.run(command, **kwargs)
def vread(self, parr="", fname="", ext="", label="", n1="", n2="", n3="",
nskip="", **kwargs):
"""Reads data and produces an array parameter vector or matrix.
APDL Command: *VREAD
Parameters
----------
parr
The name of the resulting array parameter vector. See *SET for
name restrictions. The parameter must exist as a dimensioned array
[*DIM]. String arrays are limited to a maximum of 8 characters.
fname
File name and directory path (248 characters maximum, including the
characters needed for the directory path). An unspecified
directory path defaults to the working directory; in this case, you
can use all 248 characters for the file name.
ext
Filename extension (eight-character maximum).
label
Can take a value of IJK, IKJ, JIK, JKI, KIJ, KJI, or blank (IJK).
n1, n2, n3
Read as (((ParR (i,j,k), k = 1,n1), i = 1, n2), j = 1, n3) for
Label = KIJ. n2 and n3 default to 1.
nskip
Number of lines at the beginning of the file being read that will
be skipped during the reading. Default = 0.
Notes
-----
Reads data from a file and fills in an array parameter vector or
matrix. Data are read from a formatted file or, if the menu is off
[/MENU,OFF] and Fname is blank, from the next input lines. The format
of the data to be read must be input immediately following the *VREAD
command. The format specifies the number of fields to be read per
record, the field width, and the placement of the decimal point (if
none specified in the value). The read operation follows the available
FORTRAN FORMAT conventions of the system (see your system FORTRAN
manual). Any standard FORTRAN real format (such as (4F6.0),
(E10.3,2X,D8.2), etc.) or alphanumeric format (A) may be used.
Alphanumeric strings are limited to a maximum of 8 characters for any
field (A8). For storage of string arrays greater than 8 characters, the
*SREAD command can be used. Integer (I) and list-directed (*)
descriptors may not be used. The parentheses must be included in the
format and the format must not exceed 80 characters (including
parentheses). The input line length is limited to 128 characters.
A starting array element number must be defined for the result array
parameter vector (numeric or character). For example, entering these
two lines:
will read two values from each line of file ARRAYVAL and assign the
values to A(1), A(2), A(3), etc. Reading continues until successive
row elements [*VLEN, *VMASK, *DIM] are filled.
For an array parameter matrix, a starting array element row and column
number must be defined. For example, entering these two lines:
will read two values from each line of file ARRAYVAL and assign the
values to A(1,1), A(2,1), A(3,1), etc. Reading continues until n1 (10)
successive row elements are filled. Once the maximum row number is
reached, subsequent data will be read into the next column (e.g.,
A(1,2), A(2,2), A(3,2), etc.)
For numerical parameters, absolute values and scale factors may be
applied to the result parameter [*VABS, *VFACT]. Results may be
cumulative [*VCUM]. See the *VOPER command for details. If you are in
the GUI the *VREAD command must be contained in an externally prepared
file read into the ANSYS program (i.e., *USE, /INPUT, etc.).
This command is not applicable to 4- or 5-D arrays.
This command is valid in any processor.
"""
command = f"*VREAD,{parr},{fname},{ext},,{label},{n1},{n2},{n3},{nskip}"
return self.run(command, **kwargs)
| 45.695415 | 614 | 0.630241 | import os
class ParameterDefinition:
def afun(self, lab="", **kwargs):
command = f"*AFUN,{lab}"
return self.run(command, **kwargs)
def dim(self, par="", type_="", imax="", jmax="", kmax="", var1="", var2="",
var3="", csysid="", **kwargs):
command = f"*DIM,{par},{type_},{imax},{jmax},{kmax},{var1},{var2},{var3},{csysid}"
return self.run(command, **kwargs)
def get(self, par="", entity="", entnum="", item1="", it1num="", item2="",
it2num="", **kwargs):
command = f"*GET,{par},{entity},{entnum},{item1},{it1num},{item2},{it2num}"
return self.run(command, **kwargs)
def inquire(self, func):
response = self.run(f'/INQUIRE,,{func}', mute=False)
if '=' in response:
return response.split('=')[1].strip()
return ''
def parres(self, lab="", fname="", ext="", **kwargs):
if ext:
fname = fname + '.' + ext
elif not fname:
fname = '.' + 'PARM'
if 'Grpc' in self.__class__.__name__:
if self._local:
if not os.path.isfile(fname):
raise FileNotFoundError(
'Unable to locate filename "%s"' % fname)
if not os.path.dirname(fname):
filename = os.path.join(os.getcwd(), fname)
else:
filename = fname
else:
if not os.path.dirname(fname):
if fname not in self.list_files():
self.upload(fname, progress_bar=False)
else:
self.upload(fname, progress_bar=False)
filename = os.path.basename(fname)
else:
filename = fname
return self.input(filename)
def parsav(self, lab="", fname="", ext="", **kwargs):
command = f"PARSAV,{lab},{fname},{ext}"
return self.run(command, **kwargs)
def starset(self, par="", value="", val2="", val3="", val4="", val5="",
val6="", val7="", val8="", val9="", val10="", **kwargs):
command = f"*SET,{par},{value},{val2},{val3},{val4},{val5},{val6},{val7},{val8},{val9},{val10}"
return self.run(command, **kwargs)
def starvget(self, parr="", entity="", entnum="", item1="", it1num="",
item2="", it2num="", kloop="", **kwargs):
command = f"*VGET,{parr},{entity},{entnum},{item1},{it1num},{item2},{it2num},{kloop}"
return self.run(command, **kwargs)
def taxis(self, parmloc="", naxis="", val1="", val2="", val3="", val4="",
val5="", val6="", val7="", val8="", val9="", val10="", **kwargs):
command = f"*TAXIS,{parmloc},{naxis},{val1},{val2},{val3},{val4},{val5},{val6},{val7},{val8},{val9},{val10}"
return self.run(command, **kwargs)
def tread(self, par="", fname="", ext="", nskip="", **kwargs):
command = f"*TREAD,{par},{fname},{ext},,{nskip}"
return self.run(command, **kwargs)
def vfill(self, parr="", func="", con1="", con2="", con3="", con4="",
con5="", con6="", con7="", con8="", con9="", con10="", **kwargs):
command = f"*VFILL,{parr},{func},{con1},{con2},{con3},{con4},{con5},{con6},{con7},{con8},{con9},{con10}"
return self.run(command, **kwargs)
def vread(self, parr="", fname="", ext="", label="", n1="", n2="", n3="",
nskip="", **kwargs):
command = f"*VREAD,{parr},{fname},{ext},,{label},{n1},{n2},{n3},{nskip}"
return self.run(command, **kwargs)
| true | true |
1c2f9a7a20d5abc765e5ecede822365f427f6daf | 2,273 | py | Python | sdbuild/packages/xrfdc/package/setup.py | michalkouril/PYNQ | c72febc2decc83816f40b91a7f60e11fe707c248 | [
"BSD-3-Clause"
] | 1 | 2021-12-18T09:54:09.000Z | 2021-12-18T09:54:09.000Z | sdbuild/packages/xrfdc/package/setup.py | michalkouril/PYNQ | c72febc2decc83816f40b91a7f60e11fe707c248 | [
"BSD-3-Clause"
] | null | null | null | sdbuild/packages/xrfdc/package/setup.py | michalkouril/PYNQ | c72febc2decc83816f40b91a7f60e11fe707c248 | [
"BSD-3-Clause"
] | null | null | null | # Copyright (c) 2019, Xilinx, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION). HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from setuptools import setup
with open("README.md", encoding='utf-8') as fh:
readme_lines = fh.readlines()[:]
long_description = (''.join(readme_lines))
setup(
name="xrfdc",
version='1.0',
description="Driver for the RFSoC RF Data Converter IP",
long_description=long_description,
long_description_content_type='text/markdown',
url='https://github.com/Xilinx/PYNQ/tree/master/sdbuild/packages/xrfdc',
license='BSD 3-Clause',
author="Craig Ramsay",
author_email="cramsay01@gmail.com",
packages=['xrfdc'],
package_data={
'': ['*.py', '*.so', '*.c'],
},
install_requires=[
'wurlitzer',
]
)
| 39.877193 | 79 | 0.720634 |
from setuptools import setup
with open("README.md", encoding='utf-8') as fh:
readme_lines = fh.readlines()[:]
long_description = (''.join(readme_lines))
setup(
name="xrfdc",
version='1.0',
description="Driver for the RFSoC RF Data Converter IP",
long_description=long_description,
long_description_content_type='text/markdown',
url='https://github.com/Xilinx/PYNQ/tree/master/sdbuild/packages/xrfdc',
license='BSD 3-Clause',
author="Craig Ramsay",
author_email="cramsay01@gmail.com",
packages=['xrfdc'],
package_data={
'': ['*.py', '*.so', '*.c'],
},
install_requires=[
'wurlitzer',
]
)
| true | true |
1c2f9ba902803ac26a5da1ca5b8b26ae271ffd27 | 25,420 | py | Python | HLACERServer/Pipeline/FeatureExtractor_GATE.py | CDCgov/DCPC | c3fadef1bd6345e01a58afef051491d8ef6a7f93 | [
"Apache-2.0"
] | 6 | 2018-11-03T22:43:35.000Z | 2022-02-15T17:51:33.000Z | HLACERServer/Pipeline/FeatureExtractor_GATE.py | CDCgov/DCPC | c3fadef1bd6345e01a58afef051491d8ef6a7f93 | [
"Apache-2.0"
] | 2 | 2019-04-08T03:42:59.000Z | 2019-10-28T13:42:59.000Z | HLACERServer/Pipeline/FeatureExtractor_GATE.py | CDCgov/DCPC | c3fadef1bd6345e01a58afef051491d8ef6a7f93 | [
"Apache-2.0"
] | 10 | 2017-04-10T21:40:22.000Z | 2022-02-21T16:50:10.000Z | from Pipeline.LifFileParser import LifFileParser
import sys
from collections import defaultdict,OrderedDict
from Pipeline.Authentication import Authentication
import requests
import json
import copy
from Pipeline.MetaMap import find_snomed
'''
This file implements a feature extractor tool for GATE which allows users to specify
the type of features they want to include.
The return file will be in BIO format which could be passed onto the CRF learn.
'''
class FeatureExtractor:
'''
This class implements all required function for extracting different features.
The features are stored in a dictionary with key: start + ' ' + end
The values of the feature dictionary is as the following sequence:
token, length, semantic tag, pos, orth, tokenType, chunker, snomedct code, features for token-1, features for token-2, ... ,
features for token-n, features for token+1, ..., features for token+n.
These features will be written into the output BIO file with semantic tag at the last column.
'''
def __init__(self, filename="", lif_string=""):
self.input_filename = filename
if filename != "":
self.lif_loader = LifFileParser(filename)
if lif_string != "":
self.lif_loader = LifFileParser(string=lif_string)
self.token_features = defaultdict(list)
self.number_of_tokens = 0
self.chunk_code = defaultdict(list)
self.section_code = defaultdict(list)
self.verb_code = defaultdict(list)
def extract_tokens(self):
annotations = self.lif_loader.loadAnnotation("Token")
if annotations == []:
print("Cannot find token result!")
else:
for ann in annotations:
if ann['label'] == 'Token' or ann['label'] == 'SpaceToken':
word = ann['features']['word']
if word == '\n':
word = 'NEWLINE'
elif word == ' ':
continue
self.number_of_tokens = self.number_of_tokens + 1
start = ann['start']
end = ann['end']
length = int(end) - int(start)
key = start
self.token_features[key].append(word)
self.token_features[key].append(length)
if 'semanticTag' in ann['features'].keys():
semantic_tag = ann['features']['semanticTag']
self.token_features[key].append(semantic_tag)
else:
self.token_features[key].append("UNKNOWN")
def extract_pos(self):
annotations = self.lif_loader.loadAnnotation("Tagger")
if annotations == []:
print("Cannot find POS tag result!")
else:
for ann in annotations:
if ann['label'] == 'Token' or ann['label'] == 'SpaceToken':
start = ann['start']
end = ann['end']
if start < self.start_position:
continue
word = ann['features']['word']
if word == ' ':
continue
key = start
if 'pos' in ann['features'].keys():
pos = ann['features']['pos']
# word = ann['features']['word']
if key in self.token_features.keys():
# and self.token_features[key][0] == word:
self.token_features[key].append(pos)
else:
self.token_features[key].append("UN")
# Assign the POS as UN for those untagged tokens
for (key, value) in self.token_features.items():
if len(value) < 4:
self.token_features[key].append("UN")
def extract_orth(self):
# print("Start extracting orthography information")
annotations = self.lif_loader.loadAnnotation("Token")
if annotations == []:
print("Cannot find token result!")
else:
for ann in annotations:
if ann['label'] == 'Token' or ann['label'] == 'SpaceToken':
start = ann['start']
end = ann['end']
if start < self.start_position:
continue
word = ann['features']['word']
if word == ' ':
continue
key = start
if 'orth' in ann['features'].keys():
orth = ann['features']['orth']
self.token_features[key].append(orth)
else:
self.token_features[key].append("UN")
# Assign the orth as UN for those untagged tokens
for (key, value) in self.token_features.items():
if len(value) < 5:
self.token_features[key].append("UN")
def extract_type(self):
annotations = self.lif_loader.loadAnnotation("Token")
if annotations == []:
print("Cannot find token result!")
else:
for ann in annotations:
if ann['label'] == 'Token' or ann['label'] == 'SpaceToken':
start = ann['start']
end = ann['end']
if start < self.start_position:
continue
word = ann['features']['word']
if word == ' ':
continue
key = start
if 'tokenType' in ann['features'].keys():
type = ann['features']['tokenType']
self.token_features[key].append(type)
else:
self.token_features[key].append("UN")
# Assign the type as UN for those untagged tokens
for (key, value) in self.token_features.items():
if len(value) < 6:
self.token_features[key].append("UN")
def extract_neighbor(self, left_number, right_number, select_number):
# print("Start extract tokens nearby information")
token_annotations = self.lif_loader.loadAnnotation("Token")
self.token_features = OrderedDict(sorted(self.token_features.items()))
self.number_of_tokens = len(list(self.token_features.keys()))
for i in range(self.number_of_tokens):
key = list(self.token_features.keys())[i]
# Extract features for tokens on the left
for j in range(1, left_number+1):
if i - j < 0:
for k in range(select_number+2):
self.token_features[key].append("start")
else:
prev_key = list(self.token_features.keys())[i - j]
for k in range(select_number+3):
if k != 2:
prev_feature = self.token_features[prev_key][k]
self.token_features[key].append(prev_feature)
# Extract features for tokens on the right
for j in range(1, right_number + 1):
if i + j >= self.number_of_tokens:
for k in range(select_number+2):
self.token_features[key].append("end")
else:
next_key = list(self.token_features.keys())[i+j]
for k in range(select_number+3):
if k != 2:
next_feature = self.token_features[next_key][k]
self.token_features[key].append(next_feature)
# print("Finish extract tokens nearby information")
def extract_chunk(self):
# print("Start extracting chunker information")
annotations = self.lif_loader.loadAnnotation("Token")
text = self.lif_loader.data['payload']['text']['@value']
# print_chunk(text, annotations)
if annotations == []:
print("Cannot find token result!")
else:
for ann in annotations:
if ann['label'] == 'Token' or ann['label'] == 'SpaceToken':
start = int(ann['start'])
end = int(ann['end'])
if start < self.start_position:
continue
word = ann['features']['word']
if word == ' ':
continue
chunk = find_chunk(annotations, start, end)
# key = str(ann['start']) + ' ' + str(ann['end'])
key = start
self.token_features[key].append(chunk)
# Assign the type as False for those untagged tokens
for (key, value) in self.token_features.items():
if len(value) < 7:
self.token_features[key].append(False)
# print("Finish extracting chunker information")
# Extract SNOMED CODE information
def extract_snomedct(self):
print("Start extracting SNOMEDCT code information")
annotations = self.lif_loader.loadAnnotation("Token")
if annotations == []:
print("Cannot find token result!")
else:
start_extract = False
for ann in annotations:
if ann['label'] == 'Token':
start = int(ann['start'])
end = int(ann['end'])
key = ann['start']
if key < self.start_position:
continue
word = self.token_features[key][0]
code, name = find_code(self.section_code, self.chunk_code, self.verb_code, start, end, word)
if code == "" or code == 'NONE':
if '.' not in word and str(word).isalpha():
code, name = find_snomedct(word)
#print(code, end=' ')
#print(name)
line = word + ' ' + code + ' ' + name + '\n'
if code == "" or code == 'NONE':
self.token_features[key].append("NOTFOUND")
else:
self.token_features[key].append(code)
# Assign the type as UN for those untagged tokens
for (key, value) in self.token_features.items():
if len(value) < 8:
self.token_features[key].append("UNKNOWN")
print("Finish extracting SNOMEDCT code information")
# Extract SNOMED code by section heading
def extract_code_section(self):
annotations = self.lif_loader.loadAnnotation("Sentence")
text = self.lif_loader.data['payload']['text']['@value']
for ann in annotations:
if ann['label'] == "Sentence":
start = int(ann['start'])
end = int(ann['end'])
sent = text[start:end+1]
camal = camalcase(sent)
if camal == True:
code, name = find_snomedct(sent)
key = str(start) + ' ' + str(end)
self.section_code[key].append(sent)
self.section_code[key].append(code)
self.section_code[key].append(name)
line = sent + ' ' + code + ' ' + name + '\n'
# Extract SNOMED code by chunks
def extract_code_chunk(self):
annotations = self.lif_loader.loadAnnotation("Chunk")
text = self.lif_loader.data['payload']['text']['@value']
chunks = get_chunk(text, annotations)
for (key, value) in chunks.items():
if '.' not in str(value[0]) and str(value[0]).isalpha():
code, name = find_snomedct(str(value[0]))
line = str(value[0]) + ' ' + code + ' ' + name + '\n'
self.chunk_code[key].append(str(value[0]))
self.chunk_code[key].append(code)
self.chunk_code[key].append(name)
# Extract the SNOMED code by verb chunks
def extract_code_verb(self):
annotations = self.lif_loader.loadAnnotation("Chunk")
text = self.lif_loader.data['payload']['text']['@value']
for ann in annotations:
if ann['label'] == 'VG':
start = int(ann['start'])
end = int(ann['end'])
phrase = text[start:end]
code, name = find_snomedct(phrase)
key = str(start) + ' ' + str(end)
self.verb_code[key].append(phrase)
self.verb_code[key].append(code)
self.verb_code[key].append(name)
line = phrase + ' ' + code + ' ' + name + '\n'
def filter_tokens(self):
start_valid = False
self.print_tokens = defaultdict(list)
self.token_features = OrderedDict(sorted(self.token_features.items()))
self.start_position = 0
for (key, value) in self.token_features.items():
token = value[0]
tag = value[2]
if (token == "Gross" or token == "Final" or token == "Path" or token == "PATH" or token.upper() == "DIAGNOSIS") and start_valid == False:
start_valid = True
if self.start_position == 0:
self.start_position = key
if start_valid == True and key >= self.start_position:
self.print_tokens[key] = value
self.token_features = OrderedDict(sorted(self.token_features.items()))
if len(self.print_tokens.keys()) > 1:
self.token_features = self.print_tokens
else:
self.print_tokens = self.token_features
def load_sentence_end(self):
sentence_annotations = self.lif_loader.loadAnnotation("Splitter")
self.sentence_end = []
for ann in sentence_annotations:
if ann['label'] == 'Sentence':
end = ann['end']
self.sentence_end.append(end)
def find_sentence_end(self, token_end):
sent_end = False
for end in self.sentence_end:
if int(end) + 1 == token_end:
sent_end = True
return sent_end
def save_position(self, position_filename):
position = open(position_filename, "w+")
self.token_features = OrderedDict(sorted(self.token_features.items()))
for (key, value) in self.token_features.items():
token = value[0]
line = str(token) + " " + str(key) + "\n"
position.write(line)
position.close()
def write_bio(self, filename, position_filename):
output_file = open(filename, "w+")
position = open(position_filename, "w+")
self.load_sentence_end()
for (key, feature) in self.token_features.items():
line = ""
for i in range(len(feature)):
if i != 2:
line = line + str(feature[i]) + " "
line = line + str(key)
output_file.write(line)
output_file.write("\n")
token = feature[0]
if token != " ":
line = str(token) + " " + str(key) + "\n"
position.write(line)
end = int(key) + feature[1]
sent_end = self.find_sentence_end(end)
if sent_end == True:
output_file.write("\n")
output_file.close()
def find_chunk(annotations, start, end):
for chunk in annotations:
if chunk['label'] == 'NounChunk':
chunk_start = int(chunk['start'])
chunk_end = int(chunk['end'])
if start >= chunk_start \
and end <= chunk_end:
return True
return False
# Load the chunker result
def get_chunk(text, annotations):
chunks = defaultdict(list)
for chunk in annotations:
if chunk['label'] == 'NounChunk':
chunk_start = int(chunk['start'])
chunk_end = int(chunk['end'])
chunk = text[chunk_start:chunk_end+1]
length = len(chunk)
if str(chunk)[-1].isalpha() == False \
and str(chunk)[-1].isnumeric() == False:
chunk = chunk[:-1]
elif chunk[length-2:] == "'s" \
or chunk[length-2:] == "s'":
next_word = str(text[chunk_end+1:]).split(' ')[1]
chunk_end = chunk_end + len(next_word) + 1
chunk = chunk + ' ' + next_word
if '/' in chunk:
index = str(chunk).index('/')
first = chunk[0:index]
second = chunk[index+1:]
first_end = chunk_start + len(first)
second_start = first_end
if find_chunk_exist(chunks, first, chunk_start, first_end) == False:
key = str(chunk_start) + ' ' + str(first_end)
chunks[key].append(first)
if find_chunk_exist(chunks, second, second_start, chunk_end) == False:
key = str(second_start) + ' ' + str(chunk_end)
chunks[key].append(second)
else:
if find_chunk_exist(chunks, chunk, chunk_start, chunk_end) == False:
key = str(chunk_start) + ' ' + str(chunk_end)
chunks[key].append(chunk)
else:
print(chunk)
print('already added')
# temp_file.close()
return chunks
# Find if the chunk already exists in the dictionary
def find_chunk_exist(chunks, text, start, end):
exist = False
for (key, value) in chunks.items():
cur_start = int(str(key).split(' ')[0])
cur_end = int(str(key).split(' ')[1])
if cur_start <= start \
and cur_end >= end-2 \
and text in value:
exist = True
return exist
def find_snomedct(term):
apikey = "ca310f05-53e6-4984-82fd-8691dc30174e"
AuthClient = Authentication(apikey)
version = "2017AB"
tgt = AuthClient.gettgt()
query = {'ticket': AuthClient.getst(tgt), 'targetSource': 'SNOMEDCT_US'}
base_uri = "https://uts-ws.nlm.nih.gov/rest"
search_uri = "/search/current?string="
content_uri = "/content/current/CUI/"
source = "&sabs=SNOMEDCT_US"
search_type = '&searchType=words'
path = base_uri + search_uri + term + search_type + source
r = requests.get(path, params=query)
code, name, semantic = "", "", ""
try:
items = json.loads(r.text)
code, name = select_code(items['result']['results'], term)
if code != "":
path2 = base_uri + content_uri + code
tgt2 = AuthClient.gettgt()
query2 = {'ticket': AuthClient.getst(tgt2), 'targetSource': 'SNOMEDCT_US'}
r2 = requests.get(path2, params=query2)
try:
items2 = json.loads(r2.text)
semantic = items2['result']['semanticTypes'][0]['name']
except json.decoder.JSONDecodeError:
semantic = "UNKNOWN"
except json.decoder.JSONDecodeError:
code, name = "", ""
return code, name
# # Select the maximum code and name that matches the searched term
def select_code(results, term):
# Initialize the minimum number of matches threshold we accept
score = 0.6
def_score = 0.4
code, name = "",""
for result in results:
title = result['name']
temp_score, temp_def_score = calculate_score(title, term)
if temp_score > score and temp_def_score > def_score:
score = temp_score
def_score = temp_def_score
code = result['ui']
name = title
return code, name
# # Calculate the similarity score between SNOMED CT name and the term to be searched
def calculate_score(name, term):
score, score_name = 0, 0
separate = str(term).lower().split(' ')
separate_copy = copy.deepcopy(separate)
number = len(separate)
definitions = str(name).lower().split(' (')[0].split(' ')
definitions_copy = copy.deepcopy(definitions)
number_of_definitions = len(definitions)
for word in definitions:
if separate_copy != None:
if word.lower() in separate_copy:
score_name = score_name + 1
separate_copy.remove(word.lower())
elif word[-1] == 's' and word[:-1].lower() in separate_copy:
score_name = score_name + 1
separate_copy.remove(word[:-1].lower())
elif word.lower() == 'centimeter' and separate_copy[0] == 'cm':
score_name = score_name + 1
separate_copy.remove(separate_copy[0])
else:
for sep in separate_copy:
if word.lower() in sep:
score_name = score_name + 1
separate_copy.remove(sep)
break
# term = str(term).replace(word.lower(), "")
for word in separate:
if definitions_copy != None:
if word.lower() != 'x' and word.replace('.', '', 1).isdigit() == False \
and word.lower() in definitions_copy:
score = score + 1
definitions_copy.remove(word.lower())
elif len(word) >= 1 and word[-1] == 's' and word[:-1].lower() in definitions_copy:
score = score + 1
definitions_copy.remove(word[:-1].lower())
elif word.replace('.', '', 1).isdigit() and len(definitions_copy) == 1 \
and definitions_copy[0].replace('.','',1).isdigit():
score = score + 1
definitions_copy.remove(word.lower())
elif word.lower() == 'cm' and definitions_copy[0] == 'centimeter':
score = score + 1
definitions_copy.remove(definitions_copy[0])
elif word.lower() != 'x' and word.replace('.', '', 1).isdigit() == False:
for defi in definitions_copy:
if word.lower() in defi:
score = score + 1
definitions_copy.remove(defi)
break
# name = str(name).replace(word.lower(), '')
return score/number, score_name/number_of_definitions
# Judge if a sentence is camalcase, used for judging section headings.
def camalcase(sent):
words = str(sent).split(" ")
camalcase = True
for word in words:
if len(word) > 0:
if word[0] != word[0].upper()\
and word != 'of':
camalcase = False
return camalcase
#Look for extracted codes in sections and chunks
def find_code(section_code, chunk_code, verb_code, start, end, word):
code, name = "", ""
for (key, value) in section_code.items():
sec_start = int(str(key).split(' ')[0])
sec_end = int(str(key).split(' ')[1])
if sec_start <= start \
and sec_end >= end-1 \
and word in value[0]:
code = value[1]
name = value[2]
if code == "":
for (key, value) in verb_code.items():
verb_start = int(str(key).split(' ')[0])
verb_end = int(str(key).split(' ')[1])
if verb_start <= start \
and verb_end >= end-1\
and word in value[0]:
code = value[1]
name = value[2]
if code == "":
for (key, value) in chunk_code.items():
chunk_start = int(str(key).split(' ')[0])
chunk_end = int(str(key).split(' ')[1])
if chunk_start <= start \
and chunk_end >= end-1 \
and word in value[0]:
code = value[1]
name = value[2]
return code, name
def match_snomed_code(cui, filename):
file = open(filename)
snomed_code = ""
for line in file.readlines():
sections = line.split('|')
cui_code = sections[3]
if cui == cui_code:
snomed_code = sections[0]
return snomed_code
def run(arguments):
input_filename = arguments[1]
output_filename = arguments[2]
pos_info = arguments[3]
orth_info = arguments[4]
type_info = arguments[5]
chunk_info = arguments[6]
code_info = arguments[7]
left_info = arguments[8]
right_info = arguments[9]
# Extract Features
extractor = FeatureExtractor(input_filename)
extractor.extract_tokens()
extractor.filter_tokens()
select_number = 0
if pos_info == "yes":
extractor.extract_pos()
select_number += 1
if orth_info == "yes":
extractor.extract_orth()
select_number += 1
if type_info == "yes":
extractor.extract_type()
select_number += 1
if chunk_info == "yes":
extractor.extract_chunk()
select_number += 1
if code_info == "yes":
extractor.extract_code_chunk()
extractor.extract_code_section()
extractor.extract_code_verb()
extractor.extract_snomedct()
select_number += 1
if int(left_info) != 0 or int(right_info) != 0:
extractor.extract_neighbor(int(left_info), int(right_info), select_number)
extractor.write_bio(output_filename)
if __name__ == "__main__":
arguments = sys.argv
run(arguments) | 42.155887 | 149 | 0.530606 | from Pipeline.LifFileParser import LifFileParser
import sys
from collections import defaultdict,OrderedDict
from Pipeline.Authentication import Authentication
import requests
import json
import copy
from Pipeline.MetaMap import find_snomed
class FeatureExtractor:
def __init__(self, filename="", lif_string=""):
self.input_filename = filename
if filename != "":
self.lif_loader = LifFileParser(filename)
if lif_string != "":
self.lif_loader = LifFileParser(string=lif_string)
self.token_features = defaultdict(list)
self.number_of_tokens = 0
self.chunk_code = defaultdict(list)
self.section_code = defaultdict(list)
self.verb_code = defaultdict(list)
def extract_tokens(self):
annotations = self.lif_loader.loadAnnotation("Token")
if annotations == []:
print("Cannot find token result!")
else:
for ann in annotations:
if ann['label'] == 'Token' or ann['label'] == 'SpaceToken':
word = ann['features']['word']
if word == '\n':
word = 'NEWLINE'
elif word == ' ':
continue
self.number_of_tokens = self.number_of_tokens + 1
start = ann['start']
end = ann['end']
length = int(end) - int(start)
key = start
self.token_features[key].append(word)
self.token_features[key].append(length)
if 'semanticTag' in ann['features'].keys():
semantic_tag = ann['features']['semanticTag']
self.token_features[key].append(semantic_tag)
else:
self.token_features[key].append("UNKNOWN")
def extract_pos(self):
annotations = self.lif_loader.loadAnnotation("Tagger")
if annotations == []:
print("Cannot find POS tag result!")
else:
for ann in annotations:
if ann['label'] == 'Token' or ann['label'] == 'SpaceToken':
start = ann['start']
end = ann['end']
if start < self.start_position:
continue
word = ann['features']['word']
if word == ' ':
continue
key = start
if 'pos' in ann['features'].keys():
pos = ann['features']['pos']
if key in self.token_features.keys():
self.token_features[key].append(pos)
else:
self.token_features[key].append("UN")
for (key, value) in self.token_features.items():
if len(value) < 4:
self.token_features[key].append("UN")
def extract_orth(self):
annotations = self.lif_loader.loadAnnotation("Token")
if annotations == []:
print("Cannot find token result!")
else:
for ann in annotations:
if ann['label'] == 'Token' or ann['label'] == 'SpaceToken':
start = ann['start']
end = ann['end']
if start < self.start_position:
continue
word = ann['features']['word']
if word == ' ':
continue
key = start
if 'orth' in ann['features'].keys():
orth = ann['features']['orth']
self.token_features[key].append(orth)
else:
self.token_features[key].append("UN")
for (key, value) in self.token_features.items():
if len(value) < 5:
self.token_features[key].append("UN")
def extract_type(self):
annotations = self.lif_loader.loadAnnotation("Token")
if annotations == []:
print("Cannot find token result!")
else:
for ann in annotations:
if ann['label'] == 'Token' or ann['label'] == 'SpaceToken':
start = ann['start']
end = ann['end']
if start < self.start_position:
continue
word = ann['features']['word']
if word == ' ':
continue
key = start
if 'tokenType' in ann['features'].keys():
type = ann['features']['tokenType']
self.token_features[key].append(type)
else:
self.token_features[key].append("UN")
for (key, value) in self.token_features.items():
if len(value) < 6:
self.token_features[key].append("UN")
def extract_neighbor(self, left_number, right_number, select_number):
token_annotations = self.lif_loader.loadAnnotation("Token")
self.token_features = OrderedDict(sorted(self.token_features.items()))
self.number_of_tokens = len(list(self.token_features.keys()))
for i in range(self.number_of_tokens):
key = list(self.token_features.keys())[i]
for j in range(1, left_number+1):
if i - j < 0:
for k in range(select_number+2):
self.token_features[key].append("start")
else:
prev_key = list(self.token_features.keys())[i - j]
for k in range(select_number+3):
if k != 2:
prev_feature = self.token_features[prev_key][k]
self.token_features[key].append(prev_feature)
for j in range(1, right_number + 1):
if i + j >= self.number_of_tokens:
for k in range(select_number+2):
self.token_features[key].append("end")
else:
next_key = list(self.token_features.keys())[i+j]
for k in range(select_number+3):
if k != 2:
next_feature = self.token_features[next_key][k]
self.token_features[key].append(next_feature)
def extract_chunk(self):
annotations = self.lif_loader.loadAnnotation("Token")
text = self.lif_loader.data['payload']['text']['@value']
if annotations == []:
print("Cannot find token result!")
else:
for ann in annotations:
if ann['label'] == 'Token' or ann['label'] == 'SpaceToken':
start = int(ann['start'])
end = int(ann['end'])
if start < self.start_position:
continue
word = ann['features']['word']
if word == ' ':
continue
chunk = find_chunk(annotations, start, end)
key = start
self.token_features[key].append(chunk)
for (key, value) in self.token_features.items():
if len(value) < 7:
self.token_features[key].append(False)
def extract_snomedct(self):
print("Start extracting SNOMEDCT code information")
annotations = self.lif_loader.loadAnnotation("Token")
if annotations == []:
print("Cannot find token result!")
else:
start_extract = False
for ann in annotations:
if ann['label'] == 'Token':
start = int(ann['start'])
end = int(ann['end'])
key = ann['start']
if key < self.start_position:
continue
word = self.token_features[key][0]
code, name = find_code(self.section_code, self.chunk_code, self.verb_code, start, end, word)
if code == "" or code == 'NONE':
if '.' not in word and str(word).isalpha():
code, name = find_snomedct(word)
line = word + ' ' + code + ' ' + name + '\n'
if code == "" or code == 'NONE':
self.token_features[key].append("NOTFOUND")
else:
self.token_features[key].append(code)
for (key, value) in self.token_features.items():
if len(value) < 8:
self.token_features[key].append("UNKNOWN")
print("Finish extracting SNOMEDCT code information")
def extract_code_section(self):
annotations = self.lif_loader.loadAnnotation("Sentence")
text = self.lif_loader.data['payload']['text']['@value']
for ann in annotations:
if ann['label'] == "Sentence":
start = int(ann['start'])
end = int(ann['end'])
sent = text[start:end+1]
camal = camalcase(sent)
if camal == True:
code, name = find_snomedct(sent)
key = str(start) + ' ' + str(end)
self.section_code[key].append(sent)
self.section_code[key].append(code)
self.section_code[key].append(name)
line = sent + ' ' + code + ' ' + name + '\n'
def extract_code_chunk(self):
annotations = self.lif_loader.loadAnnotation("Chunk")
text = self.lif_loader.data['payload']['text']['@value']
chunks = get_chunk(text, annotations)
for (key, value) in chunks.items():
if '.' not in str(value[0]) and str(value[0]).isalpha():
code, name = find_snomedct(str(value[0]))
line = str(value[0]) + ' ' + code + ' ' + name + '\n'
self.chunk_code[key].append(str(value[0]))
self.chunk_code[key].append(code)
self.chunk_code[key].append(name)
def extract_code_verb(self):
annotations = self.lif_loader.loadAnnotation("Chunk")
text = self.lif_loader.data['payload']['text']['@value']
for ann in annotations:
if ann['label'] == 'VG':
start = int(ann['start'])
end = int(ann['end'])
phrase = text[start:end]
code, name = find_snomedct(phrase)
key = str(start) + ' ' + str(end)
self.verb_code[key].append(phrase)
self.verb_code[key].append(code)
self.verb_code[key].append(name)
line = phrase + ' ' + code + ' ' + name + '\n'
def filter_tokens(self):
start_valid = False
self.print_tokens = defaultdict(list)
self.token_features = OrderedDict(sorted(self.token_features.items()))
self.start_position = 0
for (key, value) in self.token_features.items():
token = value[0]
tag = value[2]
if (token == "Gross" or token == "Final" or token == "Path" or token == "PATH" or token.upper() == "DIAGNOSIS") and start_valid == False:
start_valid = True
if self.start_position == 0:
self.start_position = key
if start_valid == True and key >= self.start_position:
self.print_tokens[key] = value
self.token_features = OrderedDict(sorted(self.token_features.items()))
if len(self.print_tokens.keys()) > 1:
self.token_features = self.print_tokens
else:
self.print_tokens = self.token_features
def load_sentence_end(self):
sentence_annotations = self.lif_loader.loadAnnotation("Splitter")
self.sentence_end = []
for ann in sentence_annotations:
if ann['label'] == 'Sentence':
end = ann['end']
self.sentence_end.append(end)
def find_sentence_end(self, token_end):
sent_end = False
for end in self.sentence_end:
if int(end) + 1 == token_end:
sent_end = True
return sent_end
def save_position(self, position_filename):
position = open(position_filename, "w+")
self.token_features = OrderedDict(sorted(self.token_features.items()))
for (key, value) in self.token_features.items():
token = value[0]
line = str(token) + " " + str(key) + "\n"
position.write(line)
position.close()
def write_bio(self, filename, position_filename):
output_file = open(filename, "w+")
position = open(position_filename, "w+")
self.load_sentence_end()
for (key, feature) in self.token_features.items():
line = ""
for i in range(len(feature)):
if i != 2:
line = line + str(feature[i]) + " "
line = line + str(key)
output_file.write(line)
output_file.write("\n")
token = feature[0]
if token != " ":
line = str(token) + " " + str(key) + "\n"
position.write(line)
end = int(key) + feature[1]
sent_end = self.find_sentence_end(end)
if sent_end == True:
output_file.write("\n")
output_file.close()
def find_chunk(annotations, start, end):
for chunk in annotations:
if chunk['label'] == 'NounChunk':
chunk_start = int(chunk['start'])
chunk_end = int(chunk['end'])
if start >= chunk_start \
and end <= chunk_end:
return True
return False
def get_chunk(text, annotations):
chunks = defaultdict(list)
for chunk in annotations:
if chunk['label'] == 'NounChunk':
chunk_start = int(chunk['start'])
chunk_end = int(chunk['end'])
chunk = text[chunk_start:chunk_end+1]
length = len(chunk)
if str(chunk)[-1].isalpha() == False \
and str(chunk)[-1].isnumeric() == False:
chunk = chunk[:-1]
elif chunk[length-2:] == "'s" \
or chunk[length-2:] == "s'":
next_word = str(text[chunk_end+1:]).split(' ')[1]
chunk_end = chunk_end + len(next_word) + 1
chunk = chunk + ' ' + next_word
if '/' in chunk:
index = str(chunk).index('/')
first = chunk[0:index]
second = chunk[index+1:]
first_end = chunk_start + len(first)
second_start = first_end
if find_chunk_exist(chunks, first, chunk_start, first_end) == False:
key = str(chunk_start) + ' ' + str(first_end)
chunks[key].append(first)
if find_chunk_exist(chunks, second, second_start, chunk_end) == False:
key = str(second_start) + ' ' + str(chunk_end)
chunks[key].append(second)
else:
if find_chunk_exist(chunks, chunk, chunk_start, chunk_end) == False:
key = str(chunk_start) + ' ' + str(chunk_end)
chunks[key].append(chunk)
else:
print(chunk)
print('already added')
return chunks
def find_chunk_exist(chunks, text, start, end):
exist = False
for (key, value) in chunks.items():
cur_start = int(str(key).split(' ')[0])
cur_end = int(str(key).split(' ')[1])
if cur_start <= start \
and cur_end >= end-2 \
and text in value:
exist = True
return exist
def find_snomedct(term):
apikey = "ca310f05-53e6-4984-82fd-8691dc30174e"
AuthClient = Authentication(apikey)
version = "2017AB"
tgt = AuthClient.gettgt()
query = {'ticket': AuthClient.getst(tgt), 'targetSource': 'SNOMEDCT_US'}
base_uri = "https://uts-ws.nlm.nih.gov/rest"
search_uri = "/search/current?string="
content_uri = "/content/current/CUI/"
source = "&sabs=SNOMEDCT_US"
search_type = '&searchType=words'
path = base_uri + search_uri + term + search_type + source
r = requests.get(path, params=query)
code, name, semantic = "", "", ""
try:
items = json.loads(r.text)
code, name = select_code(items['result']['results'], term)
if code != "":
path2 = base_uri + content_uri + code
tgt2 = AuthClient.gettgt()
query2 = {'ticket': AuthClient.getst(tgt2), 'targetSource': 'SNOMEDCT_US'}
r2 = requests.get(path2, params=query2)
try:
items2 = json.loads(r2.text)
semantic = items2['result']['semanticTypes'][0]['name']
except json.decoder.JSONDecodeError:
semantic = "UNKNOWN"
except json.decoder.JSONDecodeError:
code, name = "", ""
return code, name
re = 0.4
code, name = "",""
for result in results:
title = result['name']
temp_score, temp_def_score = calculate_score(title, term)
if temp_score > score and temp_def_score > def_score:
score = temp_score
def_score = temp_def_score
code = result['ui']
name = title
return code, name
erm).lower().split(' ')
separate_copy = copy.deepcopy(separate)
number = len(separate)
definitions = str(name).lower().split(' (')[0].split(' ')
definitions_copy = copy.deepcopy(definitions)
number_of_definitions = len(definitions)
for word in definitions:
if separate_copy != None:
if word.lower() in separate_copy:
score_name = score_name + 1
separate_copy.remove(word.lower())
elif word[-1] == 's' and word[:-1].lower() in separate_copy:
score_name = score_name + 1
separate_copy.remove(word[:-1].lower())
elif word.lower() == 'centimeter' and separate_copy[0] == 'cm':
score_name = score_name + 1
separate_copy.remove(separate_copy[0])
else:
for sep in separate_copy:
if word.lower() in sep:
score_name = score_name + 1
separate_copy.remove(sep)
break
for word in separate:
if definitions_copy != None:
if word.lower() != 'x' and word.replace('.', '', 1).isdigit() == False \
and word.lower() in definitions_copy:
score = score + 1
definitions_copy.remove(word.lower())
elif len(word) >= 1 and word[-1] == 's' and word[:-1].lower() in definitions_copy:
score = score + 1
definitions_copy.remove(word[:-1].lower())
elif word.replace('.', '', 1).isdigit() and len(definitions_copy) == 1 \
and definitions_copy[0].replace('.','',1).isdigit():
score = score + 1
definitions_copy.remove(word.lower())
elif word.lower() == 'cm' and definitions_copy[0] == 'centimeter':
score = score + 1
definitions_copy.remove(definitions_copy[0])
elif word.lower() != 'x' and word.replace('.', '', 1).isdigit() == False:
for defi in definitions_copy:
if word.lower() in defi:
score = score + 1
definitions_copy.remove(defi)
break
return score/number, score_name/number_of_definitions
def camalcase(sent):
words = str(sent).split(" ")
camalcase = True
for word in words:
if len(word) > 0:
if word[0] != word[0].upper()\
and word != 'of':
camalcase = False
return camalcase
def find_code(section_code, chunk_code, verb_code, start, end, word):
code, name = "", ""
for (key, value) in section_code.items():
sec_start = int(str(key).split(' ')[0])
sec_end = int(str(key).split(' ')[1])
if sec_start <= start \
and sec_end >= end-1 \
and word in value[0]:
code = value[1]
name = value[2]
if code == "":
for (key, value) in verb_code.items():
verb_start = int(str(key).split(' ')[0])
verb_end = int(str(key).split(' ')[1])
if verb_start <= start \
and verb_end >= end-1\
and word in value[0]:
code = value[1]
name = value[2]
if code == "":
for (key, value) in chunk_code.items():
chunk_start = int(str(key).split(' ')[0])
chunk_end = int(str(key).split(' ')[1])
if chunk_start <= start \
and chunk_end >= end-1 \
and word in value[0]:
code = value[1]
name = value[2]
return code, name
def match_snomed_code(cui, filename):
file = open(filename)
snomed_code = ""
for line in file.readlines():
sections = line.split('|')
cui_code = sections[3]
if cui == cui_code:
snomed_code = sections[0]
return snomed_code
def run(arguments):
input_filename = arguments[1]
output_filename = arguments[2]
pos_info = arguments[3]
orth_info = arguments[4]
type_info = arguments[5]
chunk_info = arguments[6]
code_info = arguments[7]
left_info = arguments[8]
right_info = arguments[9]
extractor = FeatureExtractor(input_filename)
extractor.extract_tokens()
extractor.filter_tokens()
select_number = 0
if pos_info == "yes":
extractor.extract_pos()
select_number += 1
if orth_info == "yes":
extractor.extract_orth()
select_number += 1
if type_info == "yes":
extractor.extract_type()
select_number += 1
if chunk_info == "yes":
extractor.extract_chunk()
select_number += 1
if code_info == "yes":
extractor.extract_code_chunk()
extractor.extract_code_section()
extractor.extract_code_verb()
extractor.extract_snomedct()
select_number += 1
if int(left_info) != 0 or int(right_info) != 0:
extractor.extract_neighbor(int(left_info), int(right_info), select_number)
extractor.write_bio(output_filename)
if __name__ == "__main__":
arguments = sys.argv
run(arguments) | true | true |
1c2f9be739ba817cd997258c805e95fee7d7ef05 | 5,365 | py | Python | exopy_hqc_legacy/tasks/tasks/instr/apply_mag_field_task.py | Exopy/ecpy_hqc_legacy | 3e31a8865d130907a82005e6cd78d99c6da7a951 | [
"BSD-3-Clause"
] | null | null | null | exopy_hqc_legacy/tasks/tasks/instr/apply_mag_field_task.py | Exopy/ecpy_hqc_legacy | 3e31a8865d130907a82005e6cd78d99c6da7a951 | [
"BSD-3-Clause"
] | 34 | 2015-12-14T22:06:57.000Z | 2018-02-07T08:40:47.000Z | exopy_hqc_legacy/tasks/tasks/instr/apply_mag_field_task.py | Exopy/ecpy_hqc_legacy | 3e31a8865d130907a82005e6cd78d99c6da7a951 | [
"BSD-3-Clause"
] | 6 | 2018-04-20T14:48:54.000Z | 2021-06-23T22:25:17.000Z | # -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright 2015-2018 by ExopyHqcLegacy Authors, see AUTHORS for more details.
#
# Distributed under the terms of the BSD license.
#
# The full license is in the file LICENCE, distributed with this software.
# -----------------------------------------------------------------------------
"""Task to apply a magnetic field.
"""
from time import sleep
import numbers
from inspect import cleandoc
from atom.api import (Str, Float, Bool, set_default)
from exopy.tasks.api import InstrumentTask, validators
from exopy_hqc_legacy.instruments.drivers.driver_tools import InstrTimeoutError
class ApplyMagFieldTask(InstrumentTask):
"""Use a supraconducting magnet to apply a magnetic field. Parallel task.
"""
# Target magnetic field (dynamically evaluated)
field = Str().tag(pref=True,
feval=validators.SkipLoop(types=numbers.Real))
# Rate at which to sweep the field.
rate = Float(0.01).tag(pref=True)
# Whether to stop the switch heater after setting the field.
auto_stop_heater = Bool(True).tag(pref=True)
# Time to wait before bringing the field to zero after closing the switch
# heater.
post_switch_wait = Float(30.0).tag(pref=True)
parallel = set_default({'activated': True, 'pool': 'instr'})
database_entries = set_default({'field': 0.01})
def check_for_interruption(self):
"""Check if the user required an interruption.
"""
return self.root.should_stop.is_set()
def perform(self, target_value=None):
"""Apply the specified magnetic field.
"""
# make ready
if (self.driver.owner != self.name or
not self.driver.check_connection()):
self.driver.owner = self.name
if target_value is None:
target_value = self.format_and_eval_string(self.field)
driver = self.driver
normal_end = True
# Only if driver.heater_state == 'Off' otherwise we wait for
# post_switch_wait no matter what
if driver.heater_state == 'Off':
job = driver.sweep_to_persistent_field()
if job.wait_for_completion(self.check_for_interruption,
timeout=60, refresh_time=1):
driver.heater_state = 'On'
sleep(self.post_switch_wait)
else:
return False
if (abs(driver.read_persistent_field() - target_value) >
driver.output_fluctuations):
# set the magnetic field
job = driver.sweep_to_field(target_value, self.rate)
try:
normal_end = job.wait_for_completion(self.check_for_interruption,
timeout=60,
refresh_time=5)
except InstrTimeoutError:
# job.timeout() has been called, which stops the sweep and turn off
# the switch heater
self.write_in_database('field', driver.read_persistent_field())
# fail the measurement
self.root.should_stop.set()
raise ValueError(cleandoc('''Field source did not set the field to
{}'''.format(target_value)))
if not normal_end:
# this happens when a stop signal has been send to the measurement
job.cancel()
return
# turn off heater if required
if self.auto_stop_heater:
driver.heater_state = 'Off'
sleep(self.post_switch_wait)
# sweep down to zero at the fast sweep rate
job = driver.sweep_to_field(0, driver.fast_sweep_rate)
job.wait_for_completion(self.check_for_interruption,
timeout=60, refresh_time=1)
self.write_in_database('field', target_value)
class ApplyMagFieldAndDropTask(InstrumentTask):
"""Use a supraconducting magnet to apply a magnetic field. Parallel task.
"""
# Target magnetic field (dynamically evaluated)
field = Str().tag(pref=True,
feval=validators.SkipLoop(types=numbers.Real))
# Rate at which to sweep the field.
rate = Float(0.01).tag(pref=True)
parallel = set_default({'activated': True, 'pool': 'instr'})
database_entries = set_default({'field': 0.01})
def check_for_interruption(self):
"""Check if the user required an interruption.
"""
return self.root.should_stop.is_set()
def perform(self, target_value=None):
"""Apply the specified magnetic field.
"""
# make ready
if (self.driver.owner != self.name or
not self.driver.check_connection()):
self.driver.owner = self.name
driver = self.driver
if driver.heater_state == 'Off':
raise ValueError(cleandoc(''' Switch heater must be on'''))
if target_value is None:
target_value = self.format_and_eval_string(self.field)
driver.field_sweep_rate = self.rate
driver.target_field = target_value
driver.activity = 'To set point'
self.write_in_database('field', target_value)
| 35.766667 | 84 | 0.592917 |
from time import sleep
import numbers
from inspect import cleandoc
from atom.api import (Str, Float, Bool, set_default)
from exopy.tasks.api import InstrumentTask, validators
from exopy_hqc_legacy.instruments.drivers.driver_tools import InstrTimeoutError
class ApplyMagFieldTask(InstrumentTask):
field = Str().tag(pref=True,
feval=validators.SkipLoop(types=numbers.Real))
rate = Float(0.01).tag(pref=True)
auto_stop_heater = Bool(True).tag(pref=True)
post_switch_wait = Float(30.0).tag(pref=True)
parallel = set_default({'activated': True, 'pool': 'instr'})
database_entries = set_default({'field': 0.01})
def check_for_interruption(self):
return self.root.should_stop.is_set()
def perform(self, target_value=None):
if (self.driver.owner != self.name or
not self.driver.check_connection()):
self.driver.owner = self.name
if target_value is None:
target_value = self.format_and_eval_string(self.field)
driver = self.driver
normal_end = True
if driver.heater_state == 'Off':
job = driver.sweep_to_persistent_field()
if job.wait_for_completion(self.check_for_interruption,
timeout=60, refresh_time=1):
driver.heater_state = 'On'
sleep(self.post_switch_wait)
else:
return False
if (abs(driver.read_persistent_field() - target_value) >
driver.output_fluctuations):
job = driver.sweep_to_field(target_value, self.rate)
try:
normal_end = job.wait_for_completion(self.check_for_interruption,
timeout=60,
refresh_time=5)
except InstrTimeoutError:
self.write_in_database('field', driver.read_persistent_field())
self.root.should_stop.set()
raise ValueError(cleandoc('''Field source did not set the field to
{}'''.format(target_value)))
if not normal_end:
job.cancel()
return
if self.auto_stop_heater:
driver.heater_state = 'Off'
sleep(self.post_switch_wait)
job = driver.sweep_to_field(0, driver.fast_sweep_rate)
job.wait_for_completion(self.check_for_interruption,
timeout=60, refresh_time=1)
self.write_in_database('field', target_value)
class ApplyMagFieldAndDropTask(InstrumentTask):
field = Str().tag(pref=True,
feval=validators.SkipLoop(types=numbers.Real))
rate = Float(0.01).tag(pref=True)
parallel = set_default({'activated': True, 'pool': 'instr'})
database_entries = set_default({'field': 0.01})
def check_for_interruption(self):
return self.root.should_stop.is_set()
def perform(self, target_value=None):
if (self.driver.owner != self.name or
not self.driver.check_connection()):
self.driver.owner = self.name
driver = self.driver
if driver.heater_state == 'Off':
raise ValueError(cleandoc(''' Switch heater must be on'''))
if target_value is None:
target_value = self.format_and_eval_string(self.field)
driver.field_sweep_rate = self.rate
driver.target_field = target_value
driver.activity = 'To set point'
self.write_in_database('field', target_value)
| true | true |
1c2f9cb0f0f6ad3b0ba9caddb0ca01340165cd8f | 6,290 | py | Python | neutron/agent/metadata/namespace_proxy.py | armando-migliaccio/neutron-1 | e31861c15bc73e65a7c22212df2a56f9e45aa0e4 | [
"Apache-2.0"
] | null | null | null | neutron/agent/metadata/namespace_proxy.py | armando-migliaccio/neutron-1 | e31861c15bc73e65a7c22212df2a56f9e45aa0e4 | [
"Apache-2.0"
] | null | null | null | neutron/agent/metadata/namespace_proxy.py | armando-migliaccio/neutron-1 | e31861c15bc73e65a7c22212df2a56f9e45aa0e4 | [
"Apache-2.0"
] | null | null | null | # Copyright 2012 New Dream Network, LLC (DreamHost)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import httplib
import socket
import eventlet
eventlet.monkey_patch()
import httplib2
from oslo.config import cfg
import six.moves.urllib.parse as urlparse
import webob
from neutron.agent.linux import daemon
from neutron.common import config
from neutron.common import utils
from neutron.openstack.common import log as logging
from neutron import wsgi
LOG = logging.getLogger(__name__)
class UnixDomainHTTPConnection(httplib.HTTPConnection):
"""Connection class for HTTP over UNIX domain socket."""
def __init__(self, host, port=None, strict=None, timeout=None,
proxy_info=None):
httplib.HTTPConnection.__init__(self, host, port, strict)
self.timeout = timeout
def connect(self):
self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
if self.timeout:
self.sock.settimeout(self.timeout)
self.sock.connect(cfg.CONF.metadata_proxy_socket)
class NetworkMetadataProxyHandler(object):
"""Proxy AF_INET metadata request through Unix Domain socket.
The Unix domain socket allows the proxy access resource that are not
accessible within the isolated tenant context.
"""
def __init__(self, network_id=None, router_id=None):
self.network_id = network_id
self.router_id = router_id
if network_id is None and router_id is None:
msg = _('network_id and router_id are None. One must be provided.')
raise ValueError(msg)
@webob.dec.wsgify(RequestClass=webob.Request)
def __call__(self, req):
LOG.debug(_("Request: %s"), req)
try:
return self._proxy_request(req.remote_addr,
req.method,
req.path_info,
req.query_string,
req.body)
except Exception:
LOG.exception(_("Unexpected error."))
msg = _('An unknown error has occurred. '
'Please try your request again.')
return webob.exc.HTTPInternalServerError(explanation=unicode(msg))
def _proxy_request(self, remote_address, method, path_info,
query_string, body):
headers = {
'X-Forwarded-For': remote_address,
}
if self.router_id:
headers['X-Neutron-Router-ID'] = self.router_id
else:
headers['X-Neutron-Network-ID'] = self.network_id
url = urlparse.urlunsplit((
'http',
'169.254.169.254', # a dummy value to make the request proper
path_info,
query_string,
''))
h = httplib2.Http()
resp, content = h.request(
url,
method=method,
headers=headers,
body=body,
connection_type=UnixDomainHTTPConnection)
if resp.status == 200:
LOG.debug(resp)
LOG.debug(content)
response = webob.Response()
response.status = resp.status
response.headers['Content-Type'] = resp['content-type']
response.body = content
return response
elif resp.status == 404:
return webob.exc.HTTPNotFound()
elif resp.status == 409:
return webob.exc.HTTPConflict()
elif resp.status == 500:
msg = _(
'Remote metadata server experienced an internal server error.'
)
LOG.debug(msg)
return webob.exc.HTTPInternalServerError(explanation=unicode(msg))
else:
raise Exception(_('Unexpected response code: %s') % resp.status)
class ProxyDaemon(daemon.Daemon):
def __init__(self, pidfile, port, network_id=None, router_id=None):
uuid = network_id or router_id
super(ProxyDaemon, self).__init__(pidfile, uuid=uuid)
self.network_id = network_id
self.router_id = router_id
self.port = port
def run(self):
handler = NetworkMetadataProxyHandler(
self.network_id,
self.router_id)
proxy = wsgi.Server('neutron-network-metadata-proxy')
proxy.start(handler, self.port)
proxy.wait()
def main():
opts = [
cfg.StrOpt('network_id',
help=_('Network that will have instance metadata '
'proxied.')),
cfg.StrOpt('router_id',
help=_('Router that will have connected instances\' '
'metadata proxied.')),
cfg.StrOpt('pid_file',
help=_('Location of pid file of this process.')),
cfg.BoolOpt('daemonize',
default=True,
help=_('Run as daemon.')),
cfg.IntOpt('metadata_port',
default=9697,
help=_("TCP Port to listen for metadata server "
"requests.")),
cfg.StrOpt('metadata_proxy_socket',
default='$state_path/metadata_proxy',
help=_('Location of Metadata Proxy UNIX domain '
'socket'))
]
cfg.CONF.register_cli_opts(opts)
# Don't get the default configuration file
cfg.CONF(project='neutron', default_config_files=[])
config.setup_logging()
utils.log_opt_values(LOG)
proxy = ProxyDaemon(cfg.CONF.pid_file,
cfg.CONF.metadata_port,
network_id=cfg.CONF.network_id,
router_id=cfg.CONF.router_id)
if cfg.CONF.daemonize:
proxy.start()
else:
proxy.run()
| 34.751381 | 79 | 0.595548 |
import httplib
import socket
import eventlet
eventlet.monkey_patch()
import httplib2
from oslo.config import cfg
import six.moves.urllib.parse as urlparse
import webob
from neutron.agent.linux import daemon
from neutron.common import config
from neutron.common import utils
from neutron.openstack.common import log as logging
from neutron import wsgi
LOG = logging.getLogger(__name__)
class UnixDomainHTTPConnection(httplib.HTTPConnection):
def __init__(self, host, port=None, strict=None, timeout=None,
proxy_info=None):
httplib.HTTPConnection.__init__(self, host, port, strict)
self.timeout = timeout
def connect(self):
self.sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
if self.timeout:
self.sock.settimeout(self.timeout)
self.sock.connect(cfg.CONF.metadata_proxy_socket)
class NetworkMetadataProxyHandler(object):
def __init__(self, network_id=None, router_id=None):
self.network_id = network_id
self.router_id = router_id
if network_id is None and router_id is None:
msg = _('network_id and router_id are None. One must be provided.')
raise ValueError(msg)
@webob.dec.wsgify(RequestClass=webob.Request)
def __call__(self, req):
LOG.debug(_("Request: %s"), req)
try:
return self._proxy_request(req.remote_addr,
req.method,
req.path_info,
req.query_string,
req.body)
except Exception:
LOG.exception(_("Unexpected error."))
msg = _('An unknown error has occurred. '
'Please try your request again.')
return webob.exc.HTTPInternalServerError(explanation=unicode(msg))
def _proxy_request(self, remote_address, method, path_info,
query_string, body):
headers = {
'X-Forwarded-For': remote_address,
}
if self.router_id:
headers['X-Neutron-Router-ID'] = self.router_id
else:
headers['X-Neutron-Network-ID'] = self.network_id
url = urlparse.urlunsplit((
'http',
'169.254.169.254',
path_info,
query_string,
''))
h = httplib2.Http()
resp, content = h.request(
url,
method=method,
headers=headers,
body=body,
connection_type=UnixDomainHTTPConnection)
if resp.status == 200:
LOG.debug(resp)
LOG.debug(content)
response = webob.Response()
response.status = resp.status
response.headers['Content-Type'] = resp['content-type']
response.body = content
return response
elif resp.status == 404:
return webob.exc.HTTPNotFound()
elif resp.status == 409:
return webob.exc.HTTPConflict()
elif resp.status == 500:
msg = _(
'Remote metadata server experienced an internal server error.'
)
LOG.debug(msg)
return webob.exc.HTTPInternalServerError(explanation=unicode(msg))
else:
raise Exception(_('Unexpected response code: %s') % resp.status)
class ProxyDaemon(daemon.Daemon):
def __init__(self, pidfile, port, network_id=None, router_id=None):
uuid = network_id or router_id
super(ProxyDaemon, self).__init__(pidfile, uuid=uuid)
self.network_id = network_id
self.router_id = router_id
self.port = port
def run(self):
handler = NetworkMetadataProxyHandler(
self.network_id,
self.router_id)
proxy = wsgi.Server('neutron-network-metadata-proxy')
proxy.start(handler, self.port)
proxy.wait()
def main():
opts = [
cfg.StrOpt('network_id',
help=_('Network that will have instance metadata '
'proxied.')),
cfg.StrOpt('router_id',
help=_('Router that will have connected instances\' '
'metadata proxied.')),
cfg.StrOpt('pid_file',
help=_('Location of pid file of this process.')),
cfg.BoolOpt('daemonize',
default=True,
help=_('Run as daemon.')),
cfg.IntOpt('metadata_port',
default=9697,
help=_("TCP Port to listen for metadata server "
"requests.")),
cfg.StrOpt('metadata_proxy_socket',
default='$state_path/metadata_proxy',
help=_('Location of Metadata Proxy UNIX domain '
'socket'))
]
cfg.CONF.register_cli_opts(opts)
# Don't get the default configuration file
cfg.CONF(project='neutron', default_config_files=[])
config.setup_logging()
utils.log_opt_values(LOG)
proxy = ProxyDaemon(cfg.CONF.pid_file,
cfg.CONF.metadata_port,
network_id=cfg.CONF.network_id,
router_id=cfg.CONF.router_id)
if cfg.CONF.daemonize:
proxy.start()
else:
proxy.run()
| true | true |
1c2f9d0764cf97e8baaa01bd411ffddb6120b77e | 994 | py | Python | plugins/wigle/komand_wigle/actions/__init__.py | lukaszlaszuk/insightconnect-plugins | 8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892 | [
"MIT"
] | 46 | 2019-06-05T20:47:58.000Z | 2022-03-29T10:18:01.000Z | plugins/wigle/komand_wigle/actions/__init__.py | lukaszlaszuk/insightconnect-plugins | 8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892 | [
"MIT"
] | 386 | 2019-06-07T20:20:39.000Z | 2022-03-30T17:35:01.000Z | plugins/wigle/komand_wigle/actions/__init__.py | lukaszlaszuk/insightconnect-plugins | 8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892 | [
"MIT"
] | 43 | 2019-07-09T14:13:58.000Z | 2022-03-28T12:04:46.000Z | # GENERATED BY KOMAND SDK - DO NOT EDIT
from .add_comment.action import AddComment
from .get_country_statistics.action import GetCountryStatistics
from .get_files_status.action import GetFilesStatus
from .get_general_statistics.action import GetGeneralStatistics
from .get_group_statistics.action import GetGroupStatistics
from .get_kml.action import GetKml
from .get_metadata.action import GetMetadata
from .get_network_details.action import GetNetworkDetails
from .get_network_geocode.action import GetNetworkGeocode
from .get_region_statistics.action import GetRegionStatistics
from .get_site_statistics.action import GetSiteStatistics
from .get_user_profile.action import GetUserProfile
from .get_user_standings.action import GetUserStandings
from .get_user_statistics.action import GetUserStatistics
from .get_user_tokens.action import GetUserTokens
from .search_cells.action import SearchCells
from .search_networks.action import SearchNetworks
from .upload_file.action import UploadFile
| 49.7 | 63 | 0.880282 |
from .add_comment.action import AddComment
from .get_country_statistics.action import GetCountryStatistics
from .get_files_status.action import GetFilesStatus
from .get_general_statistics.action import GetGeneralStatistics
from .get_group_statistics.action import GetGroupStatistics
from .get_kml.action import GetKml
from .get_metadata.action import GetMetadata
from .get_network_details.action import GetNetworkDetails
from .get_network_geocode.action import GetNetworkGeocode
from .get_region_statistics.action import GetRegionStatistics
from .get_site_statistics.action import GetSiteStatistics
from .get_user_profile.action import GetUserProfile
from .get_user_standings.action import GetUserStandings
from .get_user_statistics.action import GetUserStatistics
from .get_user_tokens.action import GetUserTokens
from .search_cells.action import SearchCells
from .search_networks.action import SearchNetworks
from .upload_file.action import UploadFile
| true | true |
1c2f9d0fc489ea0d1d13f99334354b6620938490 | 15,993 | py | Python | models/lstm_model.py | HLTCHKUST/emotion-dialogue | 0d58b339134dd9a2f386948ae474b270a77370f9 | [
"MIT"
] | 40 | 2019-04-29T09:17:48.000Z | 2021-11-19T06:32:02.000Z | models/lstm_model.py | HLTCHKUST/emotion-dialogue | 0d58b339134dd9a2f386948ae474b270a77370f9 | [
"MIT"
] | null | null | null | models/lstm_model.py | HLTCHKUST/emotion-dialogue | 0d58b339134dd9a2f386948ae474b270a77370f9 | [
"MIT"
] | 4 | 2019-09-01T10:00:56.000Z | 2020-03-08T16:00:22.000Z | import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.nn.init as I
from torch.nn.utils.rnn import pack_padded_sequence, pad_packed_sequence
from torch.autograd import Variable
import numpy as np
import math
from models.common_layer import Attention
from utils import constant
from models.common_layer import MultiHeadAttention, BertPooler
from utils.features import share_embedding
class LstmModel(nn.Module):
"""
An LSTM model.
Inputs:
X: (batch_size, seq_len)
X_lengths: (batch_size)
Outputs: (batch_size, labels)
"""
def __init__(self, vocab, embedding_size, hidden_size, num_layers, max_length=700, input_dropout=0.0, layer_dropout=0.0, is_bidirectional=False, attentive=False):
super(LstmModel, self).__init__()
self.embedding_dim = embedding_size
self.hidden_size = hidden_size
self.input_dropout = nn.Dropout(input_dropout)
self.layer_dropout = nn.Dropout(layer_dropout)
self.vocab = vocab
# self.emb = nn.Embedding(num_vocab, embedding_size, padding_idx=0)
self.emb = share_embedding(self.vocab,constant.pretrained, fix_pretrain=constant.fix_pretrain)
self.lstm = nn.LSTM(embedding_size, hidden_size=hidden_size, num_layers=num_layers, bidirectional=is_bidirectional)
self.W = nn.Linear(hidden_size*2 if is_bidirectional else hidden_size,4) ## 4 emotion
self.softmax = nn.Softmax(dim=1)
self.num_layers = num_layers
self.is_bidirectional = is_bidirectional
def forward(self, X, X_lengths, extract_feature=False):
"""
Forward algorithm
if extract_feature is True: returns output of LSTM before output layer
else: returns the logits and softmax of logits
"""
X = self.emb(X)
X = self.input_dropout(X)
X = X.transpose(0, 1) # (len, batch_size, dim)
packed_input = torch.nn.utils.rnn.pack_padded_sequence(X, X_lengths, batch_first=False)
_, hidden = self.lstm(packed_input)
# returns hidden state of all timesteps as well as hidden state at last timestep
# should take last non zero hidden state, not last timestamp (may have zeros), don't take output
# output, _ = torch.nn.utils.rnn.pad_packed_sequence(packed_output, batch_first=False)
last_hidden = hidden[-1] # (num_direction, batch_size, dim)
if len(last_hidden.size()) == 3:
last_hidden_1 = last_hidden[-1]
if self.is_bidirectional:
last_hidden = torch.cat((last_hidden[0].squeeze(0), last_hidden[1].squeeze(0)), dim=1)
else:
last_hidden = last_hidden.squeeze(0)
if extract_feature:
return last_hidden
last_hidden = self.layer_dropout(last_hidden)
a_hat = self.W(last_hidden) # (batch_size, 4)
return a_hat, self.softmax(a_hat)
class HLstmModel(nn.Module):
"""
A Hierarchical LSTM model with self-attention.
Inputs:
X_1: (batch_size, seq_len),
X_2: (batch_size, seq_len),
X_3: (batch_size, seq_len),
X_lengths: (batch_size)
Outputs: (batch_size, labels)
"""
def __init__(self, vocab, embedding_size, hidden_size, num_layers, max_length=700, input_dropout=0.0, layer_dropout=0.0, is_bidirectional=False,
attentive=False, multiattentive=False, num_heads=5, total_key_depth=500, total_value_depth=1000, use_mask = True, sum_tensor=False, super_ratio=0.0, double_supervision=False,
context=False,pool="",pool_stride=2, pool_kernel=3):
super(HLstmModel, self).__init__()
self.embedding_dim = embedding_size
self.hidden_size = hidden_size
self.sum_tensor = sum_tensor
self.input_dropout = nn.Dropout(input_dropout)
self.layer_dropout = nn.Dropout(layer_dropout)
self.vocab = vocab
self.pooler = BertPooler(hidden_size*2 if is_bidirectional else hidden_size)
# self.emb = nn.Embedding(num_vocab, embedding_size, padding_idx=0)
self.emb = share_embedding(self.vocab,constant.pretrained, constant.fix_pretrain)
self.context = context
if self.context:
self.context_lstm = nn.LSTM(embedding_size, hidden_size=hidden_size, num_layers=num_layers, bidirectional=is_bidirectional)
self.lstm = nn.LSTM(embedding_size, hidden_size=hidden_size, num_layers=num_layers, bidirectional=is_bidirectional)
self.pool = pool
if self.pool == "avg":
kernel_size = pool_kernel
stride = pool_stride
padding = 0
l_in = hidden_size
if is_bidirectional:
l_in *= 2
self.pooling_layer = nn.AvgPool1d(kernel_size, stride=stride, padding=padding)
l_out = int(((l_in + 2 * padding - kernel_size) / stride) + 1)
self.lstm_layer = nn.LSTM(l_out, hidden_size=hidden_size, num_layers=num_layers, bidirectional=is_bidirectional)
elif self.pool == "max":
kernel_size = pool_kernel
stride = pool_stride
padding = 0
dilation = 1
l_in = hidden_size
if is_bidirectional:
l_in *= 2
self.pooling_layer = nn.MaxPool1d(kernel_size, stride=stride, padding=padding, dilation=dilation)
l_out = int(((l_in + 2 * padding - dilation * (kernel_size - 1) - 1) / stride) + 1)
self.lstm_layer = nn.LSTM(l_out, hidden_size=hidden_size, num_layers=num_layers, bidirectional=is_bidirectional)
elif self.pool == "globalmax" or self.pool == "globalavg":
self.lstm_layer = nn.LSTM(hidden_size*2 if is_bidirectional else hidden_size, hidden_size=hidden_size, num_layers=num_layers, bidirectional=is_bidirectional)
else:
self.lstm_layer = nn.LSTM(hidden_size*2 if is_bidirectional else hidden_size, hidden_size=hidden_size, num_layers=num_layers, bidirectional=is_bidirectional)
self.W = nn.Linear(hidden_size*2 if is_bidirectional else hidden_size,4) ## 4 emotion
self.softmax = nn.Softmax(dim=1)
self.num_layers = num_layers
self.is_bidirectional = is_bidirectional
self.attentive =attentive
self.multiattentive = multiattentive
self.use_mask = use_mask
if self.multiattentive:
self.attentive = True
if self.attentive:
if self.multiattentive:
self.word_attention = MultiHeadAttention(hidden_size*2 if is_bidirectional else hidden_size, total_key_depth, total_value_depth,
hidden_size*2 if is_bidirectional else hidden_size, num_heads)
self.sentences_attention = MultiHeadAttention(hidden_size*2 if is_bidirectional else hidden_size, total_key_depth, total_value_depth,
hidden_size*2 if is_bidirectional else hidden_size, num_heads)
else:
self.word_attention = Attention(hidden_size*2 if is_bidirectional else hidden_size)
self.sentences_attention = Attention(hidden_size*2 if is_bidirectional else hidden_size)
self.double_supervision = double_supervision
if double_supervision:
self.output_super = nn.Linear(hidden_size*3, 4) # lower layer supervision
def forward(self, X_1, X_2, X_3, X_1_lengths, X_2_lengths, X_3_lengths, extract_feature=False, cuda=True):
"""
Forward algorithm
if extract_feature is True: returns output of LSTM before output layer
else: returns the logits and softmax of logits
"""
if self.use_mask:
mask1 = X_1.data.eq(constant.PAD_idx).unsqueeze(1)
mask2 = X_2.data.eq(constant.PAD_idx).unsqueeze(1)
mask3 = X_3.data.eq(constant.PAD_idx).unsqueeze(1)
else:
mask1=mask2=mask3=None
X_1 = self.emb(X_1)
X_2 = self.emb(X_2)
X_3 = self.emb(X_3)
# sort X_2 and X_3
X_1_lengths = torch.LongTensor(X_1_lengths)
sorted_X_2_lengths, perm_index_x_2 = torch.sort(torch.LongTensor(X_2_lengths), descending=True)
sorted_X_3_lengths, perm_index_x_3 = torch.sort(torch.LongTensor(X_3_lengths), descending=True)
if cuda:
X_1_lengths = X_1_lengths.cuda()
sorted_X_2_lengths = sorted_X_2_lengths.cuda()
sorted_X_3_lengths = sorted_X_3_lengths.cuda()
perm_index_x_2 = perm_index_x_2.cuda()
perm_index_x_3 = perm_index_x_3.cuda()
sorted_X_2 = X_2[perm_index_x_2]
sorted_X_3 = X_3[perm_index_x_3]
X_1 = self.input_dropout(X_1).transpose(0, 1) # (len, batch_size, dim)
sorted_X_2 = self.input_dropout(sorted_X_2).transpose(0, 1) # (len, batch_size, dim)
sorted_X_3 = self.input_dropout(sorted_X_3).transpose(0, 1) # (len, batch_size, dim)
# returns hidden state of all timesteps as well as hidden state at last timestep
# should take last non zero hidden state, not last timestamp (may have zeros), don't take output
packed_input_1 = torch.nn.utils.rnn.pack_padded_sequence(X_1, X_1_lengths, batch_first=False)
packed_input_2 = torch.nn.utils.rnn.pack_padded_sequence(sorted_X_2, sorted_X_2_lengths, batch_first=False)
packed_input_3 = torch.nn.utils.rnn.pack_padded_sequence(sorted_X_3, sorted_X_3_lengths, batch_first=False)
if self.context:
lstm_out1, hidden_1 = self.context_lstm(packed_input_1) # hidden: (len, batch_size, dim)
lstm_out2, hidden_2 = self.context_lstm(packed_input_2) # hidden: (len, batch_size, dim)
else:
lstm_out1, hidden_1 = self.lstm(packed_input_1) # hidden: (len, batch_size, dim)
lstm_out2, hidden_2 = self.lstm(packed_input_2) # hidden: (len, batch_size, dim)
lstm_out3, hidden_3 = self.lstm(packed_input_3) # hidden: (len, batch_size, dim)
if self.attentive:
padded_lstm_out1, _ = nn.utils.rnn.pad_packed_sequence(lstm_out1)
padded_lstm_out2, _ = nn.utils.rnn.pad_packed_sequence(lstm_out2)
padded_lstm_out3, _ = nn.utils.rnn.pad_packed_sequence(lstm_out3)
padded_lstm_out1 = padded_lstm_out1.permute(1,0,2) #(batch, seq_len, num_directions * hidden_size)
padded_lstm_out2 = padded_lstm_out2.permute(1,0,2)
padded_lstm_out3 = padded_lstm_out3.permute(1,0,2)
if self.multiattentive:
last_hidden_1 = self.word_attention(padded_lstm_out1,padded_lstm_out1,padded_lstm_out1,src_mask=mask1)
last_hidden_2 = self.word_attention(padded_lstm_out2,padded_lstm_out2,padded_lstm_out2,src_mask=mask2)
last_hidden_3 = self.word_attention(padded_lstm_out3,padded_lstm_out3,padded_lstm_out3,src_mask=mask3)
if self.sum_tensor:
last_hidden_1 = torch.sum(last_hidden_1, dim=1)
last_hidden_2 = torch.sum(last_hidden_2, dim=1)
last_hidden_3 = torch.sum(last_hidden_3, dim=1)
else:
last_hidden_1 = self.pooler(last_hidden_1)
last_hidden_2 = self.pooler(last_hidden_2)
last_hidden_3 = self.pooler(last_hidden_3)
else:
last_hidden_1 = self.word_attention(padded_lstm_out1, X_1_lengths) #(batch, num_directions * hidden_size)
last_hidden_2 = self.word_attention(padded_lstm_out2, sorted_X_2_lengths)
last_hidden_3 = self.word_attention(padded_lstm_out3, sorted_X_2_lengths)
else:
last_hidden_1 = hidden_1[-1] # (num_direction * num_layer, batch_size, dim)
last_hidden_2 = hidden_2[-1] # (num_direction * num_layer, batch_size, dim)
last_hidden_3 = hidden_3[-1] # (num_direction * num_layer, batch_size, dim)
batch_size = last_hidden_1.size(1)
dim = last_hidden_1.size(2)
last_hidden_1 = last_hidden_1.view(self.num_layers, 2 if self.is_bidirectional else 1, batch_size, dim)[-1]
last_hidden_2 = last_hidden_2.view(self.num_layers, 2 if self.is_bidirectional else 1, batch_size, dim)[-1]
last_hidden_3 = last_hidden_3.view(self.num_layers, 2 if self.is_bidirectional else 1, batch_size, dim)[-1]
if self.is_bidirectional:
last_hidden_1 = torch.cat((last_hidden_1[0].squeeze(0), last_hidden_1[1].squeeze(0)), dim=1)
last_hidden_2 = torch.cat((last_hidden_2[0].squeeze(0), last_hidden_2[1].squeeze(0)), dim=1)
last_hidden_3 = torch.cat((last_hidden_3[0].squeeze(0), last_hidden_3[1].squeeze(0)), dim=1)
else:
last_hidden_1 = last_hidden_1.squeeze(0) # (batch_size, dim)
last_hidden_2 = last_hidden_2.squeeze(0) # (batch_size, dim)
last_hidden_3 = last_hidden_3.squeeze(0) # (batch_size, dim)
# restore the order
unsorted_last_hidden_2 = last_hidden_2.new(*last_hidden_2.size())
unsorted_last_hidden_2.scatter_(0, perm_index_x_2.unsqueeze(1).expand(last_hidden_2.size(0), last_hidden_2.size(1)), last_hidden_2)
unsorted_last_hidden_3 = last_hidden_3.new(*last_hidden_3.size())
unsorted_last_hidden_3.scatter_(0, perm_index_x_3.unsqueeze(1).expand(last_hidden_3.size(0), last_hidden_3.size(1)), last_hidden_3)
last_hidden = torch.cat((last_hidden_1.unsqueeze(0), unsorted_last_hidden_2.unsqueeze(0), unsorted_last_hidden_3.unsqueeze(0)), dim=0) # (3, batch_size, dim)
concatenated_hidden = last_hidden.transpose(0, 1) # (batch_size, 3, dim)
if self.pool == "avg" or self.pool == "max":
last_hidden = self.pooling_layer(last_hidden)
elif self.pool == "globalavg" or self.pool == "globalmax":
context_hidden = last_hidden[:2] # (2, batch_size, dim)
if self.pool == "globalavg":
context_hidden = context_hidden.mean(dim=0).unsqueeze(0) # (1, batch_size, dim)
else:
context_hidden = context_hidden.max(dim=0)[0].unsqueeze(0) # (1, batch_size, dim)
turn3_hidden = last_hidden[2].unsqueeze(0) # (1, batch_size, dim)
last_hidden = torch.cat((context_hidden, turn3_hidden), dim=0)
concatenated_hidden = concatenated_hidden.contiguous().view(concatenated_hidden.size(0), -1) # (batch_size, 3 * dim)
lstm_layer_out, hidden = self.lstm_layer(last_hidden)
if self.attentive:
lstm_layer_out = lstm_layer_out.permute(1,0,2) #(batch, seq_len, num_directions * hidden_size)
if self.multiattentive:
last_hidden = torch.sum(self.sentences_attention(lstm_layer_out, lstm_layer_out, lstm_layer_out),dim=1)
else:
last_hidden = self.sentences_attention(lstm_layer_out) #(batch, num_directions * hidden_size)
else:
last_hidden = hidden[-1] # (num_direction * num_layers, batch_size, dim)
batch_size = last_hidden.size(1)
dim = last_hidden.size(2)
last_hidden = last_hidden.view(self.num_layers, 2 if self.is_bidirectional else 1, batch_size, dim)[-1]
if self.is_bidirectional:
last_hidden = torch.cat((last_hidden[0].squeeze(0), last_hidden[1].squeeze(0)), dim=1)
else:
last_hidden = last_hidden.squeeze(0)
if extract_feature: # for SVM
if self.double_supervision:
return torch.cat((concatenated_hidden, last_hidden), dim=1)
else:
return last_hidden
last_hidden = self.layer_dropout(last_hidden)
a_hat = self.W(last_hidden) # (batch_size, 4)
if self.double_supervision:
additional_logits = self.output_super(concatenated_hidden) # (batch_size, 4)
return a_hat, self.softmax(a_hat), additional_logits, self.softmax(additional_logits)
return a_hat, self.softmax(a_hat)
| 52.264706 | 179 | 0.667792 | import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.nn.init as I
from torch.nn.utils.rnn import pack_padded_sequence, pad_packed_sequence
from torch.autograd import Variable
import numpy as np
import math
from models.common_layer import Attention
from utils import constant
from models.common_layer import MultiHeadAttention, BertPooler
from utils.features import share_embedding
class LstmModel(nn.Module):
def __init__(self, vocab, embedding_size, hidden_size, num_layers, max_length=700, input_dropout=0.0, layer_dropout=0.0, is_bidirectional=False, attentive=False):
super(LstmModel, self).__init__()
self.embedding_dim = embedding_size
self.hidden_size = hidden_size
self.input_dropout = nn.Dropout(input_dropout)
self.layer_dropout = nn.Dropout(layer_dropout)
self.vocab = vocab
self.emb = share_embedding(self.vocab,constant.pretrained, fix_pretrain=constant.fix_pretrain)
self.lstm = nn.LSTM(embedding_size, hidden_size=hidden_size, num_layers=num_layers, bidirectional=is_bidirectional)
self.W = nn.Linear(hidden_size*2 if is_bidirectional else hidden_size,4) lf.softmax = nn.Softmax(dim=1)
self.num_layers = num_layers
self.is_bidirectional = is_bidirectional
def forward(self, X, X_lengths, extract_feature=False):
X = self.emb(X)
X = self.input_dropout(X)
X = X.transpose(0, 1)
packed_input = torch.nn.utils.rnn.pack_padded_sequence(X, X_lengths, batch_first=False)
_, hidden = self.lstm(packed_input)
# output, _ = torch.nn.utils.rnn.pad_packed_sequence(packed_output, batch_first=False)
last_hidden = hidden[-1] # (num_direction, batch_size, dim)
if len(last_hidden.size()) == 3:
last_hidden_1 = last_hidden[-1]
if self.is_bidirectional:
last_hidden = torch.cat((last_hidden[0].squeeze(0), last_hidden[1].squeeze(0)), dim=1)
else:
last_hidden = last_hidden.squeeze(0)
if extract_feature:
return last_hidden
last_hidden = self.layer_dropout(last_hidden)
a_hat = self.W(last_hidden) # (batch_size, 4)
return a_hat, self.softmax(a_hat)
class HLstmModel(nn.Module):
def __init__(self, vocab, embedding_size, hidden_size, num_layers, max_length=700, input_dropout=0.0, layer_dropout=0.0, is_bidirectional=False,
attentive=False, multiattentive=False, num_heads=5, total_key_depth=500, total_value_depth=1000, use_mask = True, sum_tensor=False, super_ratio=0.0, double_supervision=False,
context=False,pool="",pool_stride=2, pool_kernel=3):
super(HLstmModel, self).__init__()
self.embedding_dim = embedding_size
self.hidden_size = hidden_size
self.sum_tensor = sum_tensor
self.input_dropout = nn.Dropout(input_dropout)
self.layer_dropout = nn.Dropout(layer_dropout)
self.vocab = vocab
self.pooler = BertPooler(hidden_size*2 if is_bidirectional else hidden_size)
# self.emb = nn.Embedding(num_vocab, embedding_size, padding_idx=0)
self.emb = share_embedding(self.vocab,constant.pretrained, constant.fix_pretrain)
self.context = context
if self.context:
self.context_lstm = nn.LSTM(embedding_size, hidden_size=hidden_size, num_layers=num_layers, bidirectional=is_bidirectional)
self.lstm = nn.LSTM(embedding_size, hidden_size=hidden_size, num_layers=num_layers, bidirectional=is_bidirectional)
self.pool = pool
if self.pool == "avg":
kernel_size = pool_kernel
stride = pool_stride
padding = 0
l_in = hidden_size
if is_bidirectional:
l_in *= 2
self.pooling_layer = nn.AvgPool1d(kernel_size, stride=stride, padding=padding)
l_out = int(((l_in + 2 * padding - kernel_size) / stride) + 1)
self.lstm_layer = nn.LSTM(l_out, hidden_size=hidden_size, num_layers=num_layers, bidirectional=is_bidirectional)
elif self.pool == "max":
kernel_size = pool_kernel
stride = pool_stride
padding = 0
dilation = 1
l_in = hidden_size
if is_bidirectional:
l_in *= 2
self.pooling_layer = nn.MaxPool1d(kernel_size, stride=stride, padding=padding, dilation=dilation)
l_out = int(((l_in + 2 * padding - dilation * (kernel_size - 1) - 1) / stride) + 1)
self.lstm_layer = nn.LSTM(l_out, hidden_size=hidden_size, num_layers=num_layers, bidirectional=is_bidirectional)
elif self.pool == "globalmax" or self.pool == "globalavg":
self.lstm_layer = nn.LSTM(hidden_size*2 if is_bidirectional else hidden_size, hidden_size=hidden_size, num_layers=num_layers, bidirectional=is_bidirectional)
else:
self.lstm_layer = nn.LSTM(hidden_size*2 if is_bidirectional else hidden_size, hidden_size=hidden_size, num_layers=num_layers, bidirectional=is_bidirectional)
self.W = nn.Linear(hidden_size*2 if is_bidirectional else hidden_size,4) ## 4 emotion
self.softmax = nn.Softmax(dim=1)
self.num_layers = num_layers
self.is_bidirectional = is_bidirectional
self.attentive =attentive
self.multiattentive = multiattentive
self.use_mask = use_mask
if self.multiattentive:
self.attentive = True
if self.attentive:
if self.multiattentive:
self.word_attention = MultiHeadAttention(hidden_size*2 if is_bidirectional else hidden_size, total_key_depth, total_value_depth,
hidden_size*2 if is_bidirectional else hidden_size, num_heads)
self.sentences_attention = MultiHeadAttention(hidden_size*2 if is_bidirectional else hidden_size, total_key_depth, total_value_depth,
hidden_size*2 if is_bidirectional else hidden_size, num_heads)
else:
self.word_attention = Attention(hidden_size*2 if is_bidirectional else hidden_size)
self.sentences_attention = Attention(hidden_size*2 if is_bidirectional else hidden_size)
self.double_supervision = double_supervision
if double_supervision:
self.output_super = nn.Linear(hidden_size*3, 4) # lower layer supervision
def forward(self, X_1, X_2, X_3, X_1_lengths, X_2_lengths, X_3_lengths, extract_feature=False, cuda=True):
if self.use_mask:
mask1 = X_1.data.eq(constant.PAD_idx).unsqueeze(1)
mask2 = X_2.data.eq(constant.PAD_idx).unsqueeze(1)
mask3 = X_3.data.eq(constant.PAD_idx).unsqueeze(1)
else:
mask1=mask2=mask3=None
X_1 = self.emb(X_1)
X_2 = self.emb(X_2)
X_3 = self.emb(X_3)
# sort X_2 and X_3
X_1_lengths = torch.LongTensor(X_1_lengths)
sorted_X_2_lengths, perm_index_x_2 = torch.sort(torch.LongTensor(X_2_lengths), descending=True)
sorted_X_3_lengths, perm_index_x_3 = torch.sort(torch.LongTensor(X_3_lengths), descending=True)
if cuda:
X_1_lengths = X_1_lengths.cuda()
sorted_X_2_lengths = sorted_X_2_lengths.cuda()
sorted_X_3_lengths = sorted_X_3_lengths.cuda()
perm_index_x_2 = perm_index_x_2.cuda()
perm_index_x_3 = perm_index_x_3.cuda()
sorted_X_2 = X_2[perm_index_x_2]
sorted_X_3 = X_3[perm_index_x_3]
X_1 = self.input_dropout(X_1).transpose(0, 1) # (len, batch_size, dim)
sorted_X_2 = self.input_dropout(sorted_X_2).transpose(0, 1) # (len, batch_size, dim)
sorted_X_3 = self.input_dropout(sorted_X_3).transpose(0, 1) # (len, batch_size, dim)
# returns hidden state of all timesteps as well as hidden state at last timestep
# should take last non zero hidden state, not last timestamp (may have zeros), don't take output
packed_input_1 = torch.nn.utils.rnn.pack_padded_sequence(X_1, X_1_lengths, batch_first=False)
packed_input_2 = torch.nn.utils.rnn.pack_padded_sequence(sorted_X_2, sorted_X_2_lengths, batch_first=False)
packed_input_3 = torch.nn.utils.rnn.pack_padded_sequence(sorted_X_3, sorted_X_3_lengths, batch_first=False)
if self.context:
lstm_out1, hidden_1 = self.context_lstm(packed_input_1)
lstm_out2, hidden_2 = self.context_lstm(packed_input_2)
else:
lstm_out1, hidden_1 = self.lstm(packed_input_1)
lstm_out2, hidden_2 = self.lstm(packed_input_2)
lstm_out3, hidden_3 = self.lstm(packed_input_3)
if self.attentive:
padded_lstm_out1, _ = nn.utils.rnn.pad_packed_sequence(lstm_out1)
padded_lstm_out2, _ = nn.utils.rnn.pad_packed_sequence(lstm_out2)
padded_lstm_out3, _ = nn.utils.rnn.pad_packed_sequence(lstm_out3)
padded_lstm_out1 = padded_lstm_out1.permute(1,0,2)
padded_lstm_out2 = padded_lstm_out2.permute(1,0,2)
padded_lstm_out3 = padded_lstm_out3.permute(1,0,2)
if self.multiattentive:
last_hidden_1 = self.word_attention(padded_lstm_out1,padded_lstm_out1,padded_lstm_out1,src_mask=mask1)
last_hidden_2 = self.word_attention(padded_lstm_out2,padded_lstm_out2,padded_lstm_out2,src_mask=mask2)
last_hidden_3 = self.word_attention(padded_lstm_out3,padded_lstm_out3,padded_lstm_out3,src_mask=mask3)
if self.sum_tensor:
last_hidden_1 = torch.sum(last_hidden_1, dim=1)
last_hidden_2 = torch.sum(last_hidden_2, dim=1)
last_hidden_3 = torch.sum(last_hidden_3, dim=1)
else:
last_hidden_1 = self.pooler(last_hidden_1)
last_hidden_2 = self.pooler(last_hidden_2)
last_hidden_3 = self.pooler(last_hidden_3)
else:
last_hidden_1 = self.word_attention(padded_lstm_out1, X_1_lengths)
last_hidden_2 = self.word_attention(padded_lstm_out2, sorted_X_2_lengths)
last_hidden_3 = self.word_attention(padded_lstm_out3, sorted_X_2_lengths)
else:
last_hidden_1 = hidden_1[-1]
last_hidden_2 = hidden_2[-1]
last_hidden_3 = hidden_3[-1]
batch_size = last_hidden_1.size(1)
dim = last_hidden_1.size(2)
last_hidden_1 = last_hidden_1.view(self.num_layers, 2 if self.is_bidirectional else 1, batch_size, dim)[-1]
last_hidden_2 = last_hidden_2.view(self.num_layers, 2 if self.is_bidirectional else 1, batch_size, dim)[-1]
last_hidden_3 = last_hidden_3.view(self.num_layers, 2 if self.is_bidirectional else 1, batch_size, dim)[-1]
if self.is_bidirectional:
last_hidden_1 = torch.cat((last_hidden_1[0].squeeze(0), last_hidden_1[1].squeeze(0)), dim=1)
last_hidden_2 = torch.cat((last_hidden_2[0].squeeze(0), last_hidden_2[1].squeeze(0)), dim=1)
last_hidden_3 = torch.cat((last_hidden_3[0].squeeze(0), last_hidden_3[1].squeeze(0)), dim=1)
else:
last_hidden_1 = last_hidden_1.squeeze(0)
last_hidden_2 = last_hidden_2.squeeze(0)
last_hidden_3 = last_hidden_3.squeeze(0)
unsorted_last_hidden_2 = last_hidden_2.new(*last_hidden_2.size())
unsorted_last_hidden_2.scatter_(0, perm_index_x_2.unsqueeze(1).expand(last_hidden_2.size(0), last_hidden_2.size(1)), last_hidden_2)
unsorted_last_hidden_3 = last_hidden_3.new(*last_hidden_3.size())
unsorted_last_hidden_3.scatter_(0, perm_index_x_3.unsqueeze(1).expand(last_hidden_3.size(0), last_hidden_3.size(1)), last_hidden_3)
last_hidden = torch.cat((last_hidden_1.unsqueeze(0), unsorted_last_hidden_2.unsqueeze(0), unsorted_last_hidden_3.unsqueeze(0)), dim=0)
concatenated_hidden = last_hidden.transpose(0, 1)
if self.pool == "avg" or self.pool == "max":
last_hidden = self.pooling_layer(last_hidden)
elif self.pool == "globalavg" or self.pool == "globalmax":
context_hidden = last_hidden[:2]
if self.pool == "globalavg":
context_hidden = context_hidden.mean(dim=0).unsqueeze(0)
else:
context_hidden = context_hidden.max(dim=0)[0].unsqueeze(0)
turn3_hidden = last_hidden[2].unsqueeze(0)
last_hidden = torch.cat((context_hidden, turn3_hidden), dim=0)
concatenated_hidden = concatenated_hidden.contiguous().view(concatenated_hidden.size(0), -1)
lstm_layer_out, hidden = self.lstm_layer(last_hidden)
if self.attentive:
lstm_layer_out = lstm_layer_out.permute(1,0,2)
if self.multiattentive:
last_hidden = torch.sum(self.sentences_attention(lstm_layer_out, lstm_layer_out, lstm_layer_out),dim=1)
else:
last_hidden = self.sentences_attention(lstm_layer_out)
else:
last_hidden = hidden[-1]
batch_size = last_hidden.size(1)
dim = last_hidden.size(2)
last_hidden = last_hidden.view(self.num_layers, 2 if self.is_bidirectional else 1, batch_size, dim)[-1]
if self.is_bidirectional:
last_hidden = torch.cat((last_hidden[0].squeeze(0), last_hidden[1].squeeze(0)), dim=1)
else:
last_hidden = last_hidden.squeeze(0)
if extract_feature:
if self.double_supervision:
return torch.cat((concatenated_hidden, last_hidden), dim=1)
else:
return last_hidden
last_hidden = self.layer_dropout(last_hidden)
a_hat = self.W(last_hidden)
if self.double_supervision:
additional_logits = self.output_super(concatenated_hidden)
return a_hat, self.softmax(a_hat), additional_logits, self.softmax(additional_logits)
return a_hat, self.softmax(a_hat)
| true | true |
1c2f9d11fb3cb06f9f3a510b0c9d7f611974d2a1 | 925 | py | Python | extended_choices/__init__.py | art1415926535/django-extended-choices | bb310c5da4d53685c69173541172e4b813a6afb2 | [
"BSD-3-Clause"
] | 60 | 2015-05-24T10:38:13.000Z | 2022-03-13T12:16:16.000Z | extended_choices/__init__.py | art1415926535/django-extended-choices | bb310c5da4d53685c69173541172e4b813a6afb2 | [
"BSD-3-Clause"
] | 28 | 2015-04-02T22:19:44.000Z | 2021-03-03T08:58:33.000Z | extended_choices/__init__.py | art1415926535/django-extended-choices | bb310c5da4d53685c69173541172e4b813a6afb2 | [
"BSD-3-Clause"
] | 12 | 2015-08-25T06:21:44.000Z | 2019-12-10T01:39:48.000Z | """Little helper application to improve django choices (for fields)"""
from __future__ import unicode_literals
import pkg_resources
import six
from os import path
from setuptools.config import read_configuration
from .choices import Choices, OrderedChoices, AutoDisplayChoices, AutoChoices # noqa: F401
def _extract_version(package_name):
try:
# if package is installed
version = pkg_resources.get_distribution(package_name).version
except pkg_resources.DistributionNotFound:
# if not installed, so we must be in source, with ``setup.cfg`` available
_conf = read_configuration(path.join(
path.dirname(__file__), '..', 'setup.cfg')
)
version = _conf['metadata']['version']
return version
EXACT_VERSION = six.text_type(_extract_version('django_extended_choices'))
VERSION = tuple(int(part) for part in EXACT_VERSION.split('.') if part.isnumeric())
| 34.259259 | 91 | 0.731892 | from __future__ import unicode_literals
import pkg_resources
import six
from os import path
from setuptools.config import read_configuration
from .choices import Choices, OrderedChoices, AutoDisplayChoices, AutoChoices
def _extract_version(package_name):
try:
version = pkg_resources.get_distribution(package_name).version
except pkg_resources.DistributionNotFound:
_conf = read_configuration(path.join(
path.dirname(__file__), '..', 'setup.cfg')
)
version = _conf['metadata']['version']
return version
EXACT_VERSION = six.text_type(_extract_version('django_extended_choices'))
VERSION = tuple(int(part) for part in EXACT_VERSION.split('.') if part.isnumeric())
| true | true |
1c2fa0ff5b9381182b654ca980c8c114ee61e659 | 5,888 | py | Python | frappe-bench/env/lib/python2.7/site-packages/num2words/lang_DE.py | ibrahmm22/library-management | b88a2129a5a2e96ce1f945ec8ba99a0b63b8c506 | [
"MIT"
] | null | null | null | frappe-bench/env/lib/python2.7/site-packages/num2words/lang_DE.py | ibrahmm22/library-management | b88a2129a5a2e96ce1f945ec8ba99a0b63b8c506 | [
"MIT"
] | null | null | null | frappe-bench/env/lib/python2.7/site-packages/num2words/lang_DE.py | ibrahmm22/library-management | b88a2129a5a2e96ce1f945ec8ba99a0b63b8c506 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright (c) 2003, Taro Ogawa. All Rights Reserved.
# Copyright (c) 2013, Savoir-faire Linux inc. All Rights Reserved.
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301 USA
from __future__ import unicode_literals, print_function
from .lang_EU import Num2Word_EU
class Num2Word_DE(Num2Word_EU):
def set_high_numwords(self, high):
max = 3 + 6*len(high)
for word, n in zip(high, range(max, 3, -6)):
self.cards[10**n] = word + "illiarde"
self.cards[10**(n-3)] = word + "illion"
def setup(self):
self.negword = "minus "
self.pointword = "Komma"
self.errmsg_floatord = "Die Gleitkommazahl %s kann nicht in eine Ordnungszahl konvertiert werden." # "Cannot treat float %s as ordinal."
self.errmsg_nonnum = "Nur Zahlen (type(%s)) können in Wörter konvertiert werden." # "type(((type(%s)) ) not in [long, int, float]"
self.errmsg_negord = "Die negative Zahl %s kann nicht in eine Ordnungszahl konvertiert werden." # "Cannot treat negative num %s as ordinal."
self.errmsg_toobig = "Die Zahl %s muss kleiner als %s sein." # "abs(%s) must be less than %s."
self.exclude_title = []
lows = ["non", "okt", "sept", "sext", "quint", "quadr", "tr", "b", "m"]
units = ["", "un", "duo", "tre", "quattuor", "quin", "sex", "sept",
"okto", "novem"]
tens = ["dez", "vigint", "trigint", "quadragint", "quinquagint",
"sexagint", "septuagint", "oktogint", "nonagint"]
self.high_numwords = ["zent"]+self.gen_high_numwords(units, tens, lows)
self.mid_numwords = [(1000, "tausend"), (100, "hundert"),
(90, "neunzig"), (80, "achtzig"), (70, "siebzig"),
(60, "sechzig"), (50, "f\xFCnfzig"), (40, "vierzig"),
(30, "drei\xDFig")]
self.low_numwords = ["zwanzig", "neunzehn", "achtzehn", "siebzehn",
"sechzehn", "f\xFCnfzehn", "vierzehn", "dreizehn",
"zw\xF6lf", "elf", "zehn", "neun", "acht", "sieben",
"sechs", "f\xFCnf", "vier", "drei", "zwei", "eins",
"null"]
self.ords = {"eins": "ers",
"drei": "drit",
"acht": "ach",
"sieben": "sieb",
"ig": "igs",
"ert": "erts",
"end": "ends",
"ion": "ions",
"nen": "nens",
"rde": "rdes",
"rden": "rdens"}
def merge(self, curr, next):
ctext, cnum, ntext, nnum = curr + next
if cnum == 1:
if nnum < 10**6:
return next
ctext = "eine"
if nnum > cnum:
if nnum >= 10**6:
if cnum > 1:
if ntext.endswith("e"):
ntext += "n"
else:
ntext += "en"
ctext += " "
val = cnum * nnum
else:
if nnum < 10 < cnum < 100:
if nnum == 1:
ntext = "ein"
ntext, ctext = ctext, ntext + "und"
elif cnum >= 10**6:
ctext += " "
val = cnum + nnum
word = ctext + ntext
return (word, val)
def to_ordinal(self, value):
self.verify_ordinal(value)
outword = self.to_cardinal(value)
for key in self.ords:
if outword.endswith(key):
outword = outword[:len(outword) - len(key)] + self.ords[key]
break
return outword + "te"
def to_ordinal_num(self, value):
self.verify_ordinal(value)
return str(value) + "."
def to_currency(self, val, longval=True, old=False):
if old:
return self.to_splitnum(val, hightxt="mark/s", lowtxt="pfennig/e",
jointxt="und",longval=longval)
return super(Num2Word_DE, self).to_currency(val, jointxt="und",
longval=longval)
def to_year(self, val, longval=True):
if not (val//100)%10:
return self.to_cardinal(val)
return self.to_splitnum(val, hightxt="hundert", longval=longval)
n2w = Num2Word_DE()
to_card = n2w.to_cardinal
to_ord = n2w.to_ordinal
to_ordnum = n2w.to_ordinal_num
def main():
for val in [1, 7, 8, 12, 17, 81, 91, 99, 100, 101, 102, 155,
180, 300, 308, 832, 1000, 1001, 1061, 1100, 1500, 1701, 3000,
8280, 8291, 150000, 500000, 3000000, 1000000, 2000001, 1000000000, 2000000000,
-21212121211221211111, -2.121212, -1.0000100]:
n2w.test(val)
# n2w.test(1325325436067876801768700107601001012212132143210473207540327057320957032975032975093275093275093270957329057320975093272950730)
n2w.test(3000000)
n2w.test(3000000000001)
n2w.test(3000000324566)
print(n2w.to_currency(112121))
print(n2w.to_year(2000))
print(n2w.to_year(1820))
print(n2w.to_year(2001))
if __name__ == "__main__":
main()
| 40.888889 | 148 | 0.544158 |
from __future__ import unicode_literals, print_function
from .lang_EU import Num2Word_EU
class Num2Word_DE(Num2Word_EU):
def set_high_numwords(self, high):
max = 3 + 6*len(high)
for word, n in zip(high, range(max, 3, -6)):
self.cards[10**n] = word + "illiarde"
self.cards[10**(n-3)] = word + "illion"
def setup(self):
self.negword = "minus "
self.pointword = "Komma"
self.errmsg_floatord = "Die Gleitkommazahl %s kann nicht in eine Ordnungszahl konvertiert werden."
self.errmsg_nonnum = "Nur Zahlen (type(%s)) können in Wörter konvertiert werden."
self.errmsg_negord = "Die negative Zahl %s kann nicht in eine Ordnungszahl konvertiert werden."
self.errmsg_toobig = "Die Zahl %s muss kleiner als %s sein."
self.exclude_title = []
lows = ["non", "okt", "sept", "sext", "quint", "quadr", "tr", "b", "m"]
units = ["", "un", "duo", "tre", "quattuor", "quin", "sex", "sept",
"okto", "novem"]
tens = ["dez", "vigint", "trigint", "quadragint", "quinquagint",
"sexagint", "septuagint", "oktogint", "nonagint"]
self.high_numwords = ["zent"]+self.gen_high_numwords(units, tens, lows)
self.mid_numwords = [(1000, "tausend"), (100, "hundert"),
(90, "neunzig"), (80, "achtzig"), (70, "siebzig"),
(60, "sechzig"), (50, "f\xFCnfzig"), (40, "vierzig"),
(30, "drei\xDFig")]
self.low_numwords = ["zwanzig", "neunzehn", "achtzehn", "siebzehn",
"sechzehn", "f\xFCnfzehn", "vierzehn", "dreizehn",
"zw\xF6lf", "elf", "zehn", "neun", "acht", "sieben",
"sechs", "f\xFCnf", "vier", "drei", "zwei", "eins",
"null"]
self.ords = {"eins": "ers",
"drei": "drit",
"acht": "ach",
"sieben": "sieb",
"ig": "igs",
"ert": "erts",
"end": "ends",
"ion": "ions",
"nen": "nens",
"rde": "rdes",
"rden": "rdens"}
def merge(self, curr, next):
ctext, cnum, ntext, nnum = curr + next
if cnum == 1:
if nnum < 10**6:
return next
ctext = "eine"
if nnum > cnum:
if nnum >= 10**6:
if cnum > 1:
if ntext.endswith("e"):
ntext += "n"
else:
ntext += "en"
ctext += " "
val = cnum * nnum
else:
if nnum < 10 < cnum < 100:
if nnum == 1:
ntext = "ein"
ntext, ctext = ctext, ntext + "und"
elif cnum >= 10**6:
ctext += " "
val = cnum + nnum
word = ctext + ntext
return (word, val)
def to_ordinal(self, value):
self.verify_ordinal(value)
outword = self.to_cardinal(value)
for key in self.ords:
if outword.endswith(key):
outword = outword[:len(outword) - len(key)] + self.ords[key]
break
return outword + "te"
def to_ordinal_num(self, value):
self.verify_ordinal(value)
return str(value) + "."
def to_currency(self, val, longval=True, old=False):
if old:
return self.to_splitnum(val, hightxt="mark/s", lowtxt="pfennig/e",
jointxt="und",longval=longval)
return super(Num2Word_DE, self).to_currency(val, jointxt="und",
longval=longval)
def to_year(self, val, longval=True):
if not (val//100)%10:
return self.to_cardinal(val)
return self.to_splitnum(val, hightxt="hundert", longval=longval)
n2w = Num2Word_DE()
to_card = n2w.to_cardinal
to_ord = n2w.to_ordinal
to_ordnum = n2w.to_ordinal_num
def main():
for val in [1, 7, 8, 12, 17, 81, 91, 99, 100, 101, 102, 155,
180, 300, 308, 832, 1000, 1001, 1061, 1100, 1500, 1701, 3000,
8280, 8291, 150000, 500000, 3000000, 1000000, 2000001, 1000000000, 2000000000,
-21212121211221211111, -2.121212, -1.0000100]:
n2w.test(val)
n2w.test(3000000)
n2w.test(3000000000001)
n2w.test(3000000324566)
print(n2w.to_currency(112121))
print(n2w.to_year(2000))
print(n2w.to_year(1820))
print(n2w.to_year(2001))
if __name__ == "__main__":
main()
| true | true |
1c2fa18af3497416319f5f949b16e202eafbf06f | 4,971 | py | Python | operators/azure-service-operator/python/pulumi_pulumi_kubernetes_crds_operators_azure_service_operator/azure/v1alpha1/PostgreSQLDatabase.py | pulumi/pulumi-kubernetes-crds | 372c4c0182f6b899af82d6edaad521aa14f22150 | [
"Apache-2.0"
] | null | null | null | operators/azure-service-operator/python/pulumi_pulumi_kubernetes_crds_operators_azure_service_operator/azure/v1alpha1/PostgreSQLDatabase.py | pulumi/pulumi-kubernetes-crds | 372c4c0182f6b899af82d6edaad521aa14f22150 | [
"Apache-2.0"
] | 2 | 2020-09-18T17:12:23.000Z | 2020-12-30T19:40:56.000Z | operators/azure-service-operator/python/pulumi_pulumi_kubernetes_crds_operators_azure_service_operator/azure/v1alpha1/PostgreSQLDatabase.py | pulumi/pulumi-kubernetes-crds | 372c4c0182f6b899af82d6edaad521aa14f22150 | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# *** WARNING: this file was generated by crd2pulumi. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
from ... import meta_v1 as _meta_v1
from ._inputs import *
__all__ = ['PostgreSQLDatabase']
class PostgreSQLDatabase(pulumi.CustomResource):
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
api_version: Optional[pulumi.Input[str]] = None,
kind: Optional[pulumi.Input[str]] = None,
metadata: Optional[pulumi.Input[pulumi.InputType['_meta_v1.ObjectMetaArgs']]] = None,
spec: Optional[pulumi.Input[pulumi.InputType['PostgreSQLDatabaseSpecArgs']]] = None,
status: Optional[pulumi.Input[pulumi.InputType['PostgreSQLDatabaseStatusArgs']]] = None,
__props__=None,
__name__=None,
__opts__=None):
"""
PostgreSQLDatabase is the Schema for the postgresqldatabases API
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[pulumi.InputType['PostgreSQLDatabaseSpecArgs']] spec: PostgreSQLDatabaseSpec defines the desired state of PostgreSQLDatabase
:param pulumi.Input[pulumi.InputType['PostgreSQLDatabaseStatusArgs']] status: ASOStatus (AzureServiceOperatorsStatus) defines the observed state of resource actions
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
__props__['api_version'] = 'azure.microsoft.com/v1alpha1'
__props__['kind'] = 'PostgreSQLDatabase'
__props__['metadata'] = metadata
__props__['spec'] = spec
__props__['status'] = status
super(PostgreSQLDatabase, __self__).__init__(
'kubernetes:azure.microsoft.com/v1alpha1:PostgreSQLDatabase',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'PostgreSQLDatabase':
"""
Get an existing PostgreSQLDatabase resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
return PostgreSQLDatabase(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="apiVersion")
def api_version(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "api_version")
@property
@pulumi.getter
def kind(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "kind")
@property
@pulumi.getter
def metadata(self) -> pulumi.Output[Optional['_meta_v1.outputs.ObjectMeta']]:
return pulumi.get(self, "metadata")
@property
@pulumi.getter
def spec(self) -> pulumi.Output[Optional['outputs.PostgreSQLDatabaseSpec']]:
"""
PostgreSQLDatabaseSpec defines the desired state of PostgreSQLDatabase
"""
return pulumi.get(self, "spec")
@property
@pulumi.getter
def status(self) -> pulumi.Output[Optional['outputs.PostgreSQLDatabaseStatus']]:
"""
ASOStatus (AzureServiceOperatorsStatus) defines the observed state of resource actions
"""
return pulumi.get(self, "status")
def translate_output_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| 41.425 | 172 | 0.661436 |
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
from ... import meta_v1 as _meta_v1
from ._inputs import *
__all__ = ['PostgreSQLDatabase']
class PostgreSQLDatabase(pulumi.CustomResource):
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
api_version: Optional[pulumi.Input[str]] = None,
kind: Optional[pulumi.Input[str]] = None,
metadata: Optional[pulumi.Input[pulumi.InputType['_meta_v1.ObjectMetaArgs']]] = None,
spec: Optional[pulumi.Input[pulumi.InputType['PostgreSQLDatabaseSpecArgs']]] = None,
status: Optional[pulumi.Input[pulumi.InputType['PostgreSQLDatabaseStatusArgs']]] = None,
__props__=None,
__name__=None,
__opts__=None):
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
__props__['api_version'] = 'azure.microsoft.com/v1alpha1'
__props__['kind'] = 'PostgreSQLDatabase'
__props__['metadata'] = metadata
__props__['spec'] = spec
__props__['status'] = status
super(PostgreSQLDatabase, __self__).__init__(
'kubernetes:azure.microsoft.com/v1alpha1:PostgreSQLDatabase',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'PostgreSQLDatabase':
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
return PostgreSQLDatabase(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="apiVersion")
def api_version(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "api_version")
@property
@pulumi.getter
def kind(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "kind")
@property
@pulumi.getter
def metadata(self) -> pulumi.Output[Optional['_meta_v1.outputs.ObjectMeta']]:
return pulumi.get(self, "metadata")
@property
@pulumi.getter
def spec(self) -> pulumi.Output[Optional['outputs.PostgreSQLDatabaseSpec']]:
return pulumi.get(self, "spec")
@property
@pulumi.getter
def status(self) -> pulumi.Output[Optional['outputs.PostgreSQLDatabaseStatus']]:
return pulumi.get(self, "status")
def translate_output_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| true | true |
1c2fa1b28d4236ecd86c3cc30dc15d65ac8ab769 | 10,886 | py | Python | nemo/collections/asr/parts/features.py | borisdayma/NeMo | 88f6c5b93574adb219185d5ded14b6393c485ea0 | [
"Apache-2.0"
] | 10 | 2020-03-17T08:32:06.000Z | 2021-04-19T19:03:50.000Z | nemo/collections/asr/parts/features.py | dcmartin/NeMo | d2120a40bf23d3e38ff5677c2685c712f297e6b1 | [
"Apache-2.0"
] | 1 | 2020-06-11T00:54:42.000Z | 2020-06-11T00:54:42.000Z | nemo/collections/asr/parts/features.py | dcmartin/NeMo | d2120a40bf23d3e38ff5677c2685c712f297e6b1 | [
"Apache-2.0"
] | 3 | 2020-03-10T05:10:07.000Z | 2020-12-08T01:33:35.000Z | # Taken straight from Patter https://github.com/ryanleary/patter
# TODO: review, and copyright and fix/add comments
import math
import librosa
import torch
import torch.nn as nn
from torch_stft import STFT
from nemo import logging
from nemo.collections.asr.parts.perturb import AudioAugmentor
from nemo.collections.asr.parts.segment import AudioSegment
CONSTANT = 1e-5
def normalize_batch(x, seq_len, normalize_type):
if normalize_type == "per_feature":
x_mean = torch.zeros((seq_len.shape[0], x.shape[1]), dtype=x.dtype, device=x.device)
x_std = torch.zeros((seq_len.shape[0], x.shape[1]), dtype=x.dtype, device=x.device)
for i in range(x.shape[0]):
x_mean[i, :] = x[i, :, : seq_len[i]].mean(dim=1)
x_std[i, :] = x[i, :, : seq_len[i]].std(dim=1)
# make sure x_std is not zero
x_std += CONSTANT
return (x - x_mean.unsqueeze(2)) / x_std.unsqueeze(2)
elif normalize_type == "all_features":
x_mean = torch.zeros(seq_len.shape, dtype=x.dtype, device=x.device)
x_std = torch.zeros(seq_len.shape, dtype=x.dtype, device=x.device)
for i in range(x.shape[0]):
x_mean[i] = x[i, :, : seq_len[i].item()].mean()
x_std[i] = x[i, :, : seq_len[i].item()].std()
# make sure x_std is not zero
x_std += CONSTANT
return (x - x_mean.view(-1, 1, 1)) / x_std.view(-1, 1, 1)
elif "fixed_mean" in normalize_type and "fixed_std" in normalize_type:
x_mean = torch.tensor(normalize_type["fixed_mean"], device=x.device)
x_std = torch.tensor(normalize_type["fixed_std"], device=x.device)
return (x - x_mean.view(x.shape[0], x.shape[1]).unsqueeze(2)) / x_std.view(x.shape[0], x.shape[1]).unsqueeze(2)
else:
return x
def splice_frames(x, frame_splicing):
""" Stacks frames together across feature dim
input is batch_size, feature_dim, num_frames
output is batch_size, feature_dim*frame_splicing, num_frames
"""
seq = [x]
for n in range(1, frame_splicing):
seq.append(torch.cat([x[:, :, :n], x[:, :, n:]], dim=2))
return torch.cat(seq, dim=1)
class WaveformFeaturizer(object):
def __init__(self, sample_rate=16000, int_values=False, augmentor=None):
self.augmentor = augmentor if augmentor is not None else AudioAugmentor()
self.sample_rate = sample_rate
self.int_values = int_values
def max_augmentation_length(self, length):
return self.augmentor.max_augmentation_length(length)
def process(self, file_path, offset=0, duration=0, trim=False):
audio = AudioSegment.from_file(
file_path,
target_sr=self.sample_rate,
int_values=self.int_values,
offset=offset,
duration=duration,
trim=trim,
)
return self.process_segment(audio)
def process_segment(self, audio_segment):
self.augmentor.perturb(audio_segment)
return torch.tensor(audio_segment.samples, dtype=torch.float)
@classmethod
def from_config(cls, input_config, perturbation_configs=None):
if perturbation_configs is not None:
aa = AudioAugmentor.from_config(perturbation_configs)
else:
aa = None
sample_rate = input_config.get("sample_rate", 16000)
int_values = input_config.get("int_values", False)
return cls(sample_rate=sample_rate, int_values=int_values, augmentor=aa)
class FeaturizerFactory(object):
def __init__(self):
pass
@classmethod
def from_config(cls, input_cfg, perturbation_configs=None):
return WaveformFeaturizer.from_config(input_cfg, perturbation_configs=perturbation_configs)
class FilterbankFeatures(nn.Module):
"""Featurizer that converts wavs to Mel Spectrograms.
See AudioToMelSpectrogramPreprocessor for args.
"""
def __init__(
self,
sample_rate=16000,
n_window_size=320,
n_window_stride=160,
window="hann",
normalize="per_feature",
n_fft=None,
preemph=0.97,
nfilt=64,
lowfreq=0,
highfreq=None,
log=True,
log_zero_guard_type="add",
log_zero_guard_value=2 ** -24,
dither=CONSTANT,
pad_to=16,
max_duration=16.7,
frame_splicing=1,
stft_conv=False,
pad_value=0,
mag_power=2.0,
):
super(FilterbankFeatures, self).__init__()
if (
n_window_size is None
or n_window_stride is None
or not isinstance(n_window_size, int)
or not isinstance(n_window_stride, int)
or n_window_size <= 0
or n_window_stride <= 0
):
raise ValueError(
f"{self} got an invalid value for either n_window_size or "
f"n_window_stride. Both must be positive ints."
)
logging.info(f"PADDING: {pad_to}")
self.win_length = n_window_size
self.hop_length = n_window_stride
self.n_fft = n_fft or 2 ** math.ceil(math.log2(self.win_length))
self.stft_conv = stft_conv
if stft_conv:
logging.info("STFT using conv")
# Create helper class to patch forward func for use with AMP
class STFTPatch(STFT):
def __init__(self, *params, **kw_params):
super(STFTPatch, self).__init__(*params, **kw_params)
def forward(self, input_data):
return super(STFTPatch, self).transform(input_data)[0]
self.stft = STFTPatch(self.n_fft, self.hop_length, self.win_length, window)
else:
logging.info("STFT using torch")
torch_windows = {
'hann': torch.hann_window,
'hamming': torch.hamming_window,
'blackman': torch.blackman_window,
'bartlett': torch.bartlett_window,
'none': None,
}
window_fn = torch_windows.get(window, None)
window_tensor = window_fn(self.win_length, periodic=False) if window_fn else None
self.register_buffer("window", window_tensor)
self.stft = lambda x: torch.stft(
x,
n_fft=self.n_fft,
hop_length=self.hop_length,
win_length=self.win_length,
center=True,
window=self.window.to(dtype=torch.float),
)
self.normalize = normalize
self.log = log
self.dither = dither
self.frame_splicing = frame_splicing
self.nfilt = nfilt
self.preemph = preemph
self.pad_to = pad_to
highfreq = highfreq or sample_rate / 2
filterbanks = torch.tensor(
librosa.filters.mel(sample_rate, self.n_fft, n_mels=nfilt, fmin=lowfreq, fmax=highfreq,),
dtype=torch.float,
).unsqueeze(0)
# self.fb = filterbanks
# self.window = window_tensor
self.register_buffer("fb", filterbanks)
# Calculate maximum sequence length
max_length = self.get_seq_len(torch.tensor(max_duration * sample_rate, dtype=torch.float))
max_pad = pad_to - (max_length % pad_to) if pad_to > 0 else 0
self.max_length = max_length + max_pad
self.pad_value = pad_value
self.mag_power = mag_power
# We want to avoid taking the log of zero
# There are two options: either adding or clamping to a small value
if log_zero_guard_type not in ["add", "clamp"]:
raise ValueError(
f"{self} received {log_zero_guard_type} for the "
f"log_zero_guard_type parameter. It must be either 'add' or "
f"'clamp'."
)
# log_zero_guard_value is the the small we want to use, we support
# an actual number, or "tiny", or "eps"
self.log_zero_guard_value = lambda _: log_zero_guard_value
if isinstance(log_zero_guard_value, str):
if log_zero_guard_value == "tiny":
self.log_zero_guard_value = lambda x: torch.finfo(x.dtype).tiny
elif log_zero_guard_value == "eps":
self.log_zero_guard_value = lambda x: torch.finfo(x.dtype).eps
else:
raise ValueError(
f"{self} received {log_zero_guard_value} for the "
f"log_zero_guard_type parameter. It must be either a "
f"number, 'tiny', or 'eps'"
)
self.log_zero_guard_type = log_zero_guard_type
def get_seq_len(self, seq_len):
return torch.ceil(seq_len / self.hop_length).to(dtype=torch.long)
@property
def filter_banks(self):
return self.fb
@torch.no_grad()
def forward(self, x, seq_len):
seq_len = self.get_seq_len(seq_len.float())
# dither
if self.dither > 0:
x += self.dither * torch.randn_like(x)
# do preemphasis
if self.preemph is not None:
x = torch.cat((x[:, 0].unsqueeze(1), x[:, 1:] - self.preemph * x[:, :-1]), dim=1,)
x = self.stft(x)
# get power spectrum
if self.mag_power != 1.0:
x = x.pow(self.mag_power)
if not self.stft_conv:
x = x.sum(-1)
# dot with filterbank energies
x = torch.matmul(self.fb.to(x.dtype), x)
# log features if required
if self.log:
if self.log_zero_guard_type == "add":
x = torch.log(x + self.log_zero_guard_value(x))
elif self.log_zero_guard_type == "clamp":
x = torch.log(torch.clamp(x, min=self.log_zero_guard_value(x)))
else:
raise ValueError("log_zero_guard_type was not understood")
# frame splicing if required
if self.frame_splicing > 1:
x = splice_frames(x, self.frame_splicing)
# normalize if required
if self.normalize:
x = normalize_batch(x, seq_len, normalize_type=self.normalize)
# mask to zero any values beyond seq_len in batch, pad to multiple of
# `pad_to` (for efficiency)
max_len = x.size(-1)
mask = torch.arange(max_len).to(x.device)
mask = mask.expand(x.size(0), max_len) >= seq_len.unsqueeze(1)
x = x.masked_fill(mask.unsqueeze(1).type(torch.bool).to(device=x.device), self.pad_value,)
del mask
pad_to = self.pad_to
if not self.training:
pad_to = 16
if pad_to == "max":
x = nn.functional.pad(x, (0, self.max_length - x.size(-1)), value=self.pad_value)
elif pad_to > 0:
pad_amt = x.size(-1) % pad_to
if pad_amt != 0:
x = nn.functional.pad(x, (0, pad_to - pad_amt), value=self.pad_value)
return x
| 36.653199 | 119 | 0.60215 |
import math
import librosa
import torch
import torch.nn as nn
from torch_stft import STFT
from nemo import logging
from nemo.collections.asr.parts.perturb import AudioAugmentor
from nemo.collections.asr.parts.segment import AudioSegment
CONSTANT = 1e-5
def normalize_batch(x, seq_len, normalize_type):
if normalize_type == "per_feature":
x_mean = torch.zeros((seq_len.shape[0], x.shape[1]), dtype=x.dtype, device=x.device)
x_std = torch.zeros((seq_len.shape[0], x.shape[1]), dtype=x.dtype, device=x.device)
for i in range(x.shape[0]):
x_mean[i, :] = x[i, :, : seq_len[i]].mean(dim=1)
x_std[i, :] = x[i, :, : seq_len[i]].std(dim=1)
x_std += CONSTANT
return (x - x_mean.unsqueeze(2)) / x_std.unsqueeze(2)
elif normalize_type == "all_features":
x_mean = torch.zeros(seq_len.shape, dtype=x.dtype, device=x.device)
x_std = torch.zeros(seq_len.shape, dtype=x.dtype, device=x.device)
for i in range(x.shape[0]):
x_mean[i] = x[i, :, : seq_len[i].item()].mean()
x_std[i] = x[i, :, : seq_len[i].item()].std()
x_std += CONSTANT
return (x - x_mean.view(-1, 1, 1)) / x_std.view(-1, 1, 1)
elif "fixed_mean" in normalize_type and "fixed_std" in normalize_type:
x_mean = torch.tensor(normalize_type["fixed_mean"], device=x.device)
x_std = torch.tensor(normalize_type["fixed_std"], device=x.device)
return (x - x_mean.view(x.shape[0], x.shape[1]).unsqueeze(2)) / x_std.view(x.shape[0], x.shape[1]).unsqueeze(2)
else:
return x
def splice_frames(x, frame_splicing):
seq = [x]
for n in range(1, frame_splicing):
seq.append(torch.cat([x[:, :, :n], x[:, :, n:]], dim=2))
return torch.cat(seq, dim=1)
class WaveformFeaturizer(object):
def __init__(self, sample_rate=16000, int_values=False, augmentor=None):
self.augmentor = augmentor if augmentor is not None else AudioAugmentor()
self.sample_rate = sample_rate
self.int_values = int_values
def max_augmentation_length(self, length):
return self.augmentor.max_augmentation_length(length)
def process(self, file_path, offset=0, duration=0, trim=False):
audio = AudioSegment.from_file(
file_path,
target_sr=self.sample_rate,
int_values=self.int_values,
offset=offset,
duration=duration,
trim=trim,
)
return self.process_segment(audio)
def process_segment(self, audio_segment):
self.augmentor.perturb(audio_segment)
return torch.tensor(audio_segment.samples, dtype=torch.float)
@classmethod
def from_config(cls, input_config, perturbation_configs=None):
if perturbation_configs is not None:
aa = AudioAugmentor.from_config(perturbation_configs)
else:
aa = None
sample_rate = input_config.get("sample_rate", 16000)
int_values = input_config.get("int_values", False)
return cls(sample_rate=sample_rate, int_values=int_values, augmentor=aa)
class FeaturizerFactory(object):
def __init__(self):
pass
@classmethod
def from_config(cls, input_cfg, perturbation_configs=None):
return WaveformFeaturizer.from_config(input_cfg, perturbation_configs=perturbation_configs)
class FilterbankFeatures(nn.Module):
def __init__(
self,
sample_rate=16000,
n_window_size=320,
n_window_stride=160,
window="hann",
normalize="per_feature",
n_fft=None,
preemph=0.97,
nfilt=64,
lowfreq=0,
highfreq=None,
log=True,
log_zero_guard_type="add",
log_zero_guard_value=2 ** -24,
dither=CONSTANT,
pad_to=16,
max_duration=16.7,
frame_splicing=1,
stft_conv=False,
pad_value=0,
mag_power=2.0,
):
super(FilterbankFeatures, self).__init__()
if (
n_window_size is None
or n_window_stride is None
or not isinstance(n_window_size, int)
or not isinstance(n_window_stride, int)
or n_window_size <= 0
or n_window_stride <= 0
):
raise ValueError(
f"{self} got an invalid value for either n_window_size or "
f"n_window_stride. Both must be positive ints."
)
logging.info(f"PADDING: {pad_to}")
self.win_length = n_window_size
self.hop_length = n_window_stride
self.n_fft = n_fft or 2 ** math.ceil(math.log2(self.win_length))
self.stft_conv = stft_conv
if stft_conv:
logging.info("STFT using conv")
class STFTPatch(STFT):
def __init__(self, *params, **kw_params):
super(STFTPatch, self).__init__(*params, **kw_params)
def forward(self, input_data):
return super(STFTPatch, self).transform(input_data)[0]
self.stft = STFTPatch(self.n_fft, self.hop_length, self.win_length, window)
else:
logging.info("STFT using torch")
torch_windows = {
'hann': torch.hann_window,
'hamming': torch.hamming_window,
'blackman': torch.blackman_window,
'bartlett': torch.bartlett_window,
'none': None,
}
window_fn = torch_windows.get(window, None)
window_tensor = window_fn(self.win_length, periodic=False) if window_fn else None
self.register_buffer("window", window_tensor)
self.stft = lambda x: torch.stft(
x,
n_fft=self.n_fft,
hop_length=self.hop_length,
win_length=self.win_length,
center=True,
window=self.window.to(dtype=torch.float),
)
self.normalize = normalize
self.log = log
self.dither = dither
self.frame_splicing = frame_splicing
self.nfilt = nfilt
self.preemph = preemph
self.pad_to = pad_to
highfreq = highfreq or sample_rate / 2
filterbanks = torch.tensor(
librosa.filters.mel(sample_rate, self.n_fft, n_mels=nfilt, fmin=lowfreq, fmax=highfreq,),
dtype=torch.float,
).unsqueeze(0)
self.register_buffer("fb", filterbanks)
max_length = self.get_seq_len(torch.tensor(max_duration * sample_rate, dtype=torch.float))
max_pad = pad_to - (max_length % pad_to) if pad_to > 0 else 0
self.max_length = max_length + max_pad
self.pad_value = pad_value
self.mag_power = mag_power
if log_zero_guard_type not in ["add", "clamp"]:
raise ValueError(
f"{self} received {log_zero_guard_type} for the "
f"log_zero_guard_type parameter. It must be either 'add' or "
f"'clamp'."
)
self.log_zero_guard_value = lambda _: log_zero_guard_value
if isinstance(log_zero_guard_value, str):
if log_zero_guard_value == "tiny":
self.log_zero_guard_value = lambda x: torch.finfo(x.dtype).tiny
elif log_zero_guard_value == "eps":
self.log_zero_guard_value = lambda x: torch.finfo(x.dtype).eps
else:
raise ValueError(
f"{self} received {log_zero_guard_value} for the "
f"log_zero_guard_type parameter. It must be either a "
f"number, 'tiny', or 'eps'"
)
self.log_zero_guard_type = log_zero_guard_type
def get_seq_len(self, seq_len):
return torch.ceil(seq_len / self.hop_length).to(dtype=torch.long)
@property
def filter_banks(self):
return self.fb
@torch.no_grad()
def forward(self, x, seq_len):
seq_len = self.get_seq_len(seq_len.float())
if self.dither > 0:
x += self.dither * torch.randn_like(x)
if self.preemph is not None:
x = torch.cat((x[:, 0].unsqueeze(1), x[:, 1:] - self.preemph * x[:, :-1]), dim=1,)
x = self.stft(x)
if self.mag_power != 1.0:
x = x.pow(self.mag_power)
if not self.stft_conv:
x = x.sum(-1)
x = torch.matmul(self.fb.to(x.dtype), x)
if self.log:
if self.log_zero_guard_type == "add":
x = torch.log(x + self.log_zero_guard_value(x))
elif self.log_zero_guard_type == "clamp":
x = torch.log(torch.clamp(x, min=self.log_zero_guard_value(x)))
else:
raise ValueError("log_zero_guard_type was not understood")
if self.frame_splicing > 1:
x = splice_frames(x, self.frame_splicing)
if self.normalize:
x = normalize_batch(x, seq_len, normalize_type=self.normalize)
max_len = x.size(-1)
mask = torch.arange(max_len).to(x.device)
mask = mask.expand(x.size(0), max_len) >= seq_len.unsqueeze(1)
x = x.masked_fill(mask.unsqueeze(1).type(torch.bool).to(device=x.device), self.pad_value,)
del mask
pad_to = self.pad_to
if not self.training:
pad_to = 16
if pad_to == "max":
x = nn.functional.pad(x, (0, self.max_length - x.size(-1)), value=self.pad_value)
elif pad_to > 0:
pad_amt = x.size(-1) % pad_to
if pad_amt != 0:
x = nn.functional.pad(x, (0, pad_to - pad_amt), value=self.pad_value)
return x
| true | true |
1c2fa4607fad66cd68a24ce8e5744d69da3257bb | 650 | py | Python | mlxtend/docs/make_userguide.py | WhiteWolf21/fp-growth | 01e1d853b09f244f14e66d7d0c87f139a0f67c81 | [
"MIT"
] | null | null | null | mlxtend/docs/make_userguide.py | WhiteWolf21/fp-growth | 01e1d853b09f244f14e66d7d0c87f139a0f67c81 | [
"MIT"
] | null | null | null | mlxtend/docs/make_userguide.py | WhiteWolf21/fp-growth | 01e1d853b09f244f14e66d7d0c87f139a0f67c81 | [
"MIT"
] | null | null | null | # API generator script
#
# Sebastian Raschka 2014-2020
# mlxtend Machine Learning Library Extensions
#
# Author: Sebastian Raschka <sebastianraschka.com>
#
# License: BSD 3 clause
import yaml
import os.path
s = "# User Guide Index"
yml_cont = open('mkdocs.yml', 'r')
usr_gd = yaml.load(yml_cont)['pages'][1]['User Guide']
for dct in usr_gd[1:]:
subpk = list(dct.keys())[0]
s += '\n\n## `%s`' % subpk
for obj in dct[subpk]:
bsname = os.path.basename(obj).split('.md')[0]
s += '\n- [%s](%s)' % (bsname, obj)
usr_gd_file = os.path.join('sources', 'USER_GUIDE_INDEX.md')
with open(usr_gd_file, 'w') as f:
f.write(s)
| 22.413793 | 60 | 0.633846 |
import yaml
import os.path
s = "# User Guide Index"
yml_cont = open('mkdocs.yml', 'r')
usr_gd = yaml.load(yml_cont)['pages'][1]['User Guide']
for dct in usr_gd[1:]:
subpk = list(dct.keys())[0]
s += '\n\n## `%s`' % subpk
for obj in dct[subpk]:
bsname = os.path.basename(obj).split('.md')[0]
s += '\n- [%s](%s)' % (bsname, obj)
usr_gd_file = os.path.join('sources', 'USER_GUIDE_INDEX.md')
with open(usr_gd_file, 'w') as f:
f.write(s)
| true | true |
1c2fa478453f7b0416d6189af5154d347cc6e565 | 6,317 | py | Python | rasa/core/channels/botframework.py | anoop2503/rasa | 990bab95954a2f6605f039ce9fb1317b39858651 | [
"Apache-2.0"
] | 1 | 2021-07-06T20:49:07.000Z | 2021-07-06T20:49:07.000Z | rasa/core/channels/botframework.py | anoop2503/rasa | 990bab95954a2f6605f039ce9fb1317b39858651 | [
"Apache-2.0"
] | null | null | null | rasa/core/channels/botframework.py | anoop2503/rasa | 990bab95954a2f6605f039ce9fb1317b39858651 | [
"Apache-2.0"
] | 1 | 2020-01-13T14:54:22.000Z | 2020-01-13T14:54:22.000Z | # -*- coding: utf-8 -*-
import datetime
import json
import logging
import requests
from sanic import Blueprint, response
from sanic.request import Request
from typing import Text, Dict, Any
from rasa.core.channels.channel import UserMessage, OutputChannel, InputChannel
logger = logging.getLogger(__name__)
MICROSOFT_OAUTH2_URL = "https://login.microsoftonline.com"
MICROSOFT_OAUTH2_PATH = "botframework.com/oauth2/v2.0/token"
class BotFramework(OutputChannel):
"""A Microsoft Bot Framework communication channel."""
token_expiration_date = datetime.datetime.now()
headers = None
@classmethod
def name(cls):
return "botframework"
def __init__(
self,
app_id: Text,
app_password: Text,
conversation: Dict[Text, Any],
bot: Text,
service_url: Text,
) -> None:
self.app_id = app_id
self.app_password = app_password
self.conversation = conversation
self.global_uri = "{}v3/".format(service_url)
self.bot = bot
async def _get_headers(self):
if BotFramework.token_expiration_date < datetime.datetime.now():
uri = "{}/{}".format(MICROSOFT_OAUTH2_URL, MICROSOFT_OAUTH2_PATH)
grant_type = "client_credentials"
scope = "https://api.botframework.com/.default"
payload = {
"client_id": self.app_id,
"client_secret": self.app_password,
"grant_type": grant_type,
"scope": scope,
}
token_response = requests.post(uri, data=payload)
if token_response.ok:
token_data = token_response.json()
access_token = token_data["access_token"]
token_expiration = token_data["expires_in"]
BotFramework.token_expiration_date = datetime.datetime.now() + datetime.timedelta(
seconds=int(token_expiration)
)
BotFramework.headers = {
"content-type": "application/json",
"Authorization": "Bearer %s" % access_token,
}
return BotFramework.headers
else:
logger.error("Could not get BotFramework token")
else:
return BotFramework.headers
async def send(self, recipient_id: Text, message_data: Dict[Text, Any]) -> None:
post_message_uri = "{}conversations/{}/activities".format(
self.global_uri, self.conversation["id"]
)
data = {
"type": "message",
"recipient": {"id": recipient_id},
"from": self.bot,
"channelData": {"notification": {"alert": "true"}},
"text": "",
}
data.update(message_data)
headers = await self._get_headers()
send_response = requests.post(
post_message_uri, headers=headers, data=json.dumps(data)
)
if not send_response.ok:
logger.error(
"Error trying to send botframework messge. Response: %s",
send_response.text,
)
async def send_text_message(self, recipient_id, message):
for message_part in message.split("\n\n"):
text_message = {"text": message_part}
await self.send(recipient_id, text_message)
async def send_image_url(self, recipient_id, image_url):
hero_content = {
"contentType": "application/vnd.microsoft.card.hero",
"content": {"images": [{"url": image_url}]},
}
image_message = {"attachments": [hero_content]}
await self.send(recipient_id, image_message)
async def send_text_with_buttons(self, recipient_id, message, buttons, **kwargs):
hero_content = {
"contentType": "application/vnd.microsoft.card.hero",
"content": {"subtitle": message, "buttons": buttons},
}
buttons_message = {"attachments": [hero_content]}
await self.send(recipient_id, buttons_message)
async def send_custom_message(self, recipient_id, elements):
await self.send(recipient_id, elements[0])
class BotFrameworkInput(InputChannel):
"""Bot Framework input channel implementation."""
@classmethod
def name(cls):
return "botframework"
@classmethod
def from_credentials(cls, credentials):
if not credentials:
cls.raise_missing_credentials_exception()
return cls(credentials.get("app_id"), credentials.get("app_password"))
def __init__(self, app_id: Text, app_password: Text) -> None:
"""Create a Bot Framework input channel.
Args:
app_id: Bot Framework's API id
app_password: Bot Framework application secret
"""
self.app_id = app_id
self.app_password = app_password
def blueprint(self, on_new_message):
botframework_webhook = Blueprint("botframework_webhook", __name__)
@botframework_webhook.route("/", methods=["GET"])
async def health(request):
return response.json({"status": "ok"})
@botframework_webhook.route("/webhook", methods=["POST"])
async def webhook(request: Request):
postdata = request.json
try:
if postdata["type"] == "message":
out_channel = BotFramework(
self.app_id,
self.app_password,
postdata["conversation"],
postdata["recipient"],
postdata["serviceUrl"],
)
user_msg = UserMessage(
postdata["text"],
out_channel,
postdata["from"]["id"],
input_channel=self.name(),
)
await on_new_message(user_msg)
else:
logger.info("Not received message type")
except Exception as e:
logger.error("Exception when trying to handle message.{0}".format(e))
logger.debug(e, exc_info=True)
pass
return response.text("success")
return botframework_webhook
| 32.394872 | 98 | 0.576698 |
import datetime
import json
import logging
import requests
from sanic import Blueprint, response
from sanic.request import Request
from typing import Text, Dict, Any
from rasa.core.channels.channel import UserMessage, OutputChannel, InputChannel
logger = logging.getLogger(__name__)
MICROSOFT_OAUTH2_URL = "https://login.microsoftonline.com"
MICROSOFT_OAUTH2_PATH = "botframework.com/oauth2/v2.0/token"
class BotFramework(OutputChannel):
token_expiration_date = datetime.datetime.now()
headers = None
@classmethod
def name(cls):
return "botframework"
def __init__(
self,
app_id: Text,
app_password: Text,
conversation: Dict[Text, Any],
bot: Text,
service_url: Text,
) -> None:
self.app_id = app_id
self.app_password = app_password
self.conversation = conversation
self.global_uri = "{}v3/".format(service_url)
self.bot = bot
async def _get_headers(self):
if BotFramework.token_expiration_date < datetime.datetime.now():
uri = "{}/{}".format(MICROSOFT_OAUTH2_URL, MICROSOFT_OAUTH2_PATH)
grant_type = "client_credentials"
scope = "https://api.botframework.com/.default"
payload = {
"client_id": self.app_id,
"client_secret": self.app_password,
"grant_type": grant_type,
"scope": scope,
}
token_response = requests.post(uri, data=payload)
if token_response.ok:
token_data = token_response.json()
access_token = token_data["access_token"]
token_expiration = token_data["expires_in"]
BotFramework.token_expiration_date = datetime.datetime.now() + datetime.timedelta(
seconds=int(token_expiration)
)
BotFramework.headers = {
"content-type": "application/json",
"Authorization": "Bearer %s" % access_token,
}
return BotFramework.headers
else:
logger.error("Could not get BotFramework token")
else:
return BotFramework.headers
async def send(self, recipient_id: Text, message_data: Dict[Text, Any]) -> None:
post_message_uri = "{}conversations/{}/activities".format(
self.global_uri, self.conversation["id"]
)
data = {
"type": "message",
"recipient": {"id": recipient_id},
"from": self.bot,
"channelData": {"notification": {"alert": "true"}},
"text": "",
}
data.update(message_data)
headers = await self._get_headers()
send_response = requests.post(
post_message_uri, headers=headers, data=json.dumps(data)
)
if not send_response.ok:
logger.error(
"Error trying to send botframework messge. Response: %s",
send_response.text,
)
async def send_text_message(self, recipient_id, message):
for message_part in message.split("\n\n"):
text_message = {"text": message_part}
await self.send(recipient_id, text_message)
async def send_image_url(self, recipient_id, image_url):
hero_content = {
"contentType": "application/vnd.microsoft.card.hero",
"content": {"images": [{"url": image_url}]},
}
image_message = {"attachments": [hero_content]}
await self.send(recipient_id, image_message)
async def send_text_with_buttons(self, recipient_id, message, buttons, **kwargs):
hero_content = {
"contentType": "application/vnd.microsoft.card.hero",
"content": {"subtitle": message, "buttons": buttons},
}
buttons_message = {"attachments": [hero_content]}
await self.send(recipient_id, buttons_message)
async def send_custom_message(self, recipient_id, elements):
await self.send(recipient_id, elements[0])
class BotFrameworkInput(InputChannel):
@classmethod
def name(cls):
return "botframework"
@classmethod
def from_credentials(cls, credentials):
if not credentials:
cls.raise_missing_credentials_exception()
return cls(credentials.get("app_id"), credentials.get("app_password"))
def __init__(self, app_id: Text, app_password: Text) -> None:
self.app_id = app_id
self.app_password = app_password
def blueprint(self, on_new_message):
botframework_webhook = Blueprint("botframework_webhook", __name__)
@botframework_webhook.route("/", methods=["GET"])
async def health(request):
return response.json({"status": "ok"})
@botframework_webhook.route("/webhook", methods=["POST"])
async def webhook(request: Request):
postdata = request.json
try:
if postdata["type"] == "message":
out_channel = BotFramework(
self.app_id,
self.app_password,
postdata["conversation"],
postdata["recipient"],
postdata["serviceUrl"],
)
user_msg = UserMessage(
postdata["text"],
out_channel,
postdata["from"]["id"],
input_channel=self.name(),
)
await on_new_message(user_msg)
else:
logger.info("Not received message type")
except Exception as e:
logger.error("Exception when trying to handle message.{0}".format(e))
logger.debug(e, exc_info=True)
pass
return response.text("success")
return botframework_webhook
| true | true |
1c2fa48e6922a25e39c7a2a1af51ab9a9bf4482e | 14,086 | py | Python | pygsti/modelmembers/operations/lpdenseop.py | pyGSTi-Developers/pyGSTi | bfedc1de4d604f14b0f958615776fb80ddb59e33 | [
"Apache-2.0"
] | 73 | 2016-01-28T05:02:05.000Z | 2022-03-30T07:46:33.000Z | pygsti/modelmembers/operations/lpdenseop.py | pyGSTi-Developers/pyGSTi | bfedc1de4d604f14b0f958615776fb80ddb59e33 | [
"Apache-2.0"
] | 113 | 2016-02-25T15:32:18.000Z | 2022-03-31T13:18:13.000Z | pygsti/modelmembers/operations/lpdenseop.py | pyGSTi-Developers/pyGSTi | bfedc1de4d604f14b0f958615776fb80ddb59e33 | [
"Apache-2.0"
] | 41 | 2016-03-15T19:32:07.000Z | 2022-02-16T10:22:05.000Z | """
The LinearlyParamArbitraryOp class and supporting functionality.
"""
#***************************************************************************************************
# Copyright 2015, 2019 National Technology & Engineering Solutions of Sandia, LLC (NTESS).
# Under the terms of Contract DE-NA0003525 with NTESS, the U.S. Government retains certain rights
# in this software.
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
# in compliance with the License. You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0 or in the LICENSE file in the root pyGSTi directory.
#***************************************************************************************************
import numpy as _np
from pygsti.modelmembers.operations.denseop import DenseOperator as _DenseOperator
from pygsti.modelmembers.operations.linearop import LinearOperator as _LinearOperator
from pygsti.baseobjs.statespace import StateSpace as _StateSpace
from pygsti.tools import matrixtools as _mt
IMAG_TOL = 1e-7 # tolerance for imaginary part being considered zero
class LinearlyParameterizedElementTerm(object):
"""
Encapsulates a single term within a LinearlyParamArbitraryOp.
Parameters
----------
coeff : float, optional
The term's coefficient
param_indices : list
A list of integers, specifying which parameters are muliplied
together (and finally, with `coeff`) to form this term.
"""
def __init__(self, coeff=1.0, param_indices=[]):
"""
Create a new LinearlyParameterizedElementTerm
Parameters
----------
coeff : float, optional
The term's coefficient
param_indices : list
A list of integers, specifying which parameters are muliplied
together (and finally, with `coeff`) to form this term.
"""
self.coeff = coeff
self.paramIndices = param_indices
class LinearlyParamArbitraryOp(_DenseOperator):
"""
An operation matrix parameterized such that each element depends only linearly on any parameter.
Parameters
----------
basematrix : numpy array
a square 2D numpy array that acts as the starting point when
constructin the operation's matrix. The shape of this array sets
the dimension of the operation.
parameter_array : numpy array
a 1D numpy array that holds the all the parameters for this
operation. The shape of this array sets is what is returned by
value_dimension(...).
parameter_to_base_indices_map : dict
A dictionary with keys == index of a parameter
(i.e. in parameter_array) and values == list of 2-tuples
indexing potentially multiple operation matrix coordinates
which should be set equal to this parameter.
left_transform : numpy array or None, optional
A 2D array of the same shape as basematrix which left-multiplies
the base matrix after parameters have been evaluated. Defaults to
no transform_inplace.
right_transform : numpy array or None, optional
A 2D array of the same shape as basematrix which right-multiplies
the base matrix after parameters have been evaluated. Defaults to
no transform_inplace.
real : bool, optional
Whether or not the resulting operation matrix, after all
parameter evaluation and left & right transforms have
been performed, should be real. If True, ValueError will
be raised if the matrix contains any complex or imaginary
elements.
evotype : Evotype or str, optional
The evolution type. The special value `"default"` is equivalent
to specifying the value of `pygsti.evotypes.Evotype.default_evotype`.
state_space : StateSpace, optional
The state space for this operation. If `None` a default state space
with the appropriate number of qubits is used.
"""
def __init__(self, base_matrix, parameter_array, parameter_to_base_indices_map,
left_transform=None, right_transform=None, real=False, evotype="default", state_space=None):
base_matrix = _np.array(_LinearOperator.convert_to_matrix(base_matrix), 'complex')
#complex, even if passed all real base matrix
elementExpressions = {}
for p, ij_tuples in parameter_to_base_indices_map.items():
for i, j in ij_tuples:
assert((i, j) not in elementExpressions) # only one parameter allowed per base index pair
elementExpressions[(i, j)] = [LinearlyParameterizedElementTerm(1.0, [p])]
typ = "d" if real else "complex"
mx = _np.empty(base_matrix.shape, typ)
self.baseMatrix = base_matrix
self.parameterArray = parameter_array
self.numParams = len(parameter_array)
self.elementExpressions = elementExpressions
assert(_np.isrealobj(self.parameterArray)), "Parameter array must be real-valued!"
I = _np.identity(self.baseMatrix.shape[0], 'd') # LinearlyParameterizedGates are currently assumed to be real
self.leftTrans = left_transform if (left_transform is not None) else I
self.rightTrans = right_transform if (right_transform is not None) else I
self.enforceReal = real
#Note: dense op reps *always* own their own data so setting writeable flag is OK
_DenseOperator.__init__(self, mx, evotype, state_space)
self._ptr.flags.writeable = False # only _construct_matrix can change array
self._construct_matrix() # construct base from the parameters
def _construct_matrix(self):
"""
Build the internal operation matrix using the current parameters.
"""
matrix = self.baseMatrix.copy()
for (i, j), terms in self.elementExpressions.items():
for term in terms:
param_prod = _np.prod([self.parameterArray[p] for p in term.paramIndices])
matrix[i, j] += term.coeff * param_prod
matrix = _np.dot(self.leftTrans, _np.dot(matrix, self.rightTrans))
if self.enforceReal:
if _np.linalg.norm(_np.imag(matrix)) > IMAG_TOL:
raise ValueError("Linearly parameterized matrix has non-zero"
"imaginary part (%g)!" % _np.linalg.norm(_np.imag(matrix)))
matrix = _np.real(matrix)
#Note: dense op reps *always* own their own data so setting writeable flag is OK
assert(matrix.shape == (self.dim, self.dim))
self._ptr.flags.writeable = True
self._ptr[:, :] = matrix
self._ptr.flags.writeable = False
def _construct_param_to_base_indices_map(self):
# build mapping for constructor, which has integer keys so ok for serialization
param_to_base_indices_map = {}
for (i, j), term in self.elementExpressions:
assert(len(term.paramIndices) == 1)
p = term.paramIndices[0]
if p not in param_to_base_indices_map:
param_to_base_indices_map[p] = []
param_to_base_indices_map[p].append((i, j))
return param_to_base_indices_map
def to_memoized_dict(self, mmg_memo):
"""Create a serializable dict with references to other objects in the memo.
Parameters
----------
mmg_memo: dict
Memo dict from a ModelMemberGraph, i.e. keys are object ids and values
are ModelMemberGraphNodes (which contain the serialize_id). This is NOT
the same as other memos in ModelMember (e.g. copy, allocate_gpindices, etc.).
Returns
-------
mm_dict: dict
A dict representation of this ModelMember ready for serialization
This must have at least the following fields:
module, class, submembers, params, state_space, evotype
Additional fields may be added by derived classes.
"""
param_to_base_indices_map = self._construct_param_to_base_indices_map()
mm_dict = super().to_memoized_dict(mmg_memo) # includes 'dense_matrix' from DenseOperator
mm_dict['base_matrix'] = self._encodemx(self.baseMatrix)
mm_dict['parameter_array'] = self._encodemx(self.parameterArray)
mm_dict['parameter_to_base_indices_map'] = param_to_base_indices_map
mm_dict['left_transform'] = self._encodemx(self.leftTrans)
mm_dict['right_transform'] = self._encodemx(self.rightTrans)
mm_dict['enforce_real'] = self.enforceReal
return mm_dict
@classmethod
def _from_memoized_dict(cls, mm_dict, serial_memo):
base_matrix = cls._decodemx(mm_dict['base_matrix'])
parameter_array = cls._decodemx(mm_dict['parameter_array'])
left_transform = cls._decodemx(mm_dict['left_transform'])
right_transform = cls._decodemx(mm_dict['right_transform'])
state_space = _StateSpace.from_nice_serialization(mm_dict['state_space'])
return cls(base_matrix, parameter_array, mm_dict['parameter_to_base_indices_map'],
left_transform, right_transform, mm_dict['enforce_real'], mm_dict['evotype'], state_space)
def _is_similar(self, other, rtol, atol):
""" Returns True if `other` model member (which it guaranteed to be the same type as self) has
the same local structure, i.e., not considering parameter values or submembers """
return ((self.baseMatrix.shape == other.baseMatrix.shape)
and _np.allclose(self.baseMatrix, other.baseMatrix, rtol=rtol, atol=atol)
and _np.allclose(self.leftTrans, other.leftTrans, rtol=rtol, atol=atol)
and _np.allclose(self.rightTrans, other.rightTrans, rtol=rtol, atol=atol)
and (self._construct_param_to_base_indices_map() == other._construct_param_to_base_indices_map())
and (self.enforceReal == other.enforceReal))
@property
def num_params(self):
"""
Get the number of independent parameters which specify this operation.
Returns
-------
int
the number of independent parameters.
"""
return self.numParams
def to_vector(self):
"""
Extract a vector of the underlying operation parameters from this operation.
Returns
-------
numpy array
a 1D numpy array with length == num_params().
"""
return self.parameterArray
def from_vector(self, v, close=False, dirty_value=True):
"""
Initialize the operation using a vector of parameters.
Parameters
----------
v : numpy array
The 1D vector of operation parameters. Length
must == num_params()
close : bool, optional
Whether `v` is close to this operation's current
set of parameters. Under some circumstances, when this
is true this call can be completed more quickly.
dirty_value : bool, optional
The value to set this object's "dirty flag" to before exiting this
call. This is passed as an argument so it can be updated *recursively*.
Leave this set to `True` unless you know what you're doing.
Returns
-------
None
"""
self.parameterArray[:] = v
self._construct_matrix()
self.dirty = dirty_value
def deriv_wrt_params(self, wrt_filter=None):
"""
The element-wise derivative this operation.
Construct a matrix whose columns are the vectorized
derivatives of the flattened operation matrix with respect to a
single operation parameter. Thus, each column is of length
op_dim^2 and there is one column per operation parameter.
Parameters
----------
wrt_filter : list or numpy.ndarray
List of parameter indices to take derivative with respect to.
(None means to use all the this operation's parameters.)
Returns
-------
numpy array
Array of derivatives, shape == (dimension^2, num_params)
"""
derivMx = _np.zeros((self.numParams, self.dim, self.dim), 'complex')
for (i, j), terms in self.elementExpressions.items():
for term in terms:
params_to_mult = [self.parameterArray[p] for p in term.paramIndices]
for k, p in enumerate(term.paramIndices):
param_partial_prod = _np.prod(params_to_mult[0:k] + params_to_mult[k + 1:]) # exclude k-th factor
derivMx[p, i, j] += term.coeff * param_partial_prod
derivMx = _np.dot(self.leftTrans, _np.dot(derivMx, self.rightTrans)) # (d,d) * (P,d,d) * (d,d) => (d,P,d)
derivMx = _np.rollaxis(derivMx, 1, 3) # now (d,d,P)
derivMx = derivMx.reshape([self.dim**2, self.numParams]) # (d^2,P) == final shape
if self.enforceReal:
assert(_np.linalg.norm(_np.imag(derivMx)) < IMAG_TOL)
derivMx = _np.real(derivMx)
if wrt_filter is None:
return derivMx
else:
return _np.take(derivMx, wrt_filter, axis=1)
def has_nonzero_hessian(self):
"""
Whether this operation has a non-zero Hessian with respect to its parameters.
(i.e. whether it only depends linearly on its parameters or not)
Returns
-------
bool
"""
return False
def __str__(self):
s = "Linearly Parameterized operation with shape %s, num params = %d\n" % \
(str(self._ptr.shape), self.numParams)
s += _mt.mx_to_string(self._ptr, width=5, prec=1)
s += "\nParameterization:"
for (i, j), terms in self.elementExpressions.items():
tStr = ' + '.join(['*'.join(["p%d" % p for p in term.paramIndices])
for term in terms])
s += "LinearOperator[%d,%d] = %s\n" % (i, j, tStr)
return s
| 42.555891 | 118 | 0.639216 |
import numpy as _np
from pygsti.modelmembers.operations.denseop import DenseOperator as _DenseOperator
from pygsti.modelmembers.operations.linearop import LinearOperator as _LinearOperator
from pygsti.baseobjs.statespace import StateSpace as _StateSpace
from pygsti.tools import matrixtools as _mt
IMAG_TOL = 1e-7
class LinearlyParameterizedElementTerm(object):
def __init__(self, coeff=1.0, param_indices=[]):
self.coeff = coeff
self.paramIndices = param_indices
class LinearlyParamArbitraryOp(_DenseOperator):
def __init__(self, base_matrix, parameter_array, parameter_to_base_indices_map,
left_transform=None, right_transform=None, real=False, evotype="default", state_space=None):
base_matrix = _np.array(_LinearOperator.convert_to_matrix(base_matrix), 'complex')
elementExpressions = {}
for p, ij_tuples in parameter_to_base_indices_map.items():
for i, j in ij_tuples:
assert((i, j) not in elementExpressions)
elementExpressions[(i, j)] = [LinearlyParameterizedElementTerm(1.0, [p])]
typ = "d" if real else "complex"
mx = _np.empty(base_matrix.shape, typ)
self.baseMatrix = base_matrix
self.parameterArray = parameter_array
self.numParams = len(parameter_array)
self.elementExpressions = elementExpressions
assert(_np.isrealobj(self.parameterArray)), "Parameter array must be real-valued!"
I = _np.identity(self.baseMatrix.shape[0], 'd')
self.leftTrans = left_transform if (left_transform is not None) else I
self.rightTrans = right_transform if (right_transform is not None) else I
self.enforceReal = real
_DenseOperator.__init__(self, mx, evotype, state_space)
self._ptr.flags.writeable = False
self._construct_matrix()
def _construct_matrix(self):
matrix = self.baseMatrix.copy()
for (i, j), terms in self.elementExpressions.items():
for term in terms:
param_prod = _np.prod([self.parameterArray[p] for p in term.paramIndices])
matrix[i, j] += term.coeff * param_prod
matrix = _np.dot(self.leftTrans, _np.dot(matrix, self.rightTrans))
if self.enforceReal:
if _np.linalg.norm(_np.imag(matrix)) > IMAG_TOL:
raise ValueError("Linearly parameterized matrix has non-zero"
"imaginary part (%g)!" % _np.linalg.norm(_np.imag(matrix)))
matrix = _np.real(matrix)
assert(matrix.shape == (self.dim, self.dim))
self._ptr.flags.writeable = True
self._ptr[:, :] = matrix
self._ptr.flags.writeable = False
def _construct_param_to_base_indices_map(self):
param_to_base_indices_map = {}
for (i, j), term in self.elementExpressions:
assert(len(term.paramIndices) == 1)
p = term.paramIndices[0]
if p not in param_to_base_indices_map:
param_to_base_indices_map[p] = []
param_to_base_indices_map[p].append((i, j))
return param_to_base_indices_map
def to_memoized_dict(self, mmg_memo):
param_to_base_indices_map = self._construct_param_to_base_indices_map()
mm_dict = super().to_memoized_dict(mmg_memo)
mm_dict['base_matrix'] = self._encodemx(self.baseMatrix)
mm_dict['parameter_array'] = self._encodemx(self.parameterArray)
mm_dict['parameter_to_base_indices_map'] = param_to_base_indices_map
mm_dict['left_transform'] = self._encodemx(self.leftTrans)
mm_dict['right_transform'] = self._encodemx(self.rightTrans)
mm_dict['enforce_real'] = self.enforceReal
return mm_dict
@classmethod
def _from_memoized_dict(cls, mm_dict, serial_memo):
base_matrix = cls._decodemx(mm_dict['base_matrix'])
parameter_array = cls._decodemx(mm_dict['parameter_array'])
left_transform = cls._decodemx(mm_dict['left_transform'])
right_transform = cls._decodemx(mm_dict['right_transform'])
state_space = _StateSpace.from_nice_serialization(mm_dict['state_space'])
return cls(base_matrix, parameter_array, mm_dict['parameter_to_base_indices_map'],
left_transform, right_transform, mm_dict['enforce_real'], mm_dict['evotype'], state_space)
def _is_similar(self, other, rtol, atol):
return ((self.baseMatrix.shape == other.baseMatrix.shape)
and _np.allclose(self.baseMatrix, other.baseMatrix, rtol=rtol, atol=atol)
and _np.allclose(self.leftTrans, other.leftTrans, rtol=rtol, atol=atol)
and _np.allclose(self.rightTrans, other.rightTrans, rtol=rtol, atol=atol)
and (self._construct_param_to_base_indices_map() == other._construct_param_to_base_indices_map())
and (self.enforceReal == other.enforceReal))
@property
def num_params(self):
return self.numParams
def to_vector(self):
return self.parameterArray
def from_vector(self, v, close=False, dirty_value=True):
self.parameterArray[:] = v
self._construct_matrix()
self.dirty = dirty_value
def deriv_wrt_params(self, wrt_filter=None):
derivMx = _np.zeros((self.numParams, self.dim, self.dim), 'complex')
for (i, j), terms in self.elementExpressions.items():
for term in terms:
params_to_mult = [self.parameterArray[p] for p in term.paramIndices]
for k, p in enumerate(term.paramIndices):
param_partial_prod = _np.prod(params_to_mult[0:k] + params_to_mult[k + 1:])
derivMx[p, i, j] += term.coeff * param_partial_prod
derivMx = _np.dot(self.leftTrans, _np.dot(derivMx, self.rightTrans))
derivMx = _np.rollaxis(derivMx, 1, 3)
derivMx = derivMx.reshape([self.dim**2, self.numParams])
if self.enforceReal:
assert(_np.linalg.norm(_np.imag(derivMx)) < IMAG_TOL)
derivMx = _np.real(derivMx)
if wrt_filter is None:
return derivMx
else:
return _np.take(derivMx, wrt_filter, axis=1)
def has_nonzero_hessian(self):
return False
def __str__(self):
s = "Linearly Parameterized operation with shape %s, num params = %d\n" % \
(str(self._ptr.shape), self.numParams)
s += _mt.mx_to_string(self._ptr, width=5, prec=1)
s += "\nParameterization:"
for (i, j), terms in self.elementExpressions.items():
tStr = ' + '.join(['*'.join(["p%d" % p for p in term.paramIndices])
for term in terms])
s += "LinearOperator[%d,%d] = %s\n" % (i, j, tStr)
return s
| true | true |
1c2fa5b2a808191a18d71c60db9fe0df1548d91f | 1,987 | py | Python | catacomb/utils/formatter.py | mitchjeitani/safe | 9b710319a769d532902cb1f7bfecd94155c4e2bf | [
"MIT"
] | 30 | 2017-11-09T09:38:04.000Z | 2021-04-15T16:04:18.000Z | catacomb/utils/formatter.py | mitchjeitani/safe | 9b710319a769d532902cb1f7bfecd94155c4e2bf | [
"MIT"
] | 14 | 2017-11-22T12:47:17.000Z | 2018-06-18T04:47:48.000Z | catacomb/utils/formatter.py | mitchjeitani/safe | 9b710319a769d532902cb1f7bfecd94155c4e2bf | [
"MIT"
] | 3 | 2018-04-04T10:40:20.000Z | 2019-05-25T07:17:04.000Z | import click
import sys
import textwrap
from catacomb.common import constants, errors
from terminaltables import AsciiTable
def color_text(text, color):
"""Adds color to the provided text.
Arguments:
text (str): The text to color.
color (str): The color as a string, e.g. 'red', 'green' and 'blue'
(more: http://click.pocoo.org/6/api/#click.style).
Returns:
A `string` with ANSI color codes appended to it, or just the regular
non-colored text if the color is not available.
"""
try:
text = click.style(text, fg=color)
except TypeError:
print_warning(errors.INVALID_COLOR.format(color))
return text
def to_table(headers, rows):
"""Produces a nicely formatted ASCII table from the provided data.
Arguments:
headers (list): The headers of the table.
rows (list): List of rows to append to the table.
Returns:
A `string` representation of the table.
"""
table = [headers]
table.extend(rows)
return AsciiTable(table).table
def create_row(*args):
"""Creates a new table row.
Arguments:
*args (list): Elements of the new row.
Returns:
A `list` representing the new row for a command.
"""
return [textwrap.fill(e, constants.TABLE_COL_MAX_WIDTH) for e in args]
def print_error(message):
"""Correctly styles and prints an error message to standard output.
Arguments:
message (str): An error message.
"""
click.echo(color_text(message, "red"), err=True)
def print_warning(message):
"""Correctly styles and prints a warning message to standard output.
Arguments:
message (str): A warning message.
"""
click.echo(color_text(message, "red"))
def print_success(message):
"""Correctly styles and prints a success message to standard output.
Arguments:
message (str): A success message.
"""
click.echo(color_text(message, "green"))
| 24.530864 | 76 | 0.653749 | import click
import sys
import textwrap
from catacomb.common import constants, errors
from terminaltables import AsciiTable
def color_text(text, color):
try:
text = click.style(text, fg=color)
except TypeError:
print_warning(errors.INVALID_COLOR.format(color))
return text
def to_table(headers, rows):
table = [headers]
table.extend(rows)
return AsciiTable(table).table
def create_row(*args):
return [textwrap.fill(e, constants.TABLE_COL_MAX_WIDTH) for e in args]
def print_error(message):
click.echo(color_text(message, "red"), err=True)
def print_warning(message):
click.echo(color_text(message, "red"))
def print_success(message):
click.echo(color_text(message, "green"))
| true | true |
1c2fa7114a080c54a55dcd81f2300d640e401a1f | 294 | py | Python | doc/sphinx/conf.py | ReverentEngineer/ber | 7942c0b10a42873d3507cac095a14b6aa9a73cdc | [
"MIT"
] | null | null | null | doc/sphinx/conf.py | ReverentEngineer/ber | 7942c0b10a42873d3507cac095a14b6aa9a73cdc | [
"MIT"
] | null | null | null | doc/sphinx/conf.py | ReverentEngineer/ber | 7942c0b10a42873d3507cac095a14b6aa9a73cdc | [
"MIT"
] | null | null | null | project = 'BER'
copyright = '2020, Jeff Caffrey-Hill'
author = 'Jeff Caffrey-Hill'
extensions = []
templates_path = ['_templates']
exclude_patterns = []
html_theme = 'alabaster'
html_static_path = ['_static']
html_css_files = ['css/style.css']
html_theme_options = {
'nosidebar': True,
}
| 21 | 37 | 0.704082 | project = 'BER'
copyright = '2020, Jeff Caffrey-Hill'
author = 'Jeff Caffrey-Hill'
extensions = []
templates_path = ['_templates']
exclude_patterns = []
html_theme = 'alabaster'
html_static_path = ['_static']
html_css_files = ['css/style.css']
html_theme_options = {
'nosidebar': True,
}
| true | true |
1c2fa7b4e689efac207e1e801a3d543f5d85fce7 | 772 | py | Python | preprocess/anonymize_wordpiece.py | mounicam/neural_splitter | 143346775d03667225bccb9455369a9b08352a67 | [
"BSD-3-Clause"
] | null | null | null | preprocess/anonymize_wordpiece.py | mounicam/neural_splitter | 143346775d03667225bccb9455369a9b08352a67 | [
"BSD-3-Clause"
] | null | null | null | preprocess/anonymize_wordpiece.py | mounicam/neural_splitter | 143346775d03667225bccb9455369a9b08352a67 | [
"BSD-3-Clause"
] | null | null | null | import argparse
from wordpiece import FullTokenizer
def main(args):
tokenizer = FullTokenizer(args.vocab, do_lower_case=False)
fp = open(args.output, "w")
for line in open(args.input):
line = line.strip()
tokens = tokenizer.tokenize(line)
tokens.append("[SEP]")
tokens = ["[CLS]"] + tokens
tokenized_line = " ".join(tokens)
tokenized_line = tokenized_line.replace("< SE ##P >", "[SEP]")
assert "\[UNK\]" not in tokenized_line
fp.write(tokenized_line + "\n")
fp.close()
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--input')
parser.add_argument('--vocab')
parser.add_argument('--output')
args = parser.parse_args()
main(args) | 27.571429 | 70 | 0.623057 | import argparse
from wordpiece import FullTokenizer
def main(args):
tokenizer = FullTokenizer(args.vocab, do_lower_case=False)
fp = open(args.output, "w")
for line in open(args.input):
line = line.strip()
tokens = tokenizer.tokenize(line)
tokens.append("[SEP]")
tokens = ["[CLS]"] + tokens
tokenized_line = " ".join(tokens)
tokenized_line = tokenized_line.replace("< SE ##P >", "[SEP]")
assert "\[UNK\]" not in tokenized_line
fp.write(tokenized_line + "\n")
fp.close()
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--input')
parser.add_argument('--vocab')
parser.add_argument('--output')
args = parser.parse_args()
main(args) | true | true |
1c2fa82082af0f0a89575f7ee8afb28ae0c8e46b | 965 | py | Python | sim_spawner/setup.py | 09ubberboy90/lvl4-ros2-sim-comp | c197c76b29a9d864a800b81332bc3a549ecaa7c3 | [
"BSD-3-Clause"
] | 1 | 2021-05-03T09:16:11.000Z | 2021-05-03T09:16:11.000Z | sim_spawner/setup.py | 09ubberboy90/lvl4-ros2-sim-comp | c197c76b29a9d864a800b81332bc3a549ecaa7c3 | [
"BSD-3-Clause"
] | 1 | 2021-05-03T20:39:31.000Z | 2021-11-24T14:57:55.000Z | sim_spawner/setup.py | 09ubberboy90/lvl4-ros2-sim-comp | c197c76b29a9d864a800b81332bc3a549ecaa7c3 | [
"BSD-3-Clause"
] | 1 | 2021-05-03T09:16:21.000Z | 2021-05-03T09:16:21.000Z | from setuptools import setup
package_name = 'sim_spawner'
setup(
name=package_name,
version='1.0.0',
packages=[package_name],
data_files=[
('share/ament_index/resource_index/packages',
['resource/' + package_name]),
('share/' + package_name, ['package.xml']),
],
install_requires=['setuptools'],
zip_safe=True,
maintainer='Florent Audonnet',
maintainer_email='2330834a@student.gla.ac.uk',
description='spawn object in both simulation and run the services to get their positions',
license='BSD 3-Clause License',
tests_require=['pytest'],
entry_points={
'console_scripts': [
"webots_spawner = sim_spawner.webots_spawner:main",
"webots_throw_spawner = sim_spawner.webots_throw_spawner:main",
"gazebo_spawner = sim_spawner.gazebo_spawner:main",
"gazebo_throw_spawner = sim_spawner.gazebo_throw_spawner:main",
],
},
)
| 31.129032 | 94 | 0.658031 | from setuptools import setup
package_name = 'sim_spawner'
setup(
name=package_name,
version='1.0.0',
packages=[package_name],
data_files=[
('share/ament_index/resource_index/packages',
['resource/' + package_name]),
('share/' + package_name, ['package.xml']),
],
install_requires=['setuptools'],
zip_safe=True,
maintainer='Florent Audonnet',
maintainer_email='2330834a@student.gla.ac.uk',
description='spawn object in both simulation and run the services to get their positions',
license='BSD 3-Clause License',
tests_require=['pytest'],
entry_points={
'console_scripts': [
"webots_spawner = sim_spawner.webots_spawner:main",
"webots_throw_spawner = sim_spawner.webots_throw_spawner:main",
"gazebo_spawner = sim_spawner.gazebo_spawner:main",
"gazebo_throw_spawner = sim_spawner.gazebo_throw_spawner:main",
],
},
)
| true | true |
1c2fa9eca07cb2778eed074247670b3c0203e92f | 6,944 | py | Python | components/aws/sagemaker/tests/unit_tests/tests/test_batch_transform.py | kamalmemon/pipelines | 7e68991a2a7bfa767f893facfe58190690ca89ed | [
"Apache-2.0"
] | null | null | null | components/aws/sagemaker/tests/unit_tests/tests/test_batch_transform.py | kamalmemon/pipelines | 7e68991a2a7bfa767f893facfe58190690ca89ed | [
"Apache-2.0"
] | 4 | 2022-02-14T21:39:59.000Z | 2022-03-08T23:38:00.000Z | components/aws/sagemaker/tests/unit_tests/tests/test_batch_transform.py | kamalmemon/pipelines | 7e68991a2a7bfa767f893facfe58190690ca89ed | [
"Apache-2.0"
] | 2 | 2019-10-15T03:06:15.000Z | 2019-10-15T03:10:39.000Z | import unittest
import os
import signal
from unittest.mock import patch, call, Mock, MagicMock, mock_open, ANY
from botocore.exceptions import ClientError
from batch_transform.src import batch_transform
from common import _utils
# TODO : Errors out if model_name doesn't contain '-'
# fix model_name '-' bug
required_args = [
'--region', 'us-west-2',
'--model_name', 'model-test',
'--input_location', 's3://fake-bucket/data',
'--output_location', 's3://fake-bucket/output',
'--instance_type', 'ml.c5.18xlarge',
'--output_location_output_path', '/tmp/output'
]
class BatchTransformTestCase(unittest.TestCase):
@classmethod
def setUpClass(cls):
parser = batch_transform.create_parser()
cls.parser = parser
def test_create_parser(self):
self.assertIsNotNone(self.parser)
def test_main(self):
# Mock out all of utils except parser
batch_transform._utils = MagicMock()
batch_transform._utils.add_default_client_arguments = _utils.add_default_client_arguments
# Set some static returns
batch_transform._utils.create_transform_job.return_value = 'test-batch-job'
batch_transform.main(required_args)
# Check if correct requests were created and triggered
batch_transform._utils.create_transform_job.assert_called()
batch_transform._utils.wait_for_transform_job.assert_called()
batch_transform._utils.print_logs_for_job.assert_called()
# Check the file outputs
batch_transform._utils.write_output.assert_has_calls([
call('/tmp/output', 's3://fake-bucket/output')
])
def test_main_assumes_role(self):
# Mock out all of utils except parser
batch_transform._utils = MagicMock()
batch_transform._utils.add_default_client_arguments = _utils.add_default_client_arguments
# Set some static returns
batch_transform._utils.create_transform_job.return_value = 'test-batch-job'
assume_role_args = required_args + ['--assume_role', 'my-role']
batch_transform.main(assume_role_args)
batch_transform._utils.get_sagemaker_client.assert_called_once_with('us-west-2', None, assume_role_arn='my-role')
def test_batch_transform(self):
mock_client = MagicMock()
mock_args = self.parser.parse_args(required_args + ['--job_name', 'test-batch-job'])
response = _utils.create_transform_job(mock_client, vars(mock_args))
mock_client.create_transform_job.assert_called_once_with(
DataProcessing={'InputFilter': '', 'OutputFilter': '', 'JoinSource': 'None'},
Environment={},
MaxConcurrentTransforms=0,
MaxPayloadInMB=6,
ModelName='model-test',
Tags=[],
TransformInput={'DataSource': {'S3DataSource': {'S3DataType': 'S3Prefix', 'S3Uri': 's3://fake-bucket/data'}},
'ContentType': '', 'CompressionType': 'None', 'SplitType': 'None'},
TransformJobName='test-batch-job',
TransformOutput={'S3OutputPath': 's3://fake-bucket/output', 'Accept': None, 'KmsKeyId': ''},
TransformResources={'InstanceType': 'ml.c5.18xlarge', 'InstanceCount': None, 'VolumeKmsKeyId': ''}
)
self.assertEqual(response, 'test-batch-job')
def test_pass_all_arguments(self):
mock_client = MagicMock()
mock_args = self.parser.parse_args(required_args + [
'--job_name', 'test-batch-job',
'--max_concurrent', '5',
'--max_payload', '100',
'--batch_strategy', 'MultiRecord',
'--data_type', 'S3Prefix',
'--compression_type', 'Gzip',
'--split_type', 'RecordIO',
'--assemble_with', 'Line',
'--join_source', 'Input',
'--tags', '{"fake_key": "fake_value"}'
])
response = _utils.create_transform_job(mock_client, vars(mock_args))
mock_client.create_transform_job.assert_called_once_with(
BatchStrategy='MultiRecord',
DataProcessing={'InputFilter': '', 'OutputFilter': '',
'JoinSource': 'Input'},
Environment={},
MaxConcurrentTransforms=5,
MaxPayloadInMB=100,
ModelName='model-test',
Tags=[{'Key': 'fake_key', 'Value': 'fake_value'}],
TransformInput={
'DataSource': {'S3DataSource': {'S3DataType': 'S3Prefix',
'S3Uri': 's3://fake-bucket/data'}},
'ContentType': '',
'CompressionType': 'Gzip',
'SplitType': 'RecordIO',
},
TransformJobName='test-batch-job',
TransformOutput={
'S3OutputPath': 's3://fake-bucket/output',
'Accept': None,
'AssembleWith': 'Line',
'KmsKeyId': '',
},
TransformResources={'InstanceType': 'ml.c5.18xlarge',
'InstanceCount': None, 'VolumeKmsKeyId': ''}
)
def test_main_stop_tranform_job(self):
batch_transform._utils = MagicMock()
batch_transform._utils.create_transform_job.return_value = 'test-batch-job'
try:
os.kill(os.getpid(), signal.SIGTERM)
finally:
batch_transform._utils.stop_transform_job.assert_called_once_with(ANY, 'test-batch-job')
batch_transform._utils.print_logs_for_job.assert_not_called()
def test_utils_stop_transform_job(self):
mock_sm_client = MagicMock()
mock_sm_client.stop_transform_job.return_value = None
response = _utils.stop_transform_job(mock_sm_client, 'FakeJobName')
mock_sm_client.stop_transform_job.assert_called_once_with(
TransformJobName='FakeJobName'
)
self.assertEqual(response, None)
def test_sagemaker_exception_in_batch_transform(self):
mock_client = MagicMock()
mock_exception = ClientError({"Error": {"Message": "SageMaker broke"}}, "batch_transform")
mock_client.create_transform_job.side_effect = mock_exception
mock_args = self.parser.parse_args(required_args)
with self.assertRaises(Exception):
_utils.create_transform_job(mock_client, vars(mock_args))
def test_wait_for_transform_job_creation(self):
mock_client = MagicMock()
mock_client.describe_transform_job.side_effect = [
{"TransformJobStatus": "InProgress"},
{"TransformJobStatus": "Completed"},
{"TransformJobStatus": "Should not be called"}
]
_utils.wait_for_transform_job(mock_client, 'test-batch', 0)
self.assertEqual(mock_client.describe_transform_job.call_count, 2)
def test_wait_for_failed_job(self):
mock_client = MagicMock()
mock_client.describe_transform_job.side_effect = [
{"TransformJobStatus": "InProgress"},
{"TransformJobStatus": "Failed", "FailureReason": "SYSTEM FAILURE"},
{"TransformJobStatus": "Should not be called"}
]
with self.assertRaises(Exception):
_utils.wait_for_transform_job(mock_client, 'test-batch', 0)
self.assertEqual(mock_client.describe_transform_job.call_count, 2)
| 35.979275 | 117 | 0.671947 | import unittest
import os
import signal
from unittest.mock import patch, call, Mock, MagicMock, mock_open, ANY
from botocore.exceptions import ClientError
from batch_transform.src import batch_transform
from common import _utils
# fix model_name '-' bug
required_args = [
'--region', 'us-west-2',
'--model_name', 'model-test',
'--input_location', 's3://fake-bucket/data',
'--output_location', 's3://fake-bucket/output',
'--instance_type', 'ml.c5.18xlarge',
'--output_location_output_path', '/tmp/output'
]
class BatchTransformTestCase(unittest.TestCase):
@classmethod
def setUpClass(cls):
parser = batch_transform.create_parser()
cls.parser = parser
def test_create_parser(self):
self.assertIsNotNone(self.parser)
def test_main(self):
# Mock out all of utils except parser
batch_transform._utils = MagicMock()
batch_transform._utils.add_default_client_arguments = _utils.add_default_client_arguments
# Set some static returns
batch_transform._utils.create_transform_job.return_value = 'test-batch-job'
batch_transform.main(required_args)
# Check if correct requests were created and triggered
batch_transform._utils.create_transform_job.assert_called()
batch_transform._utils.wait_for_transform_job.assert_called()
batch_transform._utils.print_logs_for_job.assert_called()
# Check the file outputs
batch_transform._utils.write_output.assert_has_calls([
call('/tmp/output', 's3://fake-bucket/output')
])
def test_main_assumes_role(self):
# Mock out all of utils except parser
batch_transform._utils = MagicMock()
batch_transform._utils.add_default_client_arguments = _utils.add_default_client_arguments
# Set some static returns
batch_transform._utils.create_transform_job.return_value = 'test-batch-job'
assume_role_args = required_args + ['--assume_role', 'my-role']
batch_transform.main(assume_role_args)
batch_transform._utils.get_sagemaker_client.assert_called_once_with('us-west-2', None, assume_role_arn='my-role')
def test_batch_transform(self):
mock_client = MagicMock()
mock_args = self.parser.parse_args(required_args + ['--job_name', 'test-batch-job'])
response = _utils.create_transform_job(mock_client, vars(mock_args))
mock_client.create_transform_job.assert_called_once_with(
DataProcessing={'InputFilter': '', 'OutputFilter': '', 'JoinSource': 'None'},
Environment={},
MaxConcurrentTransforms=0,
MaxPayloadInMB=6,
ModelName='model-test',
Tags=[],
TransformInput={'DataSource': {'S3DataSource': {'S3DataType': 'S3Prefix', 'S3Uri': 's3://fake-bucket/data'}},
'ContentType': '', 'CompressionType': 'None', 'SplitType': 'None'},
TransformJobName='test-batch-job',
TransformOutput={'S3OutputPath': 's3://fake-bucket/output', 'Accept': None, 'KmsKeyId': ''},
TransformResources={'InstanceType': 'ml.c5.18xlarge', 'InstanceCount': None, 'VolumeKmsKeyId': ''}
)
self.assertEqual(response, 'test-batch-job')
def test_pass_all_arguments(self):
mock_client = MagicMock()
mock_args = self.parser.parse_args(required_args + [
'--job_name', 'test-batch-job',
'--max_concurrent', '5',
'--max_payload', '100',
'--batch_strategy', 'MultiRecord',
'--data_type', 'S3Prefix',
'--compression_type', 'Gzip',
'--split_type', 'RecordIO',
'--assemble_with', 'Line',
'--join_source', 'Input',
'--tags', '{"fake_key": "fake_value"}'
])
response = _utils.create_transform_job(mock_client, vars(mock_args))
mock_client.create_transform_job.assert_called_once_with(
BatchStrategy='MultiRecord',
DataProcessing={'InputFilter': '', 'OutputFilter': '',
'JoinSource': 'Input'},
Environment={},
MaxConcurrentTransforms=5,
MaxPayloadInMB=100,
ModelName='model-test',
Tags=[{'Key': 'fake_key', 'Value': 'fake_value'}],
TransformInput={
'DataSource': {'S3DataSource': {'S3DataType': 'S3Prefix',
'S3Uri': 's3://fake-bucket/data'}},
'ContentType': '',
'CompressionType': 'Gzip',
'SplitType': 'RecordIO',
},
TransformJobName='test-batch-job',
TransformOutput={
'S3OutputPath': 's3://fake-bucket/output',
'Accept': None,
'AssembleWith': 'Line',
'KmsKeyId': '',
},
TransformResources={'InstanceType': 'ml.c5.18xlarge',
'InstanceCount': None, 'VolumeKmsKeyId': ''}
)
def test_main_stop_tranform_job(self):
batch_transform._utils = MagicMock()
batch_transform._utils.create_transform_job.return_value = 'test-batch-job'
try:
os.kill(os.getpid(), signal.SIGTERM)
finally:
batch_transform._utils.stop_transform_job.assert_called_once_with(ANY, 'test-batch-job')
batch_transform._utils.print_logs_for_job.assert_not_called()
def test_utils_stop_transform_job(self):
mock_sm_client = MagicMock()
mock_sm_client.stop_transform_job.return_value = None
response = _utils.stop_transform_job(mock_sm_client, 'FakeJobName')
mock_sm_client.stop_transform_job.assert_called_once_with(
TransformJobName='FakeJobName'
)
self.assertEqual(response, None)
def test_sagemaker_exception_in_batch_transform(self):
mock_client = MagicMock()
mock_exception = ClientError({"Error": {"Message": "SageMaker broke"}}, "batch_transform")
mock_client.create_transform_job.side_effect = mock_exception
mock_args = self.parser.parse_args(required_args)
with self.assertRaises(Exception):
_utils.create_transform_job(mock_client, vars(mock_args))
def test_wait_for_transform_job_creation(self):
mock_client = MagicMock()
mock_client.describe_transform_job.side_effect = [
{"TransformJobStatus": "InProgress"},
{"TransformJobStatus": "Completed"},
{"TransformJobStatus": "Should not be called"}
]
_utils.wait_for_transform_job(mock_client, 'test-batch', 0)
self.assertEqual(mock_client.describe_transform_job.call_count, 2)
def test_wait_for_failed_job(self):
mock_client = MagicMock()
mock_client.describe_transform_job.side_effect = [
{"TransformJobStatus": "InProgress"},
{"TransformJobStatus": "Failed", "FailureReason": "SYSTEM FAILURE"},
{"TransformJobStatus": "Should not be called"}
]
with self.assertRaises(Exception):
_utils.wait_for_transform_job(mock_client, 'test-batch', 0)
self.assertEqual(mock_client.describe_transform_job.call_count, 2)
| true | true |
1c2faa4b61cd56eb36cb503b7db5d29f59826ba6 | 35,062 | py | Python | zerver/lib/push_notifications.py | apexlabs-ai/zulip | 316a87c03736f2ef3503b3417ee3d16cae1b913d | [
"Apache-2.0"
] | null | null | null | zerver/lib/push_notifications.py | apexlabs-ai/zulip | 316a87c03736f2ef3503b3417ee3d16cae1b913d | [
"Apache-2.0"
] | null | null | null | zerver/lib/push_notifications.py | apexlabs-ai/zulip | 316a87c03736f2ef3503b3417ee3d16cae1b913d | [
"Apache-2.0"
] | null | null | null | import base64
import binascii
import logging
import re
import time
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence, Tuple, Union
import gcm
import lxml.html
import orjson
from django.conf import settings
from django.db import IntegrityError, transaction
from django.db.models import F
from django.utils.timezone import now as timezone_now
from django.utils.translation import ugettext as _
from zerver.decorator import statsd_increment
from zerver.lib.avatar import absolute_avatar_url
from zerver.lib.exceptions import JsonableError
from zerver.lib.message import access_message, bulk_access_messages_expect_usermessage, huddle_users
from zerver.lib.remote_server import send_json_to_push_bouncer, send_to_push_bouncer
from zerver.lib.timestamp import datetime_to_timestamp
from zerver.models import (
ArchivedMessage,
Message,
PushDeviceToken,
Recipient,
UserMessage,
UserProfile,
get_display_recipient,
get_user_profile_by_id,
receives_offline_push_notifications,
receives_online_notifications,
)
if TYPE_CHECKING:
from apns2.client import APNsClient
logger = logging.getLogger(__name__)
if settings.ZILENCER_ENABLED:
from zilencer.models import RemotePushDeviceToken
DeviceToken = Union[PushDeviceToken, "RemotePushDeviceToken"]
# We store the token as b64, but apns-client wants hex strings
def b64_to_hex(data: str) -> str:
return binascii.hexlify(base64.b64decode(data)).decode('utf-8')
def hex_to_b64(data: str) -> str:
return base64.b64encode(binascii.unhexlify(data)).decode()
#
# Sending to APNs, for iOS
#
_apns_client: Optional["APNsClient"] = None
_apns_client_initialized = False
def get_apns_client() -> 'Optional[APNsClient]':
# We lazily do this import as part of optimizing Zulip's base
# import time.
from apns2.client import APNsClient
global _apns_client, _apns_client_initialized
if not _apns_client_initialized:
# NB if called concurrently, this will make excess connections.
# That's a little sloppy, but harmless unless a server gets
# hammered with a ton of these all at once after startup.
if settings.APNS_CERT_FILE is not None:
_apns_client = APNsClient(credentials=settings.APNS_CERT_FILE,
use_sandbox=settings.APNS_SANDBOX)
_apns_client_initialized = True
return _apns_client
def apns_enabled() -> bool:
client = get_apns_client()
return client is not None
def modernize_apns_payload(data: Dict[str, Any]) -> Dict[str, Any]:
'''Take a payload in an unknown Zulip version's format, and return in current format.'''
# TODO this isn't super robust as is -- if a buggy remote server
# sends a malformed payload, we are likely to raise an exception.
if 'message_ids' in data:
# The format sent by 1.6.0, from the earliest pre-1.6.0
# version with bouncer support up until 613d093d7 pre-1.7.0:
# 'alert': str, # just sender, and text about PM/group-PM/mention
# 'message_ids': List[int], # always just one
return {
'alert': data['alert'],
'badge': 0,
'custom': {
'zulip': {
'message_ids': data['message_ids'],
},
},
}
else:
# Something already compatible with the current format.
# `alert` may be a string, or a dict with `title` and `body`.
# In 1.7.0 and 1.7.1, before 0912b5ba8 pre-1.8.0, the only
# item in `custom.zulip` is `message_ids`.
return data
APNS_MAX_RETRIES = 3
@statsd_increment("apple_push_notification")
def send_apple_push_notification(user_id: int, devices: List[DeviceToken],
payload_data: Dict[str, Any], remote: bool=False) -> None:
if not devices:
return
# We lazily do the APNS imports as part of optimizing Zulip's base
# import time; since these are only needed in the push
# notification queue worker, it's best to only import them in the
# code that needs them.
from apns2.payload import Payload as APNsPayload
from hyper.http20.exceptions import HTTP20Error
client = get_apns_client()
if client is None:
logger.debug("APNs: Dropping a notification because nothing configured. "
"Set PUSH_NOTIFICATION_BOUNCER_URL (or APNS_CERT_FILE).")
return
if remote:
assert settings.ZILENCER_ENABLED
DeviceTokenClass = RemotePushDeviceToken
else:
DeviceTokenClass = PushDeviceToken
logger.info("APNs: Sending notification for user %d to %d devices",
user_id, len(devices))
payload = APNsPayload(**modernize_apns_payload(payload_data))
expiration = int(time.time() + 24 * 3600)
retries_left = APNS_MAX_RETRIES
for device in devices:
# TODO obviously this should be made to actually use the async
def attempt_send() -> Optional[str]:
assert client is not None
try:
stream_id = client.send_notification_async(
device.token, payload, topic=settings.APNS_TOPIC,
expiration=expiration)
return client.get_notification_result(stream_id)
except HTTP20Error as e:
logger.warning("APNs: HTTP error sending for user %d to device %s: %s",
user_id, device.token, e.__class__.__name__)
return None
except BrokenPipeError as e:
logger.warning("APNs: BrokenPipeError sending for user %d to device %s: %s",
user_id, device.token, e.__class__.__name__)
return None
except ConnectionError as e: # nocoverage
logger.warning("APNs: ConnectionError sending for user %d to device %s: %s",
user_id, device.token, e.__class__.__name__)
return None
result = attempt_send()
while result is None and retries_left > 0:
retries_left -= 1
result = attempt_send()
if result is None:
result = "HTTP error, retries exhausted"
if result[0] == "Unregistered":
# For some reason, "Unregistered" result values have a
# different format, as a tuple of the pair ("Unregistered", 12345132131).
result = result[0]
if result == 'Success':
logger.info("APNs: Success sending for user %d to device %s",
user_id, device.token)
elif result in ["Unregistered", "BadDeviceToken", "DeviceTokenNotForTopic"]:
logger.info("APNs: Removing invalid/expired token %s (%s)", device.token, result)
# We remove all entries for this token (There
# could be multiple for different Zulip servers).
DeviceTokenClass.objects.filter(token=device.token, kind=DeviceTokenClass.APNS).delete()
else:
logger.warning("APNs: Failed to send for user %d to device %s: %s",
user_id, device.token, result)
#
# Sending to GCM, for Android
#
def make_gcm_client() -> gcm.GCM: # nocoverage
# From GCM upstream's doc for migrating to FCM:
#
# FCM supports HTTP and XMPP protocols that are virtually
# identical to the GCM server protocols, so you don't need to
# update your sending logic for the migration.
#
# https://developers.google.com/cloud-messaging/android/android-migrate-fcm
#
# The one thing we're required to change on the server is the URL of
# the endpoint. So we get to keep using the GCM client library we've
# been using (as long as we're happy with it) -- just monkey-patch in
# that one change, because the library's API doesn't anticipate that
# as a customization point.
gcm.gcm.GCM_URL = 'https://fcm.googleapis.com/fcm/send'
return gcm.GCM(settings.ANDROID_GCM_API_KEY)
if settings.ANDROID_GCM_API_KEY: # nocoverage
gcm_client = make_gcm_client()
else:
gcm_client = None
def gcm_enabled() -> bool: # nocoverage
return gcm_client is not None
def send_android_push_notification_to_user(user_profile: UserProfile, data: Dict[str, Any],
options: Dict[str, Any]) -> None:
devices = list(PushDeviceToken.objects.filter(user=user_profile,
kind=PushDeviceToken.GCM))
send_android_push_notification(devices, data, options)
def parse_gcm_options(options: Dict[str, Any], data: Dict[str, Any]) -> str:
"""
Parse GCM options, supplying defaults, and raising an error if invalid.
The options permitted here form part of the Zulip notification
bouncer's API. They are:
`priority`: Passed through to GCM; see upstream doc linked below.
Zulip servers should always set this; when unset, we guess a value
based on the behavior of old server versions.
Including unrecognized options is an error.
For details on options' semantics, see this GCM upstream doc:
https://firebase.google.com/docs/cloud-messaging/http-server-ref
Returns `priority`.
"""
priority = options.pop('priority', None)
if priority is None:
# An older server. Identify if this seems to be an actual notification.
if data.get('event') == 'message':
priority = 'high'
else: # `'event': 'remove'`, presumably
priority = 'normal'
if priority not in ('normal', 'high'):
raise JsonableError(_(
"Invalid GCM option to bouncer: priority {!r}",
).format(priority))
if options:
# We're strict about the API; there is no use case for a newer Zulip
# server talking to an older bouncer, so we only need to provide
# one-way compatibility.
raise JsonableError(_(
"Invalid GCM options to bouncer: {}",
).format(orjson.dumps(options).decode()))
return priority # when this grows a second option, can make it a tuple
@statsd_increment("android_push_notification")
def send_android_push_notification(devices: List[DeviceToken], data: Dict[str, Any],
options: Dict[str, Any], remote: bool=False) -> None:
"""
Send a GCM message to the given devices.
See https://firebase.google.com/docs/cloud-messaging/http-server-ref
for the GCM upstream API which this talks to.
data: The JSON object (decoded) to send as the 'data' parameter of
the GCM message.
options: Additional options to control the GCM message sent.
For details, see `parse_gcm_options`.
"""
if not devices:
return
if not gcm_client:
logger.debug("Skipping sending a GCM push notification since "
"PUSH_NOTIFICATION_BOUNCER_URL and ANDROID_GCM_API_KEY are both unset")
return
reg_ids = [device.token for device in devices]
priority = parse_gcm_options(options, data)
try:
# See https://firebase.google.com/docs/cloud-messaging/http-server-ref .
# Two kwargs `retries` and `session` get eaten by `json_request`;
# the rest pass through to the GCM server.
res = gcm_client.json_request(registration_ids=reg_ids,
priority=priority,
data=data,
retries=10)
except OSError:
logger.warning("Error while pushing to GCM", exc_info=True)
return
if res and 'success' in res:
for reg_id, msg_id in res['success'].items():
logger.info("GCM: Sent %s as %s", reg_id, msg_id)
if remote:
assert settings.ZILENCER_ENABLED
DeviceTokenClass = RemotePushDeviceToken
else:
DeviceTokenClass = PushDeviceToken
# res.canonical will contain results when there are duplicate registrations for the same
# device. The "canonical" registration is the latest registration made by the device.
# Ref: https://developer.android.com/google/gcm/adv.html#canonical
if 'canonical' in res:
for reg_id, new_reg_id in res['canonical'].items():
if reg_id == new_reg_id:
# I'm not sure if this should happen. In any case, not really actionable.
logger.warning("GCM: Got canonical ref but it already matches our ID %s!", reg_id)
elif not DeviceTokenClass.objects.filter(token=new_reg_id,
kind=DeviceTokenClass.GCM).count():
# This case shouldn't happen; any time we get a canonical ref it should have been
# previously registered in our system.
#
# That said, recovery is easy: just update the current PDT object to use the new ID.
logger.warning(
"GCM: Got canonical ref %s replacing %s but new ID not registered! Updating.",
new_reg_id, reg_id)
DeviceTokenClass.objects.filter(
token=reg_id, kind=DeviceTokenClass.GCM).update(token=new_reg_id)
else:
# Since we know the new ID is registered in our system we can just drop the old one.
logger.info("GCM: Got canonical ref %s, dropping %s", new_reg_id, reg_id)
DeviceTokenClass.objects.filter(token=reg_id, kind=DeviceTokenClass.GCM).delete()
if 'errors' in res:
for error, reg_ids in res['errors'].items():
if error in ['NotRegistered', 'InvalidRegistration']:
for reg_id in reg_ids:
logger.info("GCM: Removing %s", reg_id)
# We remove all entries for this token (There
# could be multiple for different Zulip servers).
DeviceTokenClass.objects.filter(token=reg_id, kind=DeviceTokenClass.GCM).delete()
else:
for reg_id in reg_ids:
logger.warning("GCM: Delivery to %s failed: %s", reg_id, error)
# python-gcm handles retrying of the unsent messages.
# Ref: https://github.com/geeknam/python-gcm/blob/master/gcm/gcm.py#L497
#
# Sending to a bouncer
#
def uses_notification_bouncer() -> bool:
return settings.PUSH_NOTIFICATION_BOUNCER_URL is not None
def send_notifications_to_bouncer(user_profile_id: int,
apns_payload: Dict[str, Any],
gcm_payload: Dict[str, Any],
gcm_options: Dict[str, Any]) -> None:
post_data = {
'user_id': user_profile_id,
'apns_payload': apns_payload,
'gcm_payload': gcm_payload,
'gcm_options': gcm_options,
}
# Calls zilencer.views.remote_server_notify_push
send_json_to_push_bouncer('POST', 'push/notify', post_data)
#
# Managing device tokens
#
def num_push_devices_for_user(user_profile: UserProfile, kind: Optional[int]=None) -> PushDeviceToken:
if kind is None:
return PushDeviceToken.objects.filter(user=user_profile).count()
else:
return PushDeviceToken.objects.filter(user=user_profile, kind=kind).count()
def add_push_device_token(user_profile: UserProfile,
token_str: str,
kind: int,
ios_app_id: Optional[str]=None) -> PushDeviceToken:
logger.info("Registering push device: %d %r %d %r",
user_profile.id, token_str, kind, ios_app_id)
# Regardless of whether we're using the push notifications
# bouncer, we want to store a PushDeviceToken record locally.
# These can be used to discern whether the user has any mobile
# devices configured, and is also where we will store encryption
# keys for mobile push notifications.
try:
with transaction.atomic():
token = PushDeviceToken.objects.create(
user_id=user_profile.id,
kind=kind,
token=token_str,
ios_app_id=ios_app_id,
# last_updated is to be renamed to date_created.
last_updated=timezone_now())
except IntegrityError:
token = PushDeviceToken.objects.get(
user_id=user_profile.id,
kind=kind,
token=token_str,
)
# If we're sending things to the push notification bouncer
# register this user with them here
if uses_notification_bouncer():
post_data = {
'server_uuid': settings.ZULIP_ORG_ID,
'user_id': user_profile.id,
'token': token_str,
'token_kind': kind,
}
if kind == PushDeviceToken.APNS:
post_data['ios_app_id'] = ios_app_id
logger.info("Sending new push device to bouncer: %r", post_data)
# Calls zilencer.views.register_remote_push_device
send_to_push_bouncer('POST', 'push/register', post_data)
return token
def remove_push_device_token(user_profile: UserProfile, token_str: str, kind: int) -> None:
try:
token = PushDeviceToken.objects.get(token=token_str, kind=kind, user=user_profile)
token.delete()
except PushDeviceToken.DoesNotExist:
# If we are using bouncer, don't raise the exception. It will
# be raised by the code below eventually. This is important
# during the transition period after upgrading to a version
# that stores local PushDeviceToken objects even when using
# the push notifications bouncer.
if not uses_notification_bouncer():
raise JsonableError(_("Token does not exist"))
# If we're sending things to the push notification bouncer
# unregister this user with them here
if uses_notification_bouncer():
# TODO: Make this a remove item
post_data = {
'server_uuid': settings.ZULIP_ORG_ID,
'user_id': user_profile.id,
'token': token_str,
'token_kind': kind,
}
# Calls zilencer.views.unregister_remote_push_device
send_to_push_bouncer("POST", "push/unregister", post_data)
def clear_push_device_tokens(user_profile_id: int) -> None:
# Deletes all of a user's PushDeviceTokens.
if uses_notification_bouncer():
post_data = {
'server_uuid': settings.ZULIP_ORG_ID,
'user_id': user_profile_id,
}
send_to_push_bouncer("POST", "push/unregister/all", post_data)
return
PushDeviceToken.objects.filter(user_id=user_profile_id).delete()
#
# Push notifications in general
#
def push_notifications_enabled() -> bool:
'''True just if this server has configured a way to send push notifications.'''
if (uses_notification_bouncer()
and settings.ZULIP_ORG_KEY is not None
and settings.ZULIP_ORG_ID is not None): # nocoverage
# We have the needed configuration to send push notifications through
# the bouncer. Better yet would be to confirm that this config actually
# works -- e.g., that we have ever successfully sent to the bouncer --
# but this is a good start.
return True
if settings.DEVELOPMENT and (apns_enabled() or gcm_enabled()): # nocoverage
# Since much of the notifications logic is platform-specific, the mobile
# developers often work on just one platform at a time, so we should
# only require one to be configured.
return True
elif apns_enabled() and gcm_enabled(): # nocoverage
# We have the needed configuration to send through APNs and GCM directly
# (i.e., we are the bouncer, presumably.) Again, assume it actually works.
return True
return False
def initialize_push_notifications() -> None:
if not push_notifications_enabled():
if settings.DEVELOPMENT and not settings.TEST_SUITE: # nocoverage
# Avoid unnecessary spam on development environment startup
return
logger.warning("Mobile push notifications are not configured.\n "
"See https://zulip.readthedocs.io/en/latest/"
"production/mobile-push-notifications.html")
def get_gcm_alert(message: Message) -> str:
"""
Determine what alert string to display based on the missed messages.
"""
sender_str = message.sender.full_name
if message.recipient.type == Recipient.HUDDLE and message.trigger == 'private_message':
return f"New private group message from {sender_str}"
elif message.recipient.type == Recipient.PERSONAL and message.trigger == 'private_message':
return f"New private message from {sender_str}"
elif message.is_stream_message() and (message.trigger == 'mentioned' or
message.trigger == 'wildcard_mentioned'):
return f"New mention from {sender_str}"
else: # message.is_stream_message() and message.trigger == 'stream_push_notify'
return f"New stream message from {sender_str} in {get_display_recipient(message.recipient)}"
def get_mobile_push_content(rendered_content: str) -> str:
def get_text(elem: lxml.html.HtmlElement) -> str:
# Convert default emojis to their Unicode equivalent.
classes = elem.get("class", "")
if "emoji" in classes:
match = re.search(r"emoji-(?P<emoji_code>\S+)", classes)
if match:
emoji_code = match.group('emoji_code')
char_repr = ""
for codepoint in emoji_code.split('-'):
char_repr += chr(int(codepoint, 16))
return char_repr
# Handles realm emojis, avatars etc.
if elem.tag == "img":
return elem.get("alt", "")
if elem.tag == 'blockquote':
return '' # To avoid empty line before quote text
return elem.text or ''
def format_as_quote(quote_text: str) -> str:
return "".join(
f"> {line}\n" for line in quote_text.splitlines()
if line # Remove empty lines
)
def render_olist(ol: lxml.html.HtmlElement) -> str:
items = []
counter = int(ol.get('start')) if ol.get('start') else 1
nested_levels = len(list(ol.iterancestors('ol')))
indent = ('\n' + ' ' * nested_levels) if nested_levels else ''
for li in ol:
items.append(indent + str(counter) + '. ' + process(li).strip())
counter += 1
return '\n'.join(items)
def render_spoiler(elem: lxml.html.HtmlElement) -> str:
header = elem.find_class('spoiler-header')[0]
text = process(header).strip()
if len(text) == 0:
return "(…)\n"
return f"{text} (…)\n"
def process(elem: lxml.html.HtmlElement) -> str:
plain_text = ''
if elem.tag == 'ol':
plain_text = render_olist(elem)
elif 'spoiler-block' in elem.get("class", ""):
plain_text += render_spoiler(elem)
else:
plain_text = get_text(elem)
sub_text = ''
for child in elem:
sub_text += process(child)
if elem.tag == 'blockquote':
sub_text = format_as_quote(sub_text)
plain_text += sub_text
plain_text += elem.tail or ""
return plain_text
if settings.PUSH_NOTIFICATION_REDACT_CONTENT:
return "***REDACTED***"
elem = lxml.html.fromstring(rendered_content)
plain_text = process(elem)
return plain_text
def truncate_content(content: str) -> Tuple[str, bool]:
# We use Unicode character 'HORIZONTAL ELLIPSIS' (U+2026) instead
# of three dots as this saves two extra characters for textual
# content. This function will need to be updated to handle Unicode
# combining characters and tags when we start supporting themself.
if len(content) <= 200:
return content, False
return content[:200] + "…", True
def get_base_payload(user_profile: UserProfile) -> Dict[str, Any]:
'''Common fields for all notification payloads.'''
data: Dict[str, Any] = {}
# These will let the app support logging into multiple realms and servers.
data['server'] = settings.EXTERNAL_HOST
data['realm_id'] = user_profile.realm.id
data['realm_uri'] = user_profile.realm.uri
data['user_id'] = user_profile.id
return data
def get_message_payload(user_profile: UserProfile, message: Message) -> Dict[str, Any]:
'''Common fields for `message` payloads, for all platforms.'''
data = get_base_payload(user_profile)
# `sender_id` is preferred, but some existing versions use `sender_email`.
data['sender_id'] = message.sender.id
data['sender_email'] = message.sender.email
if message.recipient.type == Recipient.STREAM:
data['recipient_type'] = "stream"
data['stream'] = get_display_recipient(message.recipient)
data['topic'] = message.topic_name()
elif message.recipient.type == Recipient.HUDDLE:
data['recipient_type'] = "private"
data['pm_users'] = huddle_users(message.recipient.id)
else: # Recipient.PERSONAL
data['recipient_type'] = "private"
return data
def get_apns_alert_title(message: Message) -> str:
"""
On an iOS notification, this is the first bolded line.
"""
if message.recipient.type == Recipient.HUDDLE:
recipients = get_display_recipient(message.recipient)
assert isinstance(recipients, list)
return ', '.join(sorted(r['full_name'] for r in recipients))
elif message.is_stream_message():
return f"#{get_display_recipient(message.recipient)} > {message.topic_name()}"
# For personal PMs, we just show the sender name.
return message.sender.full_name
def get_apns_alert_subtitle(message: Message) -> str:
"""
On an iOS notification, this is the second bolded line.
"""
if message.trigger == "mentioned":
return _("{full_name} mentioned you:").format(full_name=message.sender.full_name)
elif message.trigger == "wildcard_mentioned":
return _("{full_name} mentioned everyone:").format(full_name=message.sender.full_name)
elif message.recipient.type == Recipient.PERSONAL:
return ""
# For group PMs, or regular messages to a stream, just use a colon to indicate this is the sender.
return message.sender.full_name + ":"
def get_apns_badge_count(user_profile: UserProfile, read_messages_ids: Optional[Sequence[int]]=[]) -> int:
# NOTE: We have temporarily set get_apns_badge_count to always
# return 0 until we can debug a likely mobile app side issue with
# handling notifications while the app is open.
return 0
def get_apns_badge_count_future(user_profile: UserProfile, read_messages_ids: Optional[Sequence[int]]=[]) -> int:
# Future implementation of get_apns_badge_count; unused but
# we expect to use this once we resolve client-side bugs.
return UserMessage.objects.filter(
user_profile=user_profile
).extra(
where=[UserMessage.where_active_push_notification()]
).exclude(
# If we've just marked some messages as read, they're still
# marked as having active notifications; we'll clear that flag
# only after we've sent that update to the devices. So we need
# to exclude them explicitly from the count.
message_id__in=read_messages_ids
).count()
def get_message_payload_apns(user_profile: UserProfile, message: Message) -> Dict[str, Any]:
'''A `message` payload for iOS, via APNs.'''
zulip_data = get_message_payload(user_profile, message)
zulip_data.update(
message_ids=[message.id],
)
assert message.rendered_content is not None
content, _ = truncate_content(get_mobile_push_content(message.rendered_content))
apns_data = {
'alert': {
'title': get_apns_alert_title(message),
'subtitle': get_apns_alert_subtitle(message),
'body': content,
},
'sound': 'default',
'badge': get_apns_badge_count(user_profile),
'custom': {'zulip': zulip_data},
}
return apns_data
def get_message_payload_gcm(
user_profile: UserProfile, message: Message,
) -> Tuple[Dict[str, Any], Dict[str, Any]]:
'''A `message` payload + options, for Android via GCM/FCM.'''
data = get_message_payload(user_profile, message)
assert message.rendered_content is not None
content, truncated = truncate_content(get_mobile_push_content(message.rendered_content))
data.update(
event='message',
alert=get_gcm_alert(message),
zulip_message_id=message.id, # message_id is reserved for CCS
time=datetime_to_timestamp(message.date_sent),
content=content,
content_truncated=truncated,
sender_full_name=message.sender.full_name,
sender_avatar_url=absolute_avatar_url(message.sender),
)
gcm_options = {'priority': 'high'}
return data, gcm_options
def get_remove_payload_gcm(
user_profile: UserProfile, message_ids: List[int],
) -> Tuple[Dict[str, Any], Dict[str, Any]]:
'''A `remove` payload + options, for Android via GCM/FCM.'''
gcm_payload = get_base_payload(user_profile)
gcm_payload.update(
event='remove',
zulip_message_ids=','.join(str(id) for id in message_ids),
# Older clients (all clients older than 2019-02-13) look only at
# `zulip_message_id` and ignore `zulip_message_ids`. Do our best.
zulip_message_id=message_ids[0],
)
gcm_options = {'priority': 'normal'}
return gcm_payload, gcm_options
def get_remove_payload_apns(user_profile: UserProfile, message_ids: List[int]) -> Dict[str, Any]:
zulip_data = get_base_payload(user_profile)
zulip_data.update(
event='remove',
zulip_message_ids=','.join(str(id) for id in message_ids),
)
apns_data = {
'badge': get_apns_badge_count(user_profile, message_ids),
'custom': {'zulip': zulip_data},
}
return apns_data
def handle_remove_push_notification(user_profile_id: int, message_ids: List[int]) -> None:
"""This should be called when a message that previously had a
mobile push notification executed is read. This triggers a push to the
mobile app, when the message is read on the server, to remove the
message from the notification.
"""
user_profile = get_user_profile_by_id(user_profile_id)
message_ids = bulk_access_messages_expect_usermessage(user_profile_id, message_ids)
gcm_payload, gcm_options = get_remove_payload_gcm(user_profile, message_ids)
apns_payload = get_remove_payload_apns(user_profile, message_ids)
if uses_notification_bouncer():
send_notifications_to_bouncer(user_profile_id,
apns_payload,
gcm_payload,
gcm_options)
else:
android_devices = list(PushDeviceToken.objects.filter(
user=user_profile, kind=PushDeviceToken.GCM))
apple_devices = list(PushDeviceToken.objects.filter(
user=user_profile, kind=PushDeviceToken.APNS))
if android_devices:
send_android_push_notification(android_devices, gcm_payload, gcm_options)
if apple_devices:
send_apple_push_notification(user_profile_id, apple_devices, apns_payload)
UserMessage.objects.filter(
user_profile_id=user_profile_id,
message_id__in=message_ids,
).update(
flags=F('flags').bitand(
~UserMessage.flags.active_mobile_push_notification))
@statsd_increment("push_notifications")
def handle_push_notification(user_profile_id: int, missed_message: Dict[str, Any]) -> None:
"""
missed_message is the event received by the
zerver.worker.queue_processors.PushNotificationWorker.consume function.
"""
if not push_notifications_enabled():
return
user_profile = get_user_profile_by_id(user_profile_id)
if not (receives_offline_push_notifications(user_profile) or
receives_online_notifications(user_profile)):
return
try:
(message, user_message) = access_message(user_profile, missed_message['message_id'])
except JsonableError:
if ArchivedMessage.objects.filter(id=missed_message['message_id']).exists():
# If the cause is a race with the message being deleted,
# that's normal and we have no need to log an error.
return
logging.info(
"Unexpected message access failure handling push notifications: %s %s",
user_profile.id, missed_message['message_id'],
)
return
if user_message is not None:
# If the user has read the message already, don't push-notify.
#
# TODO: It feels like this is already handled when things are
# put in the queue; maybe we should centralize this logic with
# the `zerver/tornado/event_queue.py` logic?
if user_message.flags.read or user_message.flags.active_mobile_push_notification:
return
# Otherwise, we mark the message as having an active mobile
# push notification, so that we can send revocation messages
# later.
user_message.flags.active_mobile_push_notification = True
user_message.save(update_fields=["flags"])
else:
# Users should only be getting push notifications into this
# queue for messages they haven't received if they're
# long-term idle; anything else is likely a bug.
if not user_profile.long_term_idle:
logger.error(
"Could not find UserMessage with message_id %s and user_id %s",
missed_message['message_id'], user_profile_id,
)
return
message.trigger = missed_message['trigger']
apns_payload = get_message_payload_apns(user_profile, message)
gcm_payload, gcm_options = get_message_payload_gcm(user_profile, message)
logger.info("Sending push notifications to mobile clients for user %s", user_profile_id)
if uses_notification_bouncer():
send_notifications_to_bouncer(user_profile_id,
apns_payload,
gcm_payload,
gcm_options)
return
android_devices = list(PushDeviceToken.objects.filter(user=user_profile,
kind=PushDeviceToken.GCM))
apple_devices = list(PushDeviceToken.objects.filter(user=user_profile,
kind=PushDeviceToken.APNS))
send_apple_push_notification(user_profile.id, apple_devices, apns_payload)
send_android_push_notification(android_devices, gcm_payload, gcm_options)
| 41.990419 | 113 | 0.650048 | import base64
import binascii
import logging
import re
import time
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Sequence, Tuple, Union
import gcm
import lxml.html
import orjson
from django.conf import settings
from django.db import IntegrityError, transaction
from django.db.models import F
from django.utils.timezone import now as timezone_now
from django.utils.translation import ugettext as _
from zerver.decorator import statsd_increment
from zerver.lib.avatar import absolute_avatar_url
from zerver.lib.exceptions import JsonableError
from zerver.lib.message import access_message, bulk_access_messages_expect_usermessage, huddle_users
from zerver.lib.remote_server import send_json_to_push_bouncer, send_to_push_bouncer
from zerver.lib.timestamp import datetime_to_timestamp
from zerver.models import (
ArchivedMessage,
Message,
PushDeviceToken,
Recipient,
UserMessage,
UserProfile,
get_display_recipient,
get_user_profile_by_id,
receives_offline_push_notifications,
receives_online_notifications,
)
if TYPE_CHECKING:
from apns2.client import APNsClient
logger = logging.getLogger(__name__)
if settings.ZILENCER_ENABLED:
from zilencer.models import RemotePushDeviceToken
DeviceToken = Union[PushDeviceToken, "RemotePushDeviceToken"]
def b64_to_hex(data: str) -> str:
return binascii.hexlify(base64.b64decode(data)).decode('utf-8')
def hex_to_b64(data: str) -> str:
return base64.b64encode(binascii.unhexlify(data)).decode()
_apns_client: Optional["APNsClient"] = None
_apns_client_initialized = False
def get_apns_client() -> 'Optional[APNsClient]':
# import time.
from apns2.client import APNsClient
global _apns_client, _apns_client_initialized
if not _apns_client_initialized:
# NB if called concurrently, this will make excess connections.
# That's a little sloppy, but harmless unless a server gets
if settings.APNS_CERT_FILE is not None:
_apns_client = APNsClient(credentials=settings.APNS_CERT_FILE,
use_sandbox=settings.APNS_SANDBOX)
_apns_client_initialized = True
return _apns_client
def apns_enabled() -> bool:
client = get_apns_client()
return client is not None
def modernize_apns_payload(data: Dict[str, Any]) -> Dict[str, Any]:
# sends a malformed payload, we are likely to raise an exception.
if 'message_ids' in data:
# The format sent by 1.6.0, from the earliest pre-1.6.0
# version with bouncer support up until 613d093d7 pre-1.7.0:
# 'alert': str, # just sender, and text about PM/group-PM/mention
# 'message_ids': List[int], # always just one
return {
'alert': data['alert'],
'badge': 0,
'custom': {
'zulip': {
'message_ids': data['message_ids'],
},
},
}
else:
# Something already compatible with the current format.
# `alert` may be a string, or a dict with `title` and `body`.
# In 1.7.0 and 1.7.1, before 0912b5ba8 pre-1.8.0, the only
# item in `custom.zulip` is `message_ids`.
return data
APNS_MAX_RETRIES = 3
@statsd_increment("apple_push_notification")
def send_apple_push_notification(user_id: int, devices: List[DeviceToken],
payload_data: Dict[str, Any], remote: bool=False) -> None:
if not devices:
return
# We lazily do the APNS imports as part of optimizing Zulip's base
# code that needs them.
from apns2.payload import Payload as APNsPayload
from hyper.http20.exceptions import HTTP20Error
client = get_apns_client()
if client is None:
logger.debug("APNs: Dropping a notification because nothing configured. "
"Set PUSH_NOTIFICATION_BOUNCER_URL (or APNS_CERT_FILE).")
return
if remote:
assert settings.ZILENCER_ENABLED
DeviceTokenClass = RemotePushDeviceToken
else:
DeviceTokenClass = PushDeviceToken
logger.info("APNs: Sending notification for user %d to %d devices",
user_id, len(devices))
payload = APNsPayload(**modernize_apns_payload(payload_data))
expiration = int(time.time() + 24 * 3600)
retries_left = APNS_MAX_RETRIES
for device in devices:
# TODO obviously this should be made to actually use the async
def attempt_send() -> Optional[str]:
assert client is not None
try:
stream_id = client.send_notification_async(
device.token, payload, topic=settings.APNS_TOPIC,
expiration=expiration)
return client.get_notification_result(stream_id)
except HTTP20Error as e:
logger.warning("APNs: HTTP error sending for user %d to device %s: %s",
user_id, device.token, e.__class__.__name__)
return None
except BrokenPipeError as e:
logger.warning("APNs: BrokenPipeError sending for user %d to device %s: %s",
user_id, device.token, e.__class__.__name__)
return None
except ConnectionError as e: # nocoverage
logger.warning("APNs: ConnectionError sending for user %d to device %s: %s",
user_id, device.token, e.__class__.__name__)
return None
result = attempt_send()
while result is None and retries_left > 0:
retries_left -= 1
result = attempt_send()
if result is None:
result = "HTTP error, retries exhausted"
if result[0] == "Unregistered":
# For some reason, "Unregistered" result values have a
# different format, as a tuple of the pair ("Unregistered", 12345132131).
result = result[0]
if result == 'Success':
logger.info("APNs: Success sending for user %d to device %s",
user_id, device.token)
elif result in ["Unregistered", "BadDeviceToken", "DeviceTokenNotForTopic"]:
logger.info("APNs: Removing invalid/expired token %s (%s)", device.token, result)
# We remove all entries for this token (There
# could be multiple for different Zulip servers).
DeviceTokenClass.objects.filter(token=device.token, kind=DeviceTokenClass.APNS).delete()
else:
logger.warning("APNs: Failed to send for user %d to device %s: %s",
user_id, device.token, result)
#
# Sending to GCM, for Android
#
def make_gcm_client() -> gcm.GCM: # nocoverage
# From GCM upstream's doc for migrating to FCM:
# update your sending logic for the migration.
#
# https://developers.google.com/cloud-messaging/android/android-migrate-fcm
#
# The one thing we're required to change on the server is the URL of
# been using (as long as we're happy with it) -- just monkey-patch in
gcm.gcm.GCM_URL = 'https://fcm.googleapis.com/fcm/send'
return gcm.GCM(settings.ANDROID_GCM_API_KEY)
if settings.ANDROID_GCM_API_KEY:
gcm_client = make_gcm_client()
else:
gcm_client = None
def gcm_enabled() -> bool:
return gcm_client is not None
def send_android_push_notification_to_user(user_profile: UserProfile, data: Dict[str, Any],
options: Dict[str, Any]) -> None:
devices = list(PushDeviceToken.objects.filter(user=user_profile,
kind=PushDeviceToken.GCM))
send_android_push_notification(devices, data, options)
def parse_gcm_options(options: Dict[str, Any], data: Dict[str, Any]) -> str:
priority = options.pop('priority', None)
if priority is None:
if data.get('event') == 'message':
priority = 'high'
else:
priority = 'normal'
if priority not in ('normal', 'high'):
raise JsonableError(_(
"Invalid GCM option to bouncer: priority {!r}",
).format(priority))
if options:
# server talking to an older bouncer, so we only need to provide
# one-way compatibility.
raise JsonableError(_(
"Invalid GCM options to bouncer: {}",
).format(orjson.dumps(options).decode()))
return priority # when this grows a second option, can make it a tuple
@statsd_increment("android_push_notification")
def send_android_push_notification(devices: List[DeviceToken], data: Dict[str, Any],
options: Dict[str, Any], remote: bool=False) -> None:
if not devices:
return
if not gcm_client:
logger.debug("Skipping sending a GCM push notification since "
"PUSH_NOTIFICATION_BOUNCER_URL and ANDROID_GCM_API_KEY are both unset")
return
reg_ids = [device.token for device in devices]
priority = parse_gcm_options(options, data)
try:
# See https://firebase.google.com/docs/cloud-messaging/http-server-ref .
# Two kwargs `retries` and `session` get eaten by `json_request`;
# the rest pass through to the GCM server.
res = gcm_client.json_request(registration_ids=reg_ids,
priority=priority,
data=data,
retries=10)
except OSError:
logger.warning("Error while pushing to GCM", exc_info=True)
return
if res and 'success' in res:
for reg_id, msg_id in res['success'].items():
logger.info("GCM: Sent %s as %s", reg_id, msg_id)
if remote:
assert settings.ZILENCER_ENABLED
DeviceTokenClass = RemotePushDeviceToken
else:
DeviceTokenClass = PushDeviceToken
# res.canonical will contain results when there are duplicate registrations for the same
# device. The "canonical" registration is the latest registration made by the device.
# Ref: https://developer.android.com/google/gcm/adv.html#canonical
if 'canonical' in res:
for reg_id, new_reg_id in res['canonical'].items():
if reg_id == new_reg_id:
# I'm not sure if this should happen. In any case, not really actionable.
logger.warning("GCM: Got canonical ref but it already matches our ID %s!", reg_id)
elif not DeviceTokenClass.objects.filter(token=new_reg_id,
kind=DeviceTokenClass.GCM).count():
# previously registered in our system.
#
# That said, recovery is easy: just update the current PDT object to use the new ID.
logger.warning(
"GCM: Got canonical ref %s replacing %s but new ID not registered! Updating.",
new_reg_id, reg_id)
DeviceTokenClass.objects.filter(
token=reg_id, kind=DeviceTokenClass.GCM).update(token=new_reg_id)
else:
# Since we know the new ID is registered in our system we can just drop the old one.
logger.info("GCM: Got canonical ref %s, dropping %s", new_reg_id, reg_id)
DeviceTokenClass.objects.filter(token=reg_id, kind=DeviceTokenClass.GCM).delete()
if 'errors' in res:
for error, reg_ids in res['errors'].items():
if error in ['NotRegistered', 'InvalidRegistration']:
for reg_id in reg_ids:
logger.info("GCM: Removing %s", reg_id)
# We remove all entries for this token (There
# could be multiple for different Zulip servers).
DeviceTokenClass.objects.filter(token=reg_id, kind=DeviceTokenClass.GCM).delete()
else:
for reg_id in reg_ids:
logger.warning("GCM: Delivery to %s failed: %s", reg_id, error)
# python-gcm handles retrying of the unsent messages.
# Ref: https://github.com/geeknam/python-gcm/blob/master/gcm/gcm.py#L497
#
# Sending to a bouncer
#
def uses_notification_bouncer() -> bool:
return settings.PUSH_NOTIFICATION_BOUNCER_URL is not None
def send_notifications_to_bouncer(user_profile_id: int,
apns_payload: Dict[str, Any],
gcm_payload: Dict[str, Any],
gcm_options: Dict[str, Any]) -> None:
post_data = {
'user_id': user_profile_id,
'apns_payload': apns_payload,
'gcm_payload': gcm_payload,
'gcm_options': gcm_options,
}
# Calls zilencer.views.remote_server_notify_push
send_json_to_push_bouncer('POST', 'push/notify', post_data)
#
# Managing device tokens
#
def num_push_devices_for_user(user_profile: UserProfile, kind: Optional[int]=None) -> PushDeviceToken:
if kind is None:
return PushDeviceToken.objects.filter(user=user_profile).count()
else:
return PushDeviceToken.objects.filter(user=user_profile, kind=kind).count()
def add_push_device_token(user_profile: UserProfile,
token_str: str,
kind: int,
ios_app_id: Optional[str]=None) -> PushDeviceToken:
logger.info("Registering push device: %d %r %d %r",
user_profile.id, token_str, kind, ios_app_id)
# Regardless of whether we're using the push notifications
try:
with transaction.atomic():
token = PushDeviceToken.objects.create(
user_id=user_profile.id,
kind=kind,
token=token_str,
ios_app_id=ios_app_id,
last_updated=timezone_now())
except IntegrityError:
token = PushDeviceToken.objects.get(
user_id=user_profile.id,
kind=kind,
token=token_str,
)
# register this user with them here
if uses_notification_bouncer():
post_data = {
'server_uuid': settings.ZULIP_ORG_ID,
'user_id': user_profile.id,
'token': token_str,
'token_kind': kind,
}
if kind == PushDeviceToken.APNS:
post_data['ios_app_id'] = ios_app_id
logger.info("Sending new push device to bouncer: %r", post_data)
# Calls zilencer.views.register_remote_push_device
send_to_push_bouncer('POST', 'push/register', post_data)
return token
def remove_push_device_token(user_profile: UserProfile, token_str: str, kind: int) -> None:
try:
token = PushDeviceToken.objects.get(token=token_str, kind=kind, user=user_profile)
token.delete()
except PushDeviceToken.DoesNotExist:
# If we are using bouncer, don't raise the exception. It will
if not uses_notification_bouncer():
raise JsonableError(_("Token does not exist"))
# unregister this user with them here
if uses_notification_bouncer():
# TODO: Make this a remove item
post_data = {
'server_uuid': settings.ZULIP_ORG_ID,
'user_id': user_profile.id,
'token': token_str,
'token_kind': kind,
}
# Calls zilencer.views.unregister_remote_push_device
send_to_push_bouncer("POST", "push/unregister", post_data)
def clear_push_device_tokens(user_profile_id: int) -> None:
# Deletes all of a user's PushDeviceTokens.
if uses_notification_bouncer():
post_data = {
'server_uuid': settings.ZULIP_ORG_ID,
'user_id': user_profile_id,
}
send_to_push_bouncer("POST", "push/unregister/all", post_data)
return
PushDeviceToken.objects.filter(user_id=user_profile_id).delete()
def push_notifications_enabled() -> bool:
if (uses_notification_bouncer()
and settings.ZULIP_ORG_KEY is not None
and settings.ZULIP_ORG_ID is not None):
return True
if settings.DEVELOPMENT and (apns_enabled() or gcm_enabled()):
return True
elif apns_enabled() and gcm_enabled():
return True
return False
def initialize_push_notifications() -> None:
if not push_notifications_enabled():
if settings.DEVELOPMENT and not settings.TEST_SUITE:
return
logger.warning("Mobile push notifications are not configured.\n "
"See https://zulip.readthedocs.io/en/latest/"
"production/mobile-push-notifications.html")
def get_gcm_alert(message: Message) -> str:
sender_str = message.sender.full_name
if message.recipient.type == Recipient.HUDDLE and message.trigger == 'private_message':
return f"New private group message from {sender_str}"
elif message.recipient.type == Recipient.PERSONAL and message.trigger == 'private_message':
return f"New private message from {sender_str}"
elif message.is_stream_message() and (message.trigger == 'mentioned' or
message.trigger == 'wildcard_mentioned'):
return f"New mention from {sender_str}"
else:
return f"New stream message from {sender_str} in {get_display_recipient(message.recipient)}"
def get_mobile_push_content(rendered_content: str) -> str:
def get_text(elem: lxml.html.HtmlElement) -> str:
classes = elem.get("class", "")
if "emoji" in classes:
match = re.search(r"emoji-(?P<emoji_code>\S+)", classes)
if match:
emoji_code = match.group('emoji_code')
char_repr = ""
for codepoint in emoji_code.split('-'):
char_repr += chr(int(codepoint, 16))
return char_repr
if elem.tag == "img":
return elem.get("alt", "")
if elem.tag == 'blockquote':
return ''
return elem.text or ''
def format_as_quote(quote_text: str) -> str:
return "".join(
f"> {line}\n" for line in quote_text.splitlines()
if line
)
def render_olist(ol: lxml.html.HtmlElement) -> str:
items = []
counter = int(ol.get('start')) if ol.get('start') else 1
nested_levels = len(list(ol.iterancestors('ol')))
indent = ('\n' + ' ' * nested_levels) if nested_levels else ''
for li in ol:
items.append(indent + str(counter) + '. ' + process(li).strip())
counter += 1
return '\n'.join(items)
def render_spoiler(elem: lxml.html.HtmlElement) -> str:
header = elem.find_class('spoiler-header')[0]
text = process(header).strip()
if len(text) == 0:
return "(…)\n"
return f"{text} (…)\n"
def process(elem: lxml.html.HtmlElement) -> str:
plain_text = ''
if elem.tag == 'ol':
plain_text = render_olist(elem)
elif 'spoiler-block' in elem.get("class", ""):
plain_text += render_spoiler(elem)
else:
plain_text = get_text(elem)
sub_text = ''
for child in elem:
sub_text += process(child)
if elem.tag == 'blockquote':
sub_text = format_as_quote(sub_text)
plain_text += sub_text
plain_text += elem.tail or ""
return plain_text
if settings.PUSH_NOTIFICATION_REDACT_CONTENT:
return "***REDACTED***"
elem = lxml.html.fromstring(rendered_content)
plain_text = process(elem)
return plain_text
def truncate_content(content: str) -> Tuple[str, bool]:
if len(content) <= 200:
return content, False
return content[:200] + "…", True
def get_base_payload(user_profile: UserProfile) -> Dict[str, Any]:
data: Dict[str, Any] = {}
data['server'] = settings.EXTERNAL_HOST
data['realm_id'] = user_profile.realm.id
data['realm_uri'] = user_profile.realm.uri
data['user_id'] = user_profile.id
return data
def get_message_payload(user_profile: UserProfile, message: Message) -> Dict[str, Any]:
data = get_base_payload(user_profile)
data['sender_id'] = message.sender.id
data['sender_email'] = message.sender.email
if message.recipient.type == Recipient.STREAM:
data['recipient_type'] = "stream"
data['stream'] = get_display_recipient(message.recipient)
data['topic'] = message.topic_name()
elif message.recipient.type == Recipient.HUDDLE:
data['recipient_type'] = "private"
data['pm_users'] = huddle_users(message.recipient.id)
else:
data['recipient_type'] = "private"
return data
def get_apns_alert_title(message: Message) -> str:
if message.recipient.type == Recipient.HUDDLE:
recipients = get_display_recipient(message.recipient)
assert isinstance(recipients, list)
return ', '.join(sorted(r['full_name'] for r in recipients))
elif message.is_stream_message():
return f"#{get_display_recipient(message.recipient)} > {message.topic_name()}"
return message.sender.full_name
def get_apns_alert_subtitle(message: Message) -> str:
if message.trigger == "mentioned":
return _("{full_name} mentioned you:").format(full_name=message.sender.full_name)
elif message.trigger == "wildcard_mentioned":
return _("{full_name} mentioned everyone:").format(full_name=message.sender.full_name)
elif message.recipient.type == Recipient.PERSONAL:
return ""
return message.sender.full_name + ":"
def get_apns_badge_count(user_profile: UserProfile, read_messages_ids: Optional[Sequence[int]]=[]) -> int:
return 0
def get_apns_badge_count_future(user_profile: UserProfile, read_messages_ids: Optional[Sequence[int]]=[]) -> int:
return UserMessage.objects.filter(
user_profile=user_profile
).extra(
where=[UserMessage.where_active_push_notification()]
).exclude(
# only after we've sent that update to the devices. So we need
message_id__in=read_messages_ids
).count()
def get_message_payload_apns(user_profile: UserProfile, message: Message) -> Dict[str, Any]:
zulip_data = get_message_payload(user_profile, message)
zulip_data.update(
message_ids=[message.id],
)
assert message.rendered_content is not None
content, _ = truncate_content(get_mobile_push_content(message.rendered_content))
apns_data = {
'alert': {
'title': get_apns_alert_title(message),
'subtitle': get_apns_alert_subtitle(message),
'body': content,
},
'sound': 'default',
'badge': get_apns_badge_count(user_profile),
'custom': {'zulip': zulip_data},
}
return apns_data
def get_message_payload_gcm(
user_profile: UserProfile, message: Message,
) -> Tuple[Dict[str, Any], Dict[str, Any]]:
data = get_message_payload(user_profile, message)
assert message.rendered_content is not None
content, truncated = truncate_content(get_mobile_push_content(message.rendered_content))
data.update(
event='message',
alert=get_gcm_alert(message),
zulip_message_id=message.id,
time=datetime_to_timestamp(message.date_sent),
content=content,
content_truncated=truncated,
sender_full_name=message.sender.full_name,
sender_avatar_url=absolute_avatar_url(message.sender),
)
gcm_options = {'priority': 'high'}
return data, gcm_options
def get_remove_payload_gcm(
user_profile: UserProfile, message_ids: List[int],
) -> Tuple[Dict[str, Any], Dict[str, Any]]:
gcm_payload = get_base_payload(user_profile)
gcm_payload.update(
event='remove',
zulip_message_ids=','.join(str(id) for id in message_ids),
zulip_message_id=message_ids[0],
)
gcm_options = {'priority': 'normal'}
return gcm_payload, gcm_options
def get_remove_payload_apns(user_profile: UserProfile, message_ids: List[int]) -> Dict[str, Any]:
zulip_data = get_base_payload(user_profile)
zulip_data.update(
event='remove',
zulip_message_ids=','.join(str(id) for id in message_ids),
)
apns_data = {
'badge': get_apns_badge_count(user_profile, message_ids),
'custom': {'zulip': zulip_data},
}
return apns_data
def handle_remove_push_notification(user_profile_id: int, message_ids: List[int]) -> None:
user_profile = get_user_profile_by_id(user_profile_id)
message_ids = bulk_access_messages_expect_usermessage(user_profile_id, message_ids)
gcm_payload, gcm_options = get_remove_payload_gcm(user_profile, message_ids)
apns_payload = get_remove_payload_apns(user_profile, message_ids)
if uses_notification_bouncer():
send_notifications_to_bouncer(user_profile_id,
apns_payload,
gcm_payload,
gcm_options)
else:
android_devices = list(PushDeviceToken.objects.filter(
user=user_profile, kind=PushDeviceToken.GCM))
apple_devices = list(PushDeviceToken.objects.filter(
user=user_profile, kind=PushDeviceToken.APNS))
if android_devices:
send_android_push_notification(android_devices, gcm_payload, gcm_options)
if apple_devices:
send_apple_push_notification(user_profile_id, apple_devices, apns_payload)
UserMessage.objects.filter(
user_profile_id=user_profile_id,
message_id__in=message_ids,
).update(
flags=F('flags').bitand(
~UserMessage.flags.active_mobile_push_notification))
@statsd_increment("push_notifications")
def handle_push_notification(user_profile_id: int, missed_message: Dict[str, Any]) -> None:
if not push_notifications_enabled():
return
user_profile = get_user_profile_by_id(user_profile_id)
if not (receives_offline_push_notifications(user_profile) or
receives_online_notifications(user_profile)):
return
try:
(message, user_message) = access_message(user_profile, missed_message['message_id'])
except JsonableError:
if ArchivedMessage.objects.filter(id=missed_message['message_id']).exists():
return
logging.info(
"Unexpected message access failure handling push notifications: %s %s",
user_profile.id, missed_message['message_id'],
)
return
if user_message is not None:
# If the user has read the message already, don't push-notify.
if user_message.flags.read or user_message.flags.active_mobile_push_notification:
return
user_message.flags.active_mobile_push_notification = True
user_message.save(update_fields=["flags"])
else:
if not user_profile.long_term_idle:
logger.error(
"Could not find UserMessage with message_id %s and user_id %s",
missed_message['message_id'], user_profile_id,
)
return
message.trigger = missed_message['trigger']
apns_payload = get_message_payload_apns(user_profile, message)
gcm_payload, gcm_options = get_message_payload_gcm(user_profile, message)
logger.info("Sending push notifications to mobile clients for user %s", user_profile_id)
if uses_notification_bouncer():
send_notifications_to_bouncer(user_profile_id,
apns_payload,
gcm_payload,
gcm_options)
return
android_devices = list(PushDeviceToken.objects.filter(user=user_profile,
kind=PushDeviceToken.GCM))
apple_devices = list(PushDeviceToken.objects.filter(user=user_profile,
kind=PushDeviceToken.APNS))
send_apple_push_notification(user_profile.id, apple_devices, apns_payload)
send_android_push_notification(android_devices, gcm_payload, gcm_options)
| true | true |
1c2fab0514f311db6469ee743995314e21162298 | 2,010 | py | Python | custom_components/netdaemon/switch.py | skotl/netdaemon-integration | 71fb6036330cddbd4629d942652b93b31c204eec | [
"MIT"
] | 1 | 2021-08-24T18:47:10.000Z | 2021-08-24T18:47:10.000Z | custom_components/netdaemon/switch.py | skotl/netdaemon-integration | 71fb6036330cddbd4629d942652b93b31c204eec | [
"MIT"
] | 32 | 2021-01-01T17:41:41.000Z | 2022-02-05T18:42:32.000Z | custom_components/netdaemon/switch.py | skotl/netdaemon-integration | 71fb6036330cddbd4629d942652b93b31c204eec | [
"MIT"
] | 8 | 2021-04-23T09:58:02.000Z | 2022-01-03T10:30:48.000Z | """Switch platform for NetDaemon."""
from typing import TYPE_CHECKING
from homeassistant.components.switch import SwitchEntity
from .const import (
ATTR_CLIENT,
ATTR_COORDINATOR,
ATTR_ENTITY_ID,
ATTR_STATE,
DOMAIN,
LOGGER,
PLATFORM_SWITCH,
STATE_ON_VALUES,
)
from .entity import NetDaemonEntity
if TYPE_CHECKING:
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
from .client import NetDaemonClient
async def async_setup_entry(
hass: "HomeAssistant", _config_entry: "ConfigEntry", async_add_devices
) -> None:
"""Setup switch platform."""
client: "NetDaemonClient" = hass.data[DOMAIN][ATTR_CLIENT]
coordinator: "DataUpdateCoordinator" = hass.data[DOMAIN][ATTR_COORDINATOR]
switches = []
for entity in client.entities:
if entity.split(".")[0] == PLATFORM_SWITCH:
LOGGER.debug("Adding %s", entity)
switches.append(NetDaemonSwitch(coordinator, entity.split(".")[1]))
if switches:
async_add_devices(switches)
class NetDaemonSwitch(NetDaemonEntity, SwitchEntity):
"""NetDaemon Switch class."""
@property
def is_on(self):
"""Return the state of the switch."""
state = str(self._coordinator.data[self.entity_id][ATTR_STATE]).lower()
return state in STATE_ON_VALUES
async def async_turn_on(self, **kwargs):
"""Turn the device on."""
await self._async_toggle()
async def async_turn_off(self, **kwargs):
"""Turn the device off."""
await self._async_toggle()
async def _async_toggle(self) -> None:
"""Toggle the switch entity."""
current = self._coordinator.data[self.entity_id][ATTR_STATE]
await self.hass.data[DOMAIN][ATTR_CLIENT].entity_update(
{ATTR_ENTITY_ID: self.entity_id, ATTR_STATE: not current}
)
self.async_write_ha_state()
| 30 | 79 | 0.69005 | from typing import TYPE_CHECKING
from homeassistant.components.switch import SwitchEntity
from .const import (
ATTR_CLIENT,
ATTR_COORDINATOR,
ATTR_ENTITY_ID,
ATTR_STATE,
DOMAIN,
LOGGER,
PLATFORM_SWITCH,
STATE_ON_VALUES,
)
from .entity import NetDaemonEntity
if TYPE_CHECKING:
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator
from .client import NetDaemonClient
async def async_setup_entry(
hass: "HomeAssistant", _config_entry: "ConfigEntry", async_add_devices
) -> None:
client: "NetDaemonClient" = hass.data[DOMAIN][ATTR_CLIENT]
coordinator: "DataUpdateCoordinator" = hass.data[DOMAIN][ATTR_COORDINATOR]
switches = []
for entity in client.entities:
if entity.split(".")[0] == PLATFORM_SWITCH:
LOGGER.debug("Adding %s", entity)
switches.append(NetDaemonSwitch(coordinator, entity.split(".")[1]))
if switches:
async_add_devices(switches)
class NetDaemonSwitch(NetDaemonEntity, SwitchEntity):
@property
def is_on(self):
state = str(self._coordinator.data[self.entity_id][ATTR_STATE]).lower()
return state in STATE_ON_VALUES
async def async_turn_on(self, **kwargs):
await self._async_toggle()
async def async_turn_off(self, **kwargs):
await self._async_toggle()
async def _async_toggle(self) -> None:
current = self._coordinator.data[self.entity_id][ATTR_STATE]
await self.hass.data[DOMAIN][ATTR_CLIENT].entity_update(
{ATTR_ENTITY_ID: self.entity_id, ATTR_STATE: not current}
)
self.async_write_ha_state()
| true | true |
1c2fab7d0bbcc372a971bf0468a371a8d711930d | 1,298 | py | Python | data_structures/stack/stack.py | papudova/programming-for-linguists | 2349b5d2787eb0f9e57f281647fcc7f20e900688 | [
"Apache-2.0"
] | null | null | null | data_structures/stack/stack.py | papudova/programming-for-linguists | 2349b5d2787eb0f9e57f281647fcc7f20e900688 | [
"Apache-2.0"
] | null | null | null | data_structures/stack/stack.py | papudova/programming-for-linguists | 2349b5d2787eb0f9e57f281647fcc7f20e900688 | [
"Apache-2.0"
] | null | null | null | """
Programming for linguists
Implementation of the data structure "Stack"
"""
from typing import Iterable
class Stack:
"""
Stack Data Structure
"""
# pylint: disable=missing-module-docstring
def __init__(self, maximum, data: Iterable = None):
self.data = list(data) if data else []
self.maximum = maximum
def push(self, element):
"""
Add the element ‘element’ at the top of stack
:param element: element to add to stack
"""
if self.maximum <= len(self.data):
self.data.append(element)
def pop(self):
"""
Delete the element on the top of stack
"""
self.data.pop(-1)
def top(self):
"""
Return the element on the top of stack
:return: the element that is on the top of stack
"""
return self.data[-1]
def size(self) -> int:
"""
Return the number of elements in stack
:return: Number of elements in stack
"""
return len(self.data)
def empty(self) -> bool:
"""
Return whether stack is empty or not
:return: True if stack does not contain any elements
False if stack contains elements
"""
return not bool(self.size())
| 24.037037 | 60 | 0.564715 |
from typing import Iterable
class Stack:
def __init__(self, maximum, data: Iterable = None):
self.data = list(data) if data else []
self.maximum = maximum
def push(self, element):
if self.maximum <= len(self.data):
self.data.append(element)
def pop(self):
self.data.pop(-1)
def top(self):
return self.data[-1]
def size(self) -> int:
return len(self.data)
def empty(self) -> bool:
return not bool(self.size())
| true | true |
1c2fab9eb0e822b4f94ee06c94a0274429e18786 | 4,279 | py | Python | main.py | SzymonZos/Fuzzy-Data-Analysis | 20b27ac183e8d65c41b7f3e3e491c8fb08b9696f | [
"MIT"
] | 1 | 2020-06-11T19:47:42.000Z | 2020-06-11T19:47:42.000Z | main.py | SzymonZos/Fuzzy-Data-Analysis | 20b27ac183e8d65c41b7f3e3e491c8fb08b9696f | [
"MIT"
] | 1 | 2020-07-30T08:41:13.000Z | 2020-08-14T19:00:27.000Z | main.py | SzymonZos/Fuzzy-Data-Analysis | 20b27ac183e8d65c41b7f3e3e491c8fb08b9696f | [
"MIT"
] | 1 | 2020-06-25T22:08:34.000Z | 2020-06-25T22:08:34.000Z | import re
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from functools import partial
from contextlib import ExitStack
from sklearn.cluster import KMeans
from sklearn.decomposition import PCA
from skfuzzy.cluster import cmeans, cmeans_predict
raw_datasets = ["models/" + name for name in ["pima.tr", "pima.te"]]
datasets = ["models/" + name for name in ["training.csv", "test.csv"]]
def preprocess_datasets() -> None:
with ExitStack() as stack:
raws = [stack.enter_context(open(file, 'r')) for file in raw_datasets]
processed = [stack.enter_context(open(file, 'w')) for file in datasets]
for raw, proc in zip(raws, processed):
dataset = raw.readlines()
dataset = [re.sub(r"^ +", "", row) for row in dataset]
dataset = [re.sub(r" +", ",", row) for row in dataset]
proc.writelines(dataset)
def import_datasets() -> tuple:
cols = pd.read_csv(datasets[0], nrows=1).columns
return tuple(pd.read_csv(file, usecols=cols[:-1]) for file in datasets)
def read_diagnoses() -> tuple:
cols = pd.read_csv(datasets[0], nrows=1).columns
diagnoses = tuple()
for dataset in datasets:
read = pd.read_csv(dataset, usecols=cols[-1:])
diagnoses += (np.array([*map(lambda x: 1 if x == "Yes" else 0,
read.values)]),)
return diagnoses
def perform_crisp_clustering(training: np.array, test: np.array,
clusters: int) -> tuple:
kmeans = KMeans(clusters)
kmeans.fit(training)
return kmeans.labels_, kmeans.predict(test)
def perform_fuzzy_clustering(training: np.array, test: np.array,
clusters: int, m: int) -> tuple:
center, train_labels = cmeans(training.T, clusters, m, 0.005, 1000)[0:2]
test_labels = cmeans_predict(test.T, center, m, 0.005, 1000)[0]
return *((label[1] > 0.2).astype(int) for label in [train_labels, test_labels]),
#return *(np.argmax(label, 0) for label in [train_labels, test_labels]),
def perform_pca(training: np.array, test: np.array) -> list:
pca = PCA(2)
pca_datasets = [training, test]
for pos, dataset in enumerate(pca_datasets):
pca.fit(dataset)
pca_datasets[pos] = pca.transform(dataset)
return pca_datasets
# np.apply_along_axis(np.bincount, axis=1, arr= test_array,
# minlength = np.max(test_array) +1)
# np.bincount(np.argsort(center, 0)).argmax()
def plot_datasets(pca_datasets: list, diagnoses: tuple,
clusters: int, title: str) -> None:
for idx, (dataset, diagnose) in enumerate(zip(pca_datasets, diagnoses)):
for j in range(clusters):
plt.plot(dataset[diagnose == j, 0],
dataset[diagnose == j, 1], 'o', markersize=3,
label='series ' + str(j))
plt.title(title + (" training set" if not idx else " test set"))
plt.legend()
plt.show()
def test_algorithms(training: np.array, test: np.array, pca_datasets: list,
clusters: int, diagnoses: tuple) -> None:
algorithms = [partial(perform_fuzzy_clustering, training,
test, clusters, m) for m in range(2, 5)]
algorithms += [partial(perform_crisp_clustering, training, test, clusters)]
for algorithm in algorithms:
result = algorithm()
print([sum(res) for res in [x == y for x, y in
zip(result, diagnoses)]])
title = "Clusters: {}, Function: {}".format(clusters,
algorithm.func.__name__)
if "fuzzy" in algorithm.func.__name__:
title += ", m: {}".format(algorithm.args[-1])
plot_datasets(pca_datasets, result, clusters, title)
def main():
preprocess_datasets()
training_set, test_set = import_datasets()
training, test = training_set.values, test_set.values
diagnoses = read_diagnoses()
pca_datasets = perform_pca(training, test)
plot_datasets(pca_datasets, diagnoses, 2, "Default diagnoses")
for clusters in range(2, 4):
test_algorithms(training, test, pca_datasets, clusters, diagnoses)
if __name__ == "__main__":
main()
| 38.205357 | 84 | 0.618836 | import re
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from functools import partial
from contextlib import ExitStack
from sklearn.cluster import KMeans
from sklearn.decomposition import PCA
from skfuzzy.cluster import cmeans, cmeans_predict
raw_datasets = ["models/" + name for name in ["pima.tr", "pima.te"]]
datasets = ["models/" + name for name in ["training.csv", "test.csv"]]
def preprocess_datasets() -> None:
with ExitStack() as stack:
raws = [stack.enter_context(open(file, 'r')) for file in raw_datasets]
processed = [stack.enter_context(open(file, 'w')) for file in datasets]
for raw, proc in zip(raws, processed):
dataset = raw.readlines()
dataset = [re.sub(r"^ +", "", row) for row in dataset]
dataset = [re.sub(r" +", ",", row) for row in dataset]
proc.writelines(dataset)
def import_datasets() -> tuple:
cols = pd.read_csv(datasets[0], nrows=1).columns
return tuple(pd.read_csv(file, usecols=cols[:-1]) for file in datasets)
def read_diagnoses() -> tuple:
cols = pd.read_csv(datasets[0], nrows=1).columns
diagnoses = tuple()
for dataset in datasets:
read = pd.read_csv(dataset, usecols=cols[-1:])
diagnoses += (np.array([*map(lambda x: 1 if x == "Yes" else 0,
read.values)]),)
return diagnoses
def perform_crisp_clustering(training: np.array, test: np.array,
clusters: int) -> tuple:
kmeans = KMeans(clusters)
kmeans.fit(training)
return kmeans.labels_, kmeans.predict(test)
def perform_fuzzy_clustering(training: np.array, test: np.array,
clusters: int, m: int) -> tuple:
center, train_labels = cmeans(training.T, clusters, m, 0.005, 1000)[0:2]
test_labels = cmeans_predict(test.T, center, m, 0.005, 1000)[0]
return *((label[1] > 0.2).astype(int) for label in [train_labels, test_labels]),
def perform_pca(training: np.array, test: np.array) -> list:
pca = PCA(2)
pca_datasets = [training, test]
for pos, dataset in enumerate(pca_datasets):
pca.fit(dataset)
pca_datasets[pos] = pca.transform(dataset)
return pca_datasets
def plot_datasets(pca_datasets: list, diagnoses: tuple,
clusters: int, title: str) -> None:
for idx, (dataset, diagnose) in enumerate(zip(pca_datasets, diagnoses)):
for j in range(clusters):
plt.plot(dataset[diagnose == j, 0],
dataset[diagnose == j, 1], 'o', markersize=3,
label='series ' + str(j))
plt.title(title + (" training set" if not idx else " test set"))
plt.legend()
plt.show()
def test_algorithms(training: np.array, test: np.array, pca_datasets: list,
clusters: int, diagnoses: tuple) -> None:
algorithms = [partial(perform_fuzzy_clustering, training,
test, clusters, m) for m in range(2, 5)]
algorithms += [partial(perform_crisp_clustering, training, test, clusters)]
for algorithm in algorithms:
result = algorithm()
print([sum(res) for res in [x == y for x, y in
zip(result, diagnoses)]])
title = "Clusters: {}, Function: {}".format(clusters,
algorithm.func.__name__)
if "fuzzy" in algorithm.func.__name__:
title += ", m: {}".format(algorithm.args[-1])
plot_datasets(pca_datasets, result, clusters, title)
def main():
preprocess_datasets()
training_set, test_set = import_datasets()
training, test = training_set.values, test_set.values
diagnoses = read_diagnoses()
pca_datasets = perform_pca(training, test)
plot_datasets(pca_datasets, diagnoses, 2, "Default diagnoses")
for clusters in range(2, 4):
test_algorithms(training, test, pca_datasets, clusters, diagnoses)
if __name__ == "__main__":
main()
| true | true |
1c2fac94405577726546c0175556f60c71cf410e | 49,500 | py | Python | beets/library.py | stragu/beets | da46a62772ab7a88c5799c84841f744dfc0f0a20 | [
"MIT"
] | null | null | null | beets/library.py | stragu/beets | da46a62772ab7a88c5799c84841f744dfc0f0a20 | [
"MIT"
] | null | null | null | beets/library.py | stragu/beets | da46a62772ab7a88c5799c84841f744dfc0f0a20 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# This file is part of beets.
# Copyright 2016, Adrian Sampson.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
"""The core data store and collection logic for beets.
"""
from __future__ import (division, absolute_import, print_function,
unicode_literals)
import os
import sys
import unicodedata
import time
import re
from unidecode import unidecode
from beets import logging
from beets.mediafile import MediaFile, MutagenError, UnreadableFileError
from beets import plugins
from beets import util
from beets.util import bytestring_path, syspath, normpath, samefile
from beets.util.functemplate import Template
from beets import dbcore
from beets.dbcore import types
import beets
log = logging.getLogger('beets')
# Library-specific query types.
class PathQuery(dbcore.FieldQuery):
"""A query that matches all items under a given path.
Matching can either be case-insensitive or case-sensitive. By
default, the behavior depends on the OS: case-insensitive on Windows
and case-sensitive otherwise.
"""
escape_re = re.compile(r'[\\_%]')
escape_char = b'\\'
def __init__(self, field, pattern, fast=True, case_sensitive=None):
"""Create a path query. `pattern` must be a path, either to a
file or a directory.
`case_sensitive` can be a bool or `None`, indicating that the
behavior should depend on the filesystem.
"""
super(PathQuery, self).__init__(field, pattern, fast)
# By default, the case sensitivity depends on the filesystem
# that the query path is located on.
if case_sensitive is None:
path = util.bytestring_path(util.normpath(pattern))
case_sensitive = beets.util.case_sensitive(path)
self.case_sensitive = case_sensitive
# Use a normalized-case pattern for case-insensitive matches.
if not case_sensitive:
pattern = pattern.lower()
# Match the path as a single file.
self.file_path = util.bytestring_path(util.normpath(pattern))
# As a directory (prefix).
self.dir_path = util.bytestring_path(os.path.join(self.file_path, b''))
@classmethod
def is_path_query(cls, query_part):
"""Try to guess whether a unicode query part is a path query.
Condition: separator precedes colon and the file exists.
"""
colon = query_part.find(':')
if colon != -1:
query_part = query_part[:colon]
return (os.sep in query_part and
os.path.exists(syspath(normpath(query_part))))
def match(self, item):
path = item.path if self.case_sensitive else item.path.lower()
return (path == self.file_path) or path.startswith(self.dir_path)
def col_clause(self):
if self.case_sensitive:
file_blob = buffer(self.file_path)
dir_blob = buffer(self.dir_path)
return '({0} = ?) || (substr({0}, 1, ?) = ?)'.format(self.field), \
(file_blob, len(dir_blob), dir_blob)
escape = lambda m: self.escape_char + m.group(0)
dir_pattern = self.escape_re.sub(escape, self.dir_path)
dir_blob = buffer(dir_pattern + b'%')
file_pattern = self.escape_re.sub(escape, self.file_path)
file_blob = buffer(file_pattern)
return '({0} LIKE ? ESCAPE ?) || ({0} LIKE ? ESCAPE ?)'.format(
self.field), (file_blob, self.escape_char, dir_blob,
self.escape_char)
# Library-specific field types.
class DateType(types.Float):
# TODO representation should be `datetime` object
# TODO distinguish between date and time types
query = dbcore.query.DateQuery
def format(self, value):
return time.strftime(beets.config['time_format'].get(unicode),
time.localtime(value or 0))
def parse(self, string):
try:
# Try a formatted date string.
return time.mktime(
time.strptime(string, beets.config['time_format'].get(unicode))
)
except ValueError:
# Fall back to a plain timestamp number.
try:
return float(string)
except ValueError:
return self.null
class PathType(types.Type):
sql = u'BLOB'
query = PathQuery
model_type = bytes
def format(self, value):
return util.displayable_path(value)
def parse(self, string):
return normpath(bytestring_path(string))
def normalize(self, value):
if isinstance(value, unicode):
# Paths stored internally as encoded bytes.
return bytestring_path(value)
elif isinstance(value, buffer):
# SQLite must store bytestings as buffers to avoid decoding.
# We unwrap buffers to bytes.
return bytes(value)
else:
return value
def from_sql(self, sql_value):
return self.normalize(sql_value)
def to_sql(self, value):
if isinstance(value, bytes):
value = buffer(value)
return value
class MusicalKey(types.String):
"""String representing the musical key of a song.
The standard format is C, Cm, C#, C#m, etc.
"""
ENHARMONIC = {
r'db': 'c#',
r'eb': 'd#',
r'gb': 'f#',
r'ab': 'g#',
r'bb': 'a#',
}
def parse(self, key):
key = key.lower()
for flat, sharp in self.ENHARMONIC.items():
key = re.sub(flat, sharp, key)
key = re.sub(r'[\W\s]+minor', 'm', key)
key = re.sub(r'[\W\s]+major', '', key)
return key.capitalize()
def normalize(self, key):
if key is None:
return None
else:
return self.parse(key)
class DurationType(types.Float):
"""Human-friendly (M:SS) representation of a time interval."""
query = dbcore.query.DurationQuery
def format(self, value):
if not beets.config['format_raw_length'].get(bool):
return beets.ui.human_seconds_short(value or 0.0)
else:
return value
def parse(self, string):
try:
# Try to format back hh:ss to seconds.
return util.raw_seconds_short(string)
except ValueError:
# Fall back to a plain float.
try:
return float(string)
except ValueError:
return self.null
# Library-specific sort types.
class SmartArtistSort(dbcore.query.Sort):
"""Sort by artist (either album artist or track artist),
prioritizing the sort field over the raw field.
"""
def __init__(self, model_cls, ascending=True, case_insensitive=True):
self.album = model_cls is Album
self.ascending = ascending
self.case_insensitive = case_insensitive
def order_clause(self):
order = "ASC" if self.ascending else "DESC"
field = 'albumartist' if self.album else 'artist'
collate = 'COLLATE NOCASE' if self.case_insensitive else ''
return ('(CASE {0}_sort WHEN NULL THEN {0} '
'WHEN "" THEN {0} '
'ELSE {0}_sort END) {1} {2}').format(field, collate, order)
def sort(self, objs):
if self.album:
field = lambda a: a.albumartist_sort or a.albumartist
else:
field = lambda i: i.artist_sort or i.artist
if self.case_insensitive:
key = lambda x: field(x).lower()
else:
key = field
return sorted(objs, key=key, reverse=not self.ascending)
# Special path format key.
PF_KEY_DEFAULT = 'default'
# Exceptions.
class FileOperationError(Exception):
"""Indicates an error when interacting with a file on disk.
Possibilities include an unsupported media type, a permissions
error, and an unhandled Mutagen exception.
"""
def __init__(self, path, reason):
"""Create an exception describing an operation on the file at
`path` with the underlying (chained) exception `reason`.
"""
super(FileOperationError, self).__init__(path, reason)
self.path = path
self.reason = reason
def __unicode__(self):
"""Get a string representing the error. Describes both the
underlying reason and the file path in question.
"""
return u'{0}: {1}'.format(
util.displayable_path(self.path),
unicode(self.reason)
)
def __str__(self):
return unicode(self).encode('utf8')
class ReadError(FileOperationError):
"""An error while reading a file (i.e. in `Item.read`).
"""
def __unicode__(self):
return u'error reading ' + super(ReadError, self).__unicode__()
class WriteError(FileOperationError):
"""An error while writing a file (i.e. in `Item.write`).
"""
def __unicode__(self):
return u'error writing ' + super(WriteError, self).__unicode__()
# Item and Album model classes.
class LibModel(dbcore.Model):
"""Shared concrete functionality for Items and Albums.
"""
_format_config_key = None
"""Config key that specifies how an instance should be formatted.
"""
def _template_funcs(self):
funcs = DefaultTemplateFunctions(self, self._db).functions()
funcs.update(plugins.template_funcs())
return funcs
def store(self):
super(LibModel, self).store()
plugins.send('database_change', lib=self._db, model=self)
def remove(self):
super(LibModel, self).remove()
plugins.send('database_change', lib=self._db, model=self)
def add(self, lib=None):
super(LibModel, self).add(lib)
plugins.send('database_change', lib=self._db, model=self)
def __format__(self, spec):
if not spec:
spec = beets.config[self._format_config_key].get(unicode)
result = self.evaluate_template(spec)
if isinstance(spec, bytes):
# if spec is a byte string then we must return a one as well
return result.encode('utf8')
else:
return result
def __str__(self):
return format(self).encode('utf8')
def __unicode__(self):
return format(self)
class FormattedItemMapping(dbcore.db.FormattedMapping):
"""Add lookup for album-level fields.
Album-level fields take precedence if `for_path` is true.
"""
def __init__(self, item, for_path=False):
super(FormattedItemMapping, self).__init__(item, for_path)
self.album = item.get_album()
self.album_keys = []
if self.album:
for key in self.album.keys(True):
if key in Album.item_keys or key not in item._fields.keys():
self.album_keys.append(key)
self.all_keys = set(self.model_keys).union(self.album_keys)
def _get(self, key):
"""Get the value for a key, either from the album or the item.
Raise a KeyError for invalid keys.
"""
if self.for_path and key in self.album_keys:
return self._get_formatted(self.album, key)
elif key in self.model_keys:
return self._get_formatted(self.model, key)
elif key in self.album_keys:
return self._get_formatted(self.album, key)
else:
raise KeyError(key)
def __getitem__(self, key):
"""Get the value for a key. Certain unset values are remapped.
"""
value = self._get(key)
# `artist` and `albumartist` fields fall back to one another.
# This is helpful in path formats when the album artist is unset
# on as-is imports.
if key == 'artist' and not value:
return self._get('albumartist')
elif key == 'albumartist' and not value:
return self._get('artist')
else:
return value
def __iter__(self):
return iter(self.all_keys)
def __len__(self):
return len(self.all_keys)
class Item(LibModel):
_table = 'items'
_flex_table = 'item_attributes'
_fields = {
'id': types.PRIMARY_ID,
'path': PathType(),
'album_id': types.FOREIGN_ID,
'title': types.STRING,
'artist': types.STRING,
'artist_sort': types.STRING,
'artist_credit': types.STRING,
'album': types.STRING,
'albumartist': types.STRING,
'albumartist_sort': types.STRING,
'albumartist_credit': types.STRING,
'genre': types.STRING,
'composer': types.STRING,
'grouping': types.STRING,
'year': types.PaddedInt(4),
'month': types.PaddedInt(2),
'day': types.PaddedInt(2),
'track': types.PaddedInt(2),
'tracktotal': types.PaddedInt(2),
'disc': types.PaddedInt(2),
'disctotal': types.PaddedInt(2),
'lyrics': types.STRING,
'comments': types.STRING,
'bpm': types.INTEGER,
'comp': types.BOOLEAN,
'mb_trackid': types.STRING,
'mb_albumid': types.STRING,
'mb_artistid': types.STRING,
'mb_albumartistid': types.STRING,
'albumtype': types.STRING,
'label': types.STRING,
'acoustid_fingerprint': types.STRING,
'acoustid_id': types.STRING,
'mb_releasegroupid': types.STRING,
'asin': types.STRING,
'catalognum': types.STRING,
'script': types.STRING,
'language': types.STRING,
'country': types.STRING,
'albumstatus': types.STRING,
'media': types.STRING,
'albumdisambig': types.STRING,
'disctitle': types.STRING,
'encoder': types.STRING,
'rg_track_gain': types.NULL_FLOAT,
'rg_track_peak': types.NULL_FLOAT,
'rg_album_gain': types.NULL_FLOAT,
'rg_album_peak': types.NULL_FLOAT,
'original_year': types.PaddedInt(4),
'original_month': types.PaddedInt(2),
'original_day': types.PaddedInt(2),
'initial_key': MusicalKey(),
'length': DurationType(),
'bitrate': types.ScaledInt(1000, u'kbps'),
'format': types.STRING,
'samplerate': types.ScaledInt(1000, u'kHz'),
'bitdepth': types.INTEGER,
'channels': types.INTEGER,
'mtime': DateType(),
'added': DateType(),
}
_search_fields = ('artist', 'title', 'comments',
'album', 'albumartist', 'genre')
_types = {
'data_source': types.STRING,
}
_media_fields = set(MediaFile.readable_fields()) \
.intersection(_fields.keys())
"""Set of item fields that are backed by `MediaFile` fields.
Any kind of field (fixed, flexible, and computed) may be a media
field. Only these fields are read from disk in `read` and written in
`write`.
"""
_media_tag_fields = set(MediaFile.fields()).intersection(_fields.keys())
"""Set of item fields that are backed by *writable* `MediaFile` tag
fields.
This excludes fields that represent audio data, such as `bitrate` or
`length`.
"""
_formatter = FormattedItemMapping
_sorts = {'artist': SmartArtistSort}
_format_config_key = 'format_item'
@classmethod
def _getters(cls):
getters = plugins.item_field_getters()
getters['singleton'] = lambda i: i.album_id is None
getters['filesize'] = Item.try_filesize # In bytes.
return getters
@classmethod
def from_path(cls, path):
"""Creates a new item from the media file at the specified path.
"""
# Initiate with values that aren't read from files.
i = cls(album_id=None)
i.read(path)
i.mtime = i.current_mtime() # Initial mtime.
return i
def __setitem__(self, key, value):
"""Set the item's value for a standard field or a flexattr.
"""
# Encode unicode paths and read buffers.
if key == 'path':
if isinstance(value, unicode):
value = bytestring_path(value)
elif isinstance(value, buffer):
value = bytes(value)
if key in MediaFile.fields():
self.mtime = 0 # Reset mtime on dirty.
super(Item, self).__setitem__(key, value)
def update(self, values):
"""Set all key/value pairs in the mapping. If mtime is
specified, it is not reset (as it might otherwise be).
"""
super(Item, self).update(values)
if self.mtime == 0 and 'mtime' in values:
self.mtime = values['mtime']
def get_album(self):
"""Get the Album object that this item belongs to, if any, or
None if the item is a singleton or is not associated with a
library.
"""
if not self._db:
return None
return self._db.get_album(self)
# Interaction with file metadata.
def read(self, read_path=None):
"""Read the metadata from the associated file.
If `read_path` is specified, read metadata from that file
instead. Updates all the properties in `_media_fields`
from the media file.
Raises a `ReadError` if the file could not be read.
"""
if read_path is None:
read_path = self.path
else:
read_path = normpath(read_path)
try:
mediafile = MediaFile(syspath(read_path))
except (OSError, IOError, UnreadableFileError) as exc:
raise ReadError(read_path, exc)
for key in self._media_fields:
value = getattr(mediafile, key)
if isinstance(value, (int, long)):
if value.bit_length() > 63:
value = 0
self[key] = value
# Database's mtime should now reflect the on-disk value.
if read_path == self.path:
self.mtime = self.current_mtime()
self.path = read_path
def write(self, path=None, tags=None):
"""Write the item's metadata to a media file.
All fields in `_media_fields` are written to disk according to
the values on this object.
`path` is the path of the mediafile to write the data to. It
defaults to the item's path.
`tags` is a dictionary of additional metadata the should be
written to the file. (These tags need not be in `_media_fields`.)
Can raise either a `ReadError` or a `WriteError`.
"""
if path is None:
path = self.path
else:
path = normpath(path)
# Get the data to write to the file.
item_tags = dict(self)
item_tags = {k: v for k, v in item_tags.items()
if k in self._media_fields} # Only write media fields.
if tags is not None:
item_tags.update(tags)
plugins.send('write', item=self, path=path, tags=item_tags)
# Open the file.
try:
mediafile = MediaFile(syspath(path),
id3v23=beets.config['id3v23'].get(bool))
except (OSError, IOError, UnreadableFileError) as exc:
raise ReadError(self.path, exc)
# Write the tags to the file.
mediafile.update(item_tags)
try:
mediafile.save()
except (OSError, IOError, MutagenError) as exc:
raise WriteError(self.path, exc)
# The file has a new mtime.
if path == self.path:
self.mtime = self.current_mtime()
plugins.send('after_write', item=self, path=path)
def try_write(self, path=None, tags=None):
"""Calls `write()` but catches and logs `FileOperationError`
exceptions.
Returns `False` an exception was caught and `True` otherwise.
"""
try:
self.write(path, tags)
return True
except FileOperationError as exc:
log.error("{0}", exc)
return False
def try_sync(self, write, move, with_album=True):
"""Synchronize the item with the database and, possibly, updates its
tags on disk and its path (by moving the file).
`write` indicates whether to write new tags into the file. Similarly,
`move` controls whether the path should be updated. In the
latter case, files are *only* moved when they are inside their
library's directory (if any).
Similar to calling :meth:`write`, :meth:`move`, and :meth:`store`
(conditionally).
"""
if write:
self.try_write()
if move:
# Check whether this file is inside the library directory.
if self._db and self._db.directory in util.ancestry(self.path):
log.debug('moving {0} to synchronize path',
util.displayable_path(self.path))
self.move(with_album=with_album)
self.store()
# Files themselves.
def move_file(self, dest, copy=False, link=False):
"""Moves or copies the item's file, updating the path value if
the move succeeds. If a file exists at ``dest``, then it is
slightly modified to be unique.
"""
if not util.samefile(self.path, dest):
dest = util.unique_path(dest)
if copy:
util.copy(self.path, dest)
plugins.send("item_copied", item=self, source=self.path,
destination=dest)
elif link:
util.link(self.path, dest)
plugins.send("item_linked", item=self, source=self.path,
destination=dest)
else:
plugins.send("before_item_moved", item=self, source=self.path,
destination=dest)
util.move(self.path, dest)
plugins.send("item_moved", item=self, source=self.path,
destination=dest)
# Either copying or moving succeeded, so update the stored path.
self.path = dest
def current_mtime(self):
"""Returns the current mtime of the file, rounded to the nearest
integer.
"""
return int(os.path.getmtime(syspath(self.path)))
def try_filesize(self):
"""Get the size of the underlying file in bytes.
If the file is missing, return 0 (and log a warning).
"""
try:
return os.path.getsize(syspath(self.path))
except (OSError, Exception) as exc:
log.warning(u'could not get filesize: {0}', exc)
return 0
# Model methods.
def remove(self, delete=False, with_album=True):
"""Removes the item. If `delete`, then the associated file is
removed from disk. If `with_album`, then the item's album (if
any) is removed if it the item was the last in the album.
"""
super(Item, self).remove()
# Remove the album if it is empty.
if with_album:
album = self.get_album()
if album and not album.items():
album.remove(delete, False)
# Send a 'item_removed' signal to plugins
plugins.send('item_removed', item=self)
# Delete the associated file.
if delete:
util.remove(self.path)
util.prune_dirs(os.path.dirname(self.path), self._db.directory)
self._db._memotable = {}
def move(self, copy=False, link=False, basedir=None, with_album=True):
"""Move the item to its designated location within the library
directory (provided by destination()). Subdirectories are
created as needed. If the operation succeeds, the item's path
field is updated to reflect the new location.
If `copy` is true, moving the file is copied rather than moved.
Similarly, `link` creates a symlink instead.
basedir overrides the library base directory for the
destination.
If the item is in an album, the album is given an opportunity to
move its art. (This can be disabled by passing
with_album=False.)
The item is stored to the database if it is in the database, so
any dirty fields prior to the move() call will be written as a
side effect. You probably want to call save() to commit the DB
transaction.
"""
self._check_db()
dest = self.destination(basedir=basedir)
# Create necessary ancestry for the move.
util.mkdirall(dest)
# Perform the move and store the change.
old_path = self.path
self.move_file(dest, copy, link)
self.store()
# If this item is in an album, move its art.
if with_album:
album = self.get_album()
if album:
album.move_art(copy)
album.store()
# Prune vacated directory.
if not copy:
util.prune_dirs(os.path.dirname(old_path), self._db.directory)
# Templating.
def destination(self, fragment=False, basedir=None, platform=None,
path_formats=None):
"""Returns the path in the library directory designated for the
item (i.e., where the file ought to be). fragment makes this
method return just the path fragment underneath the root library
directory; the path is also returned as Unicode instead of
encoded as a bytestring. basedir can override the library's base
directory for the destination.
"""
self._check_db()
platform = platform or sys.platform
basedir = basedir or self._db.directory
path_formats = path_formats or self._db.path_formats
# Use a path format based on a query, falling back on the
# default.
for query, path_format in path_formats:
if query == PF_KEY_DEFAULT:
continue
query, _ = parse_query_string(query, type(self))
if query.match(self):
# The query matches the item! Use the corresponding path
# format.
break
else:
# No query matched; fall back to default.
for query, path_format in path_formats:
if query == PF_KEY_DEFAULT:
break
else:
assert False, "no default path format"
if isinstance(path_format, Template):
subpath_tmpl = path_format
else:
subpath_tmpl = Template(path_format)
# Evaluate the selected template.
subpath = self.evaluate_template(subpath_tmpl, True)
# Prepare path for output: normalize Unicode characters.
if platform == 'darwin':
subpath = unicodedata.normalize('NFD', subpath)
else:
subpath = unicodedata.normalize('NFC', subpath)
if beets.config['asciify_paths']:
subpath = unidecode(subpath)
maxlen = beets.config['max_filename_length'].get(int)
if not maxlen:
# When zero, try to determine from filesystem.
maxlen = util.max_filename_length(self._db.directory)
subpath, fellback = util.legalize_path(
subpath, self._db.replacements, maxlen,
os.path.splitext(self.path)[1], fragment
)
if fellback:
# Print an error message if legalization fell back to
# default replacements because of the maximum length.
log.warning('Fell back to default replacements when naming '
'file {}. Configure replacements to avoid lengthening '
'the filename.', subpath)
if fragment:
return subpath
else:
return normpath(os.path.join(basedir, subpath))
class Album(LibModel):
"""Provides access to information about albums stored in a
library. Reflects the library's "albums" table, including album
art.
"""
_table = 'albums'
_flex_table = 'album_attributes'
_always_dirty = True
_fields = {
'id': types.PRIMARY_ID,
'artpath': PathType(),
'added': DateType(),
'albumartist': types.STRING,
'albumartist_sort': types.STRING,
'albumartist_credit': types.STRING,
'album': types.STRING,
'genre': types.STRING,
'year': types.PaddedInt(4),
'month': types.PaddedInt(2),
'day': types.PaddedInt(2),
'disctotal': types.PaddedInt(2),
'comp': types.BOOLEAN,
'mb_albumid': types.STRING,
'mb_albumartistid': types.STRING,
'albumtype': types.STRING,
'label': types.STRING,
'mb_releasegroupid': types.STRING,
'asin': types.STRING,
'catalognum': types.STRING,
'script': types.STRING,
'language': types.STRING,
'country': types.STRING,
'albumstatus': types.STRING,
'albumdisambig': types.STRING,
'rg_album_gain': types.NULL_FLOAT,
'rg_album_peak': types.NULL_FLOAT,
'original_year': types.PaddedInt(4),
'original_month': types.PaddedInt(2),
'original_day': types.PaddedInt(2),
}
_search_fields = ('album', 'albumartist', 'genre')
_types = {
'path': PathType(),
'data_source': types.STRING,
}
_sorts = {
'albumartist': SmartArtistSort,
'artist': SmartArtistSort,
}
item_keys = [
'added',
'albumartist',
'albumartist_sort',
'albumartist_credit',
'album',
'genre',
'year',
'month',
'day',
'disctotal',
'comp',
'mb_albumid',
'mb_albumartistid',
'albumtype',
'label',
'mb_releasegroupid',
'asin',
'catalognum',
'script',
'language',
'country',
'albumstatus',
'albumdisambig',
'rg_album_gain',
'rg_album_peak',
'original_year',
'original_month',
'original_day',
]
"""List of keys that are set on an album's items.
"""
_format_config_key = 'format_album'
@classmethod
def _getters(cls):
# In addition to plugin-provided computed fields, also expose
# the album's directory as `path`.
getters = plugins.album_field_getters()
getters['path'] = Album.item_dir
getters['albumtotal'] = Album._albumtotal
return getters
def items(self):
"""Returns an iterable over the items associated with this
album.
"""
return self._db.items(dbcore.MatchQuery('album_id', self.id))
def remove(self, delete=False, with_items=True):
"""Removes this album and all its associated items from the
library. If delete, then the items' files are also deleted
from disk, along with any album art. The directories
containing the album are also removed (recursively) if empty.
Set with_items to False to avoid removing the album's items.
"""
super(Album, self).remove()
# Delete art file.
if delete:
artpath = self.artpath
if artpath:
util.remove(artpath)
# Remove (and possibly delete) the constituent items.
if with_items:
for item in self.items():
item.remove(delete, False)
def move_art(self, copy=False, link=False):
"""Move or copy any existing album art so that it remains in the
same directory as the items.
"""
old_art = self.artpath
if not old_art:
return
new_art = self.art_destination(old_art)
if new_art == old_art:
return
new_art = util.unique_path(new_art)
log.debug(u'moving album art {0} to {1}',
util.displayable_path(old_art),
util.displayable_path(new_art))
if copy:
util.copy(old_art, new_art)
elif link:
util.link(old_art, new_art)
else:
util.move(old_art, new_art)
self.artpath = new_art
# Prune old path when moving.
if not copy:
util.prune_dirs(os.path.dirname(old_art),
self._db.directory)
def move(self, copy=False, link=False, basedir=None):
"""Moves (or copies) all items to their destination. Any album
art moves along with them. basedir overrides the library base
directory for the destination. The album is stored to the
database, persisting any modifications to its metadata.
"""
basedir = basedir or self._db.directory
# Ensure new metadata is available to items for destination
# computation.
self.store()
# Move items.
items = list(self.items())
for item in items:
item.move(copy, link, basedir=basedir, with_album=False)
# Move art.
self.move_art(copy, link)
self.store()
def item_dir(self):
"""Returns the directory containing the album's first item,
provided that such an item exists.
"""
item = self.items().get()
if not item:
raise ValueError('empty album')
return os.path.dirname(item.path)
def _albumtotal(self):
"""Return the total number of tracks on all discs on the album
"""
if self.disctotal == 1 or not beets.config['per_disc_numbering']:
return self.items()[0].tracktotal
counted = []
total = 0
for item in self.items():
if item.disc in counted:
continue
total += item.tracktotal
counted.append(item.disc)
if len(counted) == self.disctotal:
break
return total
def art_destination(self, image, item_dir=None):
"""Returns a path to the destination for the album art image
for the album. `image` is the path of the image that will be
moved there (used for its extension).
The path construction uses the existing path of the album's
items, so the album must contain at least one item or
item_dir must be provided.
"""
image = bytestring_path(image)
item_dir = item_dir or self.item_dir()
filename_tmpl = Template(beets.config['art_filename'].get(unicode))
subpath = self.evaluate_template(filename_tmpl, True)
if beets.config['asciify_paths']:
subpath = unidecode(subpath)
subpath = util.sanitize_path(subpath,
replacements=self._db.replacements)
subpath = bytestring_path(subpath)
_, ext = os.path.splitext(image)
dest = os.path.join(item_dir, subpath + ext)
return bytestring_path(dest)
def set_art(self, path, copy=True):
"""Sets the album's cover art to the image at the given path.
The image is copied (or moved) into place, replacing any
existing art.
Sends an 'art_set' event with `self` as the sole argument.
"""
path = bytestring_path(path)
oldart = self.artpath
artdest = self.art_destination(path)
if oldart and samefile(path, oldart):
# Art already set.
return
elif samefile(path, artdest):
# Art already in place.
self.artpath = path
return
# Normal operation.
if oldart == artdest:
util.remove(oldart)
artdest = util.unique_path(artdest)
if copy:
util.copy(path, artdest)
else:
util.move(path, artdest)
self.artpath = artdest
plugins.send('art_set', album=self)
def store(self):
"""Update the database with the album information. The album's
tracks are also updated.
"""
# Get modified track fields.
track_updates = {}
for key in self.item_keys:
if key in self._dirty:
track_updates[key] = self[key]
with self._db.transaction():
super(Album, self).store()
if track_updates:
for item in self.items():
for key, value in track_updates.items():
item[key] = value
item.store()
def try_sync(self, write, move):
"""Synchronize the album and its items with the database.
Optionally, also write any new tags into the files and update
their paths.
`write` indicates whether to write tags to the item files, and
`move` controls whether files (both audio and album art) are
moved.
"""
self.store()
for item in self.items():
item.try_sync(write, move)
# Query construction helpers.
def parse_query_parts(parts, model_cls):
"""Given a beets query string as a list of components, return the
`Query` and `Sort` they represent.
Like `dbcore.parse_sorted_query`, with beets query prefixes and
special path query detection.
"""
# Get query types and their prefix characters.
prefixes = {':': dbcore.query.RegexpQuery}
prefixes.update(plugins.queries())
# Special-case path-like queries, which are non-field queries
# containing path separators (/).
path_parts = []
non_path_parts = []
for s in parts:
if PathQuery.is_path_query(s):
path_parts.append(s)
else:
non_path_parts.append(s)
query, sort = dbcore.parse_sorted_query(
model_cls, non_path_parts, prefixes
)
# Add path queries to aggregate query.
# Match field / flexattr depending on whether the model has the path field
fast_path_query = 'path' in model_cls._fields
query.subqueries += [PathQuery('path', s, fast_path_query)
for s in path_parts]
return query, sort
def parse_query_string(s, model_cls):
"""Given a beets query string, return the `Query` and `Sort` they
represent.
The string is split into components using shell-like syntax.
"""
assert isinstance(s, unicode), "Query is not unicode: {0!r}".format(s)
try:
parts = util.shlex_split(s)
except ValueError as exc:
raise dbcore.InvalidQueryError(s, exc)
return parse_query_parts(parts, model_cls)
# The Library: interface to the database.
class Library(dbcore.Database):
"""A database of music containing songs and albums.
"""
_models = (Item, Album)
def __init__(self, path='library.blb',
directory='~/Music',
path_formats=((PF_KEY_DEFAULT,
'$artist/$album/$track $title'),),
replacements=None):
if path != ':memory:':
self.path = bytestring_path(normpath(path))
super(Library, self).__init__(path)
self.directory = bytestring_path(normpath(directory))
self.path_formats = path_formats
self.replacements = replacements
self._memotable = {} # Used for template substitution performance.
# Adding objects to the database.
def add(self, obj):
"""Add the :class:`Item` or :class:`Album` object to the library
database. Return the object's new id.
"""
obj.add(self)
self._memotable = {}
return obj.id
def add_album(self, items):
"""Create a new album consisting of a list of items.
The items are added to the database if they don't yet have an
ID. Return a new :class:`Album` object. The list items must not
be empty.
"""
if not items:
raise ValueError(u'need at least one item')
# Create the album structure using metadata from the first item.
values = dict((key, items[0][key]) for key in Album.item_keys)
album = Album(self, **values)
# Add the album structure and set the items' album_id fields.
# Store or add the items.
with self.transaction():
album.add(self)
for item in items:
item.album_id = album.id
if item.id is None:
item.add(self)
else:
item.store()
return album
# Querying.
def _fetch(self, model_cls, query, sort=None):
"""Parse a query and fetch. If a order specification is present
in the query string the `sort` argument is ignored.
"""
# Parse the query, if necessary.
try:
parsed_sort = None
if isinstance(query, basestring):
query, parsed_sort = parse_query_string(query, model_cls)
elif isinstance(query, (list, tuple)):
query, parsed_sort = parse_query_parts(query, model_cls)
except dbcore.query.InvalidQueryArgumentTypeError as exc:
raise dbcore.InvalidQueryError(query, exc)
# Any non-null sort specified by the parsed query overrides the
# provided sort.
if parsed_sort and not isinstance(parsed_sort, dbcore.query.NullSort):
sort = parsed_sort
return super(Library, self)._fetch(
model_cls, query, sort
)
@staticmethod
def get_default_album_sort():
"""Get a :class:`Sort` object for albums from the config option.
"""
return dbcore.sort_from_strings(
Album, beets.config['sort_album'].as_str_seq())
@staticmethod
def get_default_item_sort():
"""Get a :class:`Sort` object for items from the config option.
"""
return dbcore.sort_from_strings(
Item, beets.config['sort_item'].as_str_seq())
def albums(self, query=None, sort=None):
"""Get :class:`Album` objects matching the query.
"""
return self._fetch(Album, query, sort or self.get_default_album_sort())
def items(self, query=None, sort=None):
"""Get :class:`Item` objects matching the query.
"""
return self._fetch(Item, query, sort or self.get_default_item_sort())
# Convenience accessors.
def get_item(self, id):
"""Fetch an :class:`Item` by its ID. Returns `None` if no match is
found.
"""
return self._get(Item, id)
def get_album(self, item_or_id):
"""Given an album ID or an item associated with an album, return
an :class:`Album` object for the album. If no such album exists,
returns `None`.
"""
if isinstance(item_or_id, int):
album_id = item_or_id
else:
album_id = item_or_id.album_id
if album_id is None:
return None
return self._get(Album, album_id)
# Default path template resources.
def _int_arg(s):
"""Convert a string argument to an integer for use in a template
function. May raise a ValueError.
"""
return int(s.strip())
class DefaultTemplateFunctions(object):
"""A container class for the default functions provided to path
templates. These functions are contained in an object to provide
additional context to the functions -- specifically, the Item being
evaluated.
"""
_prefix = b'tmpl_'
def __init__(self, item=None, lib=None):
"""Parametrize the functions. If `item` or `lib` is None, then
some functions (namely, ``aunique``) will always evaluate to the
empty string.
"""
self.item = item
self.lib = lib
def functions(self):
"""Returns a dictionary containing the functions defined in this
object. The keys are function names (as exposed in templates)
and the values are Python functions.
"""
out = {}
for key in self._func_names:
out[key[len(self._prefix):]] = getattr(self, key)
return out
@staticmethod
def tmpl_lower(s):
"""Convert a string to lower case."""
return s.lower()
@staticmethod
def tmpl_upper(s):
"""Covert a string to upper case."""
return s.upper()
@staticmethod
def tmpl_title(s):
"""Convert a string to title case."""
return s.title()
@staticmethod
def tmpl_left(s, chars):
"""Get the leftmost characters of a string."""
return s[0:_int_arg(chars)]
@staticmethod
def tmpl_right(s, chars):
"""Get the rightmost characters of a string."""
return s[-_int_arg(chars):]
@staticmethod
def tmpl_if(condition, trueval, falseval=u''):
"""If ``condition`` is nonempty and nonzero, emit ``trueval``;
otherwise, emit ``falseval`` (if provided).
"""
try:
int_condition = _int_arg(condition)
except ValueError:
if condition.lower() == "false":
return falseval
else:
condition = int_condition
if condition:
return trueval
else:
return falseval
@staticmethod
def tmpl_asciify(s):
"""Translate non-ASCII characters to their ASCII equivalents.
"""
return unidecode(s)
@staticmethod
def tmpl_time(s, fmt):
"""Format a time value using `strftime`.
"""
cur_fmt = beets.config['time_format'].get(unicode)
return time.strftime(fmt, time.strptime(s, cur_fmt))
def tmpl_aunique(self, keys=None, disam=None):
"""Generate a string that is guaranteed to be unique among all
albums in the library who share the same set of keys. A fields
from "disam" is used in the string if one is sufficient to
disambiguate the albums. Otherwise, a fallback opaque value is
used. Both "keys" and "disam" should be given as
whitespace-separated lists of field names.
"""
# Fast paths: no album, no item or library, or memoized value.
if not self.item or not self.lib:
return u''
if self.item.album_id is None:
return u''
memokey = ('aunique', keys, disam, self.item.album_id)
memoval = self.lib._memotable.get(memokey)
if memoval is not None:
return memoval
keys = keys or 'albumartist album'
disam = disam or 'albumtype year label catalognum albumdisambig'
keys = keys.split()
disam = disam.split()
album = self.lib.get_album(self.item)
if not album:
# Do nothing for singletons.
self.lib._memotable[memokey] = u''
return u''
# Find matching albums to disambiguate with.
subqueries = []
for key in keys:
value = album.get(key, '')
subqueries.append(dbcore.MatchQuery(key, value))
albums = self.lib.albums(dbcore.AndQuery(subqueries))
# If there's only one album to matching these details, then do
# nothing.
if len(albums) == 1:
self.lib._memotable[memokey] = u''
return u''
# Find the first disambiguator that distinguishes the albums.
for disambiguator in disam:
# Get the value for each album for the current field.
disam_values = set([a.get(disambiguator, '') for a in albums])
# If the set of unique values is equal to the number of
# albums in the disambiguation set, we're done -- this is
# sufficient disambiguation.
if len(disam_values) == len(albums):
break
else:
# No disambiguator distinguished all fields.
res = u' {0}'.format(album.id)
self.lib._memotable[memokey] = res
return res
# Flatten disambiguation value into a string.
disam_value = album.formatted(True).get(disambiguator)
res = u' [{0}]'.format(disam_value)
self.lib._memotable[memokey] = res
return res
# Get the name of tmpl_* functions in the above class.
DefaultTemplateFunctions._func_names = \
[s for s in dir(DefaultTemplateFunctions)
if s.startswith(DefaultTemplateFunctions._prefix)]
| 33.627717 | 79 | 0.589475 |
from __future__ import (division, absolute_import, print_function,
unicode_literals)
import os
import sys
import unicodedata
import time
import re
from unidecode import unidecode
from beets import logging
from beets.mediafile import MediaFile, MutagenError, UnreadableFileError
from beets import plugins
from beets import util
from beets.util import bytestring_path, syspath, normpath, samefile
from beets.util.functemplate import Template
from beets import dbcore
from beets.dbcore import types
import beets
log = logging.getLogger('beets')
class PathQuery(dbcore.FieldQuery):
escape_re = re.compile(r'[\\_%]')
escape_char = b'\\'
def __init__(self, field, pattern, fast=True, case_sensitive=None):
super(PathQuery, self).__init__(field, pattern, fast)
if case_sensitive is None:
path = util.bytestring_path(util.normpath(pattern))
case_sensitive = beets.util.case_sensitive(path)
self.case_sensitive = case_sensitive
if not case_sensitive:
pattern = pattern.lower()
self.file_path = util.bytestring_path(util.normpath(pattern))
self.dir_path = util.bytestring_path(os.path.join(self.file_path, b''))
@classmethod
def is_path_query(cls, query_part):
colon = query_part.find(':')
if colon != -1:
query_part = query_part[:colon]
return (os.sep in query_part and
os.path.exists(syspath(normpath(query_part))))
def match(self, item):
path = item.path if self.case_sensitive else item.path.lower()
return (path == self.file_path) or path.startswith(self.dir_path)
def col_clause(self):
if self.case_sensitive:
file_blob = buffer(self.file_path)
dir_blob = buffer(self.dir_path)
return '({0} = ?) || (substr({0}, 1, ?) = ?)'.format(self.field), \
(file_blob, len(dir_blob), dir_blob)
escape = lambda m: self.escape_char + m.group(0)
dir_pattern = self.escape_re.sub(escape, self.dir_path)
dir_blob = buffer(dir_pattern + b'%')
file_pattern = self.escape_re.sub(escape, self.file_path)
file_blob = buffer(file_pattern)
return '({0} LIKE ? ESCAPE ?) || ({0} LIKE ? ESCAPE ?)'.format(
self.field), (file_blob, self.escape_char, dir_blob,
self.escape_char)
class DateType(types.Float):
query = dbcore.query.DateQuery
def format(self, value):
return time.strftime(beets.config['time_format'].get(unicode),
time.localtime(value or 0))
def parse(self, string):
try:
return time.mktime(
time.strptime(string, beets.config['time_format'].get(unicode))
)
except ValueError:
try:
return float(string)
except ValueError:
return self.null
class PathType(types.Type):
sql = u'BLOB'
query = PathQuery
model_type = bytes
def format(self, value):
return util.displayable_path(value)
def parse(self, string):
return normpath(bytestring_path(string))
def normalize(self, value):
if isinstance(value, unicode):
return bytestring_path(value)
elif isinstance(value, buffer):
return bytes(value)
else:
return value
def from_sql(self, sql_value):
return self.normalize(sql_value)
def to_sql(self, value):
if isinstance(value, bytes):
value = buffer(value)
return value
class MusicalKey(types.String):
ENHARMONIC = {
r'db': 'c#',
r'eb': 'd#',
r'gb': 'f#',
r'ab': 'g#',
r'bb': 'a#',
}
def parse(self, key):
key = key.lower()
for flat, sharp in self.ENHARMONIC.items():
key = re.sub(flat, sharp, key)
key = re.sub(r'[\W\s]+minor', 'm', key)
key = re.sub(r'[\W\s]+major', '', key)
return key.capitalize()
def normalize(self, key):
if key is None:
return None
else:
return self.parse(key)
class DurationType(types.Float):
query = dbcore.query.DurationQuery
def format(self, value):
if not beets.config['format_raw_length'].get(bool):
return beets.ui.human_seconds_short(value or 0.0)
else:
return value
def parse(self, string):
try:
return util.raw_seconds_short(string)
except ValueError:
try:
return float(string)
except ValueError:
return self.null
class SmartArtistSort(dbcore.query.Sort):
def __init__(self, model_cls, ascending=True, case_insensitive=True):
self.album = model_cls is Album
self.ascending = ascending
self.case_insensitive = case_insensitive
def order_clause(self):
order = "ASC" if self.ascending else "DESC"
field = 'albumartist' if self.album else 'artist'
collate = 'COLLATE NOCASE' if self.case_insensitive else ''
return ('(CASE {0}_sort WHEN NULL THEN {0} '
'WHEN "" THEN {0} '
'ELSE {0}_sort END) {1} {2}').format(field, collate, order)
def sort(self, objs):
if self.album:
field = lambda a: a.albumartist_sort or a.albumartist
else:
field = lambda i: i.artist_sort or i.artist
if self.case_insensitive:
key = lambda x: field(x).lower()
else:
key = field
return sorted(objs, key=key, reverse=not self.ascending)
PF_KEY_DEFAULT = 'default'
class FileOperationError(Exception):
def __init__(self, path, reason):
super(FileOperationError, self).__init__(path, reason)
self.path = path
self.reason = reason
def __unicode__(self):
return u'{0}: {1}'.format(
util.displayable_path(self.path),
unicode(self.reason)
)
def __str__(self):
return unicode(self).encode('utf8')
class ReadError(FileOperationError):
def __unicode__(self):
return u'error reading ' + super(ReadError, self).__unicode__()
class WriteError(FileOperationError):
def __unicode__(self):
return u'error writing ' + super(WriteError, self).__unicode__()
class LibModel(dbcore.Model):
_format_config_key = None
def _template_funcs(self):
funcs = DefaultTemplateFunctions(self, self._db).functions()
funcs.update(plugins.template_funcs())
return funcs
def store(self):
super(LibModel, self).store()
plugins.send('database_change', lib=self._db, model=self)
def remove(self):
super(LibModel, self).remove()
plugins.send('database_change', lib=self._db, model=self)
def add(self, lib=None):
super(LibModel, self).add(lib)
plugins.send('database_change', lib=self._db, model=self)
def __format__(self, spec):
if not spec:
spec = beets.config[self._format_config_key].get(unicode)
result = self.evaluate_template(spec)
if isinstance(spec, bytes):
return result.encode('utf8')
else:
return result
def __str__(self):
return format(self).encode('utf8')
def __unicode__(self):
return format(self)
class FormattedItemMapping(dbcore.db.FormattedMapping):
def __init__(self, item, for_path=False):
super(FormattedItemMapping, self).__init__(item, for_path)
self.album = item.get_album()
self.album_keys = []
if self.album:
for key in self.album.keys(True):
if key in Album.item_keys or key not in item._fields.keys():
self.album_keys.append(key)
self.all_keys = set(self.model_keys).union(self.album_keys)
def _get(self, key):
if self.for_path and key in self.album_keys:
return self._get_formatted(self.album, key)
elif key in self.model_keys:
return self._get_formatted(self.model, key)
elif key in self.album_keys:
return self._get_formatted(self.album, key)
else:
raise KeyError(key)
def __getitem__(self, key):
value = self._get(key)
if key == 'artist' and not value:
return self._get('albumartist')
elif key == 'albumartist' and not value:
return self._get('artist')
else:
return value
def __iter__(self):
return iter(self.all_keys)
def __len__(self):
return len(self.all_keys)
class Item(LibModel):
_table = 'items'
_flex_table = 'item_attributes'
_fields = {
'id': types.PRIMARY_ID,
'path': PathType(),
'album_id': types.FOREIGN_ID,
'title': types.STRING,
'artist': types.STRING,
'artist_sort': types.STRING,
'artist_credit': types.STRING,
'album': types.STRING,
'albumartist': types.STRING,
'albumartist_sort': types.STRING,
'albumartist_credit': types.STRING,
'genre': types.STRING,
'composer': types.STRING,
'grouping': types.STRING,
'year': types.PaddedInt(4),
'month': types.PaddedInt(2),
'day': types.PaddedInt(2),
'track': types.PaddedInt(2),
'tracktotal': types.PaddedInt(2),
'disc': types.PaddedInt(2),
'disctotal': types.PaddedInt(2),
'lyrics': types.STRING,
'comments': types.STRING,
'bpm': types.INTEGER,
'comp': types.BOOLEAN,
'mb_trackid': types.STRING,
'mb_albumid': types.STRING,
'mb_artistid': types.STRING,
'mb_albumartistid': types.STRING,
'albumtype': types.STRING,
'label': types.STRING,
'acoustid_fingerprint': types.STRING,
'acoustid_id': types.STRING,
'mb_releasegroupid': types.STRING,
'asin': types.STRING,
'catalognum': types.STRING,
'script': types.STRING,
'language': types.STRING,
'country': types.STRING,
'albumstatus': types.STRING,
'media': types.STRING,
'albumdisambig': types.STRING,
'disctitle': types.STRING,
'encoder': types.STRING,
'rg_track_gain': types.NULL_FLOAT,
'rg_track_peak': types.NULL_FLOAT,
'rg_album_gain': types.NULL_FLOAT,
'rg_album_peak': types.NULL_FLOAT,
'original_year': types.PaddedInt(4),
'original_month': types.PaddedInt(2),
'original_day': types.PaddedInt(2),
'initial_key': MusicalKey(),
'length': DurationType(),
'bitrate': types.ScaledInt(1000, u'kbps'),
'format': types.STRING,
'samplerate': types.ScaledInt(1000, u'kHz'),
'bitdepth': types.INTEGER,
'channels': types.INTEGER,
'mtime': DateType(),
'added': DateType(),
}
_search_fields = ('artist', 'title', 'comments',
'album', 'albumartist', 'genre')
_types = {
'data_source': types.STRING,
}
_media_fields = set(MediaFile.readable_fields()) \
.intersection(_fields.keys())
_media_tag_fields = set(MediaFile.fields()).intersection(_fields.keys())
_formatter = FormattedItemMapping
_sorts = {'artist': SmartArtistSort}
_format_config_key = 'format_item'
@classmethod
def _getters(cls):
getters = plugins.item_field_getters()
getters['singleton'] = lambda i: i.album_id is None
getters['filesize'] = Item.try_filesize
return getters
@classmethod
def from_path(cls, path):
i = cls(album_id=None)
i.read(path)
i.mtime = i.current_mtime() # Initial mtime.
return i
def __setitem__(self, key, value):
# Encode unicode paths and read buffers.
if key == 'path':
if isinstance(value, unicode):
value = bytestring_path(value)
elif isinstance(value, buffer):
value = bytes(value)
if key in MediaFile.fields():
self.mtime = 0 # Reset mtime on dirty.
super(Item, self).__setitem__(key, value)
def update(self, values):
super(Item, self).update(values)
if self.mtime == 0 and 'mtime' in values:
self.mtime = values['mtime']
def get_album(self):
if not self._db:
return None
return self._db.get_album(self)
# Interaction with file metadata.
def read(self, read_path=None):
if read_path is None:
read_path = self.path
else:
read_path = normpath(read_path)
try:
mediafile = MediaFile(syspath(read_path))
except (OSError, IOError, UnreadableFileError) as exc:
raise ReadError(read_path, exc)
for key in self._media_fields:
value = getattr(mediafile, key)
if isinstance(value, (int, long)):
if value.bit_length() > 63:
value = 0
self[key] = value
# Database's mtime should now reflect the on-disk value.
if read_path == self.path:
self.mtime = self.current_mtime()
self.path = read_path
def write(self, path=None, tags=None):
if path is None:
path = self.path
else:
path = normpath(path)
item_tags = dict(self)
item_tags = {k: v for k, v in item_tags.items()
if k in self._media_fields}
if tags is not None:
item_tags.update(tags)
plugins.send('write', item=self, path=path, tags=item_tags)
try:
mediafile = MediaFile(syspath(path),
id3v23=beets.config['id3v23'].get(bool))
except (OSError, IOError, UnreadableFileError) as exc:
raise ReadError(self.path, exc)
mediafile.update(item_tags)
try:
mediafile.save()
except (OSError, IOError, MutagenError) as exc:
raise WriteError(self.path, exc)
if path == self.path:
self.mtime = self.current_mtime()
plugins.send('after_write', item=self, path=path)
def try_write(self, path=None, tags=None):
try:
self.write(path, tags)
return True
except FileOperationError as exc:
log.error("{0}", exc)
return False
def try_sync(self, write, move, with_album=True):
if write:
self.try_write()
if move:
if self._db and self._db.directory in util.ancestry(self.path):
log.debug('moving {0} to synchronize path',
util.displayable_path(self.path))
self.move(with_album=with_album)
self.store()
def move_file(self, dest, copy=False, link=False):
if not util.samefile(self.path, dest):
dest = util.unique_path(dest)
if copy:
util.copy(self.path, dest)
plugins.send("item_copied", item=self, source=self.path,
destination=dest)
elif link:
util.link(self.path, dest)
plugins.send("item_linked", item=self, source=self.path,
destination=dest)
else:
plugins.send("before_item_moved", item=self, source=self.path,
destination=dest)
util.move(self.path, dest)
plugins.send("item_moved", item=self, source=self.path,
destination=dest)
self.path = dest
def current_mtime(self):
return int(os.path.getmtime(syspath(self.path)))
def try_filesize(self):
try:
return os.path.getsize(syspath(self.path))
except (OSError, Exception) as exc:
log.warning(u'could not get filesize: {0}', exc)
return 0
def remove(self, delete=False, with_album=True):
super(Item, self).remove()
if with_album:
album = self.get_album()
if album and not album.items():
album.remove(delete, False)
plugins.send('item_removed', item=self)
if delete:
util.remove(self.path)
util.prune_dirs(os.path.dirname(self.path), self._db.directory)
self._db._memotable = {}
def move(self, copy=False, link=False, basedir=None, with_album=True):
self._check_db()
dest = self.destination(basedir=basedir)
util.mkdirall(dest)
old_path = self.path
self.move_file(dest, copy, link)
self.store()
if with_album:
album = self.get_album()
if album:
album.move_art(copy)
album.store()
if not copy:
util.prune_dirs(os.path.dirname(old_path), self._db.directory)
def destination(self, fragment=False, basedir=None, platform=None,
path_formats=None):
self._check_db()
platform = platform or sys.platform
basedir = basedir or self._db.directory
path_formats = path_formats or self._db.path_formats
for query, path_format in path_formats:
if query == PF_KEY_DEFAULT:
continue
query, _ = parse_query_string(query, type(self))
if query.match(self):
break
else:
for query, path_format in path_formats:
if query == PF_KEY_DEFAULT:
break
else:
assert False, "no default path format"
if isinstance(path_format, Template):
subpath_tmpl = path_format
else:
subpath_tmpl = Template(path_format)
subpath = self.evaluate_template(subpath_tmpl, True)
if platform == 'darwin':
subpath = unicodedata.normalize('NFD', subpath)
else:
subpath = unicodedata.normalize('NFC', subpath)
if beets.config['asciify_paths']:
subpath = unidecode(subpath)
maxlen = beets.config['max_filename_length'].get(int)
if not maxlen:
maxlen = util.max_filename_length(self._db.directory)
subpath, fellback = util.legalize_path(
subpath, self._db.replacements, maxlen,
os.path.splitext(self.path)[1], fragment
)
if fellback:
log.warning('Fell back to default replacements when naming '
'file {}. Configure replacements to avoid lengthening '
'the filename.', subpath)
if fragment:
return subpath
else:
return normpath(os.path.join(basedir, subpath))
class Album(LibModel):
_table = 'albums'
_flex_table = 'album_attributes'
_always_dirty = True
_fields = {
'id': types.PRIMARY_ID,
'artpath': PathType(),
'added': DateType(),
'albumartist': types.STRING,
'albumartist_sort': types.STRING,
'albumartist_credit': types.STRING,
'album': types.STRING,
'genre': types.STRING,
'year': types.PaddedInt(4),
'month': types.PaddedInt(2),
'day': types.PaddedInt(2),
'disctotal': types.PaddedInt(2),
'comp': types.BOOLEAN,
'mb_albumid': types.STRING,
'mb_albumartistid': types.STRING,
'albumtype': types.STRING,
'label': types.STRING,
'mb_releasegroupid': types.STRING,
'asin': types.STRING,
'catalognum': types.STRING,
'script': types.STRING,
'language': types.STRING,
'country': types.STRING,
'albumstatus': types.STRING,
'albumdisambig': types.STRING,
'rg_album_gain': types.NULL_FLOAT,
'rg_album_peak': types.NULL_FLOAT,
'original_year': types.PaddedInt(4),
'original_month': types.PaddedInt(2),
'original_day': types.PaddedInt(2),
}
_search_fields = ('album', 'albumartist', 'genre')
_types = {
'path': PathType(),
'data_source': types.STRING,
}
_sorts = {
'albumartist': SmartArtistSort,
'artist': SmartArtistSort,
}
item_keys = [
'added',
'albumartist',
'albumartist_sort',
'albumartist_credit',
'album',
'genre',
'year',
'month',
'day',
'disctotal',
'comp',
'mb_albumid',
'mb_albumartistid',
'albumtype',
'label',
'mb_releasegroupid',
'asin',
'catalognum',
'script',
'language',
'country',
'albumstatus',
'albumdisambig',
'rg_album_gain',
'rg_album_peak',
'original_year',
'original_month',
'original_day',
]
_format_config_key = 'format_album'
@classmethod
def _getters(cls):
getters = plugins.album_field_getters()
getters['path'] = Album.item_dir
getters['albumtotal'] = Album._albumtotal
return getters
def items(self):
return self._db.items(dbcore.MatchQuery('album_id', self.id))
def remove(self, delete=False, with_items=True):
super(Album, self).remove()
# Delete art file.
if delete:
artpath = self.artpath
if artpath:
util.remove(artpath)
# Remove (and possibly delete) the constituent items.
if with_items:
for item in self.items():
item.remove(delete, False)
def move_art(self, copy=False, link=False):
old_art = self.artpath
if not old_art:
return
new_art = self.art_destination(old_art)
if new_art == old_art:
return
new_art = util.unique_path(new_art)
log.debug(u'moving album art {0} to {1}',
util.displayable_path(old_art),
util.displayable_path(new_art))
if copy:
util.copy(old_art, new_art)
elif link:
util.link(old_art, new_art)
else:
util.move(old_art, new_art)
self.artpath = new_art
# Prune old path when moving.
if not copy:
util.prune_dirs(os.path.dirname(old_art),
self._db.directory)
def move(self, copy=False, link=False, basedir=None):
basedir = basedir or self._db.directory
# Ensure new metadata is available to items for destination
# computation.
self.store()
# Move items.
items = list(self.items())
for item in items:
item.move(copy, link, basedir=basedir, with_album=False)
# Move art.
self.move_art(copy, link)
self.store()
def item_dir(self):
item = self.items().get()
if not item:
raise ValueError('empty album')
return os.path.dirname(item.path)
def _albumtotal(self):
if self.disctotal == 1 or not beets.config['per_disc_numbering']:
return self.items()[0].tracktotal
counted = []
total = 0
for item in self.items():
if item.disc in counted:
continue
total += item.tracktotal
counted.append(item.disc)
if len(counted) == self.disctotal:
break
return total
def art_destination(self, image, item_dir=None):
image = bytestring_path(image)
item_dir = item_dir or self.item_dir()
filename_tmpl = Template(beets.config['art_filename'].get(unicode))
subpath = self.evaluate_template(filename_tmpl, True)
if beets.config['asciify_paths']:
subpath = unidecode(subpath)
subpath = util.sanitize_path(subpath,
replacements=self._db.replacements)
subpath = bytestring_path(subpath)
_, ext = os.path.splitext(image)
dest = os.path.join(item_dir, subpath + ext)
return bytestring_path(dest)
def set_art(self, path, copy=True):
path = bytestring_path(path)
oldart = self.artpath
artdest = self.art_destination(path)
if oldart and samefile(path, oldart):
# Art already set.
return
elif samefile(path, artdest):
# Art already in place.
self.artpath = path
return
# Normal operation.
if oldart == artdest:
util.remove(oldart)
artdest = util.unique_path(artdest)
if copy:
util.copy(path, artdest)
else:
util.move(path, artdest)
self.artpath = artdest
plugins.send('art_set', album=self)
def store(self):
# Get modified track fields.
track_updates = {}
for key in self.item_keys:
if key in self._dirty:
track_updates[key] = self[key]
with self._db.transaction():
super(Album, self).store()
if track_updates:
for item in self.items():
for key, value in track_updates.items():
item[key] = value
item.store()
def try_sync(self, write, move):
self.store()
for item in self.items():
item.try_sync(write, move)
# Query construction helpers.
def parse_query_parts(parts, model_cls):
# Get query types and their prefix characters.
prefixes = {':': dbcore.query.RegexpQuery}
prefixes.update(plugins.queries())
# Special-case path-like queries, which are non-field queries
# containing path separators (/).
path_parts = []
non_path_parts = []
for s in parts:
if PathQuery.is_path_query(s):
path_parts.append(s)
else:
non_path_parts.append(s)
query, sort = dbcore.parse_sorted_query(
model_cls, non_path_parts, prefixes
)
# Add path queries to aggregate query.
# Match field / flexattr depending on whether the model has the path field
fast_path_query = 'path' in model_cls._fields
query.subqueries += [PathQuery('path', s, fast_path_query)
for s in path_parts]
return query, sort
def parse_query_string(s, model_cls):
assert isinstance(s, unicode), "Query is not unicode: {0!r}".format(s)
try:
parts = util.shlex_split(s)
except ValueError as exc:
raise dbcore.InvalidQueryError(s, exc)
return parse_query_parts(parts, model_cls)
# The Library: interface to the database.
class Library(dbcore.Database):
_models = (Item, Album)
def __init__(self, path='library.blb',
directory='~/Music',
path_formats=((PF_KEY_DEFAULT,
'$artist/$album/$track $title'),),
replacements=None):
if path != ':memory:':
self.path = bytestring_path(normpath(path))
super(Library, self).__init__(path)
self.directory = bytestring_path(normpath(directory))
self.path_formats = path_formats
self.replacements = replacements
self._memotable = {} # Used for template substitution performance.
# Adding objects to the database.
def add(self, obj):
obj.add(self)
self._memotable = {}
return obj.id
def add_album(self, items):
if not items:
raise ValueError(u'need at least one item')
# Create the album structure using metadata from the first item.
values = dict((key, items[0][key]) for key in Album.item_keys)
album = Album(self, **values)
# Add the album structure and set the items' album_id fields.
with self.transaction():
album.add(self)
for item in items:
item.album_id = album.id
if item.id is None:
item.add(self)
else:
item.store()
return album
def _fetch(self, model_cls, query, sort=None):
try:
parsed_sort = None
if isinstance(query, basestring):
query, parsed_sort = parse_query_string(query, model_cls)
elif isinstance(query, (list, tuple)):
query, parsed_sort = parse_query_parts(query, model_cls)
except dbcore.query.InvalidQueryArgumentTypeError as exc:
raise dbcore.InvalidQueryError(query, exc)
if parsed_sort and not isinstance(parsed_sort, dbcore.query.NullSort):
sort = parsed_sort
return super(Library, self)._fetch(
model_cls, query, sort
)
@staticmethod
def get_default_album_sort():
return dbcore.sort_from_strings(
Album, beets.config['sort_album'].as_str_seq())
@staticmethod
def get_default_item_sort():
return dbcore.sort_from_strings(
Item, beets.config['sort_item'].as_str_seq())
def albums(self, query=None, sort=None):
return self._fetch(Album, query, sort or self.get_default_album_sort())
def items(self, query=None, sort=None):
return self._fetch(Item, query, sort or self.get_default_item_sort())
def get_item(self, id):
return self._get(Item, id)
def get_album(self, item_or_id):
if isinstance(item_or_id, int):
album_id = item_or_id
else:
album_id = item_or_id.album_id
if album_id is None:
return None
return self._get(Album, album_id)
def _int_arg(s):
return int(s.strip())
class DefaultTemplateFunctions(object):
_prefix = b'tmpl_'
def __init__(self, item=None, lib=None):
self.item = item
self.lib = lib
def functions(self):
out = {}
for key in self._func_names:
out[key[len(self._prefix):]] = getattr(self, key)
return out
@staticmethod
def tmpl_lower(s):
return s.lower()
@staticmethod
def tmpl_upper(s):
return s.upper()
@staticmethod
def tmpl_title(s):
return s.title()
@staticmethod
def tmpl_left(s, chars):
return s[0:_int_arg(chars)]
@staticmethod
def tmpl_right(s, chars):
return s[-_int_arg(chars):]
@staticmethod
def tmpl_if(condition, trueval, falseval=u''):
try:
int_condition = _int_arg(condition)
except ValueError:
if condition.lower() == "false":
return falseval
else:
condition = int_condition
if condition:
return trueval
else:
return falseval
@staticmethod
def tmpl_asciify(s):
return unidecode(s)
@staticmethod
def tmpl_time(s, fmt):
cur_fmt = beets.config['time_format'].get(unicode)
return time.strftime(fmt, time.strptime(s, cur_fmt))
def tmpl_aunique(self, keys=None, disam=None):
if not self.item or not self.lib:
return u''
if self.item.album_id is None:
return u''
memokey = ('aunique', keys, disam, self.item.album_id)
memoval = self.lib._memotable.get(memokey)
if memoval is not None:
return memoval
keys = keys or 'albumartist album'
disam = disam or 'albumtype year label catalognum albumdisambig'
keys = keys.split()
disam = disam.split()
album = self.lib.get_album(self.item)
if not album:
self.lib._memotable[memokey] = u''
return u''
subqueries = []
for key in keys:
value = album.get(key, '')
subqueries.append(dbcore.MatchQuery(key, value))
albums = self.lib.albums(dbcore.AndQuery(subqueries))
# nothing.
if len(albums) == 1:
self.lib._memotable[memokey] = u''
return u''
# Find the first disambiguator that distinguishes the albums.
for disambiguator in disam:
# Get the value for each album for the current field.
disam_values = set([a.get(disambiguator, '') for a in albums])
# If the set of unique values is equal to the number of
# albums in the disambiguation set, we're done -- this is
if len(disam_values) == len(albums):
break
else:
res = u' {0}'.format(album.id)
self.lib._memotable[memokey] = res
return res
disam_value = album.formatted(True).get(disambiguator)
res = u' [{0}]'.format(disam_value)
self.lib._memotable[memokey] = res
return res
DefaultTemplateFunctions._func_names = \
[s for s in dir(DefaultTemplateFunctions)
if s.startswith(DefaultTemplateFunctions._prefix)]
| true | true |
1c2fad5561db6a64f83057a8feee7bb3000d869a | 2,138 | py | Python | grr/server/grr_response_server/serialize.py | nkrios/grr | 399e078ed522bf0555a2666fb086aa7809d54971 | [
"Apache-2.0"
] | null | null | null | grr/server/grr_response_server/serialize.py | nkrios/grr | 399e078ed522bf0555a2666fb086aa7809d54971 | [
"Apache-2.0"
] | null | null | null | grr/server/grr_response_server/serialize.py | nkrios/grr | 399e078ed522bf0555a2666fb086aa7809d54971 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
"""This module serializes AFF4 objects in various ways."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from future.utils import iteritems
from grr_response_core.lib import rdfvalue
from grr_response_core.lib.util import compatibility
from grr_response_core.lib.util.compat import yaml
from grr_response_server import aff4
def YamlDumper(aff4object):
"""Dumps the given aff4object into a yaml representation."""
aff4object.Flush()
result = {}
for attribute, values in iteritems(aff4object.synced_attributes):
result[attribute.predicate] = []
for value in values:
# This value is really a LazyDecoder() instance. We need to get at the
# real data here.
value = value.ToRDFValue()
result[attribute.predicate].append(
[value.__class__.__name__,
value.SerializeToString(),
str(value.age)])
return yaml.Dump({
"aff4_class": compatibility.GetName(aff4object),
"_urn": aff4object.urn.SerializeToString(),
"attributes": result,
"age_policy": aff4object.age_policy,
})
def YamlLoader(string):
"""Load an AFF4 object from a serialized YAML representation."""
representation = yaml.Parse(string)
result_cls = aff4.FACTORY.AFF4Object(representation["aff4_class"])
aff4_attributes = {}
for predicate, values in iteritems(representation["attributes"]):
attribute = aff4.Attribute.PREDICATES[predicate]
tmp = aff4_attributes[attribute] = []
for rdfvalue_cls_name, value, age in values:
rdfvalue_cls = aff4.FACTORY.RDFValue(rdfvalue_cls_name)
value = rdfvalue_cls(value, age=rdfvalue.RDFDatetime(age))
tmp.append(value)
# Ensure the object is dirty so when we save it, it can be written to the data
# store.
result = result_cls(
urn=representation["_urn"],
clone=aff4_attributes,
mode="rw",
age=representation["age_policy"])
result.new_attributes, result.synced_attributes = result.synced_attributes, {}
result._dirty = True # pylint: disable=protected-access
return result
| 31.441176 | 80 | 0.724509 |
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from future.utils import iteritems
from grr_response_core.lib import rdfvalue
from grr_response_core.lib.util import compatibility
from grr_response_core.lib.util.compat import yaml
from grr_response_server import aff4
def YamlDumper(aff4object):
aff4object.Flush()
result = {}
for attribute, values in iteritems(aff4object.synced_attributes):
result[attribute.predicate] = []
for value in values:
value = value.ToRDFValue()
result[attribute.predicate].append(
[value.__class__.__name__,
value.SerializeToString(),
str(value.age)])
return yaml.Dump({
"aff4_class": compatibility.GetName(aff4object),
"_urn": aff4object.urn.SerializeToString(),
"attributes": result,
"age_policy": aff4object.age_policy,
})
def YamlLoader(string):
representation = yaml.Parse(string)
result_cls = aff4.FACTORY.AFF4Object(representation["aff4_class"])
aff4_attributes = {}
for predicate, values in iteritems(representation["attributes"]):
attribute = aff4.Attribute.PREDICATES[predicate]
tmp = aff4_attributes[attribute] = []
for rdfvalue_cls_name, value, age in values:
rdfvalue_cls = aff4.FACTORY.RDFValue(rdfvalue_cls_name)
value = rdfvalue_cls(value, age=rdfvalue.RDFDatetime(age))
tmp.append(value)
result = result_cls(
urn=representation["_urn"],
clone=aff4_attributes,
mode="rw",
age=representation["age_policy"])
result.new_attributes, result.synced_attributes = result.synced_attributes, {}
result._dirty = True
return result
| true | true |
1c2fae7920b9bc1d34e0a50572315bd565c0af92 | 613 | py | Python | aoc_helper/data.py | Starwort/aoc_helper | 7ad39770e91d0f537b90baa4e3d65fa0e152e0f7 | [
"MIT"
] | 5 | 2020-12-06T17:14:17.000Z | 2021-12-09T19:54:44.000Z | aoc_helper/data.py | Starwort/aoc_helper | 7ad39770e91d0f537b90baa4e3d65fa0e152e0f7 | [
"MIT"
] | 8 | 2020-12-06T08:39:44.000Z | 2021-12-10T04:27:10.000Z | aoc_helper/data.py | Starwort/aoc_helper | 7ad39770e91d0f537b90baa4e3d65fa0e152e0f7 | [
"MIT"
] | 8 | 2021-12-02T04:00:50.000Z | 2022-01-26T20:07:55.000Z | import datetime
import pathlib
import re
DATA_DIR = pathlib.Path.home() / ".config" / "aoc_helper"
if not DATA_DIR.exists():
DATA_DIR.mkdir(parents=True)
DEFAULT_YEAR = datetime.datetime.today().year
URL = "https://adventofcode.com/{year}/day/{day}"
WAIT_TIME = re.compile(r"You have (?:(\d+)m )?(\d+)s left to wait.")
def get_cookie():
token_file = DATA_DIR / "token.txt"
if token_file.exists():
return {"session": token_file.read_text()}
token = input("Could not find configuration file. Please enter your token\n>>> ")
token_file.write_text(token)
return {"session": token}
| 29.190476 | 85 | 0.683524 | import datetime
import pathlib
import re
DATA_DIR = pathlib.Path.home() / ".config" / "aoc_helper"
if not DATA_DIR.exists():
DATA_DIR.mkdir(parents=True)
DEFAULT_YEAR = datetime.datetime.today().year
URL = "https://adventofcode.com/{year}/day/{day}"
WAIT_TIME = re.compile(r"You have (?:(\d+)m )?(\d+)s left to wait.")
def get_cookie():
token_file = DATA_DIR / "token.txt"
if token_file.exists():
return {"session": token_file.read_text()}
token = input("Could not find configuration file. Please enter your token\n>>> ")
token_file.write_text(token)
return {"session": token}
| true | true |
1c2faf958ad3017ae6f9fa4391d3e37c36bec1cd | 11,445 | py | Python | pol/pol_stack_II_load_POL03.py | mlares/CMB_polarization | 936d17d0be81564dbae96d8aae0cb9f824f8a94d | [
"MIT"
] | null | null | null | pol/pol_stack_II_load_POL03.py | mlares/CMB_polarization | 936d17d0be81564dbae96d8aae0cb9f824f8a94d | [
"MIT"
] | null | null | null | pol/pol_stack_II_load_POL03.py | mlares/CMB_polarization | 936d17d0be81564dbae96d8aae0cb9f824f8a94d | [
"MIT"
] | null | null | null | import pickle
import numpy as np
import cmfg
from Parser import Parser
from math import pi
from astropy import units as u
import itertools
from sklearn.neighbors import NearestNeighbors
from matplotlib import pyplot as plt
from random import random
from matplotlib import colors, ticker, rc
class MidpointNormalize(colors.Normalize):
def __init__(self, vmin=None, vmax=None, midpoint=None, clip=False):
self.midpoint = midpoint
colors.Normalize.__init__(self, vmin, vmax, clip)
def __call__(self, value, clip=None):
# I'm ignoring masked values and all kinds of edge cases to make a
# simple example...
x, y = [self.vmin, self.midpoint, self.vmax], [0, 0.5, 1]
return np.ma.masked_array(np.interp(value, x, y))
with open('../out/POL03/data_POL03.pk', 'rb') as arch:
results = pickle.load(arch)
N = results[0][0].shape[0]
Zt = np.zeros((N,N))
Zq = np.zeros((N,N))
Zu = np.zeros((N,N))
Zqr = np.zeros((N,N))
Zur = np.zeros((N,N))
Ncen = 0
for r in results:
Ncen += 1
Zt = Zt + r[0]
Zq = Zq + r[1]
Zu = Zu + r[2]
Zqr = Zqr + r[3]
Zur = Zur + r[4]
del results
Zt = Zt / Ncen * 1.e6
Zq = Zq / Ncen * 1.e6
Zu = Zu / Ncen * 1.e6
Zqr = Zqr / Ncen* 1.e6
Zur = Zur / Ncen* 1.e6
P = np.sqrt(Zq**2 + Zu**2)
alpha = np.arctan2(Zu, Zq) / 2
Pr = np.sqrt(Zqr**2 + Zur**2)
alphar = np.arctan2(Zur, Zqr) / 2
# ADDITIONAL DATA »»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»
config = Parser('../set/POL03.ini')
X = cmfg.profile2d(config)
X.load_centers()
X.select_subsample_centers()
rmax = config.p.r_stop # rad
rmax_deg = rmax.to(u.deg).value
print('rmax_deg ------> ', rmax_deg)
# COMPUTE RADIAL PROFILE
N = 120
xr = np.linspace(-rmax_deg, rmax_deg, N)
yr = np.linspace(-rmax_deg, rmax_deg, N)
idxs = itertools.product(range(N), range(N))
idxs = np.array(list(idxs))
G = itertools.product(xr, yr)
G = np.array(list(G))
neigh = NearestNeighbors(n_neighbors=6, radius=0.01)
neigh.fit(G)
# --------
rr = np.linspace(0.02, 2.8, 100)
xpolar, ypolar = [], []
for k, r in enumerate(rr):
nn = 4 + k
tt = np.linspace(0, 2*pi, nn, endpoint=False)
tt = tt + random()*2*pi
x = r*np.cos(tt)
y = r*np.sin(tt)
xpolar.append(x)
ypolar.append(y)
val_avg = []
for xp, yp in zip(xpolar, ypolar):
vals = []
for xx, yy in zip(xp, yp):
dist, ind = neigh.kneighbors([[xx,yy]], 3, return_distance=True)
dd = np.exp(-dist*25)
dsts = dd.sum()
zz = Zt[idxs[ind][0][:,0], idxs[ind][0][:,1]]
vals.append(np.dot(dd, zz)/dsts)
val_avg.append(np.mean(vals))
# PLOTS »»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»
plt.close('all')
fig = plt.figure(figsize=(7, 5))
ax = fig.add_subplot()
nr = MidpointNormalize(vmin=-15, vmax=15, midpoint=0.)
sc = ax.imshow(Zt, cmap='RdBu_r',
extent=[-rmax_deg, rmax_deg, -rmax_deg, rmax_deg],
norm=nr)
circle1 = plt.Circle((0, 0), rmax_deg, fc='None', linewidth=6,
#color=(0.0196, 0.188, 0.38, 0.5))
color='white')
ax.add_patch(circle1)
cb = plt.colorbar(sc, ax=ax, shrink=0.8, aspect=60)
cb.set_label(r'averaged temperature [$\mu$K]')
ax.set_xlabel('x [deg]')
ax.set_ylabel('y [deg]')
ax.xaxis.set_major_formatter(ticker.StrMethodFormatter("{x:.1f}"))
#cb.formatter.set_powerlimits((1, 1))
cb.update_ticks()
plt.tight_layout()
fig.savefig('Zt_POL03.png')
# PLOTS »»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»
plt.close('all')
fig = plt.figure(figsize=(7, 5))
ax = fig.add_subplot()
nr = MidpointNormalize(vmin=Zq.min(), vmax=Zq.max(), midpoint=0.)
sc = ax.imshow(Zq, cmap='bwr',
extent=[-rmax_deg, rmax_deg, -rmax_deg, rmax_deg],
norm=nr)
circle1 = plt.Circle((0, 0), rmax_deg, fc='None', linewidth=6,
color='white')
ax.add_patch(circle1)
cb = plt.colorbar(sc, ax=ax, shrink=0.8, aspect=60)
cb.set_label(r'$\times\; 10^{-6}\quad$ Q Stokes parameter')
ax.set_xlabel('x [deg]')
ax.set_ylabel('y [deg]')
#ax.xaxis.set_major_formatter(ticker.StrMethodFormatter("{x:.1f}"))
#cb.formatter.set_powerlimits((-6, -6))
#cb.update_ticks()
plt.tight_layout()
fig.savefig('Zq_POL03.png')
# PLOTS »»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»
plt.close('all')
fig = plt.figure(figsize=(7, 5))
ax = fig.add_subplot()
nr = MidpointNormalize(vmin=Zq.min(), vmax=Zq.max(), midpoint=0.)
sc = ax.imshow(Zu, cmap='bwr',
extent=[-rmax_deg, rmax_deg, -rmax_deg, rmax_deg],
norm=nr)
circle1 = plt.Circle((0, 0), rmax_deg, fc='None', linewidth=6,
color='white')
ax.add_patch(circle1)
cb = plt.colorbar(sc, ax=ax, shrink=0.8, aspect=60)
cb.set_label(r'$\times\; 10^{-6}\quad$ U Stokes parameter')
ax.set_xlabel('x [deg]')
ax.set_ylabel('y [deg]')
#ax.xaxis.set_major_formatter(ticker.StrMethodFormatter("{x:.1f}"))
#cb.formatter.set_powerlimits((-6, -6))
#cb.update_ticks()
plt.tight_layout()
fig.savefig('Zu_POL03.png')
# PLOTS »»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»
plt.close('all')
fig = plt.figure(figsize=(7, 5))
ax = fig.add_subplot()
nr = MidpointNormalize(vmin=Zqr.min(), vmax=Zqr.max(), midpoint=0.)
sc = ax.imshow(Zqr, cmap='bwr',
extent=[-rmax_deg, rmax_deg, -rmax_deg, rmax_deg],
norm=nr)
circle1 = plt.Circle((0, 0), rmax_deg, fc='None', linewidth=6,
color='white')
ax.add_patch(circle1)
cb = plt.colorbar(sc, ax=ax, shrink=0.8, aspect=60)
cb.set_label(r'$\times\; 10^{-6}\quad$ Q$_r$ Stokes parameter')
ax.set_xlabel('x [deg]')
ax.set_ylabel('y [deg]')
#ax.xaxis.set_major_formatter(ticker.StrMethodFormatter("{x:.1f}"))
#cb.formatter.set_powerlimits((-6, -6))
#cb.update_ticks()
plt.tight_layout()
fig.savefig('Zqr_POL03.png')
# PLOTS »»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»
plt.close('all')
fig = plt.figure(figsize=(7, 5))
ax = fig.add_subplot()
nr = MidpointNormalize(vmin=Zq.min(), vmax=Zq.max(), midpoint=0.)
sc = ax.imshow(Zur, cmap='bwr',
extent=[-rmax_deg, rmax_deg, -rmax_deg, rmax_deg],
norm=nr)
circle1 = plt.Circle((0, 0), rmax_deg, fc='None', linewidth=6,
color='white')
ax.add_patch(circle1)
cb = plt.colorbar(sc, ax=ax, shrink=0.8, aspect=60)
cb.set_label(r'$\times\; 10^{-6}\quad$ U$_r$ Stokes parameter')
ax.set_xlabel('x [deg]')
ax.set_ylabel('y [deg]')
#ax.xaxis.set_major_formatter(ticker.StrMethodFormatter("{x:.1f}"))
#cb.formatter.set_powerlimits((-6, -6))
#cb.update_ticks()
plt.tight_layout()
fig.savefig('Zur_POL03.png')
plt.close('all')
fig = plt.figure(figsize=(7, 5))
ax = fig.add_subplot()
ax.plot(rr, val_avg)
ax.axhline(0, linestyle='--', color='silver')
ax.set_xlabel('radial distance [deg]')
ax.set_ylabel(r'averaged temperature [$\times 10^6\,\mu$K]')
plt.tight_layout()
fig.savefig('Zt_POL03_radial.png')
# PLOTS »»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»
plt.close('all')
fig = plt.figure(figsize=(7, 5))
ax = fig.add_subplot()
nr = MidpointNormalize(vmin=Zq.min(), vmax=Zq.max(), midpoint=0.)
sc = ax.imshow(Zur, cmap='bwr',
extent=[-rmax_deg, rmax_deg, -rmax_deg, rmax_deg],
norm=nr)
circle1 = plt.Circle((0, 0), rmax_deg, fc='None', linewidth=6,
color='white')
ax.add_patch(circle1)
cb = plt.colorbar(sc, ax=ax, shrink=0.8, aspect=60)
cb.set_label(r'$\times\; 10^{-6}\quad$ U$_r$ Stokes parameter')
ax.set_xlabel('x [deg]')
ax.set_ylabel('y [deg]')
#ax.xaxis.set_major_formatter(ticker.StrMethodFormatter("{x:.1f}"))
#cb.formatter.set_powerlimits((-6, -6))
#cb.update_ticks()
plt.tight_layout()
fig.savefig('Zur_b_POL03.png')
# PLOTS »»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»
# P y angulo -----------------------------
plt.close('all')
fig = plt.figure(figsize=(7, 5))
ax = fig.add_subplot()
sc = ax.imshow(P, cmap='pink_r',
extent=[-rmax_deg, rmax_deg, -rmax_deg, rmax_deg])
cb = plt.colorbar(sc, ax=ax, shrink=0.8, aspect=60)
cb.set_label(r'$\times\; 10^{-6}\quad$ P')
ax.set_xlabel('x [deg]')
ax.set_ylabel('y [deg]')
plt.tight_layout()
fig.savefig('P_POL03.png')
# PLOTS »»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»
plt.close('all')
fig = plt.figure(figsize=(7, 5))
ax = fig.add_subplot()
sc = ax.imshow(alpha, cmap='bwr',
extent=[-rmax_deg, rmax_deg, -rmax_deg, rmax_deg])
cb = plt.colorbar(sc, ax=ax, shrink=0.8, aspect=60)
cb.set_label(r'$\alpha$ [rad]')
ax.set_xlabel('x [deg]')
ax.set_ylabel('y [deg]')
plt.tight_layout()
fig.savefig('alpha_POL03.png')
# PLOTS »»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»
plt.close('all')
fig = plt.figure(figsize=(7, 5))
ax = fig.add_subplot()
ax.hist(alpha)
ax.set_xlabel('alpha [rad]')
ax.set_ylabel('dN/d(alpha)')
plt.tight_layout()
fig.savefig('alpha_hist_POL03.png')
# PLOTS »»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»
# P y angulo -----------------------------
plt.close('all')
fig = plt.figure(figsize=(7, 5))
ax = fig.add_subplot()
sc = ax.imshow(Pr, cmap='pink_r',
extent=[-rmax_deg, rmax_deg, -rmax_deg, rmax_deg])
cb = plt.colorbar(sc, ax=ax, shrink=0.8, aspect=60)
cb.set_label(r'$\times\; 10^{-6}\quad$ P')
ax.set_xlabel('x [deg]')
ax.set_ylabel('y [deg]')
plt.tight_layout()
fig.savefig('P_r_POL03.png')
# PLOTS »»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»
plt.close('all')
fig = plt.figure(figsize=(7, 5))
ax = fig.add_subplot()
sc = ax.imshow(alphar, cmap='bwr',
extent=[-rmax_deg, rmax_deg, -rmax_deg, rmax_deg])
cb = plt.colorbar(sc, ax=ax, shrink=0.8, aspect=60)
cb.set_label(r'$\alpha$ [rad]')
ax.set_xlabel('x [deg]')
ax.set_ylabel('y [deg]')
plt.tight_layout()
fig.savefig('alpha_r_POL03.png')
# tails
tx = G[:,0]
ty = G[:,1]
# heads
dx = (P*np.cos(alpha)).reshape(N*N)*10000
dy = (P*np.sin(alpha)).reshape(N*N)*10000
hx = tx + dx
hy = ty + dy
filt = dx > 1.e-4
for i in range(N*N):
if filt[i]:
print(tx[i], hx[i], ty[i], hy[i])
# PLOTS »»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»
plt.close('all')
fig = plt.figure(figsize=(7, 5))
ax1 = fig.add_subplot(2, 1, 1)
zz = Zq.reshape(N*N)
ax1.hist(zz, bins=50, density=True)
ax1.set_xlim(-1.5, 1.5)
ax1.set_xlabel('Q')
ax1.set_ylabel(r'dN/dQ')
ax2 = fig.add_subplot(2, 1, 2)
zz = Zu.reshape(N*N)
ax2.hist(zz, bins=50, density=True)
ax2.set_xlim(-1.5, 1.5)
ax2.set_xlabel('U')
ax2.set_ylabel(r'dN/dU')
plt.tight_layout()
fig.savefig('hists_POL03_radial.png')
# PLOTS »»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»
ql = []
ul = []
pl = []
al = []
for i, j in idxs:
r = np.sqrt(xr[i]**2 + yr[i]**2)
if r < rmax_deg:
if abs(Zq[i, j]) > 1.e-6 and abs(Zu[i, j]) > 1.e-6:
ql.append(Zq[i, j])
ul.append(Zu[i, j])
P_tmp = np.sqrt(Zq[i,j]**2 + Zu[i,j]**2)
alpha_tmp = np.arctan2(Zu[i,j], Zq[i,j]) / 2
pl.append(P_tmp)
al.append(alpha_tmp)
ql = np.array(ql)
ul = np.array(ul)
font = {'family' : 'normal',
'weight' : 'medium',
'size' : 14}
rc('font', **font)
plt.close('all')
fig = plt.figure(figsize=(7, 5))
ax = fig.add_subplot()
ax.plot(ql, ul-ql, marker='o', markersize=12, color=(0, 0.7, 1, 0.01), linestyle='None')
ax.set_xlim(-0.7, 0.7)
ax.set_ylim(-0.25, 0.25)
ax.grid(color='silver')
ax.set_xlabel(r'Q [$\times 10^6 \, \mu$K]', fontsize=16)
ax.set_ylabel(r'U - Q [$\times 10^6 \, \mu$K]', fontsize=16)
plt.tight_layout()
fig.savefig('qu_POL03.png')
| 24.455128 | 88 | 0.584797 | import pickle
import numpy as np
import cmfg
from Parser import Parser
from math import pi
from astropy import units as u
import itertools
from sklearn.neighbors import NearestNeighbors
from matplotlib import pyplot as plt
from random import random
from matplotlib import colors, ticker, rc
class MidpointNormalize(colors.Normalize):
def __init__(self, vmin=None, vmax=None, midpoint=None, clip=False):
self.midpoint = midpoint
colors.Normalize.__init__(self, vmin, vmax, clip)
def __call__(self, value, clip=None):
# simple example...
x, y = [self.vmin, self.midpoint, self.vmax], [0, 0.5, 1]
return np.ma.masked_array(np.interp(value, x, y))
with open('../out/POL03/data_POL03.pk', 'rb') as arch:
results = pickle.load(arch)
N = results[0][0].shape[0]
Zt = np.zeros((N,N))
Zq = np.zeros((N,N))
Zu = np.zeros((N,N))
Zqr = np.zeros((N,N))
Zur = np.zeros((N,N))
Ncen = 0
for r in results:
Ncen += 1
Zt = Zt + r[0]
Zq = Zq + r[1]
Zu = Zu + r[2]
Zqr = Zqr + r[3]
Zur = Zur + r[4]
del results
Zt = Zt / Ncen * 1.e6
Zq = Zq / Ncen * 1.e6
Zu = Zu / Ncen * 1.e6
Zqr = Zqr / Ncen* 1.e6
Zur = Zur / Ncen* 1.e6
P = np.sqrt(Zq**2 + Zu**2)
alpha = np.arctan2(Zu, Zq) / 2
Pr = np.sqrt(Zqr**2 + Zur**2)
alphar = np.arctan2(Zur, Zqr) / 2
# ADDITIONAL DATA »»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»
config = Parser('../set/POL03.ini')
X = cmfg.profile2d(config)
X.load_centers()
X.select_subsample_centers()
rmax = config.p.r_stop # rad
rmax_deg = rmax.to(u.deg).value
print('rmax_deg ------> ', rmax_deg)
# COMPUTE RADIAL PROFILE
N = 120
xr = np.linspace(-rmax_deg, rmax_deg, N)
yr = np.linspace(-rmax_deg, rmax_deg, N)
idxs = itertools.product(range(N), range(N))
idxs = np.array(list(idxs))
G = itertools.product(xr, yr)
G = np.array(list(G))
neigh = NearestNeighbors(n_neighbors=6, radius=0.01)
neigh.fit(G)
# --------
rr = np.linspace(0.02, 2.8, 100)
xpolar, ypolar = [], []
for k, r in enumerate(rr):
nn = 4 + k
tt = np.linspace(0, 2*pi, nn, endpoint=False)
tt = tt + random()*2*pi
x = r*np.cos(tt)
y = r*np.sin(tt)
xpolar.append(x)
ypolar.append(y)
val_avg = []
for xp, yp in zip(xpolar, ypolar):
vals = []
for xx, yy in zip(xp, yp):
dist, ind = neigh.kneighbors([[xx,yy]], 3, return_distance=True)
dd = np.exp(-dist*25)
dsts = dd.sum()
zz = Zt[idxs[ind][0][:,0], idxs[ind][0][:,1]]
vals.append(np.dot(dd, zz)/dsts)
val_avg.append(np.mean(vals))
# PLOTS »»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»
plt.close('all')
fig = plt.figure(figsize=(7, 5))
ax = fig.add_subplot()
nr = MidpointNormalize(vmin=-15, vmax=15, midpoint=0.)
sc = ax.imshow(Zt, cmap='RdBu_r',
extent=[-rmax_deg, rmax_deg, -rmax_deg, rmax_deg],
norm=nr)
circle1 = plt.Circle((0, 0), rmax_deg, fc='None', linewidth=6,
#color=(0.0196, 0.188, 0.38, 0.5))
color='white')
ax.add_patch(circle1)
cb = plt.colorbar(sc, ax=ax, shrink=0.8, aspect=60)
cb.set_label(r'averaged temperature [$\mu$K]')
ax.set_xlabel('x [deg]')
ax.set_ylabel('y [deg]')
ax.xaxis.set_major_formatter(ticker.StrMethodFormatter("{x:.1f}"))
#cb.formatter.set_powerlimits((1, 1))
cb.update_ticks()
plt.tight_layout()
fig.savefig('Zt_POL03.png')
# PLOTS »»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»
plt.close('all')
fig = plt.figure(figsize=(7, 5))
ax = fig.add_subplot()
nr = MidpointNormalize(vmin=Zq.min(), vmax=Zq.max(), midpoint=0.)
sc = ax.imshow(Zq, cmap='bwr',
extent=[-rmax_deg, rmax_deg, -rmax_deg, rmax_deg],
norm=nr)
circle1 = plt.Circle((0, 0), rmax_deg, fc='None', linewidth=6,
color='white')
ax.add_patch(circle1)
cb = plt.colorbar(sc, ax=ax, shrink=0.8, aspect=60)
cb.set_label(r'$\times\; 10^{-6}\quad$ Q Stokes parameter')
ax.set_xlabel('x [deg]')
ax.set_ylabel('y [deg]')
#ax.xaxis.set_major_formatter(ticker.StrMethodFormatter("{x:.1f}"))
#cb.formatter.set_powerlimits((-6, -6))
#cb.update_ticks()
plt.tight_layout()
fig.savefig('Zq_POL03.png')
# PLOTS »»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»
plt.close('all')
fig = plt.figure(figsize=(7, 5))
ax = fig.add_subplot()
nr = MidpointNormalize(vmin=Zq.min(), vmax=Zq.max(), midpoint=0.)
sc = ax.imshow(Zu, cmap='bwr',
extent=[-rmax_deg, rmax_deg, -rmax_deg, rmax_deg],
norm=nr)
circle1 = plt.Circle((0, 0), rmax_deg, fc='None', linewidth=6,
color='white')
ax.add_patch(circle1)
cb = plt.colorbar(sc, ax=ax, shrink=0.8, aspect=60)
cb.set_label(r'$\times\; 10^{-6}\quad$ U Stokes parameter')
ax.set_xlabel('x [deg]')
ax.set_ylabel('y [deg]')
#ax.xaxis.set_major_formatter(ticker.StrMethodFormatter("{x:.1f}"))
#cb.formatter.set_powerlimits((-6, -6))
#cb.update_ticks()
plt.tight_layout()
fig.savefig('Zu_POL03.png')
# PLOTS »»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»
plt.close('all')
fig = plt.figure(figsize=(7, 5))
ax = fig.add_subplot()
nr = MidpointNormalize(vmin=Zqr.min(), vmax=Zqr.max(), midpoint=0.)
sc = ax.imshow(Zqr, cmap='bwr',
extent=[-rmax_deg, rmax_deg, -rmax_deg, rmax_deg],
norm=nr)
circle1 = plt.Circle((0, 0), rmax_deg, fc='None', linewidth=6,
color='white')
ax.add_patch(circle1)
cb = plt.colorbar(sc, ax=ax, shrink=0.8, aspect=60)
cb.set_label(r'$\times\; 10^{-6}\quad$ Q$_r$ Stokes parameter')
ax.set_xlabel('x [deg]')
ax.set_ylabel('y [deg]')
#ax.xaxis.set_major_formatter(ticker.StrMethodFormatter("{x:.1f}"))
#cb.formatter.set_powerlimits((-6, -6))
#cb.update_ticks()
plt.tight_layout()
fig.savefig('Zqr_POL03.png')
# PLOTS »»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»
plt.close('all')
fig = plt.figure(figsize=(7, 5))
ax = fig.add_subplot()
nr = MidpointNormalize(vmin=Zq.min(), vmax=Zq.max(), midpoint=0.)
sc = ax.imshow(Zur, cmap='bwr',
extent=[-rmax_deg, rmax_deg, -rmax_deg, rmax_deg],
norm=nr)
circle1 = plt.Circle((0, 0), rmax_deg, fc='None', linewidth=6,
color='white')
ax.add_patch(circle1)
cb = plt.colorbar(sc, ax=ax, shrink=0.8, aspect=60)
cb.set_label(r'$\times\; 10^{-6}\quad$ U$_r$ Stokes parameter')
ax.set_xlabel('x [deg]')
ax.set_ylabel('y [deg]')
#ax.xaxis.set_major_formatter(ticker.StrMethodFormatter("{x:.1f}"))
#cb.formatter.set_powerlimits((-6, -6))
#cb.update_ticks()
plt.tight_layout()
fig.savefig('Zur_POL03.png')
plt.close('all')
fig = plt.figure(figsize=(7, 5))
ax = fig.add_subplot()
ax.plot(rr, val_avg)
ax.axhline(0, linestyle='--', color='silver')
ax.set_xlabel('radial distance [deg]')
ax.set_ylabel(r'averaged temperature [$\times 10^6\,\mu$K]')
plt.tight_layout()
fig.savefig('Zt_POL03_radial.png')
# PLOTS »»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»
plt.close('all')
fig = plt.figure(figsize=(7, 5))
ax = fig.add_subplot()
nr = MidpointNormalize(vmin=Zq.min(), vmax=Zq.max(), midpoint=0.)
sc = ax.imshow(Zur, cmap='bwr',
extent=[-rmax_deg, rmax_deg, -rmax_deg, rmax_deg],
norm=nr)
circle1 = plt.Circle((0, 0), rmax_deg, fc='None', linewidth=6,
color='white')
ax.add_patch(circle1)
cb = plt.colorbar(sc, ax=ax, shrink=0.8, aspect=60)
cb.set_label(r'$\times\; 10^{-6}\quad$ U$_r$ Stokes parameter')
ax.set_xlabel('x [deg]')
ax.set_ylabel('y [deg]')
#ax.xaxis.set_major_formatter(ticker.StrMethodFormatter("{x:.1f}"))
#cb.formatter.set_powerlimits((-6, -6))
#cb.update_ticks()
plt.tight_layout()
fig.savefig('Zur_b_POL03.png')
# PLOTS »»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»
# P y angulo -----------------------------
plt.close('all')
fig = plt.figure(figsize=(7, 5))
ax = fig.add_subplot()
sc = ax.imshow(P, cmap='pink_r',
extent=[-rmax_deg, rmax_deg, -rmax_deg, rmax_deg])
cb = plt.colorbar(sc, ax=ax, shrink=0.8, aspect=60)
cb.set_label(r'$\times\; 10^{-6}\quad$ P')
ax.set_xlabel('x [deg]')
ax.set_ylabel('y [deg]')
plt.tight_layout()
fig.savefig('P_POL03.png')
# PLOTS »»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»
plt.close('all')
fig = plt.figure(figsize=(7, 5))
ax = fig.add_subplot()
sc = ax.imshow(alpha, cmap='bwr',
extent=[-rmax_deg, rmax_deg, -rmax_deg, rmax_deg])
cb = plt.colorbar(sc, ax=ax, shrink=0.8, aspect=60)
cb.set_label(r'$\alpha$ [rad]')
ax.set_xlabel('x [deg]')
ax.set_ylabel('y [deg]')
plt.tight_layout()
fig.savefig('alpha_POL03.png')
# PLOTS »»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»
plt.close('all')
fig = plt.figure(figsize=(7, 5))
ax = fig.add_subplot()
ax.hist(alpha)
ax.set_xlabel('alpha [rad]')
ax.set_ylabel('dN/d(alpha)')
plt.tight_layout()
fig.savefig('alpha_hist_POL03.png')
# PLOTS »»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»
# P y angulo -----------------------------
plt.close('all')
fig = plt.figure(figsize=(7, 5))
ax = fig.add_subplot()
sc = ax.imshow(Pr, cmap='pink_r',
extent=[-rmax_deg, rmax_deg, -rmax_deg, rmax_deg])
cb = plt.colorbar(sc, ax=ax, shrink=0.8, aspect=60)
cb.set_label(r'$\times\; 10^{-6}\quad$ P')
ax.set_xlabel('x [deg]')
ax.set_ylabel('y [deg]')
plt.tight_layout()
fig.savefig('P_r_POL03.png')
# PLOTS »»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»
plt.close('all')
fig = plt.figure(figsize=(7, 5))
ax = fig.add_subplot()
sc = ax.imshow(alphar, cmap='bwr',
extent=[-rmax_deg, rmax_deg, -rmax_deg, rmax_deg])
cb = plt.colorbar(sc, ax=ax, shrink=0.8, aspect=60)
cb.set_label(r'$\alpha$ [rad]')
ax.set_xlabel('x [deg]')
ax.set_ylabel('y [deg]')
plt.tight_layout()
fig.savefig('alpha_r_POL03.png')
# tails
tx = G[:,0]
ty = G[:,1]
# heads
dx = (P*np.cos(alpha)).reshape(N*N)*10000
dy = (P*np.sin(alpha)).reshape(N*N)*10000
hx = tx + dx
hy = ty + dy
filt = dx > 1.e-4
for i in range(N*N):
if filt[i]:
print(tx[i], hx[i], ty[i], hy[i])
# PLOTS »»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»
plt.close('all')
fig = plt.figure(figsize=(7, 5))
ax1 = fig.add_subplot(2, 1, 1)
zz = Zq.reshape(N*N)
ax1.hist(zz, bins=50, density=True)
ax1.set_xlim(-1.5, 1.5)
ax1.set_xlabel('Q')
ax1.set_ylabel(r'dN/dQ')
ax2 = fig.add_subplot(2, 1, 2)
zz = Zu.reshape(N*N)
ax2.hist(zz, bins=50, density=True)
ax2.set_xlim(-1.5, 1.5)
ax2.set_xlabel('U')
ax2.set_ylabel(r'dN/dU')
plt.tight_layout()
fig.savefig('hists_POL03_radial.png')
# PLOTS »»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»»
ql = []
ul = []
pl = []
al = []
for i, j in idxs:
r = np.sqrt(xr[i]**2 + yr[i]**2)
if r < rmax_deg:
if abs(Zq[i, j]) > 1.e-6 and abs(Zu[i, j]) > 1.e-6:
ql.append(Zq[i, j])
ul.append(Zu[i, j])
P_tmp = np.sqrt(Zq[i,j]**2 + Zu[i,j]**2)
alpha_tmp = np.arctan2(Zu[i,j], Zq[i,j]) / 2
pl.append(P_tmp)
al.append(alpha_tmp)
ql = np.array(ql)
ul = np.array(ul)
font = {'family' : 'normal',
'weight' : 'medium',
'size' : 14}
rc('font', **font)
plt.close('all')
fig = plt.figure(figsize=(7, 5))
ax = fig.add_subplot()
ax.plot(ql, ul-ql, marker='o', markersize=12, color=(0, 0.7, 1, 0.01), linestyle='None')
ax.set_xlim(-0.7, 0.7)
ax.set_ylim(-0.25, 0.25)
ax.grid(color='silver')
ax.set_xlabel(r'Q [$\times 10^6 \, \mu$K]', fontsize=16)
ax.set_ylabel(r'U - Q [$\times 10^6 \, \mu$K]', fontsize=16)
plt.tight_layout()
fig.savefig('qu_POL03.png')
| true | true |
1c2fb00c0438f657aa01efb9065b538cc0f0773c | 5,784 | py | Python | venv/Lib/site-packages/sqlalchemy/connectors/mxodbc.py | ajayiagbebaku/NFL-Model | afcc67a85ca7138c58c3334d45988ada2da158ed | [
"MIT"
] | 5,383 | 2018-11-27T07:34:03.000Z | 2022-03-31T19:40:59.000Z | venv/Lib/site-packages/sqlalchemy/connectors/mxodbc.py | ajayiagbebaku/NFL-Model | afcc67a85ca7138c58c3334d45988ada2da158ed | [
"MIT"
] | 2,719 | 2018-11-27T07:55:01.000Z | 2022-03-31T22:09:44.000Z | venv/Lib/site-packages/sqlalchemy/connectors/mxodbc.py | ajayiagbebaku/NFL-Model | afcc67a85ca7138c58c3334d45988ada2da158ed | [
"MIT"
] | 998 | 2018-11-28T09:34:38.000Z | 2022-03-30T19:04:11.000Z | # connectors/mxodbc.py
# Copyright (C) 2005-2021 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: https://www.opensource.org/licenses/mit-license.php
"""
Provide a SQLALchemy connector for the eGenix mxODBC commercial
Python adapter for ODBC. This is not a free product, but eGenix
provides SQLAlchemy with a license for use in continuous integration
testing.
This has been tested for use with mxODBC 3.1.2 on SQL Server 2005
and 2008, using the SQL Server Native driver. However, it is
possible for this to be used on other database platforms.
For more info on mxODBC, see https://www.egenix.com/
.. deprecated:: 1.4 The mxODBC DBAPI is deprecated and will be removed
in a future version. Please use one of the supported DBAPIs to
connect to mssql.
"""
import re
import sys
import warnings
from . import Connector
from ..util import warn_deprecated
class MxODBCConnector(Connector):
driver = "mxodbc"
supports_sane_multi_rowcount = False
supports_unicode_statements = True
supports_unicode_binds = True
supports_native_decimal = True
@classmethod
def dbapi(cls):
# this classmethod will normally be replaced by an instance
# attribute of the same name, so this is normally only called once.
cls._load_mx_exceptions()
platform = sys.platform
if platform == "win32":
from mx.ODBC import Windows as Module
# this can be the string "linux2", and possibly others
elif "linux" in platform:
from mx.ODBC import unixODBC as Module
elif platform == "darwin":
from mx.ODBC import iODBC as Module
else:
raise ImportError("Unrecognized platform for mxODBC import")
warn_deprecated(
"The mxODBC DBAPI is deprecated and will be removed"
"in a future version. Please use one of the supported DBAPIs to"
"connect to mssql.",
version="1.4",
)
return Module
@classmethod
def _load_mx_exceptions(cls):
"""Import mxODBC exception classes into the module namespace,
as if they had been imported normally. This is done here
to avoid requiring all SQLAlchemy users to install mxODBC.
"""
global InterfaceError, ProgrammingError
from mx.ODBC import InterfaceError
from mx.ODBC import ProgrammingError
def on_connect(self):
def connect(conn):
conn.stringformat = self.dbapi.MIXED_STRINGFORMAT
conn.datetimeformat = self.dbapi.PYDATETIME_DATETIMEFORMAT
conn.decimalformat = self.dbapi.DECIMAL_DECIMALFORMAT
conn.errorhandler = self._error_handler()
return connect
def _error_handler(self):
"""Return a handler that adjusts mxODBC's raised Warnings to
emit Python standard warnings.
"""
from mx.ODBC.Error import Warning as MxOdbcWarning
def error_handler(connection, cursor, errorclass, errorvalue):
if issubclass(errorclass, MxOdbcWarning):
errorclass.__bases__ = (Warning,)
warnings.warn(
message=str(errorvalue), category=errorclass, stacklevel=2
)
else:
raise errorclass(errorvalue)
return error_handler
def create_connect_args(self, url):
r"""Return a tuple of \*args, \**kwargs for creating a connection.
The mxODBC 3.x connection constructor looks like this:
connect(dsn, user='', password='',
clear_auto_commit=1, errorhandler=None)
This method translates the values in the provided URI
into args and kwargs needed to instantiate an mxODBC Connection.
The arg 'errorhandler' is not used by SQLAlchemy and will
not be populated.
"""
opts = url.translate_connect_args(username="user")
opts.update(url.query)
args = opts.pop("host")
opts.pop("port", None)
opts.pop("database", None)
return (args,), opts
def is_disconnect(self, e, connection, cursor):
# TODO: eGenix recommends checking connection.closed here
# Does that detect dropped connections ?
if isinstance(e, self.dbapi.ProgrammingError):
return "connection already closed" in str(e)
elif isinstance(e, self.dbapi.Error):
return "[08S01]" in str(e)
else:
return False
def _get_server_version_info(self, connection):
# eGenix suggests using conn.dbms_version instead
# of what we're doing here
dbapi_con = connection.connection
version = []
r = re.compile(r"[.\-]")
# 18 == pyodbc.SQL_DBMS_VER
for n in r.split(dbapi_con.getinfo(18)[1]):
try:
version.append(int(n))
except ValueError:
version.append(n)
return tuple(version)
def _get_direct(self, context):
if context:
native_odbc_execute = context.execution_options.get(
"native_odbc_execute", "auto"
)
# default to direct=True in all cases, is more generally
# compatible especially with SQL Server
return False if native_odbc_execute is True else True
else:
return True
def do_executemany(self, cursor, statement, parameters, context=None):
cursor.executemany(
statement, parameters, direct=self._get_direct(context)
)
def do_execute(self, cursor, statement, parameters, context=None):
cursor.execute(statement, parameters, direct=self._get_direct(context))
| 34.634731 | 79 | 0.648513 |
import re
import sys
import warnings
from . import Connector
from ..util import warn_deprecated
class MxODBCConnector(Connector):
driver = "mxodbc"
supports_sane_multi_rowcount = False
supports_unicode_statements = True
supports_unicode_binds = True
supports_native_decimal = True
@classmethod
def dbapi(cls):
cls._load_mx_exceptions()
platform = sys.platform
if platform == "win32":
from mx.ODBC import Windows as Module
elif "linux" in platform:
from mx.ODBC import unixODBC as Module
elif platform == "darwin":
from mx.ODBC import iODBC as Module
else:
raise ImportError("Unrecognized platform for mxODBC import")
warn_deprecated(
"The mxODBC DBAPI is deprecated and will be removed"
"in a future version. Please use one of the supported DBAPIs to"
"connect to mssql.",
version="1.4",
)
return Module
@classmethod
def _load_mx_exceptions(cls):
global InterfaceError, ProgrammingError
from mx.ODBC import InterfaceError
from mx.ODBC import ProgrammingError
def on_connect(self):
def connect(conn):
conn.stringformat = self.dbapi.MIXED_STRINGFORMAT
conn.datetimeformat = self.dbapi.PYDATETIME_DATETIMEFORMAT
conn.decimalformat = self.dbapi.DECIMAL_DECIMALFORMAT
conn.errorhandler = self._error_handler()
return connect
def _error_handler(self):
from mx.ODBC.Error import Warning as MxOdbcWarning
def error_handler(connection, cursor, errorclass, errorvalue):
if issubclass(errorclass, MxOdbcWarning):
errorclass.__bases__ = (Warning,)
warnings.warn(
message=str(errorvalue), category=errorclass, stacklevel=2
)
else:
raise errorclass(errorvalue)
return error_handler
def create_connect_args(self, url):
opts = url.translate_connect_args(username="user")
opts.update(url.query)
args = opts.pop("host")
opts.pop("port", None)
opts.pop("database", None)
return (args,), opts
def is_disconnect(self, e, connection, cursor):
if isinstance(e, self.dbapi.ProgrammingError):
return "connection already closed" in str(e)
elif isinstance(e, self.dbapi.Error):
return "[08S01]" in str(e)
else:
return False
def _get_server_version_info(self, connection):
dbapi_con = connection.connection
version = []
r = re.compile(r"[.\-]")
# 18 == pyodbc.SQL_DBMS_VER
for n in r.split(dbapi_con.getinfo(18)[1]):
try:
version.append(int(n))
except ValueError:
version.append(n)
return tuple(version)
def _get_direct(self, context):
if context:
native_odbc_execute = context.execution_options.get(
"native_odbc_execute", "auto"
)
# default to direct=True in all cases, is more generally
# compatible especially with SQL Server
return False if native_odbc_execute is True else True
else:
return True
def do_executemany(self, cursor, statement, parameters, context=None):
cursor.executemany(
statement, parameters, direct=self._get_direct(context)
)
def do_execute(self, cursor, statement, parameters, context=None):
cursor.execute(statement, parameters, direct=self._get_direct(context))
| true | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.