hexsha stringlengths 40 40 | size int64 2 1.02M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 245 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 245 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 245 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 1.02M | avg_line_length float64 1 417k | max_line_length int64 1 987k | alphanum_fraction float64 0 1 | content_no_comment stringlengths 0 1.01M | is_comment_constant_removed bool 1
class | is_sharp_comment_removed bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f72495f95031d45cde0afbf3bc7be5495a179206 | 798 | py | Python | distributed-todos/services/todos/src/tasks/worker.py | cheperuiz/unlearn-python | f5b97090f8f1d014bd9d65ecc0c6232919271bfa | [
"MIT"
] | null | null | null | distributed-todos/services/todos/src/tasks/worker.py | cheperuiz/unlearn-python | f5b97090f8f1d014bd9d65ecc0c6232919271bfa | [
"MIT"
] | 2 | 2021-05-11T00:00:01.000Z | 2022-01-22T10:13:26.000Z | distributed-todos/services/todos/src/tasks/worker.py | cheperuiz/unlearn-dev | f5b97090f8f1d014bd9d65ecc0c6232919271bfa | [
"MIT"
] | null | null | null | import yaml
from celery import Celery
from pymongo import MongoClient
from models.todo_dao import MongoDAO
from models.todo import TodoSchema
from library.utils import replace_env, make_url
with open("/config/todos/default_config.yml", "r") as f:
config = yaml.load(f, yaml.SafeLoader)
replace_env(config)
url = make_url(config["database"]["mongo"], include_db=False)
client = MongoClient(url)
collection = client.todos.todos_collection
broker_url = make_url(config["celery"]["broker"])
results_backend_url = make_url(config["celery"]["results_backend"])
celery = Celery(__name__, broker=broker_url, backend=results_backend_url)
@celery.task(name="tasks.worker.get_all_todos")
def get_all_todos(dao=MongoDAO(collection, TodoSchema)):
return TodoSchema(many=True).dump(dao.get_all())
| 30.692308 | 73 | 0.784461 | import yaml
from celery import Celery
from pymongo import MongoClient
from models.todo_dao import MongoDAO
from models.todo import TodoSchema
from library.utils import replace_env, make_url
with open("/config/todos/default_config.yml", "r") as f:
config = yaml.load(f, yaml.SafeLoader)
replace_env(config)
url = make_url(config["database"]["mongo"], include_db=False)
client = MongoClient(url)
collection = client.todos.todos_collection
broker_url = make_url(config["celery"]["broker"])
results_backend_url = make_url(config["celery"]["results_backend"])
celery = Celery(__name__, broker=broker_url, backend=results_backend_url)
@celery.task(name="tasks.worker.get_all_todos")
def get_all_todos(dao=MongoDAO(collection, TodoSchema)):
return TodoSchema(many=True).dump(dao.get_all())
| true | true |
f7249601a426b85ab408d827623e20f212418c0d | 1,180 | py | Python | xlsxwriter/test/comparison/test_button06.py | haiyangd/XlsxWriter | 81f8c9435b3e03a1458bf9ba314b5d3f7508290f | [
"BSD-2-Clause-FreeBSD"
] | 3 | 2018-02-26T12:31:41.000Z | 2020-10-10T14:14:11.000Z | xlsxwriter/test/comparison/test_button06.py | haiyangd/XlsxWriter | 81f8c9435b3e03a1458bf9ba314b5d3f7508290f | [
"BSD-2-Clause-FreeBSD"
] | null | null | null | xlsxwriter/test/comparison/test_button06.py | haiyangd/XlsxWriter | 81f8c9435b3e03a1458bf9ba314b5d3f7508290f | [
"BSD-2-Clause-FreeBSD"
] | null | null | null | ###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013-2017, John McNamara, jmcnamara@cpan.org
#
from ..excel_comparsion_test import ExcelComparisonTest
from ...workbook import Workbook
class TestCompareXLSXFiles(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.maxDiff = None
filename = 'button05.xlsx'
test_dir = 'xlsxwriter/test/comparison/'
self.got_filename = test_dir + '_test2_' + filename
self.exp_filename = test_dir + 'xlsx_files/' + filename
self.ignore_files = []
self.ignore_elements = {}
def test_create_file(self):
"""Test the creation of a simple XlsxWriter file."""
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
worksheet.insert_button('C2', {'macro': 'my_macro',
'width': 128,
'height': 30
})
workbook.close()
self.assertExcelEqual()
| 26.222222 | 79 | 0.54322 | true | true | |
f72497b539e640bb7175711770e3eea6c3d373a3 | 2,122 | py | Python | service/generated_flatbuffers/tflite/SliceOptions.py | lcrh/falken | 7545431c7bfa34a9b45c2243cae40dbb58adefaa | [
"Apache-2.0"
] | 213 | 2021-06-11T01:15:16.000Z | 2022-02-25T16:18:57.000Z | service/generated_flatbuffers/tflite/SliceOptions.py | lcrh/falken | 7545431c7bfa34a9b45c2243cae40dbb58adefaa | [
"Apache-2.0"
] | 32 | 2021-06-17T17:58:54.000Z | 2022-02-02T05:58:10.000Z | service/generated_flatbuffers/tflite/SliceOptions.py | lcrh/falken | 7545431c7bfa34a9b45c2243cae40dbb58adefaa | [
"Apache-2.0"
] | 28 | 2021-06-17T17:34:21.000Z | 2022-03-24T14:05:20.000Z | # Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# automatically generated by the FlatBuffers compiler, do not modify
# namespace: tflite
import flatbuffers
from flatbuffers.compat import import_numpy
np = import_numpy()
class SliceOptions(object):
__slots__ = ['_tab']
@classmethod
def GetRootAsSliceOptions(cls, buf, offset):
n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset)
x = SliceOptions()
x.Init(buf, n + offset)
return x
@classmethod
def SliceOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False):
return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed)
# SliceOptions
def Init(self, buf, pos):
self._tab = flatbuffers.table.Table(buf, pos)
def SliceOptionsStart(builder): builder.StartObject(0)
def SliceOptionsEnd(builder): return builder.EndObject()
class SliceOptionsT(object):
# SliceOptionsT
def __init__(self):
pass
@classmethod
def InitFromBuf(cls, buf, pos):
sliceOptions = SliceOptions()
sliceOptions.Init(buf, pos)
return cls.InitFromObj(sliceOptions)
@classmethod
def InitFromObj(cls, sliceOptions):
x = SliceOptionsT()
x._UnPack(sliceOptions)
return x
# SliceOptionsT
def _UnPack(self, sliceOptions):
if sliceOptions is None:
return
# SliceOptionsT
def Pack(self, builder):
SliceOptionsStart(builder)
sliceOptions = SliceOptionsEnd(builder)
return sliceOptions
| 29.068493 | 114 | 0.703582 |
import flatbuffers
from flatbuffers.compat import import_numpy
np = import_numpy()
class SliceOptions(object):
__slots__ = ['_tab']
@classmethod
def GetRootAsSliceOptions(cls, buf, offset):
n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset)
x = SliceOptions()
x.Init(buf, n + offset)
return x
@classmethod
def SliceOptionsBufferHasIdentifier(cls, buf, offset, size_prefixed=False):
return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed)
def Init(self, buf, pos):
self._tab = flatbuffers.table.Table(buf, pos)
def SliceOptionsStart(builder): builder.StartObject(0)
def SliceOptionsEnd(builder): return builder.EndObject()
class SliceOptionsT(object):
def __init__(self):
pass
@classmethod
def InitFromBuf(cls, buf, pos):
sliceOptions = SliceOptions()
sliceOptions.Init(buf, pos)
return cls.InitFromObj(sliceOptions)
@classmethod
def InitFromObj(cls, sliceOptions):
x = SliceOptionsT()
x._UnPack(sliceOptions)
return x
def _UnPack(self, sliceOptions):
if sliceOptions is None:
return
def Pack(self, builder):
SliceOptionsStart(builder)
sliceOptions = SliceOptionsEnd(builder)
return sliceOptions
| true | true |
f72498dcf1db718730a5fc9efce9cd8c757e4531 | 15,730 | py | Python | test/test_source_gdal.py | knowledgevis/large_image | ab5c213d3a68de8a2144707fc0dc1115d1e4664f | [
"Apache-2.0"
] | null | null | null | test/test_source_gdal.py | knowledgevis/large_image | ab5c213d3a68de8a2144707fc0dc1115d1e4664f | [
"Apache-2.0"
] | null | null | null | test/test_source_gdal.py | knowledgevis/large_image | ab5c213d3a68de8a2144707fc0dc1115d1e4664f | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
import glob
import json
import numpy
import os
import PIL.Image
import PIL.ImageChops
import pytest
import six
from large_image import constants
from large_image.exceptions import TileSourceException
import large_image_source_gdal
from . import utilities
def _assertImageMatches(image, testRootName, saveTestImageFailurePath='/tmp'):
"""
Check if an image matches any of a set of images.
Adapted from:
https://stackoverflow.com/questions/35176639/compare-images-python-pil
:param image: PIL image to compare or a binary string of the image.
:param testRootName: base name of the images to test. These images are
globbed in test_files/<testRootName>*.png.
:param saveTestImageFailurePath: if the image doesn't match any of the
test images, if this value is set, save the image to make it easier
to determine why it failed.
"""
if isinstance(image, six.binary_type):
image = PIL.Image.open(six.BytesIO(image))
image = image.convert('RGBA')
testDir = os.path.dirname(os.path.realpath(__file__))
testImagePaths = glob.glob(os.path.join(
testDir, 'test_files', testRootName + '*.png'))
testImages = [PIL.Image.open(testImagePath).convert('RGBA')
for testImagePath in testImagePaths]
diffs = [PIL.ImageChops.difference(image, testImage).getbbox()
for testImage in testImages]
if None not in diffs and saveTestImageFailurePath:
image.save(os.path.join(saveTestImageFailurePath, testRootName + '_test.png'))
assert None in diffs
def testTileFromGeotiffs():
testDir = os.path.dirname(os.path.realpath(__file__))
imagePath = os.path.join(testDir, 'test_files', 'rgb_geotiff.tiff')
source = large_image_source_gdal.GDALFileTileSource(imagePath)
tileMetadata = source.getMetadata()
assert tileMetadata['tileWidth'] == 256
assert tileMetadata['tileHeight'] == 256
assert tileMetadata['sizeX'] == 256
assert tileMetadata['sizeY'] == 256
assert tileMetadata['levels'] == 1
assert tileMetadata['bounds']['xmax'] == 597915.0
assert tileMetadata['bounds']['xmin'] == 367185.0
assert tileMetadata['bounds']['ymax'] == 3788115.0
assert tileMetadata['bounds']['ymin'] == 3552885.0
assert (tileMetadata['bounds']['srs'].strip() ==
'+proj=utm +zone=11 +datum=WGS84 +units=m +no_defs')
assert tileMetadata['geospatial']
# Check that we read some band data, too
assert len(tileMetadata['bands']) == 3
assert tileMetadata['bands'][2]['interpretation'] == 'green'
assert tileMetadata['bands'][2]['max'] == 212.0
assert tileMetadata['bands'][2]['min'] == 0.0
# Getting the metadata with a specified projection will be different
source = large_image_source_gdal.GDALFileTileSource(
imagePath, projection='EPSG:3857')
tileMetadata = source.getMetadata()
assert tileMetadata['tileWidth'] == 256
assert tileMetadata['tileHeight'] == 256
assert tileMetadata['sizeX'] == 65536
assert tileMetadata['sizeY'] == 65536
assert tileMetadata['levels'] == 9
assert tileMetadata['bounds']['xmax'] == pytest.approx(-12906033, 1)
assert tileMetadata['bounds']['xmin'] == pytest.approx(-13184900, 1)
assert tileMetadata['bounds']['ymax'] == pytest.approx(4059661, 1)
assert tileMetadata['bounds']['ymin'] == pytest.approx(3777034, 1)
assert tileMetadata['bounds']['srs'] == '+init=epsg:3857'
assert tileMetadata['geospatial']
source = large_image_source_gdal.GDALFileTileSource(
imagePath, projection='EPSG:3857', style=json.dumps({'band': -1}), encoding='PNG')
image = source.getTile(89, 207, 9)
_assertImageMatches(image, 'geotiff_9_89_207')
def testTileLinearStyleFromGeotiffs():
testDir = os.path.dirname(os.path.realpath(__file__))
imagePath = os.path.join(testDir, 'test_files', 'rgb_geotiff.tiff')
style = json.dumps({'band': 1, 'min': 0, 'max': 100,
'palette': 'matplotlib.Plasma_6',
'scheme': 'linear'})
source = large_image_source_gdal.GDALFileTileSource(
imagePath, projection='EPSG:3857', style=style, encoding='PNG')
image = source.getTile(22, 51, 7)
_assertImageMatches(image, 'geotiff_style_linear_7_22_51')
def testTileStyleBadInput():
def _assertStyleResponse(imagePath, style, message):
with pytest.raises(TileSourceException, match=message):
source = large_image_source_gdal.GDALFileTileSource(
imagePath, projection='EPSG:3857', style=json.dumps(style), encoding='PNG')
source.getTile(22, 51, 7)
testDir = os.path.dirname(os.path.realpath(__file__))
imagePath = os.path.join(testDir, 'test_files', 'rgb_geotiff.tiff')
_assertStyleResponse(imagePath, {
'band': 1.1,
}, 'Band has to be a positive integer, -1, or a band interpretation found in the source.')
_assertStyleResponse(imagePath, {
'band': 500,
}, 'Band has to be a positive integer, -1, or a band interpretation found in the source.')
_assertStyleResponse(imagePath, {
'band': 1,
'palette': 'nonexistent.palette'
}, 'Palette is not a valid palettable path.')
_assertStyleResponse(imagePath, ['style'],
'Style is not a valid json object.')
def testThumbnailFromGeotiffs():
testDir = os.path.dirname(os.path.realpath(__file__))
imagePath = os.path.join(testDir, 'test_files', 'rgb_geotiff.tiff')
source = large_image_source_gdal.GDALFileTileSource(imagePath)
# We get a thumbnail without a projection
image, mimeType = source.getThumbnail(encoding='PNG')
assert image[:len(utilities.PNGHeader)] == utilities.PNGHeader
# We get a different thumbnail with a projection
source = large_image_source_gdal.GDALFileTileSource(imagePath, projection='EPSG:3857')
image2, mimeType = source.getThumbnail(encoding='PNG')
assert image2[:len(utilities.PNGHeader)] == utilities.PNGHeader
assert image != image2
def testPixel():
testDir = os.path.dirname(os.path.realpath(__file__))
imagePath = os.path.join(testDir, 'test_files', 'rgb_geotiff.tiff')
# Test in pixel coordinates
source = large_image_source_gdal.GDALFileTileSource(imagePath)
pixel = source.getPixel(region={'left': 212, 'top': 198})
assert pixel == {
'r': 76, 'g': 78, 'b': 77, 'a': 255, 'bands': {1: 62.0, 2: 65.0, 3: 66.0}}
pixel = source.getPixel(region={'left': 2120, 'top': 198})
assert pixel == {}
# Test with a projection
source = large_image_source_gdal.GDALFileTileSource(imagePath, projection='EPSG:3857')
pixel = source.getPixel(region={'left': -13132910, 'top': 4010586, 'units': 'projection'})
assert pixel == {
'r': 94, 'g': 98, 'b': 99, 'a': 255, 'bands': {1: 77.0, 2: 82.0, 3: 84.0}}
# Test with styles
style = json.dumps({'band': 1, 'min': 0, 'max': 100,
'palette': 'matplotlib.Plasma_6'})
source = large_image_source_gdal.GDALFileTileSource(
imagePath, projection='EPSG:3857', style=style)
pixel = source.getPixel(region={'left': -13132910, 'top': 4010586, 'units': 'projection'})
assert pixel == {
'r': 247, 'g': 156, 'b': 60, 'a': 255, 'bands': {1: 77.0, 2: 82.0, 3: 84.0}}
# Test with palette as an array of colors
style = json.dumps({'band': 1, 'min': 0, 'max': 100,
'palette': ['#0000ff', '#00ff00', '#ff0000']})
source = large_image_source_gdal.GDALFileTileSource(
imagePath, projection='EPSG:3857', style=style)
pixel = source.getPixel(region={'left': -13132910, 'top': 4010586, 'units': 'projection'})
assert pixel == {
'r': 137, 'g': 117, 'b': 0, 'a': 255, 'bands': {1: 77.0, 2: 82.0, 3: 84.0}}
# Test with projection units
source = large_image_source_gdal.GDALFileTileSource(imagePath, projection='EPSG:3857')
pixel = source.getPixel(region={'left': -13132910, 'top': 4010586, 'units': 'EPSG:3857'})
assert pixel == {
'r': 94, 'g': 98, 'b': 99, 'a': 255, 'bands': {1: 77.0, 2: 82.0, 3: 84.0}}
pixel = source.getPixel(region={'left': -117.975, 'top': 33.865, 'units': 'WGS84'})
assert pixel == {
'r': 94, 'g': 98, 'b': 99, 'a': 255, 'bands': {1: 77.0, 2: 82.0, 3: 84.0}}
# When the tile has a different projection, the pixel is the same as
# the band values.
source = large_image_source_gdal.GDALFileTileSource(imagePath)
pixel = source.getPixel(region={'left': -13132910, 'top': 4010586, 'units': 'EPSG:3857'})
assert pixel == {
'r': 94, 'g': 98, 'b': 99, 'a': 255, 'bands': {1: 77.0, 2: 82.0, 3: 84.0}}
def testSourceErrors():
testDir = os.path.dirname(os.path.realpath(__file__))
imagePath = os.path.join(testDir, 'test_files', 'rgb_geotiff.tiff')
with pytest.raises(TileSourceException, match='must not be geographic'):
large_image_source_gdal.GDALFileTileSource(imagePath, 'EPSG:4326')
imagePath = os.path.join(testDir, 'test_files', 'zero_gi.tif')
with pytest.raises(TileSourceException, match='cannot be opened via'):
large_image_source_gdal.GDALFileTileSource(imagePath)
imagePath = os.path.join(testDir, 'test_files', 'yb10kx5k.png')
with pytest.raises(TileSourceException, match='does not have a projected scale'):
large_image_source_gdal.GDALFileTileSource(imagePath)
def testStereographicProjection():
testDir = os.path.dirname(os.path.realpath(__file__))
imagePath = os.path.join(testDir, 'test_files', 'rgb_geotiff.tiff')
# We will fail if we ask for a stereographic projection and don't
# specify unitsPerPixel
with pytest.raises(TileSourceException, match='unitsPerPixel must be specified'):
large_image_source_gdal.GDALFileTileSource(imagePath, 'EPSG:3411')
# But will pass if unitsPerPixel is specified
large_image_source_gdal.GDALFileTileSource(imagePath, 'EPSG:3411', unitsPerPixel=150000)
def testProj4Proj():
# Test obtaining pyproj.Proj projection values
proj4Proj = large_image_source_gdal.GDALFileTileSource._proj4Proj
proj = proj4Proj(b'epsg:4326')
assert proj4Proj(u'epsg:4326').srs == proj.srs
assert proj4Proj('proj4:EPSG:4326').srs == proj.srs
assert proj4Proj(4326) is None
def testConvertProjectionUnits():
testDir = os.path.dirname(os.path.realpath(__file__))
imagePath = os.path.join(testDir, 'test_files', 'rgb_geotiff.tiff')
tsNoProj = large_image_source_gdal.GDALFileTileSource(imagePath)
result = tsNoProj._convertProjectionUnits(
-13024380, 3895303, None, None, None, None, 'EPSG:3857')
assert result[0] == pytest.approx(147, 1)
assert result[1] == pytest.approx(149, 1)
assert result[2:] == (None, None, 'base_pixels')
result = tsNoProj._convertProjectionUnits(
None, None, -13080040, 3961860, None, None, 'EPSG:3857')
assert result[2] == pytest.approx(96, 1)
assert result[3] == pytest.approx(88, 1)
assert result[:2] == (None, None)
result = tsNoProj._convertProjectionUnits(
-117.5, 33, None, None, 0.5, 0.5, 'EPSG:4326')
assert result[0] == pytest.approx(96, 1)
assert result[1] == pytest.approx(149, 1)
assert result[2] == pytest.approx(147, 1)
assert result[3] == pytest.approx(89, 1)
result = tsNoProj._convertProjectionUnits(
None, None, -117, 33.5, 0.5, 0.5, 'EPSG:4326')
assert result[0] == pytest.approx(96, 1)
assert result[1] == pytest.approx(149, 1)
assert result[2] == pytest.approx(147, 1)
assert result[3] == pytest.approx(89, 1)
result = tsNoProj._convertProjectionUnits(
-117.5, 33, None, None, 0.5, 0.5, 'EPSG:4326', unitsWH='base_pixels')
assert result[0] == pytest.approx(96, 1)
assert result[1] == pytest.approx(149, 1)
assert result[2:] == (None, None, 'base_pixels')
with pytest.raises(TileSourceException, match='Cannot convert'):
tsNoProj._convertProjectionUnits(
-117.5, None, -117, None, None, None, 'EPSG:4326')
tsProj = large_image_source_gdal.GDALFileTileSource(imagePath, projection='EPSG:3857')
result = tsProj._convertProjectionUnits(
-13024380, 3895303, None, None, None, None, 'EPSG:3857')
assert result[0] == pytest.approx(-13024380, 1)
assert result[1] == pytest.approx(3895303, 1)
assert result[2:] == (None, None, 'projection')
def testGuardAgainstBadLatLong():
testDir = os.path.dirname(os.path.realpath(__file__))
imagePath = os.path.join(testDir, 'test_files', 'global_dem.tif')
source = large_image_source_gdal.GDALFileTileSource(imagePath)
bounds = source.getBounds(srs='EPSG:4326')
assert bounds['xmin'] == -180.00416667
assert bounds['xmax'] == 179.99583333
assert bounds['ymin'] == -89.99583333
assert bounds['ymax'] == 90
def testPalettizedGeotiff():
imagePath = utilities.externaldata('data/landcover_sample_1000.tif.sha512')
source = large_image_source_gdal.GDALFileTileSource(imagePath)
tileMetadata = source.getMetadata()
assert tileMetadata['tileWidth'] == 256
assert tileMetadata['tileHeight'] == 256
assert tileMetadata['sizeX'] == 687
assert tileMetadata['sizeY'] == 509
assert tileMetadata['levels'] == 3
assert tileMetadata['bounds']['srs'].strip().startswith(
'+proj=aea +lat_0=23 +lon_0=-96 +lat_1=29.5 +lat_2=45.5 +x_0=0 +y_0=0')
assert tileMetadata['geospatial']
assert len(tileMetadata['bands']) == 1
assert tileMetadata['bands'][1]['interpretation'] == 'palette'
# Getting the metadata with a specified projection will be different
source = large_image_source_gdal.GDALFileTileSource(
imagePath, projection='EPSG:3857', encoding='PNG')
tileMetadata = source.getMetadata()
assert tileMetadata['tileWidth'] == 256
assert tileMetadata['tileHeight'] == 256
assert tileMetadata['sizeX'] == 65536
assert tileMetadata['sizeY'] == 65536
assert tileMetadata['levels'] == 9
assert tileMetadata['bounds']['xmax'] == pytest.approx(-7837888, 1)
assert tileMetadata['bounds']['xmin'] == pytest.approx(-8909162, 1)
assert tileMetadata['bounds']['ymax'] == pytest.approx(5755717, 1)
assert tileMetadata['bounds']['ymin'] == pytest.approx(4876273, 1)
assert tileMetadata['bounds']['srs'] == '+init=epsg:3857'
assert tileMetadata['geospatial']
image = source.getTile(37, 46, 7)
image = PIL.Image.open(six.BytesIO(image))
image = numpy.asarray(image)
assert list(image[0, 0, :]) == [0, 0, 0, 0]
assert list(image[255, 0, :]) == [221, 201, 201, 255]
def testRetileProjection():
imagePath = utilities.externaldata('data/landcover_sample_1000.tif.sha512')
ts = large_image_source_gdal.GDALFileTileSource(imagePath, projection='EPSG:3857')
ti = ts.getSingleTile(tile_size=dict(width=1000, height=1000), tile_position=1000)
assert ti['tile'].size == 3000000
tile = ts.getTile(1178, 1507, 12)
assert len(tile) > 1000
def testInternalMetadata():
testDir = os.path.dirname(os.path.realpath(__file__))
imagePath = os.path.join(testDir, 'test_files', 'rgb_geotiff.tiff')
source = large_image_source_gdal.GDALFileTileSource(imagePath)
metadata = source.getInternalMetadata()
assert metadata['driverShortName'] == 'GTiff'
def testGetRegionWithProjection():
imagePath = utilities.externaldata('data/landcover_sample_1000.tif.sha512')
ts = large_image_source_gdal.GDALFileTileSource(imagePath, projection='EPSG:3857')
region, _ = ts.getRegion(output=dict(maxWidth=1024, maxHeight=1024),
format=constants.TILE_FORMAT_NUMPY)
assert region.shape == (1024, 1024, 4)
| 44.6875 | 94 | 0.676351 |
import glob
import json
import numpy
import os
import PIL.Image
import PIL.ImageChops
import pytest
import six
from large_image import constants
from large_image.exceptions import TileSourceException
import large_image_source_gdal
from . import utilities
def _assertImageMatches(image, testRootName, saveTestImageFailurePath='/tmp'):
if isinstance(image, six.binary_type):
image = PIL.Image.open(six.BytesIO(image))
image = image.convert('RGBA')
testDir = os.path.dirname(os.path.realpath(__file__))
testImagePaths = glob.glob(os.path.join(
testDir, 'test_files', testRootName + '*.png'))
testImages = [PIL.Image.open(testImagePath).convert('RGBA')
for testImagePath in testImagePaths]
diffs = [PIL.ImageChops.difference(image, testImage).getbbox()
for testImage in testImages]
if None not in diffs and saveTestImageFailurePath:
image.save(os.path.join(saveTestImageFailurePath, testRootName + '_test.png'))
assert None in diffs
def testTileFromGeotiffs():
testDir = os.path.dirname(os.path.realpath(__file__))
imagePath = os.path.join(testDir, 'test_files', 'rgb_geotiff.tiff')
source = large_image_source_gdal.GDALFileTileSource(imagePath)
tileMetadata = source.getMetadata()
assert tileMetadata['tileWidth'] == 256
assert tileMetadata['tileHeight'] == 256
assert tileMetadata['sizeX'] == 256
assert tileMetadata['sizeY'] == 256
assert tileMetadata['levels'] == 1
assert tileMetadata['bounds']['xmax'] == 597915.0
assert tileMetadata['bounds']['xmin'] == 367185.0
assert tileMetadata['bounds']['ymax'] == 3788115.0
assert tileMetadata['bounds']['ymin'] == 3552885.0
assert (tileMetadata['bounds']['srs'].strip() ==
'+proj=utm +zone=11 +datum=WGS84 +units=m +no_defs')
assert tileMetadata['geospatial']
assert len(tileMetadata['bands']) == 3
assert tileMetadata['bands'][2]['interpretation'] == 'green'
assert tileMetadata['bands'][2]['max'] == 212.0
assert tileMetadata['bands'][2]['min'] == 0.0
source = large_image_source_gdal.GDALFileTileSource(
imagePath, projection='EPSG:3857')
tileMetadata = source.getMetadata()
assert tileMetadata['tileWidth'] == 256
assert tileMetadata['tileHeight'] == 256
assert tileMetadata['sizeX'] == 65536
assert tileMetadata['sizeY'] == 65536
assert tileMetadata['levels'] == 9
assert tileMetadata['bounds']['xmax'] == pytest.approx(-12906033, 1)
assert tileMetadata['bounds']['xmin'] == pytest.approx(-13184900, 1)
assert tileMetadata['bounds']['ymax'] == pytest.approx(4059661, 1)
assert tileMetadata['bounds']['ymin'] == pytest.approx(3777034, 1)
assert tileMetadata['bounds']['srs'] == '+init=epsg:3857'
assert tileMetadata['geospatial']
source = large_image_source_gdal.GDALFileTileSource(
imagePath, projection='EPSG:3857', style=json.dumps({'band': -1}), encoding='PNG')
image = source.getTile(89, 207, 9)
_assertImageMatches(image, 'geotiff_9_89_207')
def testTileLinearStyleFromGeotiffs():
testDir = os.path.dirname(os.path.realpath(__file__))
imagePath = os.path.join(testDir, 'test_files', 'rgb_geotiff.tiff')
style = json.dumps({'band': 1, 'min': 0, 'max': 100,
'palette': 'matplotlib.Plasma_6',
'scheme': 'linear'})
source = large_image_source_gdal.GDALFileTileSource(
imagePath, projection='EPSG:3857', style=style, encoding='PNG')
image = source.getTile(22, 51, 7)
_assertImageMatches(image, 'geotiff_style_linear_7_22_51')
def testTileStyleBadInput():
def _assertStyleResponse(imagePath, style, message):
with pytest.raises(TileSourceException, match=message):
source = large_image_source_gdal.GDALFileTileSource(
imagePath, projection='EPSG:3857', style=json.dumps(style), encoding='PNG')
source.getTile(22, 51, 7)
testDir = os.path.dirname(os.path.realpath(__file__))
imagePath = os.path.join(testDir, 'test_files', 'rgb_geotiff.tiff')
_assertStyleResponse(imagePath, {
'band': 1.1,
}, 'Band has to be a positive integer, -1, or a band interpretation found in the source.')
_assertStyleResponse(imagePath, {
'band': 500,
}, 'Band has to be a positive integer, -1, or a band interpretation found in the source.')
_assertStyleResponse(imagePath, {
'band': 1,
'palette': 'nonexistent.palette'
}, 'Palette is not a valid palettable path.')
_assertStyleResponse(imagePath, ['style'],
'Style is not a valid json object.')
def testThumbnailFromGeotiffs():
testDir = os.path.dirname(os.path.realpath(__file__))
imagePath = os.path.join(testDir, 'test_files', 'rgb_geotiff.tiff')
source = large_image_source_gdal.GDALFileTileSource(imagePath)
image, mimeType = source.getThumbnail(encoding='PNG')
assert image[:len(utilities.PNGHeader)] == utilities.PNGHeader
source = large_image_source_gdal.GDALFileTileSource(imagePath, projection='EPSG:3857')
image2, mimeType = source.getThumbnail(encoding='PNG')
assert image2[:len(utilities.PNGHeader)] == utilities.PNGHeader
assert image != image2
def testPixel():
testDir = os.path.dirname(os.path.realpath(__file__))
imagePath = os.path.join(testDir, 'test_files', 'rgb_geotiff.tiff')
source = large_image_source_gdal.GDALFileTileSource(imagePath)
pixel = source.getPixel(region={'left': 212, 'top': 198})
assert pixel == {
'r': 76, 'g': 78, 'b': 77, 'a': 255, 'bands': {1: 62.0, 2: 65.0, 3: 66.0}}
pixel = source.getPixel(region={'left': 2120, 'top': 198})
assert pixel == {}
source = large_image_source_gdal.GDALFileTileSource(imagePath, projection='EPSG:3857')
pixel = source.getPixel(region={'left': -13132910, 'top': 4010586, 'units': 'projection'})
assert pixel == {
'r': 94, 'g': 98, 'b': 99, 'a': 255, 'bands': {1: 77.0, 2: 82.0, 3: 84.0}}
style = json.dumps({'band': 1, 'min': 0, 'max': 100,
'palette': 'matplotlib.Plasma_6'})
source = large_image_source_gdal.GDALFileTileSource(
imagePath, projection='EPSG:3857', style=style)
pixel = source.getPixel(region={'left': -13132910, 'top': 4010586, 'units': 'projection'})
assert pixel == {
'r': 247, 'g': 156, 'b': 60, 'a': 255, 'bands': {1: 77.0, 2: 82.0, 3: 84.0}}
style = json.dumps({'band': 1, 'min': 0, 'max': 100,
'palette': ['#0000ff', '#00ff00', '#ff0000']})
source = large_image_source_gdal.GDALFileTileSource(
imagePath, projection='EPSG:3857', style=style)
pixel = source.getPixel(region={'left': -13132910, 'top': 4010586, 'units': 'projection'})
assert pixel == {
'r': 137, 'g': 117, 'b': 0, 'a': 255, 'bands': {1: 77.0, 2: 82.0, 3: 84.0}}
source = large_image_source_gdal.GDALFileTileSource(imagePath, projection='EPSG:3857')
pixel = source.getPixel(region={'left': -13132910, 'top': 4010586, 'units': 'EPSG:3857'})
assert pixel == {
'r': 94, 'g': 98, 'b': 99, 'a': 255, 'bands': {1: 77.0, 2: 82.0, 3: 84.0}}
pixel = source.getPixel(region={'left': -117.975, 'top': 33.865, 'units': 'WGS84'})
assert pixel == {
'r': 94, 'g': 98, 'b': 99, 'a': 255, 'bands': {1: 77.0, 2: 82.0, 3: 84.0}}
source = large_image_source_gdal.GDALFileTileSource(imagePath)
pixel = source.getPixel(region={'left': -13132910, 'top': 4010586, 'units': 'EPSG:3857'})
assert pixel == {
'r': 94, 'g': 98, 'b': 99, 'a': 255, 'bands': {1: 77.0, 2: 82.0, 3: 84.0}}
def testSourceErrors():
testDir = os.path.dirname(os.path.realpath(__file__))
imagePath = os.path.join(testDir, 'test_files', 'rgb_geotiff.tiff')
with pytest.raises(TileSourceException, match='must not be geographic'):
large_image_source_gdal.GDALFileTileSource(imagePath, 'EPSG:4326')
imagePath = os.path.join(testDir, 'test_files', 'zero_gi.tif')
with pytest.raises(TileSourceException, match='cannot be opened via'):
large_image_source_gdal.GDALFileTileSource(imagePath)
imagePath = os.path.join(testDir, 'test_files', 'yb10kx5k.png')
with pytest.raises(TileSourceException, match='does not have a projected scale'):
large_image_source_gdal.GDALFileTileSource(imagePath)
def testStereographicProjection():
testDir = os.path.dirname(os.path.realpath(__file__))
imagePath = os.path.join(testDir, 'test_files', 'rgb_geotiff.tiff')
# specify unitsPerPixel
with pytest.raises(TileSourceException, match='unitsPerPixel must be specified'):
large_image_source_gdal.GDALFileTileSource(imagePath, 'EPSG:3411')
# But will pass if unitsPerPixel is specified
large_image_source_gdal.GDALFileTileSource(imagePath, 'EPSG:3411', unitsPerPixel=150000)
def testProj4Proj():
# Test obtaining pyproj.Proj projection values
proj4Proj = large_image_source_gdal.GDALFileTileSource._proj4Proj
proj = proj4Proj(b'epsg:4326')
assert proj4Proj(u'epsg:4326').srs == proj.srs
assert proj4Proj('proj4:EPSG:4326').srs == proj.srs
assert proj4Proj(4326) is None
def testConvertProjectionUnits():
testDir = os.path.dirname(os.path.realpath(__file__))
imagePath = os.path.join(testDir, 'test_files', 'rgb_geotiff.tiff')
tsNoProj = large_image_source_gdal.GDALFileTileSource(imagePath)
result = tsNoProj._convertProjectionUnits(
-13024380, 3895303, None, None, None, None, 'EPSG:3857')
assert result[0] == pytest.approx(147, 1)
assert result[1] == pytest.approx(149, 1)
assert result[2:] == (None, None, 'base_pixels')
result = tsNoProj._convertProjectionUnits(
None, None, -13080040, 3961860, None, None, 'EPSG:3857')
assert result[2] == pytest.approx(96, 1)
assert result[3] == pytest.approx(88, 1)
assert result[:2] == (None, None)
result = tsNoProj._convertProjectionUnits(
-117.5, 33, None, None, 0.5, 0.5, 'EPSG:4326')
assert result[0] == pytest.approx(96, 1)
assert result[1] == pytest.approx(149, 1)
assert result[2] == pytest.approx(147, 1)
assert result[3] == pytest.approx(89, 1)
result = tsNoProj._convertProjectionUnits(
None, None, -117, 33.5, 0.5, 0.5, 'EPSG:4326')
assert result[0] == pytest.approx(96, 1)
assert result[1] == pytest.approx(149, 1)
assert result[2] == pytest.approx(147, 1)
assert result[3] == pytest.approx(89, 1)
result = tsNoProj._convertProjectionUnits(
-117.5, 33, None, None, 0.5, 0.5, 'EPSG:4326', unitsWH='base_pixels')
assert result[0] == pytest.approx(96, 1)
assert result[1] == pytest.approx(149, 1)
assert result[2:] == (None, None, 'base_pixels')
with pytest.raises(TileSourceException, match='Cannot convert'):
tsNoProj._convertProjectionUnits(
-117.5, None, -117, None, None, None, 'EPSG:4326')
tsProj = large_image_source_gdal.GDALFileTileSource(imagePath, projection='EPSG:3857')
result = tsProj._convertProjectionUnits(
-13024380, 3895303, None, None, None, None, 'EPSG:3857')
assert result[0] == pytest.approx(-13024380, 1)
assert result[1] == pytest.approx(3895303, 1)
assert result[2:] == (None, None, 'projection')
def testGuardAgainstBadLatLong():
testDir = os.path.dirname(os.path.realpath(__file__))
imagePath = os.path.join(testDir, 'test_files', 'global_dem.tif')
source = large_image_source_gdal.GDALFileTileSource(imagePath)
bounds = source.getBounds(srs='EPSG:4326')
assert bounds['xmin'] == -180.00416667
assert bounds['xmax'] == 179.99583333
assert bounds['ymin'] == -89.99583333
assert bounds['ymax'] == 90
def testPalettizedGeotiff():
imagePath = utilities.externaldata('data/landcover_sample_1000.tif.sha512')
source = large_image_source_gdal.GDALFileTileSource(imagePath)
tileMetadata = source.getMetadata()
assert tileMetadata['tileWidth'] == 256
assert tileMetadata['tileHeight'] == 256
assert tileMetadata['sizeX'] == 687
assert tileMetadata['sizeY'] == 509
assert tileMetadata['levels'] == 3
assert tileMetadata['bounds']['srs'].strip().startswith(
'+proj=aea +lat_0=23 +lon_0=-96 +lat_1=29.5 +lat_2=45.5 +x_0=0 +y_0=0')
assert tileMetadata['geospatial']
assert len(tileMetadata['bands']) == 1
assert tileMetadata['bands'][1]['interpretation'] == 'palette'
# Getting the metadata with a specified projection will be different
source = large_image_source_gdal.GDALFileTileSource(
imagePath, projection='EPSG:3857', encoding='PNG')
tileMetadata = source.getMetadata()
assert tileMetadata['tileWidth'] == 256
assert tileMetadata['tileHeight'] == 256
assert tileMetadata['sizeX'] == 65536
assert tileMetadata['sizeY'] == 65536
assert tileMetadata['levels'] == 9
assert tileMetadata['bounds']['xmax'] == pytest.approx(-7837888, 1)
assert tileMetadata['bounds']['xmin'] == pytest.approx(-8909162, 1)
assert tileMetadata['bounds']['ymax'] == pytest.approx(5755717, 1)
assert tileMetadata['bounds']['ymin'] == pytest.approx(4876273, 1)
assert tileMetadata['bounds']['srs'] == '+init=epsg:3857'
assert tileMetadata['geospatial']
image = source.getTile(37, 46, 7)
image = PIL.Image.open(six.BytesIO(image))
image = numpy.asarray(image)
assert list(image[0, 0, :]) == [0, 0, 0, 0]
assert list(image[255, 0, :]) == [221, 201, 201, 255]
def testRetileProjection():
imagePath = utilities.externaldata('data/landcover_sample_1000.tif.sha512')
ts = large_image_source_gdal.GDALFileTileSource(imagePath, projection='EPSG:3857')
ti = ts.getSingleTile(tile_size=dict(width=1000, height=1000), tile_position=1000)
assert ti['tile'].size == 3000000
tile = ts.getTile(1178, 1507, 12)
assert len(tile) > 1000
def testInternalMetadata():
testDir = os.path.dirname(os.path.realpath(__file__))
imagePath = os.path.join(testDir, 'test_files', 'rgb_geotiff.tiff')
source = large_image_source_gdal.GDALFileTileSource(imagePath)
metadata = source.getInternalMetadata()
assert metadata['driverShortName'] == 'GTiff'
def testGetRegionWithProjection():
imagePath = utilities.externaldata('data/landcover_sample_1000.tif.sha512')
ts = large_image_source_gdal.GDALFileTileSource(imagePath, projection='EPSG:3857')
region, _ = ts.getRegion(output=dict(maxWidth=1024, maxHeight=1024),
format=constants.TILE_FORMAT_NUMPY)
assert region.shape == (1024, 1024, 4)
| true | true |
f724993bfb65f691c47509e2ab9bc57c799df3c7 | 1,799 | py | Python | app/models/supply_code.py | uwhvz/uwhvz | 72805d0e55740c3d90251dd4b4e40bf5c9e296d1 | [
"MIT"
] | 2 | 2019-12-15T06:30:37.000Z | 2020-01-26T23:12:27.000Z | app/models/supply_code.py | uwhvz/uwhvz | 72805d0e55740c3d90251dd4b4e40bf5c9e296d1 | [
"MIT"
] | 37 | 2020-01-22T02:36:32.000Z | 2020-10-06T15:05:37.000Z | app/models/supply_code.py | uwhvz/uwhvz | 72805d0e55740c3d90251dd4b4e40bf5c9e296d1 | [
"MIT"
] | 2 | 2020-06-24T03:07:36.000Z | 2020-06-24T03:10:46.000Z | import uuid
from datetime import datetime
from django.db import models
from django.utils import timezone
from .game import Game
from .player import Player
from .util import generate_code
class SupplyCodeManager(models.Manager):
def create_supply_code(self, game: Game, value: 5, code: None) -> 'SupplyCode':
if code is None or code == '' or self.filter(code=code):
code = generate_code(6)
# For set of all supply codes, each code must be unique
while self.filter(code=code):
code = generate_code(6)
if type(value) is int:
value = int(value)
else:
value = 5
supply_code = self.model(code=code.upper(), game=game, value=value)
supply_code.save()
return supply_code
class SupplyCode(models.Model):
id: uuid = models.UUIDField(primary_key=True, default=uuid.uuid4)
game: Game = models.ForeignKey(Game, on_delete=models.CASCADE)
code: str = models.CharField(max_length=6, unique=True)
value: int = models.IntegerField()
point_modifier: int = models.IntegerField(default=0)
active: bool = models.BooleanField(default=True)
claimed_by: Player = models.ForeignKey(Player, on_delete=models.CASCADE, null=True, blank=True)
claimed_at: datetime = models.DateTimeField(null=True, blank=True)
created_at: datetime = models.DateTimeField(auto_now_add=True)
modified_at: datetime = models.DateTimeField(auto_now=True)
objects = SupplyCodeManager()
def claim(self, player: Player, point_modifier: int) -> 'SupplyCode':
self.claimed_by = player
self.claimed_at = timezone.now()
self.point_modifier = point_modifier
self.save()
return self
def __str__(self):
return self.code
| 32.709091 | 99 | 0.678155 | import uuid
from datetime import datetime
from django.db import models
from django.utils import timezone
from .game import Game
from .player import Player
from .util import generate_code
class SupplyCodeManager(models.Manager):
def create_supply_code(self, game: Game, value: 5, code: None) -> 'SupplyCode':
if code is None or code == '' or self.filter(code=code):
code = generate_code(6)
while self.filter(code=code):
code = generate_code(6)
if type(value) is int:
value = int(value)
else:
value = 5
supply_code = self.model(code=code.upper(), game=game, value=value)
supply_code.save()
return supply_code
class SupplyCode(models.Model):
id: uuid = models.UUIDField(primary_key=True, default=uuid.uuid4)
game: Game = models.ForeignKey(Game, on_delete=models.CASCADE)
code: str = models.CharField(max_length=6, unique=True)
value: int = models.IntegerField()
point_modifier: int = models.IntegerField(default=0)
active: bool = models.BooleanField(default=True)
claimed_by: Player = models.ForeignKey(Player, on_delete=models.CASCADE, null=True, blank=True)
claimed_at: datetime = models.DateTimeField(null=True, blank=True)
created_at: datetime = models.DateTimeField(auto_now_add=True)
modified_at: datetime = models.DateTimeField(auto_now=True)
objects = SupplyCodeManager()
def claim(self, player: Player, point_modifier: int) -> 'SupplyCode':
self.claimed_by = player
self.claimed_at = timezone.now()
self.point_modifier = point_modifier
self.save()
return self
def __str__(self):
return self.code
| true | true |
f724993d2ddc89e333243979253d709c8bed589f | 49 | py | Python | je_editor/ui/ui_event/text_process/__init__.py | JE-Chen/je_editor | 2f18dedb6f0eb27c38668dc53f520739c8d5c6c6 | [
"MIT"
] | 1 | 2021-12-10T14:57:15.000Z | 2021-12-10T14:57:15.000Z | je_editor/ui/ui_event/text_process/__init__.py | JE-Chen/je_editor | 2f18dedb6f0eb27c38668dc53f520739c8d5c6c6 | [
"MIT"
] | null | null | null | je_editor/ui/ui_event/text_process/__init__.py | JE-Chen/je_editor | 2f18dedb6f0eb27c38668dc53f520739c8d5c6c6 | [
"MIT"
] | null | null | null | from je_editor.ui.ui_event.text_process import *
| 24.5 | 48 | 0.836735 | from je_editor.ui.ui_event.text_process import *
| true | true |
f7249959198c554003e2dec70da578ce0dcef41f | 5,791 | py | Python | GANs/stargan/generate.py | shikisawamura/nnabla-examples | baf4e4cc620dedbf4368683325c0fb868676850d | [
"Apache-2.0"
] | 1 | 2020-08-03T12:49:25.000Z | 2020-08-03T12:49:25.000Z | GANs/stargan/generate.py | takuseno/nnabla-examples | 070d25078ad3d5458744dbfd390cdd926e20e573 | [
"Apache-2.0"
] | null | null | null | GANs/stargan/generate.py | takuseno/nnabla-examples | 070d25078ad3d5458744dbfd390cdd926e20e573 | [
"Apache-2.0"
] | 1 | 2020-04-25T06:11:28.000Z | 2020-04-25T06:11:28.000Z | # Copyright (c) 2019 Sony Corporation. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import argparse
import nnabla as nn
from nnabla.ext_utils import get_extension_context
import numpy as np
import json
import glob
import model
from nnabla.utils.image_utils import imread, imsave, imresize
import functools
def saveimage(path, img):
img = (img * 0.5) + 0.5
imsave(path, img, channel_first=True)
def save_results(i, args, used_config, img_trg, lbl_trg):
target_attr_flags = lbl_trg.d[0].reshape(lbl_trg.d[0].size)
target_domain = "_".join([attr for idx, attr in zip(
target_attr_flags, used_config["selected_attrs"]) if bool(idx) is True])
result_x = img_trg.d[0]
filename = os.path.join(args.result_save_path,
"generated_{}_{}.png".format(i, target_domain))
saveimage(filename, result_x)
print("Saved {}.".format(filename))
return
def img_preprocess(img_paths, used_config):
image_size = used_config["image_size"]
images = list()
image_names = list()
for img_path in img_paths:
# Load (and resize) image and labels.
image = imread(img_path, num_channels=3, channel_first=True)
if image.dtype == np.uint8:
# Clip image's value from [0, 255] -> [0.0, 1.0]
image = image / 255.0
image = (image - 0.5) / 0.5 # Normalize
image = imresize(image, (image_size, image_size),
interpolate='bilinear', channel_first=True)
images.append(image)
image_names.append(img_path.split("/")[-1])
return np.asarray(images), np.asarray(image_names)
def get_user_input(used_config):
label = [0 for _ in range(used_config["c_dim"])]
choice = used_config["selected_attrs"]
for i, c in enumerate(choice):
print("Use '{}'?".format(c))
while 1:
ans = input("type yes or no: ")
if ans in ["yes", "no"]:
label[i] = 1 if ans == "yes" else 0
break
else:
print("type 'yes' or 'no'.")
#label[i] = int(bool(input("if yes, type 1, if not, just press enter:")))
return np.array(label)
def generate(args):
# Load the config data used for training.
with open(args.config, "r") as f:
used_config = json.load(f)
paramfile = args.pretrained_params
img_paths = glob.glob(os.path.join(args.test_image_path, "*.png"))
assert os.path.isfile(paramfile) and paramfile.split(
"/")[-1] == used_config["pretrained_params"], "Corresponding parameter file not found."
print("Learned attributes choice: {}".format(
used_config["selected_attrs"]))
# Prepare Generator and Discriminator based on user config.
generator = functools.partial(
model.generator, conv_dim=used_config["g_conv_dim"], c_dim=used_config["c_dim"], repeat_num=used_config["g_repeat_num"])
x_real = nn.Variable(
[1, 3, used_config["image_size"], used_config["image_size"]])
label_trg = nn.Variable([1, used_config["c_dim"], 1, 1])
with nn.parameter_scope("gen"):
x_fake = generator(x_real, label_trg)
x_fake.persistent = True
nn.load_parameters(paramfile) # load learned parameters.
images, image_names = img_preprocess(img_paths, used_config)
for i, (image, image_name) in enumerate(zip(images, image_names)):
# Get real images.
print("Source image: {}".format(image_name))
x_real.d = image
# Generate target domain based on user input.
label_trg.d = np.reshape(get_user_input(used_config), label_trg.shape)
# Execute image translation.
x_fake.forward(clear_no_need_grad=True)
save_results(i, args, used_config, x_fake, label_trg)
def get_args():
parser = argparse.ArgumentParser()
# Generation
parser.add_argument('--context', '-c', type=str,
default='cudnn', help="Extension path. ex) cpu, cudnn.")
parser.add_argument("--device-id", "-d", type=str, default='0',
help='Device ID the training run on. This is only valid if you specify `-c cudnn`.')
parser.add_argument("--type-config", "-t", type=str, default='float',
help='Type of computation. e.g. "float", "half".')
parser.add_argument('--test-image-path', type=str,
help='a directory containing images used for image translation')
parser.add_argument('--result-save-path', type=str,
default="tmp.results", help='a directory to save generated images')
parser.add_argument('--pretrained-params', type=str, required=True,
help='path to the parameters used for generation.')
parser.add_argument('--config', type=str, required=True,
help='path to the config file used for generation.')
args = parser.parse_args()
if not os.path.isdir(args.result_save_path):
os.makedirs(args.result_save_path)
return args
def main():
args = get_args()
ctx = get_extension_context(
args.context, device_id=args.device_id, type_config=args.type_config)
nn.set_default_context(ctx)
generate(args)
if __name__ == '__main__':
main()
| 35.746914 | 128 | 0.64842 |
import os
import argparse
import nnabla as nn
from nnabla.ext_utils import get_extension_context
import numpy as np
import json
import glob
import model
from nnabla.utils.image_utils import imread, imsave, imresize
import functools
def saveimage(path, img):
img = (img * 0.5) + 0.5
imsave(path, img, channel_first=True)
def save_results(i, args, used_config, img_trg, lbl_trg):
target_attr_flags = lbl_trg.d[0].reshape(lbl_trg.d[0].size)
target_domain = "_".join([attr for idx, attr in zip(
target_attr_flags, used_config["selected_attrs"]) if bool(idx) is True])
result_x = img_trg.d[0]
filename = os.path.join(args.result_save_path,
"generated_{}_{}.png".format(i, target_domain))
saveimage(filename, result_x)
print("Saved {}.".format(filename))
return
def img_preprocess(img_paths, used_config):
image_size = used_config["image_size"]
images = list()
image_names = list()
for img_path in img_paths:
image = imread(img_path, num_channels=3, channel_first=True)
if image.dtype == np.uint8:
image = image / 255.0
image = (image - 0.5) / 0.5 # Normalize
image = imresize(image, (image_size, image_size),
interpolate='bilinear', channel_first=True)
images.append(image)
image_names.append(img_path.split("/")[-1])
return np.asarray(images), np.asarray(image_names)
def get_user_input(used_config):
label = [0 for _ in range(used_config["c_dim"])]
choice = used_config["selected_attrs"]
for i, c in enumerate(choice):
print("Use '{}'?".format(c))
while 1:
ans = input("type yes or no: ")
if ans in ["yes", "no"]:
label[i] = 1 if ans == "yes" else 0
break
else:
print("type 'yes' or 'no'.")
#label[i] = int(bool(input("if yes, type 1, if not, just press enter:")))
return np.array(label)
def generate(args):
# Load the config data used for training.
with open(args.config, "r") as f:
used_config = json.load(f)
paramfile = args.pretrained_params
img_paths = glob.glob(os.path.join(args.test_image_path, "*.png"))
assert os.path.isfile(paramfile) and paramfile.split(
"/")[-1] == used_config["pretrained_params"], "Corresponding parameter file not found."
print("Learned attributes choice: {}".format(
used_config["selected_attrs"]))
# Prepare Generator and Discriminator based on user config.
generator = functools.partial(
model.generator, conv_dim=used_config["g_conv_dim"], c_dim=used_config["c_dim"], repeat_num=used_config["g_repeat_num"])
x_real = nn.Variable(
[1, 3, used_config["image_size"], used_config["image_size"]])
label_trg = nn.Variable([1, used_config["c_dim"], 1, 1])
with nn.parameter_scope("gen"):
x_fake = generator(x_real, label_trg)
x_fake.persistent = True
nn.load_parameters(paramfile) # load learned parameters.
images, image_names = img_preprocess(img_paths, used_config)
for i, (image, image_name) in enumerate(zip(images, image_names)):
# Get real images.
print("Source image: {}".format(image_name))
x_real.d = image
# Generate target domain based on user input.
label_trg.d = np.reshape(get_user_input(used_config), label_trg.shape)
# Execute image translation.
x_fake.forward(clear_no_need_grad=True)
save_results(i, args, used_config, x_fake, label_trg)
def get_args():
parser = argparse.ArgumentParser()
# Generation
parser.add_argument('--context', '-c', type=str,
default='cudnn', help="Extension path. ex) cpu, cudnn.")
parser.add_argument("--device-id", "-d", type=str, default='0',
help='Device ID the training run on. This is only valid if you specify `-c cudnn`.')
parser.add_argument("--type-config", "-t", type=str, default='float',
help='Type of computation. e.g. "float", "half".')
parser.add_argument('--test-image-path', type=str,
help='a directory containing images used for image translation')
parser.add_argument('--result-save-path', type=str,
default="tmp.results", help='a directory to save generated images')
parser.add_argument('--pretrained-params', type=str, required=True,
help='path to the parameters used for generation.')
parser.add_argument('--config', type=str, required=True,
help='path to the config file used for generation.')
args = parser.parse_args()
if not os.path.isdir(args.result_save_path):
os.makedirs(args.result_save_path)
return args
def main():
args = get_args()
ctx = get_extension_context(
args.context, device_id=args.device_id, type_config=args.type_config)
nn.set_default_context(ctx)
generate(args)
if __name__ == '__main__':
main()
| true | true |
f72499cae7a442091d28cd688401d7778b935183 | 486 | py | Python | tests/test_methods/test_calls.py | jackwardell/SlackTime | c40be4854a26084e1a368a975e220d613c14d8d8 | [
"Apache-2.0"
] | 2 | 2020-09-24T00:07:13.000Z | 2020-09-27T19:27:06.000Z | tests/test_methods/test_calls.py | jackwardell/SlackTime | c40be4854a26084e1a368a975e220d613c14d8d8 | [
"Apache-2.0"
] | null | null | null | tests/test_methods/test_calls.py | jackwardell/SlackTime | c40be4854a26084e1a368a975e220d613c14d8d8 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
def test_calls_add(slack_time):
assert slack_time.calls.add
def test_calls_end(slack_time):
assert slack_time.calls.end
def test_calls_info(slack_time):
assert slack_time.calls.info
def test_calls_update(slack_time):
assert slack_time.calls.update
def test_calls_participants_add(slack_time):
assert slack_time.calls.participants.add
def test_calls_participants_remove(slack_time):
assert slack_time.calls.participants.remove
| 18.692308 | 47 | 0.781893 |
def test_calls_add(slack_time):
assert slack_time.calls.add
def test_calls_end(slack_time):
assert slack_time.calls.end
def test_calls_info(slack_time):
assert slack_time.calls.info
def test_calls_update(slack_time):
assert slack_time.calls.update
def test_calls_participants_add(slack_time):
assert slack_time.calls.participants.add
def test_calls_participants_remove(slack_time):
assert slack_time.calls.participants.remove
| true | true |
f7249a82deee9e950217299ff16ff9a37d24226b | 407 | py | Python | recruiter_portal/urls.py | yhaojin/recruitment_pipeline | 860f25185864a1b259d08e88f42aca86f8206a4e | [
"MIT"
] | null | null | null | recruiter_portal/urls.py | yhaojin/recruitment_pipeline | 860f25185864a1b259d08e88f42aca86f8206a4e | [
"MIT"
] | null | null | null | recruiter_portal/urls.py | yhaojin/recruitment_pipeline | 860f25185864a1b259d08e88f42aca86f8206a4e | [
"MIT"
] | null | null | null | from django.urls import path
from .views import RecruiterIndexView, take_on_application_view, SaveTaskChangesView
urlpatterns = [
path('', RecruiterIndexView.as_view(), name='recruiter_portal'),
path('take_on_application/<application_pk>/', take_on_application_view, name='take_on_application'),
path('save_task_changes/<task_pk>/', SaveTaskChangesView.as_view(), name='save_task_changes'),
]
| 40.7 | 104 | 0.783784 | from django.urls import path
from .views import RecruiterIndexView, take_on_application_view, SaveTaskChangesView
urlpatterns = [
path('', RecruiterIndexView.as_view(), name='recruiter_portal'),
path('take_on_application/<application_pk>/', take_on_application_view, name='take_on_application'),
path('save_task_changes/<task_pk>/', SaveTaskChangesView.as_view(), name='save_task_changes'),
]
| true | true |
f7249ae817449c506e905d847baae94b9a76abf9 | 1,594 | py | Python | numpy/distutils/command/build.py | gmabey/numpy | 9e9ec3821c1d6a055543e54336ecb2c98ec42c5f | [
"BSD-3-Clause"
] | 652 | 2015-07-26T00:00:17.000Z | 2022-02-24T18:30:04.000Z | numpy/distutils/command/build.py | gmabey/numpy | 9e9ec3821c1d6a055543e54336ecb2c98ec42c5f | [
"BSD-3-Clause"
] | 8 | 2015-09-07T03:38:19.000Z | 2021-05-23T03:18:51.000Z | numpy/distutils/command/build.py | gmabey/numpy | 9e9ec3821c1d6a055543e54336ecb2c98ec42c5f | [
"BSD-3-Clause"
] | 40 | 2015-07-24T19:45:08.000Z | 2021-11-01T14:54:56.000Z | from __future__ import division, absolute_import, print_function
import os
import sys
from distutils.command.build import build as old_build
from distutils.util import get_platform
from numpy.distutils.command.config_compiler import show_fortran_compilers
class build(old_build):
sub_commands = [('config_cc', lambda *args: True),
('config_fc', lambda *args: True),
('build_src', old_build.has_ext_modules),
] + old_build.sub_commands
user_options = old_build.user_options + [
('fcompiler=', None,
"specify the Fortran compiler type"),
('jobs=', 'j',
"number of parallel jobs"),
]
help_options = old_build.help_options + [
('help-fcompiler', None, "list available Fortran compilers",
show_fortran_compilers),
]
def initialize_options(self):
old_build.initialize_options(self)
self.fcompiler = None
self.jobs = None
def finalize_options(self):
if self.jobs:
try:
self.jobs = int(self.jobs)
except ValueError:
raise ValueError("--jobs/-j argument must be an integer")
build_scripts = self.build_scripts
old_build.finalize_options(self)
plat_specifier = ".%s-%s" % (get_platform(), sys.version[0:3])
if build_scripts is None:
self.build_scripts = os.path.join(self.build_base,
'scripts' + plat_specifier)
def run(self):
old_build.run(self)
| 33.208333 | 74 | 0.604141 | from __future__ import division, absolute_import, print_function
import os
import sys
from distutils.command.build import build as old_build
from distutils.util import get_platform
from numpy.distutils.command.config_compiler import show_fortran_compilers
class build(old_build):
sub_commands = [('config_cc', lambda *args: True),
('config_fc', lambda *args: True),
('build_src', old_build.has_ext_modules),
] + old_build.sub_commands
user_options = old_build.user_options + [
('fcompiler=', None,
"specify the Fortran compiler type"),
('jobs=', 'j',
"number of parallel jobs"),
]
help_options = old_build.help_options + [
('help-fcompiler', None, "list available Fortran compilers",
show_fortran_compilers),
]
def initialize_options(self):
old_build.initialize_options(self)
self.fcompiler = None
self.jobs = None
def finalize_options(self):
if self.jobs:
try:
self.jobs = int(self.jobs)
except ValueError:
raise ValueError("--jobs/-j argument must be an integer")
build_scripts = self.build_scripts
old_build.finalize_options(self)
plat_specifier = ".%s-%s" % (get_platform(), sys.version[0:3])
if build_scripts is None:
self.build_scripts = os.path.join(self.build_base,
'scripts' + plat_specifier)
def run(self):
old_build.run(self)
| true | true |
f7249c7b22c9dba1da94e74c57ecbd871341b782 | 1,824 | py | Python | test/functional/signmessages.py | Whiff-dev/WhiffV2.0 | e44fffbe9f448e2bd2362cc74057bc541594f58b | [
"MIT"
] | null | null | null | test/functional/signmessages.py | Whiff-dev/WhiffV2.0 | e44fffbe9f448e2bd2362cc74057bc541594f58b | [
"MIT"
] | null | null | null | test/functional/signmessages.py | Whiff-dev/WhiffV2.0 | e44fffbe9f448e2bd2362cc74057bc541594f58b | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) 2016 The Bitcoin Core developers
# Copyright (c) 2017 The Whiff Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test RPC commands for signing and verifying messages."""
from test_framework.test_framework import WhiffTestFramework
from test_framework.util import assert_equal
class SignMessagesTest(WhiffTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
def run_test(self):
message = 'This is just a test message'
self.log.info('test signing with priv_key')
priv_key = 'cUeKHd5orzT3mz8P9pxyREHfsWtVfgsfDjiZZBcjUBAaGk1BTj7N'
address = 'mpLQjfK79b7CCV4VMJWEWAj5Mpx8Up5zxB'
expected_signature = 'H5vCbG+WhOeOPJ3jf6oux/1oSjkuIGZigCw4NW+A0/fSDlgdO4fMq0SWSfx7gUMB9kuG+t/0BQxtXaTCr7v9fGM='
signature = self.nodes[0].signmessagewithprivkey(priv_key, message)
assert_equal(expected_signature, signature)
assert(self.nodes[0].verifymessage(address, signature, message))
self.log.info('test signing with an address with wallet')
address = self.nodes[0].getnewaddress()
signature = self.nodes[0].signmessage(address, message)
assert(self.nodes[0].verifymessage(address, signature, message))
self.log.info('test verifying with another address should not work')
other_address = self.nodes[0].getnewaddress()
other_signature = self.nodes[0].signmessage(other_address, message)
assert(not self.nodes[0].verifymessage(other_address, signature, message))
assert(not self.nodes[0].verifymessage(address, other_signature, message))
if __name__ == '__main__':
SignMessagesTest().main()
| 45.6 | 119 | 0.736294 |
from test_framework.test_framework import WhiffTestFramework
from test_framework.util import assert_equal
class SignMessagesTest(WhiffTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
def run_test(self):
message = 'This is just a test message'
self.log.info('test signing with priv_key')
priv_key = 'cUeKHd5orzT3mz8P9pxyREHfsWtVfgsfDjiZZBcjUBAaGk1BTj7N'
address = 'mpLQjfK79b7CCV4VMJWEWAj5Mpx8Up5zxB'
expected_signature = 'H5vCbG+WhOeOPJ3jf6oux/1oSjkuIGZigCw4NW+A0/fSDlgdO4fMq0SWSfx7gUMB9kuG+t/0BQxtXaTCr7v9fGM='
signature = self.nodes[0].signmessagewithprivkey(priv_key, message)
assert_equal(expected_signature, signature)
assert(self.nodes[0].verifymessage(address, signature, message))
self.log.info('test signing with an address with wallet')
address = self.nodes[0].getnewaddress()
signature = self.nodes[0].signmessage(address, message)
assert(self.nodes[0].verifymessage(address, signature, message))
self.log.info('test verifying with another address should not work')
other_address = self.nodes[0].getnewaddress()
other_signature = self.nodes[0].signmessage(other_address, message)
assert(not self.nodes[0].verifymessage(other_address, signature, message))
assert(not self.nodes[0].verifymessage(address, other_signature, message))
if __name__ == '__main__':
SignMessagesTest().main()
| true | true |
f7249cef52c93864a1af11a36d8ddabcc8314339 | 201,806 | py | Python | cinder/tests/test_hp3par.py | AO-AO/cmss-cinder | d1212908041e431d0fa4d42b40c4459a193484e6 | [
"Apache-2.0"
] | null | null | null | cinder/tests/test_hp3par.py | AO-AO/cmss-cinder | d1212908041e431d0fa4d42b40c4459a193484e6 | [
"Apache-2.0"
] | null | null | null | cinder/tests/test_hp3par.py | AO-AO/cmss-cinder | d1212908041e431d0fa4d42b40c4459a193484e6 | [
"Apache-2.0"
] | null | null | null | #
# (c) Copyright 2013 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Unit tests for OpenStack Cinder volume drivers."""
import mock
import ast
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import units
from cinder import context
from cinder import exception
from cinder import test
from cinder.tests import fake_hp_3par_client as hp3parclient
from cinder.volume.drivers.san.hp import hp_3par_common as hpcommon
from cinder.volume.drivers.san.hp import hp_3par_fc as hpfcdriver
from cinder.volume.drivers.san.hp import hp_3par_iscsi as hpdriver
from cinder.volume import qos_specs
from cinder.volume import utils as volume_utils
from cinder.volume import volume_types
hpexceptions = hp3parclient.hpexceptions
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
HP3PAR_CPG = 'OpenStackCPG'
HP3PAR_CPG2 = 'fakepool'
HP3PAR_CPG_QOS = 'qospool'
HP3PAR_CPG_SNAP = 'OpenStackCPGSnap'
HP3PAR_USER_NAME = 'testUser'
HP3PAR_USER_PASS = 'testPassword'
HP3PAR_SAN_IP = '2.2.2.2'
HP3PAR_SAN_SSH_PORT = 999
HP3PAR_SAN_SSH_CON_TIMEOUT = 44
HP3PAR_SAN_SSH_PRIVATE = 'foobar'
GOODNESS_FUNCTION = \
"stats.capacity_utilization < 0.6? 100:25"
FILTER_FUNCTION = \
"stats.total_volumes < 400 && stats.capacity_utilization < 0.8"
CHAP_USER_KEY = "HPQ-cinder-CHAP-name"
CHAP_PASS_KEY = "HPQ-cinder-CHAP-secret"
FLASH_CACHE_ENABLED = 1
FLASH_CACHE_DISABLED = 2
class HP3PARBaseDriver(object):
class CommentMatcher(object):
def __init__(self, f, expect):
self.assertEqual = f
self.expect = expect
def __eq__(self, actual):
actual_as_dict = dict(ast.literal_eval(actual))
self.assertEqual(self.expect, actual_as_dict)
return True
VOLUME_ID = 'd03338a9-9115-48a3-8dfc-35cdfcdc15a7'
CLONE_ID = 'd03338a9-9115-48a3-8dfc-000000000000'
VOLUME_TYPE_ID_DEDUP = 'd03338a9-9115-48a3-8dfc-11111111111'
VOLUME_TYPE_ID_FLASH_CACHE = 'd03338a9-9115-48a3-8dfc-22222222222'
VOLUME_NAME = 'volume-' + VOLUME_ID
VOLUME_NAME_3PAR = 'osv-0DM4qZEVSKON-DXN-NwVpw'
SNAPSHOT_ID = '2f823bdc-e36e-4dc8-bd15-de1c7a28ff31'
SNAPSHOT_NAME = 'snapshot-2f823bdc-e36e-4dc8-bd15-de1c7a28ff31'
VOLUME_3PAR_NAME = 'osv-0DM4qZEVSKON-DXN-NwVpw'
SNAPSHOT_3PAR_NAME = 'oss-L4I73ONuTci9Fd4ceij-MQ'
# fake host on the 3par
FAKE_HOST = 'fakehost'
FAKE_CINDER_HOST = 'fakehost@foo#' + HP3PAR_CPG
USER_ID = '2689d9a913974c008b1d859013f23607'
PROJECT_ID = 'fac88235b9d64685a3530f73e490348f'
VOLUME_ID_SNAP = '761fc5e5-5191-4ec7-aeba-33e36de44156'
FAKE_DESC = 'test description name'
FAKE_FC_PORTS = [{'portPos': {'node': 7, 'slot': 1, 'cardPort': 1},
'portWWN': '0987654321234',
'protocol': 1,
'mode': 2,
'linkState': 4},
{'portPos': {'node': 6, 'slot': 1, 'cardPort': 1},
'portWWN': '123456789000987',
'protocol': 1,
'mode': 2,
'linkState': 4}]
QOS = {'qos:maxIOPS': '1000', 'qos:maxBWS': '50',
'qos:minIOPS': '100', 'qos:minBWS': '25',
'qos:latency': '25', 'qos:priority': 'low'}
QOS_SPECS = {'maxIOPS': '1000', 'maxBWS': '50',
'minIOPS': '100', 'minBWS': '25',
'latency': '25', 'priority': 'low'}
VVS_NAME = "myvvs"
FAKE_ISCSI_PORT = {'portPos': {'node': 8, 'slot': 1, 'cardPort': 1},
'protocol': 2,
'mode': 2,
'IPAddr': '1.1.1.2',
'iSCSIName': ('iqn.2000-05.com.3pardata:'
'21810002ac00383d'),
'linkState': 4}
volume = {'name': VOLUME_NAME,
'id': VOLUME_ID,
'display_name': 'Foo Volume',
'size': 2,
'host': FAKE_CINDER_HOST,
'volume_type': None,
'volume_type_id': None}
volume_encrypted = {'name': VOLUME_NAME,
'id': VOLUME_ID,
'display_name': 'Foo Volume',
'size': 2,
'host': FAKE_CINDER_HOST,
'volume_type': None,
'volume_type_id': None,
'encryption_key_id': 'fake_key'}
volume_dedup = {'name': VOLUME_NAME,
'id': VOLUME_ID,
'display_name': 'Foo Volume',
'size': 2,
'host': FAKE_CINDER_HOST,
'volume_type': 'dedup',
'volume_type_id': VOLUME_TYPE_ID_DEDUP}
volume_pool = {'name': VOLUME_NAME,
'id': VOLUME_ID,
'display_name': 'Foo Volume',
'size': 2,
'host': volume_utils.append_host(FAKE_HOST, HP3PAR_CPG2),
'volume_type': None,
'volume_type_id': None}
volume_qos = {'name': VOLUME_NAME,
'id': VOLUME_ID,
'display_name': 'Foo Volume',
'size': 2,
'host': FAKE_CINDER_HOST,
'volume_type': None,
'volume_type_id': 'gold'}
volume_flash_cache = {'name': VOLUME_NAME,
'id': VOLUME_ID,
'display_name': 'Foo Volume',
'size': 2,
'host': FAKE_CINDER_HOST,
'volume_type': None,
'volume_type_id': VOLUME_TYPE_ID_FLASH_CACHE}
snapshot = {'name': SNAPSHOT_NAME,
'id': SNAPSHOT_ID,
'user_id': USER_ID,
'project_id': PROJECT_ID,
'volume_id': VOLUME_ID_SNAP,
'volume_name': VOLUME_NAME,
'status': 'creating',
'progress': '0%',
'volume_size': 2,
'display_name': 'fakesnap',
'display_description': FAKE_DESC}
wwn = ["123456789012345", "123456789054321"]
connector = {'ip': '10.0.0.2',
'initiator': 'iqn.1993-08.org.debian:01:222',
'wwpns': [wwn[0], wwn[1]],
'wwnns': ["223456789012345", "223456789054321"],
'host': FAKE_HOST}
volume_type = {'name': 'gold',
'deleted': False,
'updated_at': None,
'extra_specs': {'cpg': HP3PAR_CPG2,
'qos:maxIOPS': '1000',
'qos:maxBWS': '50',
'qos:minIOPS': '100',
'qos:minBWS': '25',
'qos:latency': '25',
'qos:priority': 'low'},
'deleted_at': None,
'id': 'gold'}
volume_type_dedup = {'name': 'dedup',
'deleted': False,
'updated_at': None,
'extra_specs': {'cpg': HP3PAR_CPG2,
'provisioning': 'dedup'},
'deleted_at': None,
'id': VOLUME_TYPE_ID_DEDUP}
volume_type_flash_cache = {'name': 'flash-cache-on',
'deleted': False,
'updated_at': None,
'extra_specs': {'cpg': HP3PAR_CPG2,
'hp3par:flash_cache': 'true'},
'deleted_at': None,
'id': VOLUME_TYPE_ID_FLASH_CACHE}
flash_cache_3par_keys = {'flash_cache': 'true'}
cpgs = [
{'SAGrowth': {'LDLayout': {'diskPatterns': [{'diskType': 2}]},
'incrementMiB': 8192},
'SAUsage': {'rawTotalMiB': 24576,
'rawUsedMiB': 768,
'totalMiB': 8192,
'usedMiB': 256},
'SDGrowth': {'LDLayout': {'RAIDType': 4,
'diskPatterns': [{'diskType': 2}]},
'incrementMiB': 32768},
'SDUsage': {'rawTotalMiB': 49152,
'rawUsedMiB': 1023,
'totalMiB': 36864,
'usedMiB': 1024 * 1},
'UsrUsage': {'rawTotalMiB': 57344,
'rawUsedMiB': 43349,
'totalMiB': 43008,
'usedMiB': 1024 * 20},
'additionalStates': [],
'degradedStates': [],
'failedStates': [],
'id': 5,
'name': HP3PAR_CPG,
'numFPVVs': 2,
'numTPVVs': 0,
'numTDVVs': 1,
'state': 1,
'uuid': '29c214aa-62b9-41c8-b198-543f6cf24edf'}]
TASK_DONE = 1
TASK_ACTIVE = 2
STATUS_DONE = {'status': 1}
STATUS_ACTIVE = {'status': 2}
mock_client_conf = {
'PORT_MODE_TARGET': 2,
'PORT_STATE_READY': 4,
'PORT_PROTO_ISCSI': 2,
'PORT_PROTO_FC': 1,
'TASK_DONE': TASK_DONE,
'TASK_ACTIVE': TASK_ACTIVE,
'HOST_EDIT_ADD': 1,
'CHAP_INITIATOR': 1,
'CHAP_TARGET': 2,
'getPorts.return_value': {
'members': FAKE_FC_PORTS + [FAKE_ISCSI_PORT]
}
}
RETYPE_VVS_NAME = "yourvvs"
RETYPE_HOST = {
u'host': u'mark-stack1@3parfc',
u'capabilities': {
'QoS_support': True,
u'location_info': u'HP3PARDriver:1234567:MARK_TEST_CPG',
u'timestamp': u'2014-06-04T19:03:32.485540',
u'allocated_capacity_gb': 0,
u'volume_backend_name': u'3parfc',
u'free_capacity_gb': u'infinite',
u'driver_version': u'2.0.3',
u'total_capacity_gb': u'infinite',
u'reserved_percentage': 0,
u'vendor_name': u'Hewlett-Packard',
u'storage_protocol': u'FC'
}
}
RETYPE_HOST_NOT3PAR = {
u'host': u'mark-stack1@3parfc',
u'capabilities': {
u'location_info': u'XXXDriverXXX:1610771:MARK_TEST_CPG',
}
}
RETYPE_QOS_SPECS = {'maxIOPS': '1000', 'maxBWS': '50',
'minIOPS': '100', 'minBWS': '25',
'latency': '25', 'priority': 'high'}
RETYPE_VOLUME_TYPE_ID = "FakeVolId"
RETYPE_VOLUME_TYPE_0 = {
'name': 'red',
'id': RETYPE_VOLUME_TYPE_ID,
'extra_specs': {
'cpg': HP3PAR_CPG,
'snap_cpg': HP3PAR_CPG_SNAP,
'vvs': RETYPE_VVS_NAME,
'qos': RETYPE_QOS_SPECS,
'tpvv': True,
'tdvv': False,
'volume_type': volume_type
}
}
RETYPE_VOLUME_TYPE_1 = {
'name': 'white',
'id': RETYPE_VOLUME_TYPE_ID,
'extra_specs': {
'cpg': HP3PAR_CPG,
'snap_cpg': HP3PAR_CPG_SNAP,
'vvs': VVS_NAME,
'qos': QOS,
'tpvv': True,
'tdvv': False,
'volume_type': volume_type
}
}
RETYPE_VOLUME_TYPE_2 = {
'name': 'blue',
'id': RETYPE_VOLUME_TYPE_ID,
'extra_specs': {
'cpg': HP3PAR_CPG_QOS,
'snap_cpg': HP3PAR_CPG_SNAP,
'vvs': RETYPE_VVS_NAME,
'qos': RETYPE_QOS_SPECS,
'tpvv': True,
'tdvv': False,
'volume_type': volume_type
}
}
RETYPE_VOLUME_TYPE_3 = {
'name': 'purple',
'id': RETYPE_VOLUME_TYPE_ID,
'extra_specs': {
'cpg': HP3PAR_CPG_QOS,
'snap_cpg': HP3PAR_CPG_SNAP,
'vvs': RETYPE_VVS_NAME,
'qos': RETYPE_QOS_SPECS,
'tpvv': False,
'tdvv': True,
'volume_type': volume_type
}
}
RETYPE_VOLUME_TYPE_BAD_PERSONA = {
'name': 'bad_persona',
'id': 'any_id',
'extra_specs': {
'hp3par:persona': '99 - invalid'
}
}
RETYPE_VOLUME_TYPE_BAD_CPG = {
'name': 'bad_cpg',
'id': 'any_id',
'extra_specs': {
'cpg': 'bogus',
'snap_cpg': 'bogus',
'hp3par:persona': '2 - Generic-ALUA'
}
}
MANAGE_VOLUME_INFO = {
'userCPG': 'testUserCpg0',
'snapCPG': 'testSnapCpg0',
'provisioningType': 1,
'comment': "{'display_name': 'Foo Volume'}"
}
MV_INFO_WITH_NO_SNAPCPG = {
'userCPG': 'testUserCpg0',
'provisioningType': 1,
'comment': "{'display_name': 'Foo Volume'}"
}
RETYPE_TEST_COMMENT = "{'retype_test': 'test comment'}"
RETYPE_VOLUME_INFO_0 = {
'name': VOLUME_NAME,
'id': VOLUME_ID,
'display_name': 'Retype Vol0',
'size': 1,
'host': RETYPE_HOST,
'userCPG': 'testUserCpg0',
'snapCPG': 'testSnapCpg0',
'provisioningType': 1,
'comment': RETYPE_TEST_COMMENT
}
RETYPE_TEST_COMMENT_1 = "{'retype_test': 'test comment 1'}"
RETYPE_VOLUME_INFO_1 = {
'name': VOLUME_NAME,
'id': VOLUME_ID,
'display_name': 'Retype Vol1',
'size': 1,
'host': RETYPE_HOST,
'userCPG': HP3PAR_CPG,
'snapCPG': HP3PAR_CPG_SNAP,
'provisioningType': 1,
'comment': RETYPE_TEST_COMMENT
}
RETYPE_TEST_COMMENT_2 = "{'retype_test': 'test comment 2'}"
RETYPE_VOLUME_INFO_2 = {
'name': VOLUME_NAME,
'id': VOLUME_ID,
'display_name': 'Retype Vol2',
'size': 1,
'host': RETYPE_HOST,
'userCPG': HP3PAR_CPG,
'snapCPG': HP3PAR_CPG_SNAP,
'provisioningType': 3,
'comment': RETYPE_TEST_COMMENT
}
# Test for when we don't get a snapCPG.
RETYPE_VOLUME_INFO_NO_SNAP = {
'name': VOLUME_NAME,
'id': VOLUME_ID,
'display_name': 'Retype Vol2',
'size': 1,
'host': RETYPE_HOST,
'userCPG': 'testUserCpg2',
'provisioningType': 1,
'comment': '{}'
}
RETYPE_CONF = {
'TASK_ACTIVE': TASK_ACTIVE,
'TASK_DONE': TASK_DONE,
'getTask.return_value': STATUS_DONE,
'getStorageSystemInfo.return_value': {'serialNumber': '1234567'},
'getVolume.return_value': RETYPE_VOLUME_INFO_0,
'modifyVolume.return_value': ("anyResponse", {'taskid': 1})
}
# 3PAR retype currently doesn't use the diff. Existing code and fresh info
# from the array work better for the most part. Some use of the diff was
# intentionally removed to make _retype more usable for other use cases.
RETYPE_DIFF = None
wsapi_version_312 = {'major': 1,
'build': 30102422,
'minor': 3,
'revision': 1}
wsapi_version_for_dedup = {'major': 1,
'build': 30201120,
'minor': 4,
'revision': 1}
wsapi_version_for_flash_cache = {'major': 1,
'build': 30201200,
'minor': 4,
'revision': 2}
# Use this to point to latest version of wsapi
wsapi_version_latest = wsapi_version_for_flash_cache
standard_login = [
mock.call.login(HP3PAR_USER_NAME, HP3PAR_USER_PASS),
mock.call.setSSHOptions(
HP3PAR_SAN_IP,
HP3PAR_USER_NAME,
HP3PAR_USER_PASS,
missing_key_policy='AutoAddPolicy',
privatekey=HP3PAR_SAN_SSH_PRIVATE,
known_hosts_file=mock.ANY,
port=HP3PAR_SAN_SSH_PORT,
conn_timeout=HP3PAR_SAN_SSH_CON_TIMEOUT)]
standard_logout = [
mock.call.logout()]
def setup_configuration(self):
configuration = mock.Mock()
configuration.hp3par_debug = False
configuration.hp3par_username = HP3PAR_USER_NAME
configuration.hp3par_password = HP3PAR_USER_PASS
configuration.hp3par_api_url = 'https://1.1.1.1/api/v1'
configuration.hp3par_cpg = [HP3PAR_CPG, HP3PAR_CPG2]
configuration.hp3par_cpg_snap = HP3PAR_CPG_SNAP
configuration.iscsi_ip_address = '1.1.1.2'
configuration.iscsi_port = '1234'
configuration.san_ip = HP3PAR_SAN_IP
configuration.san_login = HP3PAR_USER_NAME
configuration.san_password = HP3PAR_USER_PASS
configuration.san_ssh_port = HP3PAR_SAN_SSH_PORT
configuration.ssh_conn_timeout = HP3PAR_SAN_SSH_CON_TIMEOUT
configuration.san_private_key = HP3PAR_SAN_SSH_PRIVATE
configuration.hp3par_snapshot_expiration = ""
configuration.hp3par_snapshot_retention = ""
configuration.hp3par_iscsi_ips = []
configuration.hp3par_iscsi_chap_enabled = False
configuration.goodness_function = GOODNESS_FUNCTION
configuration.filter_function = FILTER_FUNCTION
return configuration
@mock.patch(
'hp3parclient.client.HP3ParClient',
spec=True,
)
def setup_mock_client(self, _m_client, driver, conf=None, m_conf=None):
_m_client = _m_client.return_value
# Configure the base constants, defaults etc...
_m_client.configure_mock(**self.mock_client_conf)
# If m_conf, drop those over the top of the base_conf.
if m_conf is not None:
_m_client.configure_mock(**m_conf)
if conf is None:
conf = self.setup_configuration()
self.driver = driver(configuration=conf)
self.driver.do_setup(None)
return _m_client
@mock.patch('hp3parclient.version', "3.0.9")
def test_unsupported_client_version(self):
self.assertRaises(exception.InvalidInput,
self.setup_driver)
@mock.patch('hp3parclient.version', "3.1.2")
def test_ssh_options(self):
expected_hosts_key_file = "test_hosts_key_file"
orig_ssh_hosts_key_file = CONF.ssh_hosts_key_file
orig_strict_ssh_host_key_policy = CONF.strict_ssh_host_key_policy
CONF.ssh_hosts_key_file = expected_hosts_key_file
CONF.strict_ssh_host_key_policy = False
self.ctxt = context.get_admin_context()
mock_client = self.setup_mock_client(driver=hpfcdriver.HP3PARFCDriver)
CONF.ssh_hosts_key_file = orig_ssh_hosts_key_file
CONF.strict_ssh_host_key_policy = orig_strict_ssh_host_key_policy
expected = [
mock.call.login(HP3PAR_USER_NAME, HP3PAR_USER_PASS),
mock.call.setSSHOptions(
HP3PAR_SAN_IP,
HP3PAR_USER_NAME,
HP3PAR_USER_PASS,
privatekey=HP3PAR_SAN_SSH_PRIVATE,
known_hosts_file=expected_hosts_key_file,
missing_key_policy="AutoAddPolicy",
port=HP3PAR_SAN_SSH_PORT,
conn_timeout=HP3PAR_SAN_SSH_CON_TIMEOUT),
mock.call.getCPG(HP3PAR_CPG),
mock.call.getCPG(HP3PAR_CPG2)]
mock_client.assert_has_calls(
expected +
self.standard_logout)
@mock.patch('hp3parclient.version', "3.1.2")
def test_ssh_options_strict(self):
expected_hosts_key_file = "test_hosts_key_file"
orig_ssh_hosts_key_file = CONF.ssh_hosts_key_file
orig_strict_ssh_host_key_policy = CONF.strict_ssh_host_key_policy
CONF.ssh_hosts_key_file = expected_hosts_key_file
CONF.strict_ssh_host_key_policy = True
self.ctxt = context.get_admin_context()
mock_client = self.setup_mock_client(driver=hpfcdriver.HP3PARFCDriver)
CONF.ssh_hosts_key_file = orig_ssh_hosts_key_file
CONF.strict_ssh_host_key_policy = orig_strict_ssh_host_key_policy
expected = [
mock.call.login(HP3PAR_USER_NAME, HP3PAR_USER_PASS),
mock.call.setSSHOptions(
HP3PAR_SAN_IP,
HP3PAR_USER_NAME,
HP3PAR_USER_PASS,
privatekey=HP3PAR_SAN_SSH_PRIVATE,
known_hosts_file=expected_hosts_key_file,
missing_key_policy="RejectPolicy",
port=HP3PAR_SAN_SSH_PORT,
conn_timeout=HP3PAR_SAN_SSH_CON_TIMEOUT),
mock.call.getCPG(HP3PAR_CPG),
mock.call.getCPG(HP3PAR_CPG2)]
mock_client.assert_has_calls(expected + self.standard_logout)
def test_task_waiter(self):
task_statuses = [self.STATUS_ACTIVE, self.STATUS_ACTIVE]
def side_effect(*args):
return task_statuses and task_statuses.pop(0) or self.STATUS_DONE
conf = {'getTask.side_effect': side_effect}
mock_client = self.setup_driver(mock_conf=conf)
task_id = 1234
interval = .001
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
waiter = common.TaskWaiter(mock_client, task_id, interval)
status = waiter.wait_for_task()
expected = [
mock.call.getTask(task_id),
mock.call.getTask(task_id),
mock.call.getTask(task_id)
]
mock_client.assert_has_calls(expected)
self.assertEqual(status, self.STATUS_DONE)
def test_create_volume(self):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
self.driver.create_volume(self.volume)
comment = (
'{"display_name": "Foo Volume", "type": "OpenStack",'
' "name": "volume-d03338a9-9115-48a3-8dfc-35cdfcdc15a7",'
' "volume_id": "d03338a9-9115-48a3-8dfc-35cdfcdc15a7"}')
expected = [
mock.call.createVolume(
self.VOLUME_3PAR_NAME,
HP3PAR_CPG,
1907, {
'comment': comment,
'tpvv': True,
'tdvv': False,
'snapCPG': HP3PAR_CPG_SNAP})]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
def test_create_volume_in_pool(self):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
return_model = self.driver.create_volume(self.volume_pool)
comment = (
'{"display_name": "Foo Volume", "type": "OpenStack",'
' "name": "volume-d03338a9-9115-48a3-8dfc-35cdfcdc15a7",'
' "volume_id": "d03338a9-9115-48a3-8dfc-35cdfcdc15a7"}')
expected = [
mock.call.createVolume(
self.VOLUME_3PAR_NAME,
HP3PAR_CPG2,
1907, {
'comment': comment,
'tpvv': True,
'tdvv': False,
'snapCPG': HP3PAR_CPG_SNAP})]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
self.assertEqual(return_model, None)
@mock.patch.object(volume_types, 'get_volume_type')
def test_unsupported_dedup_volume_type(self, _mock_volume_types):
mock_client = self.setup_driver(wsapi_version=self.wsapi_version_312)
_mock_volume_types.return_value = {
'name': 'dedup',
'extra_specs': {
'cpg': HP3PAR_CPG_QOS,
'snap_cpg': HP3PAR_CPG_SNAP,
'vvs_name': self.VVS_NAME,
'qos': self.QOS,
'provisioning': 'dedup',
'volume_type': self.volume_type_dedup}}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
self.assertRaises(exception.InvalidInput,
common.get_volume_settings_from_type_id,
self.VOLUME_TYPE_ID_DEDUP,
"mock")
@mock.patch.object(volume_types, 'get_volume_type')
def test_get_snap_cpg_from_volume_type(self, _mock_volume_types):
mock_client = self.setup_driver()
expected_type_snap_cpg = "type_snap_cpg"
_mock_volume_types.return_value = {
'name': 'gold',
'extra_specs': {
'cpg': HP3PAR_CPG,
'snap_cpg': expected_type_snap_cpg,
'volume_type': self.volume_type}}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
result = common.get_volume_settings_from_type_id(
"mock", self.driver.configuration.hp3par_cpg)
self.assertEqual(expected_type_snap_cpg, result['snap_cpg'])
@mock.patch.object(volume_types, 'get_volume_type')
def test_get_snap_cpg_from_volume_type_cpg(self, _mock_volume_types):
mock_client = self.setup_driver()
expected_cpg = 'use_extra_specs_cpg'
_mock_volume_types.return_value = {
'name': 'gold',
'extra_specs': {
'cpg': expected_cpg,
'volume_type': self.volume_type}}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
result = common.get_volume_settings_from_type_id(
"mock", self.driver.configuration.hp3par_cpg)
self.assertEqual(self.driver.configuration.hp3par_cpg_snap,
result['snap_cpg'])
@mock.patch.object(volume_types, 'get_volume_type')
def test_get_snap_cpg_from_volume_type_conf_snap_cpg(
self, _mock_volume_types):
_mock_volume_types.return_value = {
'name': 'gold',
'extra_specs': {
'volume_type': self.volume_type}}
conf = self.setup_configuration()
expected_snap_cpg = conf.hp3par_cpg_snap
mock_client = self.setup_driver(config=conf)
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
result = common.get_volume_settings_from_type_id(
"mock", self.driver.configuration.hp3par_cpg)
self.assertEqual(expected_snap_cpg, result['snap_cpg'])
@mock.patch.object(volume_types, 'get_volume_type')
def test_get_snap_cpg_from_volume_type_conf_cpg(
self, _mock_volume_types):
_mock_volume_types.return_value = {
'name': 'gold',
'extra_specs': {
'volume_type': self.volume_type}}
conf = self.setup_configuration()
conf.hp3par_cpg_snap = None
expected_cpg = conf.hp3par_cpg
mock_client = self.setup_driver(config=conf)
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
result = common.get_volume_settings_from_type_id(
"mock", self.driver.configuration.hp3par_cpg)
self.assertEqual(expected_cpg, result['snap_cpg'])
@mock.patch.object(volume_types, 'get_volume_type')
def test_create_volume_qos(self, _mock_volume_types):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
_mock_volume_types.return_value = {
'name': 'gold',
'extra_specs': {
'cpg': HP3PAR_CPG_QOS,
'snap_cpg': HP3PAR_CPG_SNAP,
'vvs_name': self.VVS_NAME,
'qos': self.QOS,
'tpvv': True,
'tdvv': False,
'volume_type': self.volume_type}}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
return_model = self.driver.create_volume(self.volume_qos)
comment = (
'{"volume_type_name": "gold", "display_name": "Foo Volume"'
', "name": "volume-d03338a9-9115-48a3-8dfc-35cdfcdc15a7'
'", "volume_type_id": "gold", "volume_id": "d03338a9-91'
'15-48a3-8dfc-35cdfcdc15a7", "qos": {}, "type": "OpenStack"}')
expected = [
mock.call.getCPG(HP3PAR_CPG),
mock.call.createVolume(
self.VOLUME_3PAR_NAME,
HP3PAR_CPG,
1907, {
'comment': comment,
'tpvv': True,
'tdvv': False,
'snapCPG': HP3PAR_CPG_SNAP})]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
self.assertEqual(return_model, None)
@mock.patch.object(volume_types, 'get_volume_type')
def test_create_volume_dedup(self, _mock_volume_types):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
_mock_volume_types.return_value = {
'name': 'dedup',
'extra_specs': {
'cpg': HP3PAR_CPG_QOS,
'snap_cpg': HP3PAR_CPG_SNAP,
'vvs_name': self.VVS_NAME,
'qos': self.QOS,
'provisioning': 'dedup',
'volume_type': self.volume_type_dedup}}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
return_model = self.driver.create_volume(self.volume_dedup)
comment = (
'{"volume_type_name": "dedup", "display_name": "Foo Volume"'
', "name": "volume-d03338a9-9115-48a3-8dfc-35cdfcdc15a7'
'", "volume_type_id": "d03338a9-9115-48a3-8dfc-11111111111"'
', "volume_id": "d03338a9-9115-48a3-8dfc-35cdfcdc15a7"'
', "qos": {}, "type": "OpenStack"}')
expected = [
mock.call.getCPG(HP3PAR_CPG),
mock.call.createVolume(
self.VOLUME_3PAR_NAME,
HP3PAR_CPG,
1907, {
'comment': comment,
'tpvv': False,
'tdvv': True,
'snapCPG': HP3PAR_CPG_SNAP})]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
self.assertEqual(return_model, None)
@mock.patch.object(volume_types, 'get_volume_type')
def test_create_volume_flash_cache(self, _mock_volume_types):
# Setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
_mock_volume_types.return_value = {
'name': 'flash-cache-on',
'extra_specs': {
'cpg': HP3PAR_CPG2,
'snap_cpg': HP3PAR_CPG_SNAP,
'vvs_name': self.VVS_NAME,
'qos': self.QOS,
'tpvv': True,
'tdvv': False,
'hp3par:flash_cache': 'true',
'volume_type': self.volume_type_flash_cache}}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
mock_client.getCPG.return_value = {'domain': None}
mock_client.FLASH_CACHE_ENABLED = FLASH_CACHE_ENABLED
mock_client.FLASH_CACHE_DISABLED = FLASH_CACHE_DISABLED
return_model = self.driver.create_volume(self.volume_flash_cache)
comment = (
'{"volume_type_name": "flash-cache-on", '
'"display_name": "Foo Volume", '
'"name": "volume-d03338a9-9115-48a3-8dfc-35cdfcdc15a7", '
'"volume_type_id": "d03338a9-9115-48a3-8dfc-22222222222", '
'"volume_id": "d03338a9-9115-48a3-8dfc-35cdfcdc15a7", '
'"qos": {}, "type": "OpenStack"}')
expected = [
mock.call.getCPG(HP3PAR_CPG),
mock.call.createVolume(
self.VOLUME_3PAR_NAME,
HP3PAR_CPG,
1907, {
'comment': comment,
'tpvv': True,
'tdvv': False,
'snapCPG': HP3PAR_CPG_SNAP}),
mock.call.getCPG(HP3PAR_CPG),
mock.call.createVolumeSet('vvs-0DM4qZEVSKON-DXN-NwVpw', None),
mock.call.createQoSRules(
'vvs-0DM4qZEVSKON-DXN-NwVpw',
{'priority': 2}
),
mock.call.modifyVolumeSet(
'vvs-0DM4qZEVSKON-DXN-NwVpw', flashCachePolicy=1),
mock.call.addVolumeToVolumeSet(
'vvs-0DM4qZEVSKON-DXN-NwVpw',
'osv-0DM4qZEVSKON-DXN-NwVpw')]
mock_client.assert_has_calls(
[mock.call.getWsApiVersion()] +
self.standard_login +
expected +
self.standard_logout)
self.assertEqual(return_model, None)
@mock.patch.object(volume_types, 'get_volume_type')
def test_unsupported_flash_cache_volume(self, _mock_volume_types):
mock_client = self.setup_driver(wsapi_version=self.wsapi_version_312)
_mock_volume_types.return_value = {
'name': 'flash-cache-on',
'extra_specs': {
'cpg': HP3PAR_CPG2,
'snap_cpg': HP3PAR_CPG_SNAP,
'vvs_name': self.VVS_NAME,
'qos': self.QOS,
'tpvv': True,
'tdvv': False,
'hp3par:flash_cache': 'true',
'volume_type': self.volume_type_flash_cache}}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
self.assertRaises(exception.InvalidInput,
common.get_flash_cache_policy,
self.flash_cache_3par_keys)
@mock.patch.object(volume_types, 'get_volume_type')
def test_retype_not_3par(self, _mock_volume_types):
_mock_volume_types.return_value = self.RETYPE_VOLUME_TYPE_1
mock_client = self.setup_driver(mock_conf=self.RETYPE_CONF)
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
self.assertRaises(exception.InvalidHost,
self.driver.retype,
self.ctxt,
self.RETYPE_VOLUME_INFO_0,
self.RETYPE_VOLUME_TYPE_1,
self.RETYPE_DIFF,
self.RETYPE_HOST_NOT3PAR)
expected = [mock.call.getVolume(self.VOLUME_3PAR_NAME)]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
@mock.patch.object(volume_types, 'get_volume_type')
def test_retype_volume_not_found(self, _mock_volume_types):
_mock_volume_types.return_value = self.RETYPE_VOLUME_TYPE_1
mock_client = self.setup_driver(mock_conf=self.RETYPE_CONF)
mock_client.getVolume.side_effect = hpexceptions.HTTPNotFound
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
self.assertRaises(hpexceptions.HTTPNotFound,
self.driver.retype,
self.ctxt,
self.RETYPE_VOLUME_INFO_0,
self.RETYPE_VOLUME_TYPE_1,
self.RETYPE_DIFF,
self.RETYPE_HOST)
expected = [mock.call.getVolume(self.VOLUME_3PAR_NAME)]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
@mock.patch.object(volume_types, 'get_volume_type')
def test_retype_specs_error_reverts_snap_cpg(self, _mock_volume_types):
_mock_volume_types.side_effect = [
self.RETYPE_VOLUME_TYPE_1, self.RETYPE_VOLUME_TYPE_0]
mock_client = self.setup_driver(mock_conf=self.RETYPE_CONF)
mock_client.getVolume.return_value = self.RETYPE_VOLUME_INFO_0
# Fail the QOS setting to test the revert of the snap CPG rename.
mock_client.addVolumeToVolumeSet.side_effect = \
hpexceptions.HTTPForbidden
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
self.assertRaises(hpexceptions.HTTPForbidden,
self.driver.retype,
self.ctxt,
{'id': self.VOLUME_ID},
self.RETYPE_VOLUME_TYPE_0,
self.RETYPE_DIFF,
self.RETYPE_HOST)
old_settings = {
'snapCPG': self.RETYPE_VOLUME_INFO_0['snapCPG'],
'comment': self.RETYPE_VOLUME_INFO_0['comment']}
new_settings = {
'snapCPG': (
self.RETYPE_VOLUME_TYPE_1['extra_specs']['snap_cpg']),
'comment': mock.ANY}
expected = [
mock.call.modifyVolume(self.VOLUME_3PAR_NAME, new_settings)
]
mock_client.assert_has_calls(expected)
expected = [
mock.call.modifyVolume(self.VOLUME_3PAR_NAME, old_settings)
]
mock_client.assert_has_calls(expected + self.standard_logout)
@mock.patch.object(volume_types, 'get_volume_type')
def test_retype_revert_comment(self, _mock_volume_types):
_mock_volume_types.side_effect = [
self.RETYPE_VOLUME_TYPE_2, self.RETYPE_VOLUME_TYPE_1]
mock_client = self.setup_driver(mock_conf=self.RETYPE_CONF)
mock_client.getVolume.return_value = self.RETYPE_VOLUME_INFO_1
# Fail the QOS setting to test the revert of the snap CPG rename.
mock_client.deleteVolumeSet.side_effect = hpexceptions.HTTPForbidden
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
self.assertRaises(hpexceptions.HTTPForbidden,
self.driver.retype,
self.ctxt,
{'id': self.VOLUME_ID},
self.RETYPE_VOLUME_TYPE_2,
self.RETYPE_DIFF,
self.RETYPE_HOST)
original = {
'snapCPG': self.RETYPE_VOLUME_INFO_1['snapCPG'],
'comment': self.RETYPE_VOLUME_INFO_1['comment']}
expected = [
mock.call.modifyVolume('osv-0DM4qZEVSKON-DXN-NwVpw', original)]
mock_client.assert_has_calls(expected + self.standard_logout)
@mock.patch.object(volume_types, 'get_volume_type')
def test_retype_different_array(self, _mock_volume_types):
_mock_volume_types.return_value = self.RETYPE_VOLUME_TYPE_1
mock_client = self.setup_driver(mock_conf=self.RETYPE_CONF)
mock_client.getStorageSystemInfo.return_value = {
'serialNumber': 'XXXXXXX'}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
self.assertRaises(exception.InvalidHost,
self.driver.retype,
self.ctxt,
self.RETYPE_VOLUME_INFO_0,
self.RETYPE_VOLUME_TYPE_1,
self.RETYPE_DIFF,
self.RETYPE_HOST)
expected = [
mock.call.getVolume(self.VOLUME_3PAR_NAME),
mock.call.getStorageSystemInfo()]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
@mock.patch.object(volume_types, 'get_volume_type')
def test_retype_across_cpg_domains(self, _mock_volume_types):
_mock_volume_types.return_value = self.RETYPE_VOLUME_TYPE_1
mock_client = self.setup_driver(mock_conf=self.RETYPE_CONF)
mock_client.getCPG.side_effect = [
{'domain': 'domain1'},
{'domain': 'domain2'},
]
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
self.assertRaises(exception.Invalid3PARDomain,
self.driver.retype,
self.ctxt,
self.RETYPE_VOLUME_INFO_0,
self.RETYPE_VOLUME_TYPE_1,
self.RETYPE_DIFF,
self.RETYPE_HOST)
expected = [
mock.call.getVolume(self.VOLUME_3PAR_NAME),
mock.call.getStorageSystemInfo(),
mock.call.getCPG(self.RETYPE_VOLUME_INFO_0['userCPG']),
mock.call.getCPG(
self.RETYPE_VOLUME_TYPE_1['extra_specs']['cpg'])
]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
@mock.patch.object(volume_types, 'get_volume_type')
def test_retype_across_snap_cpg_domains(self, _mock_volume_types):
_mock_volume_types.return_value = self.RETYPE_VOLUME_TYPE_1
mock_client = self.setup_driver(mock_conf=self.RETYPE_CONF)
mock_client.getCPG.side_effect = [
{'domain': 'cpg_domain'},
{'domain': 'cpg_domain'},
{'domain': 'snap_cpg_domain_1'},
]
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
self.assertRaises(exception.Invalid3PARDomain,
self.driver.retype,
self.ctxt,
self.RETYPE_VOLUME_INFO_0,
self.RETYPE_VOLUME_TYPE_1,
self.RETYPE_DIFF,
self.RETYPE_HOST)
expected = [
mock.call.getVolume(self.VOLUME_3PAR_NAME),
mock.call.getStorageSystemInfo(),
mock.call.getCPG(self.RETYPE_VOLUME_INFO_0['userCPG']),
mock.call.getCPG(
self.RETYPE_VOLUME_TYPE_1['extra_specs']['cpg']),
mock.call.getCPG(
self.RETYPE_VOLUME_TYPE_1['extra_specs']['snap_cpg'])
]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
@mock.patch.object(volume_types, 'get_volume_type')
def test_retype_to_bad_persona(self, _mock_volume_types):
_mock_volume_types.return_value = self.RETYPE_VOLUME_TYPE_BAD_PERSONA
mock_client = self.setup_driver(mock_conf=self.RETYPE_CONF)
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
self.assertRaises(exception.InvalidInput,
self.driver.retype,
self.ctxt,
self.RETYPE_VOLUME_INFO_0,
self.RETYPE_VOLUME_TYPE_BAD_PERSONA,
self.RETYPE_DIFF,
self.RETYPE_HOST)
expected = [mock.call.getVolume(self.VOLUME_3PAR_NAME)]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
@mock.patch.object(volume_types, 'get_volume_type')
def test_retype_tune(self, _mock_volume_types):
_mock_volume_types.return_value = self.RETYPE_VOLUME_TYPE_1
mock_client = self.setup_driver(mock_conf=self.RETYPE_CONF)
qos_ref = qos_specs.create(self.ctxt, 'qos-specs-1', self.QOS)
type_ref = volume_types.create(self.ctxt,
"type1", {"qos:maxIOPS": "100",
"qos:maxBWS": "50",
"qos:minIOPS": "10",
"qos:minBWS": "20",
"qos:latency": "5",
"qos:priority": "high"})
qos_specs.associate_qos_with_type(self.ctxt,
qos_ref['id'],
type_ref['id'])
type_ref = volume_types.get_volume_type(self.ctxt, type_ref['id'])
volume = {'id': HP3PARBaseDriver.CLONE_ID}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
retyped = self.driver.retype(
self.ctxt, volume, type_ref, None, self.RETYPE_HOST)
self.assertTrue(retyped)
expected = [
mock.call.modifyVolume('osv-0DM4qZEVSKON-AAAAAAAAA',
{'comment': mock.ANY,
'snapCPG': 'OpenStackCPGSnap'}),
mock.call.deleteVolumeSet('vvs-0DM4qZEVSKON-AAAAAAAAA'),
mock.call.addVolumeToVolumeSet('myvvs',
'osv-0DM4qZEVSKON-AAAAAAAAA'),
mock.call.modifyVolume('osv-0DM4qZEVSKON-AAAAAAAAA',
{'action': 6,
'userCPG': 'OpenStackCPG',
'conversionOperation': 1,
'tuneOperation': 1}),
mock.call.getTask(1)
]
mock_client.assert_has_calls(expected + self.standard_logout)
@mock.patch.object(volume_types, 'get_volume_type')
def test_retype_qos_spec(self, _mock_volume_types):
_mock_volume_types.return_value = self.RETYPE_VOLUME_TYPE_1
mock_client = self.setup_driver(mock_conf=self.RETYPE_CONF)
cpg = "any_cpg"
snap_cpg = "any_cpg"
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
common._retype(self.volume,
HP3PARBaseDriver.VOLUME_3PAR_NAME,
"old_type", "old_type_id",
HP3PARBaseDriver.RETYPE_HOST,
None, cpg, cpg, snap_cpg, snap_cpg,
True, False, False, True, None, None,
self.QOS_SPECS, self.RETYPE_QOS_SPECS,
None, None,
"{}")
expected = [
mock.call.createVolumeSet('vvs-0DM4qZEVSKON-DXN-NwVpw', None),
mock.call.createQoSRules(
'vvs-0DM4qZEVSKON-DXN-NwVpw',
{'ioMinGoal': 100, 'ioMaxLimit': 1000,
'bwMinGoalKB': 25600, 'bwMaxLimitKB': 51200,
'priority': 3,
'latencyGoal': 25}
),
mock.call.addVolumeToVolumeSet(
'vvs-0DM4qZEVSKON-DXN-NwVpw',
'osv-0DM4qZEVSKON-DXN-NwVpw')]
mock_client.assert_has_calls(expected)
@mock.patch.object(volume_types, 'get_volume_type')
def test_retype_dedup(self, _mock_volume_types):
_mock_volume_types.return_value = self.RETYPE_VOLUME_TYPE_3
mock_client = self.setup_driver(mock_conf=self.RETYPE_CONF)
cpg = "any_cpg"
snap_cpg = "any_cpg"
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
common._retype(self.volume,
HP3PARBaseDriver.VOLUME_3PAR_NAME,
"old_type", "old_type_id",
HP3PARBaseDriver.RETYPE_HOST,
None, cpg, cpg, snap_cpg, snap_cpg,
True, False, False, True, None, None,
self.QOS_SPECS, self.RETYPE_QOS_SPECS,
None, None,
"{}")
expected = [
mock.call.modifyVolume('osv-0DM4qZEVSKON-DXN-NwVpw',
{'action': 6,
'userCPG': 'any_cpg',
'conversionOperation': 3,
'tuneOperation': 1}),
mock.call.getTask(1)]
mock_client.assert_has_calls(expected)
def test_delete_volume(self):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
self.driver.delete_volume(self.volume)
expected = [mock.call.deleteVolume(self.VOLUME_3PAR_NAME)]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
def test_create_cloned_volume(self):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
mock_client.copyVolume.return_value = {'taskid': 1}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
volume = {'name': HP3PARBaseDriver.VOLUME_NAME,
'id': HP3PARBaseDriver.CLONE_ID,
'display_name': 'Foo Volume',
'size': 2,
'host': volume_utils.append_host(self.FAKE_HOST,
HP3PAR_CPG2),
'source_volid': HP3PARBaseDriver.VOLUME_ID}
src_vref = {}
model_update = self.driver.create_cloned_volume(volume, src_vref)
self.assertIsNone(model_update)
expected = [
mock.call.copyVolume(
self.VOLUME_3PAR_NAME,
'osv-0DM4qZEVSKON-AAAAAAAAA',
HP3PAR_CPG2,
{'snapCPG': 'OpenStackCPGSnap', 'tpvv': True,
'tdvv': False, 'online': True})]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
@mock.patch.object(volume_types, 'get_volume_type')
def test_create_cloned_qos_volume(self, _mock_volume_types):
_mock_volume_types.return_value = self.RETYPE_VOLUME_TYPE_2
mock_client = self.setup_driver()
mock_client.copyVolume.return_value = {'taskid': 1}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
src_vref = {}
volume = self.volume_qos.copy()
host = "TEST_HOST"
pool = "TEST_POOL"
volume_host = volume_utils.append_host(host, pool)
expected_cpg = pool
volume['id'] = HP3PARBaseDriver.CLONE_ID
volume['host'] = volume_host
volume['source_volid'] = HP3PARBaseDriver.VOLUME_ID
model_update = self.driver.create_cloned_volume(volume, src_vref)
self.assertEqual(model_update, None)
expected = [
mock.call.getCPG(expected_cpg),
mock.call.copyVolume(
self.VOLUME_3PAR_NAME,
'osv-0DM4qZEVSKON-AAAAAAAAA',
expected_cpg,
{'snapCPG': 'OpenStackCPGSnap', 'tpvv': True,
'tdvv': False, 'online': True})]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
def test_migrate_volume(self):
conf = {
'getStorageSystemInfo.return_value': {
'serialNumber': '1234'},
'getTask.return_value': {
'status': 1},
'getCPG.return_value': {},
'copyVolume.return_value': {'taskid': 1},
'getVolume.return_value': self.RETYPE_VOLUME_INFO_1
}
mock_client = self.setup_driver(mock_conf=conf)
mock_client.getVolume.return_value = self.MANAGE_VOLUME_INFO
mock_client.modifyVolume.return_value = ("anyResponse", {'taskid': 1})
mock_client.getTask.return_value = self.STATUS_DONE
volume = {'name': HP3PARBaseDriver.VOLUME_NAME,
'id': HP3PARBaseDriver.CLONE_ID,
'display_name': 'Foo Volume',
'volume_type_id': None,
'size': 2,
'status': 'available',
'host': HP3PARBaseDriver.FAKE_HOST,
'source_volid': HP3PARBaseDriver.VOLUME_ID}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
volume_name_3par = common._encode_name(volume['id'])
loc_info = 'HP3PARDriver:1234:CPG-FC1'
host = {'host': 'stack@3parfc1#CPG-FC1',
'capabilities': {'location_info': loc_info}}
result = self.driver.migrate_volume(context.get_admin_context(),
volume, host)
self.assertIsNotNone(result)
self.assertEqual((True, None), result)
osv_matcher = 'osv-' + volume_name_3par
expected = [
mock.call.modifyVolume(
osv_matcher,
{'comment': '{"qos": {}, "display_name": "Foo Volume"}',
'snapCPG': HP3PAR_CPG_SNAP}),
mock.call.modifyVolume(osv_matcher,
{'action': 6,
'userCPG': 'CPG-FC1',
'conversionOperation': 1,
'tuneOperation': 1}),
mock.call.getTask(mock.ANY)
]
mock_client.assert_has_calls(expected + self.standard_logout)
@mock.patch.object(volume_types, 'get_volume_type')
def test_migrate_volume_with_type(self, _mock_volume_types):
_mock_volume_types.return_value = self.RETYPE_VOLUME_TYPE_2
conf = {
'getStorageSystemInfo.return_value': {
'serialNumber': '1234'},
'getTask.return_value': {
'status': 1},
'getCPG.return_value': {},
'copyVolume.return_value': {'taskid': 1},
'getVolume.return_value': self.RETYPE_VOLUME_INFO_1
}
mock_client = self.setup_driver(mock_conf=conf)
mock_client.getVolume.return_value = self.MANAGE_VOLUME_INFO
mock_client.modifyVolume.return_value = ("anyResponse", {'taskid': 1})
mock_client.getTask.return_value = self.STATUS_DONE
display_name = 'Foo Volume'
volume = {'name': HP3PARBaseDriver.VOLUME_NAME,
'id': HP3PARBaseDriver.CLONE_ID,
'display_name': display_name,
"volume_type_id": self.RETYPE_VOLUME_TYPE_2['id'],
'size': 2,
'status': 'available',
'host': HP3PARBaseDriver.FAKE_HOST,
'source_volid': HP3PARBaseDriver.VOLUME_ID}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
volume_name_3par = common._encode_name(volume['id'])
loc_info = 'HP3PARDriver:1234:CPG-FC1'
instance_host = 'stack@3parfc1#CPG-FC1'
host = {'host': instance_host,
'capabilities': {'location_info': loc_info}}
result = self.driver.migrate_volume(context.get_admin_context(),
volume, host)
self.assertIsNotNone(result)
# when the host and pool are the same we'll get None
self.assertEqual((True, None), result)
osv_matcher = 'osv-' + volume_name_3par
expected_comment = {
"display_name": display_name,
"volume_type_id": self.RETYPE_VOLUME_TYPE_2['id'],
"volume_type_name": self.RETYPE_VOLUME_TYPE_2['name'],
"vvs": self.RETYPE_VOLUME_TYPE_2['extra_specs']['vvs']
}
expected = [
mock.call.modifyVolume(
osv_matcher,
{'comment': self.CommentMatcher(self.assertEqual,
expected_comment),
'snapCPG': self.RETYPE_VOLUME_TYPE_2
['extra_specs']['snap_cpg']}),
mock.call.modifyVolume(
osv_matcher,
{'action': 6,
'userCPG': 'CPG-FC1',
'conversionOperation': 1,
'tuneOperation': 1}),
mock.call.getTask(mock.ANY)
]
mock_client.assert_has_calls(
expected +
self.standard_logout)
def test_migrate_volume_diff_host(self):
conf = {
'getStorageSystemInfo.return_value': {
'serialNumber': 'different'},
}
mock_client = self.setup_driver(mock_conf=conf)
volume = {'name': HP3PARBaseDriver.VOLUME_NAME,
'id': HP3PARBaseDriver.CLONE_ID,
'display_name': 'Foo Volume',
'volume_type_id': None,
'size': 2,
'status': 'available',
'host': HP3PARBaseDriver.FAKE_HOST,
'source_volid': HP3PARBaseDriver.VOLUME_ID}
loc_info = 'HP3PARDriver:1234:CPG-FC1'
host = {'host': 'stack@3parfc1',
'capabilities': {'location_info': loc_info}}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
result = self.driver.migrate_volume(context.get_admin_context(),
volume, host)
self.assertIsNotNone(result)
self.assertEqual((False, None), result)
@mock.patch.object(volume_types, 'get_volume_type')
def test_migrate_volume_diff_domain(self, _mock_volume_types):
_mock_volume_types.return_value = self.volume_type
conf = {
'getStorageSystemInfo.return_value': {
'serialNumber': '1234'},
'getTask.return_value': {
'status': 1},
'getCPG.return_value': {},
'copyVolume.return_value': {'taskid': 1},
'getVolume.return_value': self.RETYPE_VOLUME_INFO_1
}
mock_client = self.setup_driver(mock_conf=conf)
mock_client.getVolume.return_value = self.MANAGE_VOLUME_INFO
mock_client.modifyVolume.return_value = ("anyResponse", {'taskid': 1})
mock_client.getTask.return_value = self.STATUS_DONE
volume = {'name': HP3PARBaseDriver.VOLUME_NAME,
'id': HP3PARBaseDriver.CLONE_ID,
'display_name': 'Foo Volume',
'volume_type_id': None,
'size': 2,
'status': 'available',
'host': HP3PARBaseDriver.FAKE_HOST,
'source_volid': HP3PARBaseDriver.VOLUME_ID}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
volume_name_3par = common._encode_name(volume['id'])
loc_info = 'HP3PARDriver:1234:CPG-FC1'
host = {'host': 'stack@3parfc1#CPG-FC1',
'capabilities': {'location_info': loc_info}}
result = self.driver.migrate_volume(context.get_admin_context(),
volume, host)
self.assertIsNotNone(result)
self.assertEqual((True, None), result)
osv_matcher = 'osv-' + volume_name_3par
expected = [
mock.call.modifyVolume(
osv_matcher,
{'comment': '{"qos": {}, "display_name": "Foo Volume"}',
'snapCPG': HP3PAR_CPG_SNAP}),
mock.call.modifyVolume(osv_matcher,
{'action': 6,
'userCPG': 'CPG-FC1',
'conversionOperation': 1,
'tuneOperation': 1}),
mock.call.getTask(mock.ANY),
]
mock_client.assert_has_calls(expected + self.standard_logout)
@mock.patch.object(volume_types, 'get_volume_type')
def test_migrate_volume_attached(self, _mock_volume_types):
_mock_volume_types.return_value = self.RETYPE_VOLUME_TYPE_1
mock_client = self.setup_driver(mock_conf=self.RETYPE_CONF)
volume = {'name': HP3PARBaseDriver.VOLUME_NAME,
'volume_type_id': None,
'id': HP3PARBaseDriver.CLONE_ID,
'display_name': 'Foo Volume',
'size': 2,
'status': 'in-use',
'host': HP3PARBaseDriver.FAKE_HOST,
'source_volid': HP3PARBaseDriver.VOLUME_ID}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
volume_name_3par = common._encode_name(volume['id'])
osv_matcher = 'osv-' + volume_name_3par
loc_info = 'HP3PARDriver:1234567:CPG-FC1'
protocol = "FC"
if self.properties['driver_volume_type'] == "iscsi":
protocol = "iSCSI"
host = {'host': 'stack@3parfc1',
'capabilities': {'location_info': loc_info,
'storage_protocol': protocol}}
result = self.driver.migrate_volume(context.get_admin_context(),
volume, host)
new_comment = {"qos": {},
"retype_test": "test comment"}
expected = [
mock.call.modifyVolume(osv_matcher,
{'comment': self.CommentMatcher(
self.assertEqual, new_comment),
'snapCPG': 'OpenStackCPGSnap'}),
mock.call.modifyVolume(osv_matcher,
{'action': 6,
'userCPG': 'OpenStackCPG',
'conversionOperation': 1,
'tuneOperation': 1}),
mock.call.getTask(1),
mock.call.logout()
]
mock_client.assert_has_calls(expected)
self.assertIsNotNone(result)
self.assertEqual((True, {'host': 'stack@3parfc1#OpenStackCPG'}),
result)
@mock.patch.object(volume_types, 'get_volume_type')
def test_migrate_volume_attached_diff_protocol(self, _mock_volume_types):
_mock_volume_types.return_value = self.RETYPE_VOLUME_TYPE_1
mock_client = self.setup_driver(mock_conf=self.RETYPE_CONF)
protocol = "OTHER"
volume = {'name': HP3PARBaseDriver.VOLUME_NAME,
'volume_type_id': None,
'id': HP3PARBaseDriver.CLONE_ID,
'display_name': 'Foo Volume',
'size': 2,
'status': 'in-use',
'host': HP3PARBaseDriver.FAKE_HOST,
'source_volid': HP3PARBaseDriver.VOLUME_ID}
loc_info = 'HP3PARDriver:1234567:CPG-FC1'
host = {'host': 'stack@3parfc1',
'capabilities': {'location_info': loc_info,
'storage_protocol': protocol}}
result = self.driver.migrate_volume(context.get_admin_context(),
volume, host)
self.assertIsNotNone(result)
self.assertEqual((False, None), result)
expected = []
mock_client.assert_has_calls(expected)
def test_attach_volume(self):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
self.driver.attach_volume(context.get_admin_context(),
self.volume,
'abcdef',
'newhost',
'/dev/vdb')
expected = [
mock.call.setVolumeMetaData(
self.VOLUME_3PAR_NAME,
'HPQ-CS-instance_uuid',
'abcdef')]
mock_client.assert_has_calls(expected)
# test the exception
mock_client.setVolumeMetaData.side_effect = Exception('Custom ex')
self.assertRaises(exception.CinderException,
self.driver.attach_volume,
context.get_admin_context(),
self.volume,
'abcdef',
'newhost',
'/dev/vdb')
def test_detach_volume(self):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
self.driver.detach_volume(context.get_admin_context(), self.volume,
None)
expected = [
mock.call.removeVolumeMetaData(
self.VOLUME_3PAR_NAME,
'HPQ-CS-instance_uuid')]
mock_client.assert_has_calls(expected)
# test the exception
mock_client.removeVolumeMetaData.side_effect = Exception(
'Custom ex')
self.assertRaises(exception.CinderException,
self.driver.detach_volume,
context.get_admin_context(),
self.volume, None)
def test_create_snapshot(self):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
self.driver.create_snapshot(self.snapshot)
comment = (
'{"volume_id": "761fc5e5-5191-4ec7-aeba-33e36de44156",'
' "display_name": "fakesnap",'
' "description": "test description name",'
' "volume_name":'
' "volume-d03338a9-9115-48a3-8dfc-35cdfcdc15a7"}')
expected = [
mock.call.createSnapshot(
'oss-L4I73ONuTci9Fd4ceij-MQ',
'osv-dh-F5VGRTseuujPjbeRBVg',
{
'comment': comment,
'readOnly': True})]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
def test_delete_snapshot(self):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
self.driver.delete_snapshot(self.snapshot)
expected = [
mock.call.deleteVolume('oss-L4I73ONuTci9Fd4ceij-MQ')]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
def test_delete_snapshot_in_use(self):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
self.driver.create_snapshot(self.snapshot)
self.driver.create_volume_from_snapshot(self.volume, self.snapshot)
ex = hpexceptions.HTTPConflict("In use")
mock_client.deleteVolume = mock.Mock(side_effect=ex)
# Deleting the snapshot that a volume is dependent on should fail
self.assertRaises(exception.SnapshotIsBusy,
self.driver.delete_snapshot,
self.snapshot)
def test_delete_snapshot_not_found(self):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
self.driver.create_snapshot(self.snapshot)
try:
ex = hpexceptions.HTTPNotFound("not found")
mock_client.deleteVolume = mock.Mock(side_effect=ex)
self.driver.delete_snapshot(self.snapshot)
except Exception:
self.fail("Deleting a snapshot that is missing should act "
"as if it worked.")
def test_create_volume_from_snapshot(self):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
model_update = self.driver.create_volume_from_snapshot(
self.volume,
self.snapshot)
self.assertIsNone(model_update)
comment = (
'{"snapshot_id": "2f823bdc-e36e-4dc8-bd15-de1c7a28ff31",'
' "display_name": "Foo Volume",'
' "volume_id": "d03338a9-9115-48a3-8dfc-35cdfcdc15a7"}')
expected = [
mock.call.createSnapshot(
self.VOLUME_3PAR_NAME,
'oss-L4I73ONuTci9Fd4ceij-MQ',
{
'comment': comment,
'readOnly': False})]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
volume = self.volume.copy()
volume['size'] = 1
self.assertRaises(exception.InvalidInput,
self.driver.create_volume_from_snapshot,
volume, self.snapshot)
def test_create_volume_from_snapshot_and_extend(self):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
conf = {
'getTask.return_value': {
'status': 1},
'copyVolume.return_value': {'taskid': 1},
'getVolume.return_value': {}
}
mock_client = self.setup_driver(mock_conf=conf)
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
volume = self.volume.copy()
volume['size'] = self.volume['size'] + 10
model_update = self.driver.create_volume_from_snapshot(
volume,
self.snapshot)
self.assertEqual(model_update, None)
comment = (
'{"snapshot_id": "2f823bdc-e36e-4dc8-bd15-de1c7a28ff31",'
' "display_name": "Foo Volume",'
' "volume_id": "d03338a9-9115-48a3-8dfc-35cdfcdc15a7"}')
volume_name_3par = common._encode_name(volume['id'])
osv_matcher = 'osv-' + volume_name_3par
omv_matcher = 'omv-' + volume_name_3par
expected = [
mock.call.createSnapshot(
self.VOLUME_3PAR_NAME,
'oss-L4I73ONuTci9Fd4ceij-MQ',
{
'comment': comment,
'readOnly': False}),
mock.call.copyVolume(
osv_matcher, omv_matcher, HP3PAR_CPG, mock.ANY),
mock.call.getTask(mock.ANY),
mock.call.getVolume(osv_matcher),
mock.call.deleteVolume(osv_matcher),
mock.call.modifyVolume(omv_matcher, {'newName': osv_matcher}),
mock.call.growVolume(osv_matcher, 10 * 1024)]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
@mock.patch.object(volume_types, 'get_volume_type')
def test_create_volume_from_snapshot_and_extend_with_qos(
self, _mock_volume_types):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
conf = {
'getTask.return_value': {
'status': 1},
'copyVolume.return_value': {'taskid': 1},
'getVolume.return_value': {}
}
mock_client = self.setup_driver(mock_conf=conf)
_mock_volume_types.return_value = {
'name': 'gold',
'extra_specs': {
'cpg': HP3PAR_CPG_QOS,
'snap_cpg': HP3PAR_CPG_SNAP,
'vvs_name': self.VVS_NAME,
'qos': self.QOS,
'tpvv': True,
'tdvv': False,
'volume_type': self.volume_type}}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
volume = self.volume_qos.copy()
volume['size'] = self.volume['size'] + 10
model_update = self.driver.create_volume_from_snapshot(
volume,
self.snapshot)
self.assertEqual(model_update, None)
comment = (
'{"snapshot_id": "2f823bdc-e36e-4dc8-bd15-de1c7a28ff31",'
' "display_name": "Foo Volume",'
' "volume_id": "d03338a9-9115-48a3-8dfc-35cdfcdc15a7"}')
volume_name_3par = common._encode_name(volume['id'])
osv_matcher = 'osv-' + volume_name_3par
omv_matcher = 'omv-' + volume_name_3par
expected = [
mock.call.createSnapshot(
self.VOLUME_3PAR_NAME,
'oss-L4I73ONuTci9Fd4ceij-MQ',
{
'comment': comment,
'readOnly': False}),
mock.call.getCPG(HP3PAR_CPG),
mock.call.copyVolume(
osv_matcher, omv_matcher, HP3PAR_CPG, mock.ANY),
mock.call.getTask(mock.ANY),
mock.call.getVolume(osv_matcher),
mock.call.deleteVolume(osv_matcher),
mock.call.modifyVolume(omv_matcher, {'newName': osv_matcher}),
mock.call.growVolume(osv_matcher, 10 * 1024)]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
def test_create_volume_from_snapshot_and_extend_copy_fail(self):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
conf = {
'getTask.return_value': {
'status': 4,
'failure message': 'out of disk space'},
'copyVolume.return_value': {'taskid': 1},
'getVolume.return_value': {}
}
mock_client = self.setup_driver(mock_conf=conf)
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
volume = self.volume.copy()
volume['size'] = self.volume['size'] + 10
self.assertRaises(exception.CinderException,
self.driver.create_volume_from_snapshot,
volume, self.snapshot)
@mock.patch.object(volume_types, 'get_volume_type')
def test_create_volume_from_snapshot_qos(self, _mock_volume_types):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
_mock_volume_types.return_value = {
'name': 'gold',
'extra_specs': {
'cpg': HP3PAR_CPG,
'snap_cpg': HP3PAR_CPG_SNAP,
'vvs_name': self.VVS_NAME,
'qos': self.QOS,
'tpvv': True,
'tdvv': False,
'volume_type': self.volume_type}}
self.driver.create_volume_from_snapshot(
self.volume_qos,
self.snapshot)
comment = (
'{"snapshot_id": "2f823bdc-e36e-4dc8-bd15-de1c7a28ff31",'
' "display_name": "Foo Volume",'
' "volume_id": "d03338a9-9115-48a3-8dfc-35cdfcdc15a7"}')
expected = [
mock.call.createSnapshot(
self.VOLUME_3PAR_NAME,
'oss-L4I73ONuTci9Fd4ceij-MQ', {
'comment': comment,
'readOnly': False})]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
volume = self.volume.copy()
volume['size'] = 1
self.assertRaises(exception.InvalidInput,
self.driver.create_volume_from_snapshot,
volume, self.snapshot)
def test_terminate_connection(self):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
mock_client.getHostVLUNs.return_value = [
{'active': True,
'volumeName': self.VOLUME_3PAR_NAME,
'lun': None, 'type': 0}]
mock_client.queryHost.return_value = {
'members': [{
'name': self.FAKE_HOST
}]
}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
self.driver.terminate_connection(
self.volume,
self.connector,
force=True)
expected = [
mock.call.queryHost(iqns=[self.connector['initiator']]),
mock.call.getHostVLUNs(self.FAKE_HOST),
mock.call.deleteVLUN(
self.VOLUME_3PAR_NAME,
None,
self.FAKE_HOST),
mock.call.getHostVLUNs(self.FAKE_HOST),
mock.call.deleteHost(self.FAKE_HOST),
mock.call.removeVolumeMetaData(
self.VOLUME_3PAR_NAME, CHAP_USER_KEY),
mock.call.removeVolumeMetaData(
self.VOLUME_3PAR_NAME, CHAP_PASS_KEY)]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
def test_update_volume_key_value_pair(self):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
key = 'a'
value = 'b'
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
common.update_volume_key_value_pair(
self.volume,
key,
value)
expected = [
mock.call.setVolumeMetaData(self.VOLUME_3PAR_NAME, key, value)]
mock_client.assert_has_calls(expected)
# check exception
mock_client.setVolumeMetaData.side_effect = Exception('fake')
self.assertRaises(exception.VolumeBackendAPIException,
common.update_volume_key_value_pair,
self.volume,
None,
'b')
def test_clear_volume_key_value_pair(self):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
key = 'a'
common = self.driver._login()
common.clear_volume_key_value_pair(self.volume, key)
expected = [
mock.call.removeVolumeMetaData(self.VOLUME_3PAR_NAME, key)]
mock_client.assert_has_calls(expected)
# check the exception
mock_client.removeVolumeMetaData.side_effect = Exception('fake')
self.assertRaises(exception.VolumeBackendAPIException,
common.clear_volume_key_value_pair,
self.volume,
None)
def test_extend_volume(self):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
grow_size = 3
old_size = self.volume['size']
new_size = old_size + grow_size
self.driver.extend_volume(self.volume, str(new_size))
growth_size_mib = grow_size * units.Ki
expected = [
mock.call.growVolume(self.VOLUME_3PAR_NAME, growth_size_mib)]
mock_client.assert_has_calls(expected)
def test_extend_volume_non_base(self):
extend_ex = hpexceptions.HTTPForbidden(error={'code': 150})
conf = {
'getTask.return_value': {
'status': 1},
'getCPG.return_value': {},
'copyVolume.return_value': {'taskid': 1},
'getVolume.return_value': {},
# Throw an exception first time only
'growVolume.side_effect': [extend_ex,
None],
}
mock_client = self.setup_driver(mock_conf=conf)
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
grow_size = 3
old_size = self.volume['size']
new_size = old_size + grow_size
self.driver.extend_volume(self.volume, str(new_size))
self.assertEqual(2, mock_client.growVolume.call_count)
def test_extend_volume_non_base_failure(self):
extend_ex = hpexceptions.HTTPForbidden(error={'code': 150})
conf = {
'getTask.return_value': {
'status': 1},
'getCPG.return_value': {},
'copyVolume.return_value': {'taskid': 1},
'getVolume.return_value': {},
# Always fail
'growVolume.side_effect': extend_ex
}
mock_client = self.setup_driver(mock_conf=conf)
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
grow_size = 3
old_size = self.volume['size']
new_size = old_size + grow_size
self.assertRaises(hpexceptions.HTTPForbidden,
self.driver.extend_volume,
self.volume,
str(new_size))
def test_get_ports(self):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
mock_client.getPorts.return_value = {
'members': [
{'portPos': {'node': 0, 'slot': 8, 'cardPort': 2},
'protocol': 2,
'IPAddr': '10.10.120.252',
'linkState': 4,
'device': [],
'iSCSIName': 'iqn.2000-05.com.3pardata:21810002ac00383d',
'mode': 2,
'HWAddr': '2C27D75375D2',
'type': 8},
{'portPos': {'node': 1, 'slot': 8, 'cardPort': 1},
'protocol': 2,
'IPAddr': '10.10.220.253',
'linkState': 4,
'device': [],
'iSCSIName': 'iqn.2000-05.com.3pardata:21810002ac00383d',
'mode': 2,
'HWAddr': '2C27D75375D6',
'type': 8},
{'portWWN': '20210002AC00383D',
'protocol': 1,
'linkState': 4,
'mode': 2,
'device': ['cage2'],
'nodeWWN': '20210002AC00383D',
'type': 2,
'portPos': {'node': 0, 'slot': 6, 'cardPort': 3}}]}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
ports = common.get_ports()['members']
self.assertEqual(len(ports), 3)
def test_get_by_qos_spec_with_scoping(self):
mock_client = self.setup_driver()
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
qos_ref = qos_specs.create(self.ctxt, 'qos-specs-1', self.QOS)
type_ref = volume_types.create(self.ctxt,
"type1", {"qos:maxIOPS": "100",
"qos:maxBWS": "50",
"qos:minIOPS": "10",
"qos:minBWS": "20",
"qos:latency": "5",
"qos:priority": "high"})
qos_specs.associate_qos_with_type(self.ctxt,
qos_ref['id'],
type_ref['id'])
type_ref = volume_types.get_volume_type(self.ctxt, type_ref['id'])
qos = common._get_qos_by_volume_type(type_ref)
self.assertEqual(qos, {'maxIOPS': '1000', 'maxBWS': '50',
'minIOPS': '100', 'minBWS': '25',
'latency': '25', 'priority': 'low'})
def test_get_by_qos_spec(self):
mock_client = self.setup_driver()
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
qos_ref = qos_specs.create(
self.ctxt,
'qos-specs-1',
self.QOS_SPECS)
type_ref = volume_types.create(self.ctxt,
"type1", {"qos:maxIOPS": "100",
"qos:maxBWS": "50",
"qos:minIOPS": "10",
"qos:minBWS": "20",
"qos:latency": "5",
"qos:priority": "high"})
qos_specs.associate_qos_with_type(self.ctxt,
qos_ref['id'],
type_ref['id'])
type_ref = volume_types.get_volume_type(self.ctxt, type_ref['id'])
qos = common._get_qos_by_volume_type(type_ref)
self.assertEqual(qos, {'maxIOPS': '1000', 'maxBWS': '50',
'minIOPS': '100', 'minBWS': '25',
'latency': '25', 'priority': 'low'})
def test_get_by_qos_by_type_only(self):
mock_client = self.setup_driver()
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
type_ref = volume_types.create(self.ctxt,
"type1", {"qos:maxIOPS": "100",
"qos:maxBWS": "50",
"qos:minIOPS": "10",
"qos:minBWS": "20",
"qos:latency": "5",
"qos:priority": "high"})
type_ref = volume_types.get_volume_type(self.ctxt, type_ref['id'])
qos = common._get_qos_by_volume_type(type_ref)
self.assertEqual(qos, {'maxIOPS': '100', 'maxBWS': '50',
'minIOPS': '10', 'minBWS': '20',
'latency': '5', 'priority': 'high'})
def test_create_vlun(self):
host = 'fake-host'
lun_id = 11
nsp = '1:2:3'
mock_client = self.setup_driver()
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
location = ("%(name)s,%(lunid)s,%(host)s,%(nsp)s" %
{'name': self.VOLUME_NAME,
'lunid': lun_id,
'host': host,
'nsp': nsp})
mock_client.createVLUN.return_value = location
expected_info = {'volume_name': self.VOLUME_NAME,
'lun_id': lun_id,
'host_name': host,
'nsp': nsp}
common = self.driver._login()
vlun_info = common._create_3par_vlun(
self.VOLUME_NAME,
host,
nsp)
self.assertEqual(expected_info, vlun_info)
location = ("%(name)s,%(lunid)s,%(host)s" %
{'name': self.VOLUME_NAME,
'lunid': lun_id,
'host': host})
mock_client.createVLUN.return_value = location
expected_info = {'volume_name': self.VOLUME_NAME,
'lun_id': lun_id,
'host_name': host}
vlun_info = common._create_3par_vlun(
self.VOLUME_NAME,
host,
None)
self.assertEqual(expected_info, vlun_info)
def test__get_existing_volume_ref_name(self):
mock_client = self.setup_driver()
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
unm_matcher = common._get_3par_unm_name(self.volume['id'])
existing_ref = {'source-name': unm_matcher}
result = common._get_existing_volume_ref_name(existing_ref)
self.assertEqual(unm_matcher, result)
existing_ref = {'source-id': self.volume['id']}
result = common._get_existing_volume_ref_name(existing_ref)
self.assertEqual(unm_matcher, result)
existing_ref = {'bad-key': 'foo'}
self.assertRaises(
exception.ManageExistingInvalidReference,
common._get_existing_volume_ref_name,
existing_ref)
@mock.patch.object(volume_types, 'get_volume_type')
def test_manage_existing(self, _mock_volume_types):
_mock_volume_types.return_value = self.volume_type
mock_client = self.setup_driver()
new_comment = {"display_name": "Foo Volume",
"name": "volume-007dbfce-7579-40bc-8f90-a20b3902283e",
"volume_id": "007dbfce-7579-40bc-8f90-a20b3902283e",
"type": "OpenStack"}
volume = {'display_name': None,
'host': self.FAKE_CINDER_HOST,
'volume_type': 'gold',
'volume_type_id': 'acfa9fa4-54a0-4340-a3d8-bfcf19aea65e',
'id': '007dbfce-7579-40bc-8f90-a20b3902283e'}
mock_client.getVolume.return_value = self.MANAGE_VOLUME_INFO
mock_client.modifyVolume.return_value = ("anyResponse", {'taskid': 1})
mock_client.getTask.return_value = self.STATUS_DONE
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
unm_matcher = common._get_3par_unm_name(self.volume['id'])
osv_matcher = common._get_3par_vol_name(volume['id'])
vvs_matcher = common._get_3par_vvs_name(volume['id'])
existing_ref = {'source-name': unm_matcher}
expected_obj = {'display_name': 'Foo Volume'}
obj = self.driver.manage_existing(volume, existing_ref)
expected_manage = [
mock.call.getVolume(existing_ref['source-name']),
mock.call.modifyVolume(existing_ref['source-name'],
{'newName': osv_matcher,
'comment': self.CommentMatcher(
self.assertEqual, new_comment)}),
]
retype_comment_qos = {
"display_name": "Foo Volume",
"volume_type_name": self.volume_type['name'],
"volume_type_id": self.volume_type['id'],
"qos": {
'maxIOPS': '1000',
'maxBWS': '50',
'minIOPS': '100',
'minBWS': '25',
'latency': '25',
'priority': 'low'
}
}
expected_snap_cpg = HP3PAR_CPG_SNAP
expected_retype_modify = [
mock.call.modifyVolume(osv_matcher,
{'comment': self.CommentMatcher(
self.assertEqual,
retype_comment_qos),
'snapCPG': expected_snap_cpg}),
mock.call.deleteVolumeSet(vvs_matcher),
]
expected_retype_specs = [
mock.call.createVolumeSet(vvs_matcher, None),
mock.call.createQoSRules(
vvs_matcher,
{'ioMinGoal': 100, 'ioMaxLimit': 1000,
'bwMinGoalKB': 25600, 'priority': 1, 'latencyGoal': 25,
'bwMaxLimitKB': 51200}),
mock.call.addVolumeToVolumeSet(vvs_matcher, osv_matcher),
mock.call.modifyVolume(
osv_matcher,
{'action': 6,
'userCPG': HP3PAR_CPG,
'conversionOperation': 1, 'tuneOperation': 1}),
mock.call.getTask(1)
]
mock_client.assert_has_calls(self.standard_login + expected_manage)
mock_client.assert_has_calls(expected_retype_modify)
mock_client.assert_has_calls(
expected_retype_specs +
self.standard_logout)
self.assertEqual(expected_obj, obj)
@mock.patch.object(volume_types, 'get_volume_type')
def test_manage_existing_with_no_snap_cpg(self, _mock_volume_types):
_mock_volume_types.return_value = self.volume_type
mock_client = self.setup_driver()
new_comment = {"display_name": "Foo Volume",
"name": "volume-007dbfce-7579-40bc-8f90-a20b3902283e",
"volume_id": "007dbfce-7579-40bc-8f90-a20b3902283e",
"type": "OpenStack"}
volume = {'display_name': None,
'host': 'my-stack1@3parxxx#CPGNOTUSED',
'volume_type': 'gold',
'volume_type_id': 'acfa9fa4-54a0-4340-a3d8-bfcf19aea65e',
'id': '007dbfce-7579-40bc-8f90-a20b3902283e'}
mock_client.getVolume.return_value = self.MV_INFO_WITH_NO_SNAPCPG
mock_client.modifyVolume.return_value = ("anyResponse", {'taskid': 1})
mock_client.getTask.return_value = self.STATUS_DONE
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
unm_matcher = common._get_3par_unm_name(self.volume['id'])
osv_matcher = common._get_3par_vol_name(volume['id'])
existing_ref = {'source-name': unm_matcher}
expected_obj = {'display_name': 'Foo Volume'}
obj = self.driver.manage_existing(volume, existing_ref)
expected_manage = [
mock.call.getVolume(existing_ref['source-name']),
mock.call.modifyVolume(
existing_ref['source-name'],
{'newName': osv_matcher,
'comment': self.CommentMatcher(self.assertEqual,
new_comment),
# manage_existing() should be setting
# blank snapCPG to the userCPG
'snapCPG': 'testUserCpg0'})
]
mock_client.assert_has_calls(self.standard_login + expected_manage)
self.assertEqual(expected_obj, obj)
@mock.patch.object(volume_types, 'get_volume_type')
def test_manage_existing_vvs(self, _mock_volume_types):
test_volume_type = self.RETYPE_VOLUME_TYPE_2
vvs = test_volume_type['extra_specs']['vvs']
_mock_volume_types.return_value = test_volume_type
mock_client = self.setup_driver()
mock_client.getVolume.return_value = self.MANAGE_VOLUME_INFO
mock_client.modifyVolume.return_value = ("anyResponse", {'taskid': 1})
mock_client.getTask.return_value = self.STATUS_DONE
id = '007abcde-7579-40bc-8f90-a20b3902283e'
new_comment = {"display_name": "Test Volume",
"name": ("volume-%s" % id),
"volume_id": id,
"type": "OpenStack"}
volume = {'display_name': 'Test Volume',
'host': 'my-stack1@3parxxx#CPGNOTUSED',
'volume_type': 'gold',
'volume_type_id': 'acfa9fa4-54a0-4340-a3d8-bfcf19aea65e',
'id': id}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
unm_matcher = common._get_3par_unm_name(self.volume['id'])
osv_matcher = common._get_3par_vol_name(volume['id'])
vvs_matcher = common._get_3par_vvs_name(volume['id'])
existing_ref = {'source-name': unm_matcher}
obj = self.driver.manage_existing(volume, existing_ref)
expected_obj = {'display_name': 'Test Volume'}
expected_manage = [
mock.call.getVolume(existing_ref['source-name']),
mock.call.modifyVolume(existing_ref['source-name'],
{'newName': osv_matcher,
'comment': self.CommentMatcher(
self.assertEqual, new_comment)})
]
retype_comment_vvs = {
"display_name": "Foo Volume",
"volume_type_name": test_volume_type['name'],
"volume_type_id": test_volume_type['id'],
"vvs": vvs
}
expected_retype = [
mock.call.modifyVolume(osv_matcher,
{'comment': self.CommentMatcher(
self.assertEqual,
retype_comment_vvs),
'snapCPG': 'OpenStackCPGSnap'}),
mock.call.deleteVolumeSet(vvs_matcher),
mock.call.addVolumeToVolumeSet(vvs, osv_matcher),
mock.call.modifyVolume(osv_matcher,
{'action': 6,
'userCPG': 'CPGNOTUSED',
'conversionOperation': 1,
'tuneOperation': 1}),
mock.call.getTask(1)
]
mock_client.assert_has_calls(self.standard_login + expected_manage)
mock_client.assert_has_calls(
expected_retype +
self.standard_logout)
self.assertEqual(expected_obj, obj)
def test_manage_existing_no_volume_type(self):
mock_client = self.setup_driver()
comment = (
'{"display_name": "Foo Volume"}')
new_comment = (
'{"type": "OpenStack",'
' "display_name": "Foo Volume",'
' "name": "volume-007dbfce-7579-40bc-8f90-a20b3902283e",'
' "volume_id": "007dbfce-7579-40bc-8f90-a20b3902283e"}')
volume = {'display_name': None,
'volume_type': None,
'volume_type_id': None,
'id': '007dbfce-7579-40bc-8f90-a20b3902283e'}
mock_client.getVolume.return_value = {'comment': comment,
'userCPG': 'testUserCpg0'}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
unm_matcher = common._get_3par_unm_name(self.volume['id'])
osv_matcher = common._get_3par_vol_name(volume['id'])
existing_ref = {'source-name': unm_matcher}
obj = self.driver.manage_existing(volume, existing_ref)
expected_obj = {'display_name': 'Foo Volume'}
expected = [
mock.call.getVolume(existing_ref['source-name']),
mock.call.modifyVolume(existing_ref['source-name'],
{'newName': osv_matcher,
'comment': new_comment,
# manage_existing() should be setting
# blank snapCPG to the userCPG
'snapCPG': 'testUserCpg0'})
]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
self.assertEqual(expected_obj, obj)
volume['display_name'] = 'Test Volume'
obj = self.driver.manage_existing(volume, existing_ref)
expected_obj = {'display_name': 'Test Volume'}
expected = [
mock.call.getVolume(existing_ref['source-name']),
mock.call.modifyVolume(existing_ref['source-name'],
{'newName': osv_matcher,
'comment': new_comment,
# manage_existing() should be setting
# blank snapCPG to the userCPG
'snapCPG': 'testUserCpg0'})
]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
self.assertEqual(expected_obj, obj)
mock_client.getVolume.return_value = {'userCPG': 'testUserCpg0'}
volume['display_name'] = None
common = self.driver._login()
obj = self.driver.manage_existing(volume, existing_ref)
expected_obj = {'display_name': None}
expected = [
mock.call.getVolume(existing_ref['source-name']),
mock.call.modifyVolume(existing_ref['source-name'],
{'newName': osv_matcher,
'comment': new_comment,
# manage_existing() should be setting
# blank snapCPG to the userCPG
'snapCPG': 'testUserCpg0'})
]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
self.assertEqual(expected_obj, obj)
def test_manage_existing_invalid_input(self):
mock_client = self.setup_driver()
volume = {'display_name': None,
'volume_type': None,
'id': '007dbfce-7579-40bc-8f90-a20b3902283e'}
mock_client.getVolume.side_effect = hpexceptions.HTTPNotFound('fake')
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
unm_matcher = common._get_3par_unm_name(self.volume['id'])
existing_ref = {'source-name': unm_matcher}
self.assertRaises(exception.InvalidInput,
self.driver.manage_existing,
volume=volume,
existing_ref=existing_ref)
expected = [mock.call.getVolume(existing_ref['source-name'])]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
def test_manage_existing_volume_type_exception(self):
mock_client = self.setup_driver()
comment = (
'{"display_name": "Foo Volume"}')
volume = {'display_name': None,
'volume_type': 'gold',
'volume_type_id': 'bcfa9fa4-54a0-4340-a3d8-bfcf19aea65e',
'id': '007dbfce-7579-40bc-8f90-a20b3902283e'}
mock_client.getVolume.return_value = {'comment': comment}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
unm_matcher = common._get_3par_unm_name(self.volume['id'])
existing_ref = {'source-name': unm_matcher}
self.assertRaises(exception.ManageExistingVolumeTypeMismatch,
self.driver.manage_existing,
volume=volume,
existing_ref=existing_ref)
expected = [mock.call.getVolume(existing_ref['source-name'])]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
@mock.patch.object(volume_types, 'get_volume_type')
def test_manage_existing_retype_exception(self, _mock_volume_types):
mock_client = self.setup_driver()
_mock_volume_types.return_value = {
'name': 'gold',
'id': 'gold-id',
'extra_specs': {
'cpg': HP3PAR_CPG,
'snap_cpg': HP3PAR_CPG_SNAP,
'vvs_name': self.VVS_NAME,
'qos': self.QOS,
'tpvv': True,
'tdvv': False,
'volume_type': self.volume_type}}
volume = {'display_name': None,
'host': 'stack1@3pariscsi#POOL1',
'volume_type': 'gold',
'volume_type_id': 'bcfa9fa4-54a0-4340-a3d8-bfcf19aea65e',
'id': '007dbfce-7579-40bc-8f90-a20b3902283e'}
mock_client.getVolume.return_value = self.MANAGE_VOLUME_INFO
mock_client.modifyVolume.return_value = ("anyResponse", {'taskid': 1})
mock_client.getTask.return_value = self.STATUS_DONE
mock_client.getCPG.side_effect = [
{'domain': 'domain1'},
{'domain': 'domain2'},
{'domain': 'domain3'},
]
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
unm_matcher = common._get_3par_unm_name(self.volume['id'])
osv_matcher = common._get_3par_vol_name(volume['id'])
existing_ref = {'source-name': unm_matcher}
self.assertRaises(exception.Invalid3PARDomain,
self.driver.manage_existing,
volume=volume,
existing_ref=existing_ref)
expected = [
mock.call.getVolume(unm_matcher),
mock.call.modifyVolume(
unm_matcher, {
'newName': osv_matcher,
'comment': mock.ANY}),
mock.call.getCPG('POOL1'),
mock.call.getVolume(osv_matcher),
mock.call.getCPG('testUserCpg0'),
mock.call.getCPG('POOL1'),
mock.call.modifyVolume(
osv_matcher, {'newName': unm_matcher,
'comment': self.MANAGE_VOLUME_INFO
['comment']})
]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
def test_manage_existing_get_size(self):
mock_client = self.setup_driver()
mock_client.getVolume.return_value = {'sizeMiB': 2048}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
unm_matcher = common._get_3par_unm_name(self.volume['id'])
volume = {}
existing_ref = {'source-name': unm_matcher}
size = self.driver.manage_existing_get_size(volume, existing_ref)
expected_size = 2
expected = [mock.call.getVolume(existing_ref['source-name'])]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
self.assertEqual(expected_size, size)
def test_manage_existing_get_size_invalid_reference(self):
mock_client = self.setup_driver()
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
volume = {}
existing_ref = {'source-name': self.VOLUME_3PAR_NAME}
self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing_get_size,
volume=volume,
existing_ref=existing_ref)
mock_client.assert_has_calls(
self.standard_login +
self.standard_logout)
existing_ref = {}
self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing_get_size,
volume=volume,
existing_ref=existing_ref)
mock_client.assert_has_calls(
self.standard_login +
self.standard_logout)
def test_manage_existing_get_size_invalid_input(self):
mock_client = self.setup_driver()
mock_client.getVolume.side_effect = hpexceptions.HTTPNotFound('fake')
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
unm_matcher = common._get_3par_unm_name(self.volume['id'])
volume = {}
existing_ref = {'source-name': unm_matcher}
self.assertRaises(exception.InvalidInput,
self.driver.manage_existing_get_size,
volume=volume,
existing_ref=existing_ref)
expected = [mock.call.getVolume(existing_ref['source-name'])]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
def test_unmanage(self):
mock_client = self.setup_driver()
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
self.driver.unmanage(self.volume)
osv_matcher = common._get_3par_vol_name(self.volume['id'])
unm_matcher = common._get_3par_unm_name(self.volume['id'])
expected = [
mock.call.modifyVolume(osv_matcher, {'newName': unm_matcher})
]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
def test__safe_hostname(self):
long_hostname = "abc123abc123abc123abc123abc123abc123"
fixed_hostname = "abc123abc123abc123abc123abc123a"
common = hpcommon.HP3PARCommon(None)
safe_host = common._safe_hostname(long_hostname)
self.assertEqual(fixed_hostname, safe_host)
class TestHP3PARFCDriver(HP3PARBaseDriver, test.TestCase):
properties = {
'driver_volume_type': 'fibre_channel',
'data': {
'encrypted': False,
'target_lun': 90,
'target_wwn': ['0987654321234', '123456789000987'],
'target_discovered': True,
'initiator_target_map': {'123456789012345':
['0987654321234', '123456789000987'],
'123456789054321':
['0987654321234', '123456789000987'],
}}}
def setup_driver(self, config=None, mock_conf=None, wsapi_version=None):
self.ctxt = context.get_admin_context()
mock_client = self.setup_mock_client(
conf=config,
m_conf=mock_conf,
driver=hpfcdriver.HP3PARFCDriver)
if wsapi_version:
mock_client.getWsApiVersion.return_value = (
wsapi_version)
else:
mock_client.getWsApiVersion.return_value = (
self.wsapi_version_latest)
expected = [
mock.call.getCPG(HP3PAR_CPG),
mock.call.getCPG(HP3PAR_CPG2)]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
mock_client.reset_mock()
return mock_client
def test_initialize_connection(self):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
mock_client.getVolume.return_value = {'userCPG': HP3PAR_CPG}
mock_client.getCPG.return_value = {}
mock_client.getHost.side_effect = [
hpexceptions.HTTPNotFound('fake'),
{'name': self.FAKE_HOST,
'FCPaths': [{'driverVersion': None,
'firmwareVersion': None,
'hostSpeed': 0,
'model': None,
'portPos': {'cardPort': 1, 'node': 1,
'slot': 2},
'vendor': None,
'wwn': self.wwn[0]},
{'driverVersion': None,
'firmwareVersion': None,
'hostSpeed': 0,
'model': None,
'portPos': {'cardPort': 1, 'node': 0,
'slot': 2},
'vendor': None,
'wwn': self.wwn[1]}]}]
mock_client.queryHost.return_value = {
'members': [{
'name': self.FAKE_HOST
}]
}
mock_client.getVLUN.side_effect = [
hpexceptions.HTTPNotFound('fake')]
mock_client.getHostVLUNs.return_value = [
{'active': True,
'volumeName': self.VOLUME_3PAR_NAME,
'lun': 90, 'type': 0}]
location = ("%(volume_name)s,%(lun_id)s,%(host)s,%(nsp)s" %
{'volume_name': self.VOLUME_3PAR_NAME,
'lun_id': 90,
'host': self.FAKE_HOST,
'nsp': 'something'})
mock_client.createVLUN.return_value = location
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
result = self.driver.initialize_connection(
self.volume,
self.connector)
expected = [
mock.call.getVolume(self.VOLUME_3PAR_NAME),
mock.call.getCPG(HP3PAR_CPG),
mock.call.getHost(self.FAKE_HOST),
mock.call.queryHost(wwns=['123456789012345',
'123456789054321']),
mock.call.getHost(self.FAKE_HOST),
mock.call.getPorts(),
mock.call.getVLUN(self.VOLUME_3PAR_NAME),
mock.call.createVLUN(
self.VOLUME_3PAR_NAME,
auto=True,
hostname=self.FAKE_HOST),
mock.call.getHostVLUNs(self.FAKE_HOST)]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
self.assertDictMatch(result, self.properties)
@mock.patch('cinder.zonemanager.utils.create_lookup_service')
def test_initialize_connection_with_lookup_single_nsp(self, mock_lookup):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
class fake_lookup_object(object):
def get_device_mapping_from_network(self, connector, target_wwns):
fake_map = {
'FAB_1': {
'target_port_wwn_list': ['0987654321234'],
'initiator_port_wwn_list': ['123456789012345']
}
}
return fake_map
mock_lookup.return_value = fake_lookup_object()
mock_client = self.setup_driver()
mock_client.getVolume.return_value = {'userCPG': HP3PAR_CPG}
mock_client.getCPG.return_value = {}
mock_client.getHost.side_effect = [
hpexceptions.HTTPNotFound('fake'),
{'name': self.FAKE_HOST,
'FCPaths': [{'driverVersion': None,
'firmwareVersion': None,
'hostSpeed': 0,
'model': None,
'portPos': {'cardPort': 1, 'node': 1,
'slot': 2},
'vendor': None,
'wwn': self.wwn[0]}]}]
mock_client.queryHost.return_value = {
'members': [{
'name': self.FAKE_HOST
}]
}
mock_client.getVLUN.side_effect = [
hpexceptions.HTTPNotFound('fake')]
mock_client.getHostVLUNs.return_value = [
{'active': True,
'volumeName': self.VOLUME_3PAR_NAME,
'lun': 90, 'type': 0}]
location = ("%(volume_name)s,%(lun_id)s,%(host)s,%(nsp)s" %
{'volume_name': self.VOLUME_3PAR_NAME,
'lun_id': 90,
'host': self.FAKE_HOST,
'nsp': 'something'})
mock_client.createVLUN.return_value = location
connector = {'ip': '10.0.0.2',
'initiator': 'iqn.1993-08.org.debian:01:222',
'wwpns': [self.wwn[0]],
'wwnns': ["223456789012345"],
'host': self.FAKE_HOST}
expected_properties = {
'driver_volume_type': 'fibre_channel',
'data': {
'encrypted': False,
'target_lun': 90,
'target_wwn': ['0987654321234'],
'target_discovered': True,
'initiator_target_map': {'123456789012345':
['0987654321234']
}}}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
result = self.driver.initialize_connection(self.volume, connector)
expected = [
mock.call.getVolume(self.VOLUME_3PAR_NAME),
mock.call.getCPG(HP3PAR_CPG),
mock.call.getHost(self.FAKE_HOST),
mock.ANY,
mock.call.getHost(self.FAKE_HOST),
mock.call.getPorts(),
mock.call.getVLUN(self.VOLUME_3PAR_NAME),
mock.call.getPorts(),
mock.call.createVLUN(
self.VOLUME_3PAR_NAME,
auto=True,
hostname=self.FAKE_HOST,
portPos={'node': 7, 'slot': 1, 'cardPort': 1}),
mock.call.getHostVLUNs(self.FAKE_HOST)]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
self.assertDictMatch(result, expected_properties)
def test_initialize_connection_encrypted(self):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
mock_client.getVolume.return_value = {'userCPG': HP3PAR_CPG}
mock_client.getCPG.return_value = {}
mock_client.getHost.side_effect = [
hpexceptions.HTTPNotFound('fake'),
{'name': self.FAKE_HOST,
'FCPaths': [{'driverVersion': None,
'firmwareVersion': None,
'hostSpeed': 0,
'model': None,
'portPos': {'cardPort': 1, 'node': 1,
'slot': 2},
'vendor': None,
'wwn': self.wwn[0]},
{'driverVersion': None,
'firmwareVersion': None,
'hostSpeed': 0,
'model': None,
'portPos': {'cardPort': 1, 'node': 0,
'slot': 2},
'vendor': None,
'wwn': self.wwn[1]}]}]
mock_client.queryHost.return_value = {
'members': [{
'name': self.FAKE_HOST
}]
}
mock_client.getVLUN.side_effect = [
hpexceptions.HTTPNotFound('fake')]
mock_client.getHostVLUNs.return_value = [
{'active': True,
'volumeName': self.VOLUME_3PAR_NAME,
'lun': 90, 'type': 0}]
location = ("%(volume_name)s,%(lun_id)s,%(host)s,%(nsp)s" %
{'volume_name': self.VOLUME_3PAR_NAME,
'lun_id': 90,
'host': self.FAKE_HOST,
'nsp': 'something'})
mock_client.createVLUN.return_value = location
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
result = self.driver.initialize_connection(
self.volume_encrypted,
self.connector)
expected = [
mock.call.getVolume(self.VOLUME_3PAR_NAME),
mock.call.getCPG(HP3PAR_CPG),
mock.call.getHost(self.FAKE_HOST),
mock.call.queryHost(wwns=['123456789012345',
'123456789054321']),
mock.call.getHost(self.FAKE_HOST),
mock.call.getPorts(),
mock.call.getVLUN(self.VOLUME_3PAR_NAME),
mock.call.createVLUN(
self.VOLUME_3PAR_NAME,
auto=True,
hostname=self.FAKE_HOST),
mock.call.getHostVLUNs(self.FAKE_HOST)]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
expected_properties = self.properties
expected_properties['data']['encrypted'] = True
self.assertDictMatch(result, expected_properties)
def test_terminate_connection(self):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
effects = [
[{'active': True, 'volumeName': self.VOLUME_3PAR_NAME,
'lun': None, 'type': 0}],
hpexceptions.HTTPNotFound,
hpexceptions.HTTPNotFound]
mock_client.getHostVLUNs.side_effect = effects
mock_client.queryHost.return_value = {
'members': [{
'name': self.FAKE_HOST
}]
}
expected = [
mock.call.queryHost(wwns=['123456789012345', '123456789054321']),
mock.call.getHostVLUNs(self.FAKE_HOST),
mock.call.deleteVLUN(
self.VOLUME_3PAR_NAME,
None,
self.FAKE_HOST),
mock.call.getHostVLUNs(self.FAKE_HOST),
mock.call.deleteHost(self.FAKE_HOST),
mock.call.getHostVLUNs(self.FAKE_HOST),
mock.call.getPorts()]
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
conn_info = self.driver.terminate_connection(self.volume,
self.connector)
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
self.assertIn('data', conn_info)
self.assertIn('initiator_target_map', conn_info['data'])
mock_client.reset_mock()
mock_client.getHostVLUNs.side_effect = effects
# mock some deleteHost exceptions that are handled
delete_with_vlun = hpexceptions.HTTPConflict(
error={'message': "has exported VLUN"})
delete_with_hostset = hpexceptions.HTTPConflict(
error={'message': "host is a member of a set"})
mock_client.deleteHost = mock.Mock(
side_effect=[delete_with_vlun, delete_with_hostset])
conn_info = self.driver.terminate_connection(self.volume,
self.connector)
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
mock_client.reset_mock()
mock_client.getHostVLUNs.side_effect = effects
conn_info = self.driver.terminate_connection(self.volume,
self.connector)
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
@mock.patch('cinder.zonemanager.utils.create_lookup_service')
def test_terminate_connection_with_lookup(self, mock_lookup):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
class fake_lookup_object(object):
def get_device_mapping_from_network(self, connector, target_wwns):
fake_map = {
'FAB_1': {
'target_port_wwn_list': ['0987654321234'],
'initiator_port_wwn_list': ['123456789012345']
}
}
return fake_map
mock_lookup.return_value = fake_lookup_object()
mock_client = self.setup_driver()
effects = [
[{'active': True, 'volumeName': self.VOLUME_3PAR_NAME,
'lun': None, 'type': 0}],
hpexceptions.HTTPNotFound,
hpexceptions.HTTPNotFound]
mock_client.queryHost.return_value = {
'members': [{
'name': self.FAKE_HOST
}]
}
mock_client.getHostVLUNs.side_effect = effects
expected = [
mock.call.queryHost(wwns=['123456789012345', '123456789054321']),
mock.call.getHostVLUNs(self.FAKE_HOST),
mock.call.deleteVLUN(
self.VOLUME_3PAR_NAME,
None,
self.FAKE_HOST),
mock.call.getHostVLUNs(self.FAKE_HOST),
mock.call.deleteHost(self.FAKE_HOST),
mock.call.getHostVLUNs(self.FAKE_HOST),
mock.call.getPorts()]
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
conn_info = self.driver.terminate_connection(self.volume,
self.connector)
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
self.assertIn('data', conn_info)
self.assertIn('initiator_target_map', conn_info['data'])
mock_client.reset_mock()
mock_client.getHostVLUNs.side_effect = effects
# mock some deleteHost exceptions that are handled
delete_with_vlun = hpexceptions.HTTPConflict(
error={'message': "has exported VLUN"})
delete_with_hostset = hpexceptions.HTTPConflict(
error={'message': "host is a member of a set"})
mock_client.deleteHost = mock.Mock(
side_effect=[delete_with_vlun, delete_with_hostset])
conn_info = self.driver.terminate_connection(self.volume,
self.connector)
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
mock_client.reset_mock()
mock_client.getHostVLUNs.side_effect = effects
conn_info = self.driver.terminate_connection(self.volume,
self.connector)
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
def test_terminate_connection_more_vols(self):
mock_client = self.setup_driver()
# mock more than one vlun on the host (don't even try to remove host)
mock_client.getHostVLUNs.return_value = \
[
{'active': True,
'volumeName': self.VOLUME_3PAR_NAME,
'lun': None, 'type': 0},
{'active': True,
'volumeName': 'there-is-another-volume',
'lun': None, 'type': 0},
]
mock_client.queryHost.return_value = {
'members': [{
'name': self.FAKE_HOST
}]
}
expect_less = [
mock.call.queryHost(wwns=['123456789012345', '123456789054321']),
mock.call.getHostVLUNs(self.FAKE_HOST),
mock.call.deleteVLUN(
self.VOLUME_3PAR_NAME,
None,
self.FAKE_HOST),
mock.call.getHostVLUNs(self.FAKE_HOST),
mock.call.getHostVLUNs(self.FAKE_HOST)]
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
conn_info = self.driver.terminate_connection(self.volume,
self.connector)
mock_client.assert_has_calls(
self.standard_login +
expect_less +
self.standard_logout)
self.assertNotIn('initiator_target_map', conn_info['data'])
def test_get_volume_stats(self):
# setup_mock_client drive with the configuration
# and return the mock HTTP 3PAR client
config = self.setup_configuration()
config.filter_function = FILTER_FUNCTION
config.goodness_function = GOODNESS_FUNCTION
mock_client = self.setup_driver(config=config)
mock_client.getCPG.return_value = self.cpgs[0]
mock_client.getStorageSystemInfo.return_value = {
'serialNumber': '1234'
}
# cpg has no limit
mock_client.getCPGAvailableSpace.return_value = {
"capacityEfficiency": {u'compaction': 594.4},
"rawFreeMiB": 1024.0 * 6,
"usableFreeMiB": 1024.0 * 3
}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
stats = self.driver.get_volume_stats(True)
const = 0.0009765625
self.assertEqual(stats['storage_protocol'], 'FC')
self.assertEqual(stats['total_capacity_gb'], 0)
self.assertEqual(stats['free_capacity_gb'], 0)
self.assertEqual(stats['pools'][0]['total_capacity_gb'], 24.0)
self.assertEqual(stats['pools'][0]['free_capacity_gb'], 3.0)
self.assertEqual(stats['pools'][0]['capacity_utilization'], 87.5)
self.assertEqual(stats['pools'][0]['total_volumes'], 3)
self.assertEqual(stats['pools'][0]['goodness_function'],
GOODNESS_FUNCTION)
self.assertEqual(stats['pools'][0]['filter_function'],
FILTER_FUNCTION)
expected = [
mock.call.getStorageSystemInfo(),
mock.call.getCPG(HP3PAR_CPG),
mock.call.getCPGAvailableSpace(HP3PAR_CPG),
mock.call.getCPG(HP3PAR_CPG2),
mock.call.getCPGAvailableSpace(HP3PAR_CPG2)]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
stats = self.driver.get_volume_stats(True)
self.assertEqual(stats['storage_protocol'], 'FC')
self.assertEqual(stats['total_capacity_gb'], 0)
self.assertEqual(stats['free_capacity_gb'], 0)
self.assertEqual(stats['pools'][0]['total_capacity_gb'], 24.0)
self.assertEqual(stats['pools'][0]['free_capacity_gb'], 3.0)
self.assertEqual(stats['pools'][0]['capacity_utilization'], 87.5)
self.assertEqual(stats['pools'][0]['total_volumes'], 3)
self.assertEqual(stats['pools'][0]['goodness_function'],
GOODNESS_FUNCTION)
self.assertEqual(stats['pools'][0]['filter_function'],
FILTER_FUNCTION)
cpg2 = self.cpgs[0].copy()
cpg2.update({'SDGrowth': {'limitMiB': 8192}})
mock_client.getCPG.return_value = cpg2
stats = self.driver.get_volume_stats(True)
self.assertEqual(stats['storage_protocol'], 'FC')
total_capacity_gb = 8192 * const
self.assertEqual(stats['total_capacity_gb'], 0)
self.assertEqual(stats['pools'][0]['total_capacity_gb'],
total_capacity_gb)
free_capacity_gb = int(
(8192 - (self.cpgs[0]['UsrUsage']['usedMiB'] +
self.cpgs[0]['SDUsage']['usedMiB'])) * const)
self.assertEqual(stats['free_capacity_gb'], 0)
self.assertEqual(stats['pools'][0]['free_capacity_gb'],
free_capacity_gb)
cap_util = (float(total_capacity_gb - free_capacity_gb) /
float(total_capacity_gb)) * 100
self.assertEqual(stats['pools'][0]['capacity_utilization'],
cap_util)
self.assertEqual(stats['pools'][0]['total_volumes'], 3)
self.assertEqual(stats['pools'][0]['goodness_function'],
GOODNESS_FUNCTION)
self.assertEqual(stats['pools'][0]['filter_function'],
FILTER_FUNCTION)
common.client.deleteCPG(HP3PAR_CPG)
common.client.createCPG(HP3PAR_CPG, {})
def test_create_host(self):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
mock_client.getVolume.return_value = {'userCPG': HP3PAR_CPG}
mock_client.getCPG.return_value = {}
mock_client.getHost.side_effect = [
hpexceptions.HTTPNotFound('fake'),
{'name': self.FAKE_HOST,
'FCPaths': [{'driverVersion': None,
'firmwareVersion': None,
'hostSpeed': 0,
'model': None,
'portPos': {'cardPort': 1, 'node': 1,
'slot': 2},
'vendor': None,
'wwn': self.wwn[0]},
{'driverVersion': None,
'firmwareVersion': None,
'hostSpeed': 0,
'model': None,
'portPos': {'cardPort': 1, 'node': 0,
'slot': 2},
'vendor': None,
'wwn': self.wwn[1]}]}]
mock_client.queryHost.return_value = None
mock_client.getVLUN.return_value = {'lun': 186}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
host = self.driver._create_host(
common,
self.volume,
self.connector)
expected = [
mock.call.getVolume('osv-0DM4qZEVSKON-DXN-NwVpw'),
mock.call.getCPG(HP3PAR_CPG),
mock.call.getHost(self.FAKE_HOST),
mock.call.queryHost(wwns=['123456789012345',
'123456789054321']),
mock.call.createHost(
self.FAKE_HOST,
FCWwns=['123456789012345', '123456789054321'],
optional={'domain': None, 'persona': 2}),
mock.call.getHost(self.FAKE_HOST)]
mock_client.assert_has_calls(expected)
self.assertEqual(host['name'], self.FAKE_HOST)
def test_create_invalid_host(self):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
mock_client.getVolume.return_value = {'userCPG': HP3PAR_CPG}
mock_client.getCPG.return_value = {}
mock_client.getHost.side_effect = [
hpexceptions.HTTPNotFound('Host not found.'), {
'name': 'fakehost.foo',
'FCPaths': [{'wwn': '123456789012345'}, {
'wwn': '123456789054321'}]}]
mock_client.queryHost.return_value = {
'members': [{
'name': 'fakehost.foo'
}]
}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
host = self.driver._create_host(
common,
self.volume,
self.connector)
expected = [
mock.call.getVolume('osv-0DM4qZEVSKON-DXN-NwVpw'),
mock.call.getCPG(HP3PAR_CPG),
mock.call.getHost('fakehost'),
mock.call.queryHost(wwns=['123456789012345',
'123456789054321']),
mock.call.getHost('fakehost.foo')]
mock_client.assert_has_calls(expected)
self.assertEqual(host['name'], 'fakehost.foo')
def test_create_modify_host(self):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
mock_client.getVolume.return_value = {'userCPG': HP3PAR_CPG}
mock_client.getCPG.return_value = {}
mock_client.getHost.side_effect = [{
'name': self.FAKE_HOST, 'FCPaths': []},
{'name': self.FAKE_HOST,
'FCPaths': [{'wwn': '123456789012345'}, {
'wwn': '123456789054321'}]}]
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
host = self.driver._create_host(
common,
self.volume,
self.connector)
expected = [
mock.call.getVolume('osv-0DM4qZEVSKON-DXN-NwVpw'),
mock.call.getCPG(HP3PAR_CPG),
mock.call.getHost('fakehost'),
mock.call.modifyHost(
'fakehost', {
'FCWWNs': ['123456789012345', '123456789054321'],
'pathOperation': 1}),
mock.call.getHost('fakehost')]
mock_client.assert_has_calls(expected)
self.assertEqual(host['name'], self.FAKE_HOST)
self.assertEqual(len(host['FCPaths']), 2)
def test_modify_host_with_new_wwn(self):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
mock_client.getVolume.return_value = {'userCPG': HP3PAR_CPG}
mock_client.getCPG.return_value = {}
getHost_ret1 = {
'name': self.FAKE_HOST,
'FCPaths': [{'wwn': '123456789054321'}]}
getHost_ret2 = {
'name': self.FAKE_HOST,
'FCPaths': [{'wwn': '123456789012345'},
{'wwn': '123456789054321'}]}
mock_client.getHost.side_effect = [getHost_ret1, getHost_ret2]
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
host = self.driver._create_host(
common,
self.volume,
self.connector)
expected = [
mock.call.getVolume('osv-0DM4qZEVSKON-DXN-NwVpw'),
mock.call.getCPG(HP3PAR_CPG),
mock.call.getHost('fakehost'),
mock.call.modifyHost(
'fakehost', {
'FCWWNs': ['123456789012345'], 'pathOperation': 1}),
mock.call.getHost('fakehost')]
mock_client.assert_has_calls(expected)
self.assertEqual(host['name'], self.FAKE_HOST)
self.assertEqual(len(host['FCPaths']), 2)
def test_modify_host_with_unknown_wwn_and_new_wwn(self):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
mock_client.getVolume.return_value = {'userCPG': HP3PAR_CPG}
mock_client.getCPG.return_value = {}
getHost_ret1 = {
'name': self.FAKE_HOST,
'FCPaths': [{'wwn': '123456789054321'},
{'wwn': 'xxxxxxxxxxxxxxx'}]}
getHost_ret2 = {
'name': self.FAKE_HOST,
'FCPaths': [{'wwn': '123456789012345'},
{'wwn': '123456789054321'},
{'wwn': 'xxxxxxxxxxxxxxx'}]}
mock_client.getHost.side_effect = [getHost_ret1, getHost_ret2]
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
host = self.driver._create_host(
common,
self.volume,
self.connector)
expected = [
mock.call.getVolume('osv-0DM4qZEVSKON-DXN-NwVpw'),
mock.call.getCPG(HP3PAR_CPG),
mock.call.getHost('fakehost'),
mock.call.modifyHost(
'fakehost', {
'FCWWNs': ['123456789012345'], 'pathOperation': 1}),
mock.call.getHost('fakehost')]
mock_client.assert_has_calls(expected)
self.assertEqual(host['name'], self.FAKE_HOST)
self.assertEqual(len(host['FCPaths']), 3)
class TestHP3PARISCSIDriver(HP3PARBaseDriver, test.TestCase):
TARGET_IQN = 'iqn.2000-05.com.3pardata:21810002ac00383d'
TARGET_LUN = 186
properties = {
'driver_volume_type': 'iscsi',
'data':
{'encrypted': False,
'target_discovered': True,
'target_iqn': TARGET_IQN,
'target_lun': TARGET_LUN,
'target_portal': '1.1.1.2:1234'}}
def setup_driver(self, config=None, mock_conf=None, wsapi_version=None):
self.ctxt = context.get_admin_context()
mock_client = self.setup_mock_client(
conf=config,
m_conf=mock_conf,
driver=hpdriver.HP3PARISCSIDriver)
if wsapi_version:
mock_client.getWsApiVersion.return_value = (
wsapi_version)
else:
mock_client.getWsApiVersion.return_value = (
self.wsapi_version_latest)
expected_get_cpgs = [
mock.call.getCPG(HP3PAR_CPG),
mock.call.getCPG(HP3PAR_CPG2)]
expected_get_ports = [mock.call.getPorts()]
mock_client.assert_has_calls(
self.standard_login +
expected_get_cpgs +
self.standard_logout +
self.standard_login +
expected_get_ports +
self.standard_logout)
mock_client.reset_mock()
return mock_client
def test_initialize_connection(self):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
mock_client.getVolume.return_value = {'userCPG': HP3PAR_CPG}
mock_client.getCPG.return_value = {}
mock_client.getHost.side_effect = [
hpexceptions.HTTPNotFound('fake'),
{'name': self.FAKE_HOST}]
mock_client.queryHost.return_value = {
'members': [{
'name': self.FAKE_HOST
}]
}
mock_client.getHostVLUNs.return_value = [
{'active': True,
'volumeName': self.VOLUME_3PAR_NAME,
'lun': self.TARGET_LUN, 'type': 0}]
mock_client.getVLUN.return_value = {
'hostname': self.FAKE_HOST,
'lun': self.TARGET_LUN,
'portPos': {'node': 8, 'slot': 1, 'cardPort': 1}}
location = ("%(volume_name)s,%(lun_id)s,%(host)s,%(nsp)s" %
{'volume_name': self.VOLUME_3PAR_NAME,
'lun_id': self.TARGET_LUN,
'host': self.FAKE_HOST,
'nsp': 'something'})
mock_client.createVLUN.return_value = location
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
result = self.driver.initialize_connection(
self.volume,
self.connector)
expected = [
mock.call.getVolume(self.VOLUME_3PAR_NAME),
mock.call.getCPG(HP3PAR_CPG),
mock.call.getHost(self.FAKE_HOST),
mock.call.queryHost(iqns=['iqn.1993-08.org.debian:01:222']),
mock.call.getHost(self.FAKE_HOST),
mock.call.getVLUN(self.VOLUME_3PAR_NAME)]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
self.assertDictMatch(result, self.properties)
def test_initialize_connection_encrypted(self):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
mock_client.getVolume.return_value = {'userCPG': HP3PAR_CPG}
mock_client.getCPG.return_value = {}
mock_client.getHost.side_effect = [
hpexceptions.HTTPNotFound('fake'),
{'name': self.FAKE_HOST}]
mock_client.queryHost.return_value = {
'members': [{
'name': self.FAKE_HOST
}]
}
mock_client.getHostVLUNs.return_value = [
{'active': True,
'volumeName': self.VOLUME_3PAR_NAME,
'lun': self.TARGET_LUN, 'type': 0}]
mock_client.getVLUN.return_value = {
'hostname': self.FAKE_HOST,
'lun': self.TARGET_LUN,
'portPos': {'node': 8, 'slot': 1, 'cardPort': 1}}
location = ("%(volume_name)s,%(lun_id)s,%(host)s,%(nsp)s" %
{'volume_name': self.VOLUME_3PAR_NAME,
'lun_id': self.TARGET_LUN,
'host': self.FAKE_HOST,
'nsp': 'something'})
mock_client.createVLUN.return_value = location
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
result = self.driver.initialize_connection(
self.volume_encrypted,
self.connector)
expected = [
mock.call.getVolume(self.VOLUME_3PAR_NAME),
mock.call.getCPG(HP3PAR_CPG),
mock.call.getHost(self.FAKE_HOST),
mock.call.queryHost(iqns=['iqn.1993-08.org.debian:01:222']),
mock.call.getHost(self.FAKE_HOST),
mock.call.getVLUN(self.VOLUME_3PAR_NAME)]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
expected_properties = self.properties
expected_properties['data']['encrypted'] = True
self.assertDictMatch(result, self.properties)
def test_get_volume_stats(self):
# setup_mock_client drive with the configuration
# and return the mock HTTP 3PAR client
config = self.setup_configuration()
config.filter_function = FILTER_FUNCTION
config.goodness_function = GOODNESS_FUNCTION
mock_client = self.setup_driver(config=config)
mock_client.getCPG.return_value = self.cpgs[0]
mock_client.getStorageSystemInfo.return_value = {
'serialNumber': '1234'
}
# cpg has no limit
mock_client.getCPGAvailableSpace.return_value = {
"capacityEfficiency": {u'compaction': 594.4},
"rawFreeMiB": 1024.0 * 6,
"usableFreeMiB": 1024.0 * 3
}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
stats = self.driver.get_volume_stats(True)
const = 0.0009765625
self.assertEqual(stats['storage_protocol'], 'iSCSI')
self.assertEqual(stats['total_capacity_gb'], 0)
self.assertEqual(stats['free_capacity_gb'], 0)
self.assertEqual(stats['pools'][0]['total_capacity_gb'], 24.0)
self.assertEqual(stats['pools'][0]['free_capacity_gb'], 3.0)
self.assertEqual(stats['pools'][0]['capacity_utilization'], 87.5)
self.assertEqual(stats['pools'][0]['total_volumes'], 3)
self.assertEqual(stats['pools'][0]['goodness_function'],
GOODNESS_FUNCTION)
self.assertEqual(stats['pools'][0]['filter_function'],
FILTER_FUNCTION)
expected = [
mock.call.getStorageSystemInfo(),
mock.call.getCPG(HP3PAR_CPG),
mock.call.getCPGAvailableSpace(HP3PAR_CPG),
mock.call.getCPG(HP3PAR_CPG2),
mock.call.getCPGAvailableSpace(HP3PAR_CPG2)]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
cpg2 = self.cpgs[0].copy()
cpg2.update({'SDGrowth': {'limitMiB': 8192}})
mock_client.getCPG.return_value = cpg2
stats = self.driver.get_volume_stats(True)
self.assertEqual(stats['storage_protocol'], 'iSCSI')
total_capacity_gb = 8192 * const
self.assertEqual(stats['total_capacity_gb'], 0)
self.assertEqual(stats['pools'][0]['total_capacity_gb'],
total_capacity_gb)
free_capacity_gb = int(
(8192 - (self.cpgs[0]['UsrUsage']['usedMiB'] +
self.cpgs[0]['SDUsage']['usedMiB'])) * const)
self.assertEqual(stats['free_capacity_gb'], 0)
self.assertEqual(stats['pools'][0]['free_capacity_gb'],
free_capacity_gb)
cap_util = (float(total_capacity_gb - free_capacity_gb) /
float(total_capacity_gb)) * 100
self.assertEqual(stats['pools'][0]['capacity_utilization'],
cap_util)
self.assertEqual(stats['pools'][0]['total_volumes'], 3)
self.assertEqual(stats['pools'][0]['goodness_function'],
GOODNESS_FUNCTION)
self.assertEqual(stats['pools'][0]['filter_function'],
FILTER_FUNCTION)
def test_create_host(self):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
mock_client.getVolume.return_value = {'userCPG': HP3PAR_CPG}
mock_client.getCPG.return_value = {}
mock_client.getHost.side_effect = [
hpexceptions.HTTPNotFound('fake'),
{'name': self.FAKE_HOST}]
mock_client.queryHost.return_value = None
mock_client.getVLUN.return_value = {'lun': self.TARGET_LUN}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
host, auth_username, auth_password = self.driver._create_host(
common, self.volume, self.connector)
expected = [
mock.call.getVolume('osv-0DM4qZEVSKON-DXN-NwVpw'),
mock.call.getCPG(HP3PAR_CPG),
mock.call.getHost(self.FAKE_HOST),
mock.call.queryHost(iqns=['iqn.1993-08.org.debian:01:222']),
mock.call.createHost(
self.FAKE_HOST,
optional={'domain': None, 'persona': 2},
iscsiNames=['iqn.1993-08.org.debian:01:222']),
mock.call.getHost(self.FAKE_HOST)]
mock_client.assert_has_calls(expected)
self.assertEqual(host['name'], self.FAKE_HOST)
self.assertEqual(auth_username, None)
self.assertEqual(auth_password, None)
def test_create_host_chap_enabled(self):
# setup_mock_client drive with CHAP enabled configuration
# and return the mock HTTP 3PAR client
config = self.setup_configuration()
config.hp3par_iscsi_chap_enabled = True
mock_client = self.setup_driver(config=config)
mock_client.getVolume.return_value = {'userCPG': HP3PAR_CPG}
mock_client.getCPG.return_value = {}
mock_client.getHost.side_effect = [
hpexceptions.HTTPNotFound('fake'),
{'name': self.FAKE_HOST}]
mock_client.queryHost.return_value = None
mock_client.getVLUN.return_value = {'lun': self.TARGET_LUN}
expected_mod_request = {
'chapOperation': mock_client.HOST_EDIT_ADD,
'chapOperationMode': mock_client.CHAP_INITIATOR,
'chapName': 'test-user',
'chapSecret': 'test-pass'
}
def get_side_effect(*args):
data = {'value': None}
if args[1] == CHAP_USER_KEY:
data['value'] = 'test-user'
elif args[1] == CHAP_PASS_KEY:
data['value'] = 'test-pass'
return data
mock_client.getVolumeMetaData.side_effect = get_side_effect
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
host, auth_username, auth_password = self.driver._create_host(
common, self.volume, self.connector)
expected = [
mock.call.getVolume('osv-0DM4qZEVSKON-DXN-NwVpw'),
mock.call.getCPG(HP3PAR_CPG),
mock.call.getVolumeMetaData(
'osv-0DM4qZEVSKON-DXN-NwVpw', CHAP_USER_KEY),
mock.call.getVolumeMetaData(
'osv-0DM4qZEVSKON-DXN-NwVpw', CHAP_PASS_KEY),
mock.call.getHost(self.FAKE_HOST),
mock.call.queryHost(iqns=['iqn.1993-08.org.debian:01:222']),
mock.call.createHost(
self.FAKE_HOST,
optional={'domain': None, 'persona': 2},
iscsiNames=['iqn.1993-08.org.debian:01:222']),
mock.call.modifyHost(
'fakehost',
expected_mod_request),
mock.call.getHost(self.FAKE_HOST)
]
mock_client.assert_has_calls(expected)
self.assertEqual(host['name'], self.FAKE_HOST)
self.assertEqual(auth_username, 'test-user')
self.assertEqual(auth_password, 'test-pass')
def test_create_invalid_host(self):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
mock_client.getVolume.return_value = {'userCPG': HP3PAR_CPG}
mock_client.getCPG.return_value = {}
mock_client.getHost.side_effect = [
hpexceptions.HTTPNotFound('Host not found.'),
{'name': 'fakehost.foo'}]
mock_client.queryHost.return_value = {
'members': [{
'name': 'fakehost.foo'
}]
}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
host, auth_username, auth_password = self.driver._create_host(
common, self.volume, self.connector)
expected = [
mock.call.getVolume('osv-0DM4qZEVSKON-DXN-NwVpw'),
mock.call.getCPG(HP3PAR_CPG),
mock.call.getHost(self.FAKE_HOST),
mock.call.queryHost(iqns=['iqn.1993-08.org.debian:01:222']),
mock.call.getHost('fakehost.foo')]
mock_client.assert_has_calls(expected)
self.assertEqual(host['name'], 'fakehost.foo')
self.assertEqual(auth_username, None)
self.assertEqual(auth_password, None)
def test_create_invalid_host_chap_enabled(self):
# setup_mock_client drive with CHAP enabled configuration
# and return the mock HTTP 3PAR client
config = self.setup_configuration()
config.hp3par_iscsi_chap_enabled = True
mock_client = self.setup_driver(config=config)
mock_client.getVolume.return_value = {'userCPG': HP3PAR_CPG}
mock_client.getCPG.return_value = {}
mock_client.getHost.side_effect = [
hpexceptions.HTTPNotFound('Host not found.'),
{'name': 'fakehost.foo'}]
mock_client.queryHost.return_value = {
'members': [{
'name': 'fakehost.foo'
}]
}
def get_side_effect(*args):
data = {'value': None}
if args[1] == CHAP_USER_KEY:
data['value'] = 'test-user'
elif args[1] == CHAP_PASS_KEY:
data['value'] = 'test-pass'
return data
mock_client.getVolumeMetaData.side_effect = get_side_effect
expected_mod_request = {
'chapOperation': mock_client.HOST_EDIT_ADD,
'chapOperationMode': mock_client.CHAP_INITIATOR,
'chapName': 'test-user',
'chapSecret': 'test-pass'
}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
host, auth_username, auth_password = self.driver._create_host(
common, self.volume, self.connector)
expected = [
mock.call.getVolume('osv-0DM4qZEVSKON-DXN-NwVpw'),
mock.call.getCPG(HP3PAR_CPG),
mock.call.getVolumeMetaData(
'osv-0DM4qZEVSKON-DXN-NwVpw', CHAP_USER_KEY),
mock.call.getVolumeMetaData(
'osv-0DM4qZEVSKON-DXN-NwVpw', CHAP_PASS_KEY),
mock.call.getHost(self.FAKE_HOST),
mock.call.queryHost(iqns=['iqn.1993-08.org.debian:01:222']),
mock.call.modifyHost(
'fakehost.foo',
expected_mod_request),
mock.call.getHost('fakehost.foo')
]
mock_client.assert_has_calls(expected)
self.assertEqual(host['name'], 'fakehost.foo')
self.assertEqual(auth_username, 'test-user')
self.assertEqual(auth_password, 'test-pass')
def test_create_modify_host(self):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
mock_client.getVolume.return_value = {'userCPG': HP3PAR_CPG}
mock_client.getCPG.return_value = {}
mock_client.getHost.side_effect = [
{'name': self.FAKE_HOST, 'FCPaths': []},
{'name': self.FAKE_HOST,
'FCPaths': [{'wwn': '123456789012345'},
{'wwn': '123456789054321'}]}]
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
host, auth_username, auth_password = self.driver._create_host(
common, self.volume, self.connector)
expected = [
mock.call.getVolume('osv-0DM4qZEVSKON-DXN-NwVpw'),
mock.call.getCPG(HP3PAR_CPG),
mock.call.getHost(self.FAKE_HOST),
mock.call.modifyHost(
self.FAKE_HOST,
{'pathOperation': 1,
'iSCSINames': ['iqn.1993-08.org.debian:01:222']}),
mock.call.getHost(self.FAKE_HOST)]
mock_client.assert_has_calls(expected)
self.assertEqual(host['name'], self.FAKE_HOST)
self.assertEqual(auth_username, None)
self.assertEqual(auth_password, None)
self.assertEqual(len(host['FCPaths']), 2)
def test_create_modify_host_chap_enabled(self):
# setup_mock_client drive with CHAP enabled configuration
# and return the mock HTTP 3PAR client
config = self.setup_configuration()
config.hp3par_iscsi_chap_enabled = True
mock_client = self.setup_driver(config=config)
mock_client.getVolume.return_value = {'userCPG': HP3PAR_CPG}
mock_client.getCPG.return_value = {}
mock_client.getHost.side_effect = [
{'name': self.FAKE_HOST, 'FCPaths': []},
{'name': self.FAKE_HOST,
'FCPaths': [{'wwn': '123456789012345'},
{'wwn': '123456789054321'}]}]
def get_side_effect(*args):
data = {'value': None}
if args[1] == CHAP_USER_KEY:
data['value'] = 'test-user'
elif args[1] == CHAP_PASS_KEY:
data['value'] = 'test-pass'
return data
mock_client.getVolumeMetaData.side_effect = get_side_effect
expected_mod_request = {
'chapOperation': mock_client.HOST_EDIT_ADD,
'chapOperationMode': mock_client.CHAP_INITIATOR,
'chapName': 'test-user',
'chapSecret': 'test-pass'
}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
host, auth_username, auth_password = self.driver._create_host(
common, self.volume, self.connector)
expected = [
mock.call.getVolume('osv-0DM4qZEVSKON-DXN-NwVpw'),
mock.call.getCPG(HP3PAR_CPG),
mock.call.getVolumeMetaData(
'osv-0DM4qZEVSKON-DXN-NwVpw', CHAP_USER_KEY),
mock.call.getVolumeMetaData(
'osv-0DM4qZEVSKON-DXN-NwVpw', CHAP_PASS_KEY),
mock.call.getHost(self.FAKE_HOST),
mock.call.modifyHost(
self.FAKE_HOST,
{'pathOperation': 1,
'iSCSINames': ['iqn.1993-08.org.debian:01:222']}),
mock.call.modifyHost(
self.FAKE_HOST,
expected_mod_request
),
mock.call.getHost(self.FAKE_HOST)]
mock_client.assert_has_calls(expected)
self.assertEqual(host['name'], self.FAKE_HOST)
self.assertEqual(auth_username, 'test-user')
self.assertEqual(auth_password, 'test-pass')
self.assertEqual(len(host['FCPaths']), 2)
def test_get_least_used_nsp_for_host_single(self):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
mock_client.getPorts.return_value = PORTS_RET
mock_client.getVLUNs.return_value = VLUNS1_RET
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
# Setup a single ISCSI IP
iscsi_ips = ["10.10.220.253"]
self.driver.configuration.hp3par_iscsi_ips = iscsi_ips
self.driver.initialize_iscsi_ports(common)
nsp = self.driver._get_least_used_nsp_for_host(common, 'newhost')
self.assertEqual(nsp, "1:8:1")
def test_get_least_used_nsp_for_host_new(self):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
mock_client.getPorts.return_value = PORTS_RET
mock_client.getVLUNs.return_value = VLUNS1_RET
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
# Setup two ISCSI IPs
iscsi_ips = ["10.10.220.252", "10.10.220.253"]
self.driver.configuration.hp3par_iscsi_ips = iscsi_ips
self.driver.initialize_iscsi_ports(common)
# Host 'newhost' does not yet have any iscsi paths,
# so the 'least used' is returned
nsp = self.driver._get_least_used_nsp_for_host(common, 'newhost')
self.assertEqual(nsp, "1:8:2")
def test_get_least_used_nsp_for_host_reuse(self):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
mock_client.getPorts.return_value = PORTS_RET
mock_client.getVLUNs.return_value = VLUNS1_RET
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
# Setup two ISCSI IPs
iscsi_ips = ["10.10.220.252", "10.10.220.253"]
self.driver.configuration.hp3par_iscsi_ips = iscsi_ips
self.driver.initialize_iscsi_ports(common)
# hosts 'foo' and 'bar' already have active iscsi paths
# the same one should be used
nsp = self.driver._get_least_used_nsp_for_host(common, 'foo')
self.assertEqual(nsp, "1:8:2")
nsp = self.driver._get_least_used_nsp_for_host(common, 'bar')
self.assertEqual(nsp, "1:8:1")
def test_get_least_used_nps_for_host_fc(self):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
mock_client.getPorts.return_value = PORTS1_RET
mock_client.getVLUNs.return_value = VLUNS5_RET
# Setup two ISCSI IPs
iscsi_ips = ["10.10.220.252", "10.10.220.253"]
self.driver.configuration.hp3par_iscsi_ips = iscsi_ips
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
self.driver.initialize_iscsi_ports(common)
nsp = self.driver._get_least_used_nsp_for_host(common, 'newhost')
self.assertNotEqual(nsp, "0:6:3")
self.assertEqual(nsp, "1:8:1")
def test_invalid_iscsi_ip(self):
config = self.setup_configuration()
config.hp3par_iscsi_ips = ['10.10.220.250', '10.10.220.251']
config.iscsi_ip_address = '10.10.10.10'
mock_conf = {
'getPorts.return_value': {
'members': [
{'portPos': {'node': 1, 'slot': 8, 'cardPort': 2},
'protocol': 2,
'IPAddr': '10.10.220.252',
'linkState': 4,
'device': [],
'iSCSIName': self.TARGET_IQN,
'mode': 2,
'HWAddr': '2C27D75375D2',
'type': 8},
{'portPos': {'node': 1, 'slot': 8, 'cardPort': 1},
'protocol': 2,
'IPAddr': '10.10.220.253',
'linkState': 4,
'device': [],
'iSCSIName': self.TARGET_IQN,
'mode': 2,
'HWAddr': '2C27D75375D6',
'type': 8}]}}
# no valid ip addr should be configured.
self.assertRaises(exception.InvalidInput,
self.setup_driver,
config=config,
mock_conf=mock_conf)
def test_get_least_used_nsp(self):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
ports = [
{'portPos': {'node': 1, 'slot': 8, 'cardPort': 2}, 'active': True},
{'portPos': {'node': 1, 'slot': 8, 'cardPort': 1}, 'active': True},
{'portPos': {'node': 1, 'slot': 8, 'cardPort': 2}, 'active': True},
{'portPos': {'node': 0, 'slot': 2, 'cardPort': 2}, 'active': True},
{'portPos': {'node': 0, 'slot': 2, 'cardPort': 1}, 'active': True},
{'portPos': {'node': 0, 'slot': 2, 'cardPort': 1}, 'active': True},
{'portPos': {'node': 0, 'slot': 2, 'cardPort': 1}, 'active': True},
{'portPos': {'node': 0, 'slot': 2, 'cardPort': 1}, 'active': True}]
mock_client.getVLUNs.return_value = {'members': ports}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
# in use count
vluns = common.client.getVLUNs()
nsp = self.driver._get_least_used_nsp(common, vluns['members'],
['0:2:1', '1:8:1'])
self.assertEqual(nsp, '1:8:1')
ports = [
{'portPos': {'node': 1, 'slot': 2, 'cardPort': 1},
'active': True},
{'portPos': {'node': 1, 'slot': 2, 'cardPort': 1},
'active': True},
{'portPos': {'node': 1, 'slot': 2, 'cardPort': 1},
'active': True},
{'portPos': {'node': 1, 'slot': 2, 'cardPort': 1},
'active': True},
{'portPos': {'node': 0, 'slot': 2, 'cardPort': 1},
'active': True},
{'portPos': {'node': 0, 'slot': 2, 'cardPort': 1},
'active': True},
{'portPos': {'node': 0, 'slot': 2, 'cardPort': 1},
'active': True},
{'portPos': {'node': 0, 'slot': 2, 'cardPort': 1},
'active': True},
{'portPos': {'node': 0, 'slot': 2, 'cardPort': 1},
'active': True}]
mock_client.getVLUNs.return_value = {'members': ports}
# in use count
common = self.driver._login()
vluns = common.client.getVLUNs()
nsp = self.driver._get_least_used_nsp(common, vluns['members'],
['0:2:1', '1:2:1'])
self.assertEqual(nsp, '1:2:1')
ports = [
{'portPos': {'node': 1, 'slot': 2, 'cardPort': 1},
'active': True},
{'portPos': {'node': 1, 'slot': 2, 'cardPort': 1},
'active': True},
{'portPos': {'node': 1, 'slot': 2, 'cardPort': 1},
'active': True},
{'portPos': {'node': 1, 'slot': 2, 'cardPort': 1},
'active': True},
{'portPos': {'node': 0, 'slot': 2, 'cardPort': 1},
'active': True},
{'portPos': {'node': 0, 'slot': 2, 'cardPort': 1},
'active': True},
{'portPos': {'node': 0, 'slot': 2, 'cardPort': 1},
'active': True},
{'portPos': {'node': 0, 'slot': 2, 'cardPort': 1},
'active': True},
{'portPos': {'node': 0, 'slot': 2, 'cardPort': 1},
'active': True}]
mock_client.getVLUNs.return_value = {'members': ports}
# in use count
common = self.driver._login()
vluns = common.client.getVLUNs()
nsp = self.driver._get_least_used_nsp(common, vluns['members'],
['1:1:1', '1:2:1'])
self.assertEqual(nsp, '1:1:1')
def test_set_3par_chaps(self):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
expected = []
self.driver._set_3par_chaps(
common, 'test-host', 'test-vol', 'test-host', 'pass')
mock_client.assert_has_calls(expected)
# setup_mock_client drive with CHAP enabled configuration
# and return the mock HTTP 3PAR client
config = self.setup_configuration()
config.hp3par_iscsi_chap_enabled = True
mock_client = self.setup_driver(config=config)
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
expected_mod_request = {
'chapOperation': mock_client.HOST_EDIT_ADD,
'chapOperationMode': mock_client.CHAP_INITIATOR,
'chapName': 'test-host',
'chapSecret': 'fake'
}
expected = [
mock.call.modifyHost('test-host', expected_mod_request)
]
self.driver._set_3par_chaps(
common, 'test-host', 'test-vol', 'test-host', 'fake')
mock_client.assert_has_calls(expected)
@mock.patch('cinder.volume.utils.generate_password')
def test_do_export(self, mock_utils):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
volume = {'host': 'test-host@3pariscsi',
'id': 'd03338a9-9115-48a3-8dfc-35cdfcdc15a7'}
mock_utils.return_value = 'random-pass'
mock_client.getHostVLUNs.return_value = [
{'active': True,
'volumeName': self.VOLUME_3PAR_NAME,
'lun': None, 'type': 0,
'remoteName': 'iqn.1993-08.org.debian:01:222'}
]
mock_client.getHost.return_value = {
'name': 'osv-0DM4qZEVSKON-DXN-NwVpw',
'initiatorChapEnabled': True
}
mock_client.getVolumeMetaData.return_value = {
'value': 'random-pass'
}
expected = []
expected_model = {'provider_auth': None}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
model = self.driver._do_export(common, volume)
mock_client.assert_has_calls(expected)
self.assertEqual(expected_model, model)
mock_client.reset_mock()
# setup_mock_client drive with CHAP enabled configuration
# and return the mock HTTP 3PAR client
config = self.setup_configuration()
config.hp3par_iscsi_chap_enabled = True
mock_client = self.setup_driver(config=config)
volume = {'host': 'test-host@3pariscsi',
'id': 'd03338a9-9115-48a3-8dfc-35cdfcdc15a7'}
mock_utils.return_value = 'random-pass'
mock_client.getHostVLUNs.return_value = [
{'active': True,
'volumeName': self.VOLUME_3PAR_NAME,
'lun': None, 'type': 0,
'remoteName': 'iqn.1993-08.org.debian:01:222'}
]
mock_client.getHost.return_value = {
'name': 'osv-0DM4qZEVSKON-DXN-NwVpw',
'initiatorChapEnabled': True
}
mock_client.getVolumeMetaData.return_value = {
'value': 'random-pass'
}
expected = [
mock.call.getHostVLUNs('test-host'),
mock.call.getHost('test-host'),
mock.call.getVolumeMetaData(
'osv-0DM4qZEVSKON-DXN-NwVpw', CHAP_PASS_KEY),
mock.call.setVolumeMetaData(
'osv-0DM4qZEVSKON-DXN-NwVpw', CHAP_USER_KEY, 'test-host'),
mock.call.setVolumeMetaData(
'osv-0DM4qZEVSKON-DXN-NwVpw', CHAP_PASS_KEY, 'random-pass')
]
expected_model = {'provider_auth': 'CHAP test-host random-pass'}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
model = self.driver._do_export(common, volume)
mock_client.assert_has_calls(expected)
self.assertEqual(expected_model, model)
@mock.patch('cinder.volume.utils.generate_password')
def test_do_export_host_not_found(self, mock_utils):
# setup_mock_client drive with CHAP enabled configuration
# and return the mock HTTP 3PAR client
config = self.setup_configuration()
config.hp3par_iscsi_chap_enabled = True
mock_client = self.setup_driver(config=config)
volume = {'host': 'test-host@3pariscsi',
'id': 'd03338a9-9115-48a3-8dfc-35cdfcdc15a7'}
mock_utils.return_value = "random-pass"
mock_client.getHostVLUNs.side_effect = hpexceptions.HTTPNotFound(
'fake')
mock_client.getVolumeMetaData.return_value = {
'value': 'random-pass'
}
expected = [
mock.call.getHostVLUNs('test-host'),
mock.call.setVolumeMetaData(
'osv-0DM4qZEVSKON-DXN-NwVpw', CHAP_USER_KEY, 'test-host'),
mock.call.setVolumeMetaData(
'osv-0DM4qZEVSKON-DXN-NwVpw', CHAP_PASS_KEY, 'random-pass')
]
expected_model = {'provider_auth': 'CHAP test-host random-pass'}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
model = self.driver._do_export(common, volume)
mock_client.assert_has_calls(expected)
self.assertEqual(expected_model, model)
@mock.patch('cinder.volume.utils.generate_password')
def test_do_export_host_chap_disabled(self, mock_utils):
# setup_mock_client drive with CHAP enabled configuration
# and return the mock HTTP 3PAR client
config = self.setup_configuration()
config.hp3par_iscsi_chap_enabled = True
mock_client = self.setup_driver(config=config)
volume = {'host': 'test-host@3pariscsi',
'id': 'd03338a9-9115-48a3-8dfc-35cdfcdc15a7'}
mock_utils.return_value = 'random-pass'
mock_client.getHostVLUNs.return_value = [
{'active': True,
'volumeName': self.VOLUME_3PAR_NAME,
'lun': None, 'type': 0,
'remoteName': 'iqn.1993-08.org.debian:01:222'}
]
mock_client.getHost.return_value = {
'name': 'fake-host',
'initiatorChapEnabled': False
}
mock_client.getVolumeMetaData.return_value = {
'value': 'random-pass'
}
expected = [
mock.call.getHostVLUNs('test-host'),
mock.call.getHost('test-host'),
mock.call.getVolumeMetaData(
'osv-0DM4qZEVSKON-DXN-NwVpw', CHAP_PASS_KEY),
mock.call.setVolumeMetaData(
'osv-0DM4qZEVSKON-DXN-NwVpw', CHAP_USER_KEY, 'test-host'),
mock.call.setVolumeMetaData(
'osv-0DM4qZEVSKON-DXN-NwVpw', CHAP_PASS_KEY, 'random-pass')
]
expected_model = {'provider_auth': 'CHAP test-host random-pass'}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
model = self.driver._do_export(common, volume)
mock_client.assert_has_calls(expected)
self.assertEqual(expected_model, model)
@mock.patch('cinder.volume.utils.generate_password')
def test_do_export_no_active_vluns(self, mock_utils):
# setup_mock_client drive with CHAP enabled configuration
# and return the mock HTTP 3PAR client
config = self.setup_configuration()
config.hp3par_iscsi_chap_enabled = True
mock_client = self.setup_driver(config=config)
volume = {'host': 'test-host@3pariscsi',
'id': 'd03338a9-9115-48a3-8dfc-35cdfcdc15a7'}
mock_utils.return_value = "random-pass"
mock_client.getHostVLUNs.return_value = [
{'active': False,
'volumeName': self.VOLUME_3PAR_NAME,
'lun': None, 'type': 0,
'remoteName': 'iqn.1993-08.org.debian:01:222'}
]
mock_client.getHost.return_value = {
'name': 'fake-host',
'initiatorChapEnabled': True
}
mock_client.getVolumeMetaData.return_value = {
'value': 'random-pass'
}
expected = [
mock.call.getHostVLUNs('test-host'),
mock.call.getHost('test-host'),
mock.call.setVolumeMetaData(
'osv-0DM4qZEVSKON-DXN-NwVpw', CHAP_USER_KEY, 'test-host'),
mock.call.setVolumeMetaData(
'osv-0DM4qZEVSKON-DXN-NwVpw', CHAP_PASS_KEY, 'random-pass')
]
expected_model = {'provider_auth': 'CHAP test-host random-pass'}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
model = self.driver._do_export(common, volume)
mock_client.assert_has_calls(expected)
self.assertEqual(model, expected_model)
def test_ensure_export(self):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
volume = {'host': 'test-host@3pariscsi',
'id': 'd03338a9-9115-48a3-8dfc-35cdfcdc15a7'}
mock_client.getAllVolumeMetaData.return_value = {
'total': 0,
'members': []
}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
model = self.driver.ensure_export(None, volume)
expected = [
mock.call.getVolume('osv-0DM4qZEVSKON-DXN-NwVpw'),
mock.call.getAllVolumeMetaData('osv-0DM4qZEVSKON-DXN-NwVpw')
]
expected_model = {'provider_auth': None}
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
self.assertEqual(model, expected_model)
mock_client.getAllVolumeMetaData.return_value = {
'total': 2,
'members': [
{
'creationTimeSec': 1406074222,
'value': 'fake-host',
'key': CHAP_USER_KEY,
'creationTime8601': '2014-07-22T17:10:22-07:00'
},
{
'creationTimeSec': 1406074222,
'value': 'random-pass',
'key': CHAP_PASS_KEY,
'creationTime8601': '2014-07-22T17:10:22-07:00'
}
]
}
model = self.driver.ensure_export(None, volume)
expected = [
mock.call.getVolume('osv-0DM4qZEVSKON-DXN-NwVpw'),
mock.call.getAllVolumeMetaData('osv-0DM4qZEVSKON-DXN-NwVpw')
]
expected_model = {'provider_auth': "CHAP fake-host random-pass"}
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
self.assertEqual(model, expected_model)
def test_ensure_export_missing_volume(self):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
volume = {'host': 'test-host@3pariscsi',
'id': 'd03338a9-9115-48a3-8dfc-35cdfcdc15a7'}
mock_client.getVolume.side_effect = hpexceptions.HTTPNotFound(
'fake')
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
model = self.driver.ensure_export(None, volume)
expected = [mock.call.getVolume('osv-0DM4qZEVSKON-DXN-NwVpw')]
expected_model = None
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
self.assertEqual(model, expected_model)
@mock.patch.object(volume_types, 'get_volume_type')
def test_get_volume_settings_default_pool(self, _mock_volume_types):
_mock_volume_types.return_value = {
'name': 'gold',
'id': 'gold-id',
'extra_specs': {}}
mock_client = self.setup_driver()
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
volume = {'host': 'test-host@3pariscsi#pool_foo',
'id': 'd03338a9-9115-48a3-8dfc-35cdfcdc15a7'}
pool = volume_utils.extract_host(volume['host'], 'pool')
model = common.get_volume_settings_from_type_id('gold-id', pool)
self.assertEqual(model['cpg'], 'pool_foo')
def test_get_model_update(self):
mock_client = self.setup_driver()
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
model_update = common._get_model_update('xxx@yyy#zzz', 'CPG')
self.assertEqual(model_update, {'host': 'xxx@yyy#CPG'})
VLUNS5_RET = ({'members':
[{'portPos': {'node': 0, 'slot': 8, 'cardPort': 2},
'active': True},
{'portPos': {'node': 1, 'slot': 8, 'cardPort': 1},
'active': True}]})
PORTS_RET = ({'members':
[{'portPos': {'node': 1, 'slot': 8, 'cardPort': 2},
'protocol': 2,
'IPAddr': '10.10.220.252',
'linkState': 4,
'device': [],
'iSCSIName': 'iqn.2000-05.com.3pardata:21820002ac00383d',
'mode': 2,
'HWAddr': '2C27D75375D2',
'type': 8},
{'portPos': {'node': 1, 'slot': 8, 'cardPort': 1},
'protocol': 2,
'IPAddr': '10.10.220.253',
'linkState': 4,
'device': [],
'iSCSIName': 'iqn.2000-05.com.3pardata:21810002ac00383d',
'mode': 2,
'HWAddr': '2C27D75375D6',
'type': 8}]})
VLUNS1_RET = ({'members':
[{'portPos': {'node': 1, 'slot': 8, 'cardPort': 2},
'hostname': 'foo', 'active': True},
{'portPos': {'node': 1, 'slot': 8, 'cardPort': 1},
'hostname': 'bar', 'active': True},
{'portPos': {'node': 1, 'slot': 8, 'cardPort': 1},
'hostname': 'bar', 'active': True},
{'portPos': {'node': 1, 'slot': 8, 'cardPort': 1},
'hostname': 'bar', 'active': True}]})
PORTS1_RET = ({'members':
[{'portPos': {'node': 0, 'slot': 8, 'cardPort': 2},
'protocol': 2,
'IPAddr': '10.10.120.252',
'linkState': 4,
'device': [],
'iSCSIName': 'iqn.2000-05.com.3pardata:21820002ac00383d',
'mode': 2,
'HWAddr': '2C27D75375D2',
'type': 8},
{'portPos': {'node': 1, 'slot': 8, 'cardPort': 1},
'protocol': 2,
'IPAddr': '10.10.220.253',
'linkState': 4,
'device': [],
'iSCSIName': 'iqn.2000-05.com.3pardata:21810002ac00383d',
'mode': 2,
'HWAddr': '2C27D75375D6',
'type': 8},
{'portWWN': '20210002AC00383D',
'protocol': 1,
'linkState': 4,
'mode': 2,
'device': ['cage2'],
'nodeWWN': '20210002AC00383D',
'type': 2,
'portPos': {'node': 0, 'slot': 6, 'cardPort': 3}}]})
| 41.652425 | 79 | 0.539518 |
import mock
import ast
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import units
from cinder import context
from cinder import exception
from cinder import test
from cinder.tests import fake_hp_3par_client as hp3parclient
from cinder.volume.drivers.san.hp import hp_3par_common as hpcommon
from cinder.volume.drivers.san.hp import hp_3par_fc as hpfcdriver
from cinder.volume.drivers.san.hp import hp_3par_iscsi as hpdriver
from cinder.volume import qos_specs
from cinder.volume import utils as volume_utils
from cinder.volume import volume_types
hpexceptions = hp3parclient.hpexceptions
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
HP3PAR_CPG = 'OpenStackCPG'
HP3PAR_CPG2 = 'fakepool'
HP3PAR_CPG_QOS = 'qospool'
HP3PAR_CPG_SNAP = 'OpenStackCPGSnap'
HP3PAR_USER_NAME = 'testUser'
HP3PAR_USER_PASS = 'testPassword'
HP3PAR_SAN_IP = '2.2.2.2'
HP3PAR_SAN_SSH_PORT = 999
HP3PAR_SAN_SSH_CON_TIMEOUT = 44
HP3PAR_SAN_SSH_PRIVATE = 'foobar'
GOODNESS_FUNCTION = \
"stats.capacity_utilization < 0.6? 100:25"
FILTER_FUNCTION = \
"stats.total_volumes < 400 && stats.capacity_utilization < 0.8"
CHAP_USER_KEY = "HPQ-cinder-CHAP-name"
CHAP_PASS_KEY = "HPQ-cinder-CHAP-secret"
FLASH_CACHE_ENABLED = 1
FLASH_CACHE_DISABLED = 2
class HP3PARBaseDriver(object):
class CommentMatcher(object):
def __init__(self, f, expect):
self.assertEqual = f
self.expect = expect
def __eq__(self, actual):
actual_as_dict = dict(ast.literal_eval(actual))
self.assertEqual(self.expect, actual_as_dict)
return True
VOLUME_ID = 'd03338a9-9115-48a3-8dfc-35cdfcdc15a7'
CLONE_ID = 'd03338a9-9115-48a3-8dfc-000000000000'
VOLUME_TYPE_ID_DEDUP = 'd03338a9-9115-48a3-8dfc-11111111111'
VOLUME_TYPE_ID_FLASH_CACHE = 'd03338a9-9115-48a3-8dfc-22222222222'
VOLUME_NAME = 'volume-' + VOLUME_ID
VOLUME_NAME_3PAR = 'osv-0DM4qZEVSKON-DXN-NwVpw'
SNAPSHOT_ID = '2f823bdc-e36e-4dc8-bd15-de1c7a28ff31'
SNAPSHOT_NAME = 'snapshot-2f823bdc-e36e-4dc8-bd15-de1c7a28ff31'
VOLUME_3PAR_NAME = 'osv-0DM4qZEVSKON-DXN-NwVpw'
SNAPSHOT_3PAR_NAME = 'oss-L4I73ONuTci9Fd4ceij-MQ'
FAKE_HOST = 'fakehost'
FAKE_CINDER_HOST = 'fakehost@foo#' + HP3PAR_CPG
USER_ID = '2689d9a913974c008b1d859013f23607'
PROJECT_ID = 'fac88235b9d64685a3530f73e490348f'
VOLUME_ID_SNAP = '761fc5e5-5191-4ec7-aeba-33e36de44156'
FAKE_DESC = 'test description name'
FAKE_FC_PORTS = [{'portPos': {'node': 7, 'slot': 1, 'cardPort': 1},
'portWWN': '0987654321234',
'protocol': 1,
'mode': 2,
'linkState': 4},
{'portPos': {'node': 6, 'slot': 1, 'cardPort': 1},
'portWWN': '123456789000987',
'protocol': 1,
'mode': 2,
'linkState': 4}]
QOS = {'qos:maxIOPS': '1000', 'qos:maxBWS': '50',
'qos:minIOPS': '100', 'qos:minBWS': '25',
'qos:latency': '25', 'qos:priority': 'low'}
QOS_SPECS = {'maxIOPS': '1000', 'maxBWS': '50',
'minIOPS': '100', 'minBWS': '25',
'latency': '25', 'priority': 'low'}
VVS_NAME = "myvvs"
FAKE_ISCSI_PORT = {'portPos': {'node': 8, 'slot': 1, 'cardPort': 1},
'protocol': 2,
'mode': 2,
'IPAddr': '1.1.1.2',
'iSCSIName': ('iqn.2000-05.com.3pardata:'
'21810002ac00383d'),
'linkState': 4}
volume = {'name': VOLUME_NAME,
'id': VOLUME_ID,
'display_name': 'Foo Volume',
'size': 2,
'host': FAKE_CINDER_HOST,
'volume_type': None,
'volume_type_id': None}
volume_encrypted = {'name': VOLUME_NAME,
'id': VOLUME_ID,
'display_name': 'Foo Volume',
'size': 2,
'host': FAKE_CINDER_HOST,
'volume_type': None,
'volume_type_id': None,
'encryption_key_id': 'fake_key'}
volume_dedup = {'name': VOLUME_NAME,
'id': VOLUME_ID,
'display_name': 'Foo Volume',
'size': 2,
'host': FAKE_CINDER_HOST,
'volume_type': 'dedup',
'volume_type_id': VOLUME_TYPE_ID_DEDUP}
volume_pool = {'name': VOLUME_NAME,
'id': VOLUME_ID,
'display_name': 'Foo Volume',
'size': 2,
'host': volume_utils.append_host(FAKE_HOST, HP3PAR_CPG2),
'volume_type': None,
'volume_type_id': None}
volume_qos = {'name': VOLUME_NAME,
'id': VOLUME_ID,
'display_name': 'Foo Volume',
'size': 2,
'host': FAKE_CINDER_HOST,
'volume_type': None,
'volume_type_id': 'gold'}
volume_flash_cache = {'name': VOLUME_NAME,
'id': VOLUME_ID,
'display_name': 'Foo Volume',
'size': 2,
'host': FAKE_CINDER_HOST,
'volume_type': None,
'volume_type_id': VOLUME_TYPE_ID_FLASH_CACHE}
snapshot = {'name': SNAPSHOT_NAME,
'id': SNAPSHOT_ID,
'user_id': USER_ID,
'project_id': PROJECT_ID,
'volume_id': VOLUME_ID_SNAP,
'volume_name': VOLUME_NAME,
'status': 'creating',
'progress': '0%',
'volume_size': 2,
'display_name': 'fakesnap',
'display_description': FAKE_DESC}
wwn = ["123456789012345", "123456789054321"]
connector = {'ip': '10.0.0.2',
'initiator': 'iqn.1993-08.org.debian:01:222',
'wwpns': [wwn[0], wwn[1]],
'wwnns': ["223456789012345", "223456789054321"],
'host': FAKE_HOST}
volume_type = {'name': 'gold',
'deleted': False,
'updated_at': None,
'extra_specs': {'cpg': HP3PAR_CPG2,
'qos:maxIOPS': '1000',
'qos:maxBWS': '50',
'qos:minIOPS': '100',
'qos:minBWS': '25',
'qos:latency': '25',
'qos:priority': 'low'},
'deleted_at': None,
'id': 'gold'}
volume_type_dedup = {'name': 'dedup',
'deleted': False,
'updated_at': None,
'extra_specs': {'cpg': HP3PAR_CPG2,
'provisioning': 'dedup'},
'deleted_at': None,
'id': VOLUME_TYPE_ID_DEDUP}
volume_type_flash_cache = {'name': 'flash-cache-on',
'deleted': False,
'updated_at': None,
'extra_specs': {'cpg': HP3PAR_CPG2,
'hp3par:flash_cache': 'true'},
'deleted_at': None,
'id': VOLUME_TYPE_ID_FLASH_CACHE}
flash_cache_3par_keys = {'flash_cache': 'true'}
cpgs = [
{'SAGrowth': {'LDLayout': {'diskPatterns': [{'diskType': 2}]},
'incrementMiB': 8192},
'SAUsage': {'rawTotalMiB': 24576,
'rawUsedMiB': 768,
'totalMiB': 8192,
'usedMiB': 256},
'SDGrowth': {'LDLayout': {'RAIDType': 4,
'diskPatterns': [{'diskType': 2}]},
'incrementMiB': 32768},
'SDUsage': {'rawTotalMiB': 49152,
'rawUsedMiB': 1023,
'totalMiB': 36864,
'usedMiB': 1024 * 1},
'UsrUsage': {'rawTotalMiB': 57344,
'rawUsedMiB': 43349,
'totalMiB': 43008,
'usedMiB': 1024 * 20},
'additionalStates': [],
'degradedStates': [],
'failedStates': [],
'id': 5,
'name': HP3PAR_CPG,
'numFPVVs': 2,
'numTPVVs': 0,
'numTDVVs': 1,
'state': 1,
'uuid': '29c214aa-62b9-41c8-b198-543f6cf24edf'}]
TASK_DONE = 1
TASK_ACTIVE = 2
STATUS_DONE = {'status': 1}
STATUS_ACTIVE = {'status': 2}
mock_client_conf = {
'PORT_MODE_TARGET': 2,
'PORT_STATE_READY': 4,
'PORT_PROTO_ISCSI': 2,
'PORT_PROTO_FC': 1,
'TASK_DONE': TASK_DONE,
'TASK_ACTIVE': TASK_ACTIVE,
'HOST_EDIT_ADD': 1,
'CHAP_INITIATOR': 1,
'CHAP_TARGET': 2,
'getPorts.return_value': {
'members': FAKE_FC_PORTS + [FAKE_ISCSI_PORT]
}
}
RETYPE_VVS_NAME = "yourvvs"
RETYPE_HOST = {
u'host': u'mark-stack1@3parfc',
u'capabilities': {
'QoS_support': True,
u'location_info': u'HP3PARDriver:1234567:MARK_TEST_CPG',
u'timestamp': u'2014-06-04T19:03:32.485540',
u'allocated_capacity_gb': 0,
u'volume_backend_name': u'3parfc',
u'free_capacity_gb': u'infinite',
u'driver_version': u'2.0.3',
u'total_capacity_gb': u'infinite',
u'reserved_percentage': 0,
u'vendor_name': u'Hewlett-Packard',
u'storage_protocol': u'FC'
}
}
RETYPE_HOST_NOT3PAR = {
u'host': u'mark-stack1@3parfc',
u'capabilities': {
u'location_info': u'XXXDriverXXX:1610771:MARK_TEST_CPG',
}
}
RETYPE_QOS_SPECS = {'maxIOPS': '1000', 'maxBWS': '50',
'minIOPS': '100', 'minBWS': '25',
'latency': '25', 'priority': 'high'}
RETYPE_VOLUME_TYPE_ID = "FakeVolId"
RETYPE_VOLUME_TYPE_0 = {
'name': 'red',
'id': RETYPE_VOLUME_TYPE_ID,
'extra_specs': {
'cpg': HP3PAR_CPG,
'snap_cpg': HP3PAR_CPG_SNAP,
'vvs': RETYPE_VVS_NAME,
'qos': RETYPE_QOS_SPECS,
'tpvv': True,
'tdvv': False,
'volume_type': volume_type
}
}
RETYPE_VOLUME_TYPE_1 = {
'name': 'white',
'id': RETYPE_VOLUME_TYPE_ID,
'extra_specs': {
'cpg': HP3PAR_CPG,
'snap_cpg': HP3PAR_CPG_SNAP,
'vvs': VVS_NAME,
'qos': QOS,
'tpvv': True,
'tdvv': False,
'volume_type': volume_type
}
}
RETYPE_VOLUME_TYPE_2 = {
'name': 'blue',
'id': RETYPE_VOLUME_TYPE_ID,
'extra_specs': {
'cpg': HP3PAR_CPG_QOS,
'snap_cpg': HP3PAR_CPG_SNAP,
'vvs': RETYPE_VVS_NAME,
'qos': RETYPE_QOS_SPECS,
'tpvv': True,
'tdvv': False,
'volume_type': volume_type
}
}
RETYPE_VOLUME_TYPE_3 = {
'name': 'purple',
'id': RETYPE_VOLUME_TYPE_ID,
'extra_specs': {
'cpg': HP3PAR_CPG_QOS,
'snap_cpg': HP3PAR_CPG_SNAP,
'vvs': RETYPE_VVS_NAME,
'qos': RETYPE_QOS_SPECS,
'tpvv': False,
'tdvv': True,
'volume_type': volume_type
}
}
RETYPE_VOLUME_TYPE_BAD_PERSONA = {
'name': 'bad_persona',
'id': 'any_id',
'extra_specs': {
'hp3par:persona': '99 - invalid'
}
}
RETYPE_VOLUME_TYPE_BAD_CPG = {
'name': 'bad_cpg',
'id': 'any_id',
'extra_specs': {
'cpg': 'bogus',
'snap_cpg': 'bogus',
'hp3par:persona': '2 - Generic-ALUA'
}
}
MANAGE_VOLUME_INFO = {
'userCPG': 'testUserCpg0',
'snapCPG': 'testSnapCpg0',
'provisioningType': 1,
'comment': "{'display_name': 'Foo Volume'}"
}
MV_INFO_WITH_NO_SNAPCPG = {
'userCPG': 'testUserCpg0',
'provisioningType': 1,
'comment': "{'display_name': 'Foo Volume'}"
}
RETYPE_TEST_COMMENT = "{'retype_test': 'test comment'}"
RETYPE_VOLUME_INFO_0 = {
'name': VOLUME_NAME,
'id': VOLUME_ID,
'display_name': 'Retype Vol0',
'size': 1,
'host': RETYPE_HOST,
'userCPG': 'testUserCpg0',
'snapCPG': 'testSnapCpg0',
'provisioningType': 1,
'comment': RETYPE_TEST_COMMENT
}
RETYPE_TEST_COMMENT_1 = "{'retype_test': 'test comment 1'}"
RETYPE_VOLUME_INFO_1 = {
'name': VOLUME_NAME,
'id': VOLUME_ID,
'display_name': 'Retype Vol1',
'size': 1,
'host': RETYPE_HOST,
'userCPG': HP3PAR_CPG,
'snapCPG': HP3PAR_CPG_SNAP,
'provisioningType': 1,
'comment': RETYPE_TEST_COMMENT
}
RETYPE_TEST_COMMENT_2 = "{'retype_test': 'test comment 2'}"
RETYPE_VOLUME_INFO_2 = {
'name': VOLUME_NAME,
'id': VOLUME_ID,
'display_name': 'Retype Vol2',
'size': 1,
'host': RETYPE_HOST,
'userCPG': HP3PAR_CPG,
'snapCPG': HP3PAR_CPG_SNAP,
'provisioningType': 3,
'comment': RETYPE_TEST_COMMENT
}
RETYPE_VOLUME_INFO_NO_SNAP = {
'name': VOLUME_NAME,
'id': VOLUME_ID,
'display_name': 'Retype Vol2',
'size': 1,
'host': RETYPE_HOST,
'userCPG': 'testUserCpg2',
'provisioningType': 1,
'comment': '{}'
}
RETYPE_CONF = {
'TASK_ACTIVE': TASK_ACTIVE,
'TASK_DONE': TASK_DONE,
'getTask.return_value': STATUS_DONE,
'getStorageSystemInfo.return_value': {'serialNumber': '1234567'},
'getVolume.return_value': RETYPE_VOLUME_INFO_0,
'modifyVolume.return_value': ("anyResponse", {'taskid': 1})
}
# 3PAR retype currently doesn't use the diff. Existing code and fresh info
RETYPE_DIFF = None
wsapi_version_312 = {'major': 1,
'build': 30102422,
'minor': 3,
'revision': 1}
wsapi_version_for_dedup = {'major': 1,
'build': 30201120,
'minor': 4,
'revision': 1}
wsapi_version_for_flash_cache = {'major': 1,
'build': 30201200,
'minor': 4,
'revision': 2}
wsapi_version_latest = wsapi_version_for_flash_cache
standard_login = [
mock.call.login(HP3PAR_USER_NAME, HP3PAR_USER_PASS),
mock.call.setSSHOptions(
HP3PAR_SAN_IP,
HP3PAR_USER_NAME,
HP3PAR_USER_PASS,
missing_key_policy='AutoAddPolicy',
privatekey=HP3PAR_SAN_SSH_PRIVATE,
known_hosts_file=mock.ANY,
port=HP3PAR_SAN_SSH_PORT,
conn_timeout=HP3PAR_SAN_SSH_CON_TIMEOUT)]
standard_logout = [
mock.call.logout()]
def setup_configuration(self):
configuration = mock.Mock()
configuration.hp3par_debug = False
configuration.hp3par_username = HP3PAR_USER_NAME
configuration.hp3par_password = HP3PAR_USER_PASS
configuration.hp3par_api_url = 'https://1.1.1.1/api/v1'
configuration.hp3par_cpg = [HP3PAR_CPG, HP3PAR_CPG2]
configuration.hp3par_cpg_snap = HP3PAR_CPG_SNAP
configuration.iscsi_ip_address = '1.1.1.2'
configuration.iscsi_port = '1234'
configuration.san_ip = HP3PAR_SAN_IP
configuration.san_login = HP3PAR_USER_NAME
configuration.san_password = HP3PAR_USER_PASS
configuration.san_ssh_port = HP3PAR_SAN_SSH_PORT
configuration.ssh_conn_timeout = HP3PAR_SAN_SSH_CON_TIMEOUT
configuration.san_private_key = HP3PAR_SAN_SSH_PRIVATE
configuration.hp3par_snapshot_expiration = ""
configuration.hp3par_snapshot_retention = ""
configuration.hp3par_iscsi_ips = []
configuration.hp3par_iscsi_chap_enabled = False
configuration.goodness_function = GOODNESS_FUNCTION
configuration.filter_function = FILTER_FUNCTION
return configuration
@mock.patch(
'hp3parclient.client.HP3ParClient',
spec=True,
)
def setup_mock_client(self, _m_client, driver, conf=None, m_conf=None):
_m_client = _m_client.return_value
_m_client.configure_mock(**self.mock_client_conf)
if m_conf is not None:
_m_client.configure_mock(**m_conf)
if conf is None:
conf = self.setup_configuration()
self.driver = driver(configuration=conf)
self.driver.do_setup(None)
return _m_client
@mock.patch('hp3parclient.version', "3.0.9")
def test_unsupported_client_version(self):
self.assertRaises(exception.InvalidInput,
self.setup_driver)
@mock.patch('hp3parclient.version', "3.1.2")
def test_ssh_options(self):
expected_hosts_key_file = "test_hosts_key_file"
orig_ssh_hosts_key_file = CONF.ssh_hosts_key_file
orig_strict_ssh_host_key_policy = CONF.strict_ssh_host_key_policy
CONF.ssh_hosts_key_file = expected_hosts_key_file
CONF.strict_ssh_host_key_policy = False
self.ctxt = context.get_admin_context()
mock_client = self.setup_mock_client(driver=hpfcdriver.HP3PARFCDriver)
CONF.ssh_hosts_key_file = orig_ssh_hosts_key_file
CONF.strict_ssh_host_key_policy = orig_strict_ssh_host_key_policy
expected = [
mock.call.login(HP3PAR_USER_NAME, HP3PAR_USER_PASS),
mock.call.setSSHOptions(
HP3PAR_SAN_IP,
HP3PAR_USER_NAME,
HP3PAR_USER_PASS,
privatekey=HP3PAR_SAN_SSH_PRIVATE,
known_hosts_file=expected_hosts_key_file,
missing_key_policy="AutoAddPolicy",
port=HP3PAR_SAN_SSH_PORT,
conn_timeout=HP3PAR_SAN_SSH_CON_TIMEOUT),
mock.call.getCPG(HP3PAR_CPG),
mock.call.getCPG(HP3PAR_CPG2)]
mock_client.assert_has_calls(
expected +
self.standard_logout)
@mock.patch('hp3parclient.version', "3.1.2")
def test_ssh_options_strict(self):
expected_hosts_key_file = "test_hosts_key_file"
orig_ssh_hosts_key_file = CONF.ssh_hosts_key_file
orig_strict_ssh_host_key_policy = CONF.strict_ssh_host_key_policy
CONF.ssh_hosts_key_file = expected_hosts_key_file
CONF.strict_ssh_host_key_policy = True
self.ctxt = context.get_admin_context()
mock_client = self.setup_mock_client(driver=hpfcdriver.HP3PARFCDriver)
CONF.ssh_hosts_key_file = orig_ssh_hosts_key_file
CONF.strict_ssh_host_key_policy = orig_strict_ssh_host_key_policy
expected = [
mock.call.login(HP3PAR_USER_NAME, HP3PAR_USER_PASS),
mock.call.setSSHOptions(
HP3PAR_SAN_IP,
HP3PAR_USER_NAME,
HP3PAR_USER_PASS,
privatekey=HP3PAR_SAN_SSH_PRIVATE,
known_hosts_file=expected_hosts_key_file,
missing_key_policy="RejectPolicy",
port=HP3PAR_SAN_SSH_PORT,
conn_timeout=HP3PAR_SAN_SSH_CON_TIMEOUT),
mock.call.getCPG(HP3PAR_CPG),
mock.call.getCPG(HP3PAR_CPG2)]
mock_client.assert_has_calls(expected + self.standard_logout)
def test_task_waiter(self):
task_statuses = [self.STATUS_ACTIVE, self.STATUS_ACTIVE]
def side_effect(*args):
return task_statuses and task_statuses.pop(0) or self.STATUS_DONE
conf = {'getTask.side_effect': side_effect}
mock_client = self.setup_driver(mock_conf=conf)
task_id = 1234
interval = .001
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
waiter = common.TaskWaiter(mock_client, task_id, interval)
status = waiter.wait_for_task()
expected = [
mock.call.getTask(task_id),
mock.call.getTask(task_id),
mock.call.getTask(task_id)
]
mock_client.assert_has_calls(expected)
self.assertEqual(status, self.STATUS_DONE)
def test_create_volume(self):
mock_client = self.setup_driver()
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
self.driver.create_volume(self.volume)
comment = (
'{"display_name": "Foo Volume", "type": "OpenStack",'
' "name": "volume-d03338a9-9115-48a3-8dfc-35cdfcdc15a7",'
' "volume_id": "d03338a9-9115-48a3-8dfc-35cdfcdc15a7"}')
expected = [
mock.call.createVolume(
self.VOLUME_3PAR_NAME,
HP3PAR_CPG,
1907, {
'comment': comment,
'tpvv': True,
'tdvv': False,
'snapCPG': HP3PAR_CPG_SNAP})]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
def test_create_volume_in_pool(self):
mock_client = self.setup_driver()
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
return_model = self.driver.create_volume(self.volume_pool)
comment = (
'{"display_name": "Foo Volume", "type": "OpenStack",'
' "name": "volume-d03338a9-9115-48a3-8dfc-35cdfcdc15a7",'
' "volume_id": "d03338a9-9115-48a3-8dfc-35cdfcdc15a7"}')
expected = [
mock.call.createVolume(
self.VOLUME_3PAR_NAME,
HP3PAR_CPG2,
1907, {
'comment': comment,
'tpvv': True,
'tdvv': False,
'snapCPG': HP3PAR_CPG_SNAP})]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
self.assertEqual(return_model, None)
@mock.patch.object(volume_types, 'get_volume_type')
def test_unsupported_dedup_volume_type(self, _mock_volume_types):
mock_client = self.setup_driver(wsapi_version=self.wsapi_version_312)
_mock_volume_types.return_value = {
'name': 'dedup',
'extra_specs': {
'cpg': HP3PAR_CPG_QOS,
'snap_cpg': HP3PAR_CPG_SNAP,
'vvs_name': self.VVS_NAME,
'qos': self.QOS,
'provisioning': 'dedup',
'volume_type': self.volume_type_dedup}}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
self.assertRaises(exception.InvalidInput,
common.get_volume_settings_from_type_id,
self.VOLUME_TYPE_ID_DEDUP,
"mock")
@mock.patch.object(volume_types, 'get_volume_type')
def test_get_snap_cpg_from_volume_type(self, _mock_volume_types):
mock_client = self.setup_driver()
expected_type_snap_cpg = "type_snap_cpg"
_mock_volume_types.return_value = {
'name': 'gold',
'extra_specs': {
'cpg': HP3PAR_CPG,
'snap_cpg': expected_type_snap_cpg,
'volume_type': self.volume_type}}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
result = common.get_volume_settings_from_type_id(
"mock", self.driver.configuration.hp3par_cpg)
self.assertEqual(expected_type_snap_cpg, result['snap_cpg'])
@mock.patch.object(volume_types, 'get_volume_type')
def test_get_snap_cpg_from_volume_type_cpg(self, _mock_volume_types):
mock_client = self.setup_driver()
expected_cpg = 'use_extra_specs_cpg'
_mock_volume_types.return_value = {
'name': 'gold',
'extra_specs': {
'cpg': expected_cpg,
'volume_type': self.volume_type}}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
result = common.get_volume_settings_from_type_id(
"mock", self.driver.configuration.hp3par_cpg)
self.assertEqual(self.driver.configuration.hp3par_cpg_snap,
result['snap_cpg'])
@mock.patch.object(volume_types, 'get_volume_type')
def test_get_snap_cpg_from_volume_type_conf_snap_cpg(
self, _mock_volume_types):
_mock_volume_types.return_value = {
'name': 'gold',
'extra_specs': {
'volume_type': self.volume_type}}
conf = self.setup_configuration()
expected_snap_cpg = conf.hp3par_cpg_snap
mock_client = self.setup_driver(config=conf)
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
result = common.get_volume_settings_from_type_id(
"mock", self.driver.configuration.hp3par_cpg)
self.assertEqual(expected_snap_cpg, result['snap_cpg'])
@mock.patch.object(volume_types, 'get_volume_type')
def test_get_snap_cpg_from_volume_type_conf_cpg(
self, _mock_volume_types):
_mock_volume_types.return_value = {
'name': 'gold',
'extra_specs': {
'volume_type': self.volume_type}}
conf = self.setup_configuration()
conf.hp3par_cpg_snap = None
expected_cpg = conf.hp3par_cpg
mock_client = self.setup_driver(config=conf)
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
result = common.get_volume_settings_from_type_id(
"mock", self.driver.configuration.hp3par_cpg)
self.assertEqual(expected_cpg, result['snap_cpg'])
@mock.patch.object(volume_types, 'get_volume_type')
def test_create_volume_qos(self, _mock_volume_types):
mock_client = self.setup_driver()
_mock_volume_types.return_value = {
'name': 'gold',
'extra_specs': {
'cpg': HP3PAR_CPG_QOS,
'snap_cpg': HP3PAR_CPG_SNAP,
'vvs_name': self.VVS_NAME,
'qos': self.QOS,
'tpvv': True,
'tdvv': False,
'volume_type': self.volume_type}}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
return_model = self.driver.create_volume(self.volume_qos)
comment = (
'{"volume_type_name": "gold", "display_name": "Foo Volume"'
', "name": "volume-d03338a9-9115-48a3-8dfc-35cdfcdc15a7'
'", "volume_type_id": "gold", "volume_id": "d03338a9-91'
'15-48a3-8dfc-35cdfcdc15a7", "qos": {}, "type": "OpenStack"}')
expected = [
mock.call.getCPG(HP3PAR_CPG),
mock.call.createVolume(
self.VOLUME_3PAR_NAME,
HP3PAR_CPG,
1907, {
'comment': comment,
'tpvv': True,
'tdvv': False,
'snapCPG': HP3PAR_CPG_SNAP})]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
self.assertEqual(return_model, None)
@mock.patch.object(volume_types, 'get_volume_type')
def test_create_volume_dedup(self, _mock_volume_types):
mock_client = self.setup_driver()
_mock_volume_types.return_value = {
'name': 'dedup',
'extra_specs': {
'cpg': HP3PAR_CPG_QOS,
'snap_cpg': HP3PAR_CPG_SNAP,
'vvs_name': self.VVS_NAME,
'qos': self.QOS,
'provisioning': 'dedup',
'volume_type': self.volume_type_dedup}}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
return_model = self.driver.create_volume(self.volume_dedup)
comment = (
'{"volume_type_name": "dedup", "display_name": "Foo Volume"'
', "name": "volume-d03338a9-9115-48a3-8dfc-35cdfcdc15a7'
'", "volume_type_id": "d03338a9-9115-48a3-8dfc-11111111111"'
', "volume_id": "d03338a9-9115-48a3-8dfc-35cdfcdc15a7"'
', "qos": {}, "type": "OpenStack"}')
expected = [
mock.call.getCPG(HP3PAR_CPG),
mock.call.createVolume(
self.VOLUME_3PAR_NAME,
HP3PAR_CPG,
1907, {
'comment': comment,
'tpvv': False,
'tdvv': True,
'snapCPG': HP3PAR_CPG_SNAP})]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
self.assertEqual(return_model, None)
@mock.patch.object(volume_types, 'get_volume_type')
def test_create_volume_flash_cache(self, _mock_volume_types):
mock_client = self.setup_driver()
_mock_volume_types.return_value = {
'name': 'flash-cache-on',
'extra_specs': {
'cpg': HP3PAR_CPG2,
'snap_cpg': HP3PAR_CPG_SNAP,
'vvs_name': self.VVS_NAME,
'qos': self.QOS,
'tpvv': True,
'tdvv': False,
'hp3par:flash_cache': 'true',
'volume_type': self.volume_type_flash_cache}}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
mock_client.getCPG.return_value = {'domain': None}
mock_client.FLASH_CACHE_ENABLED = FLASH_CACHE_ENABLED
mock_client.FLASH_CACHE_DISABLED = FLASH_CACHE_DISABLED
return_model = self.driver.create_volume(self.volume_flash_cache)
comment = (
'{"volume_type_name": "flash-cache-on", '
'"display_name": "Foo Volume", '
'"name": "volume-d03338a9-9115-48a3-8dfc-35cdfcdc15a7", '
'"volume_type_id": "d03338a9-9115-48a3-8dfc-22222222222", '
'"volume_id": "d03338a9-9115-48a3-8dfc-35cdfcdc15a7", '
'"qos": {}, "type": "OpenStack"}')
expected = [
mock.call.getCPG(HP3PAR_CPG),
mock.call.createVolume(
self.VOLUME_3PAR_NAME,
HP3PAR_CPG,
1907, {
'comment': comment,
'tpvv': True,
'tdvv': False,
'snapCPG': HP3PAR_CPG_SNAP}),
mock.call.getCPG(HP3PAR_CPG),
mock.call.createVolumeSet('vvs-0DM4qZEVSKON-DXN-NwVpw', None),
mock.call.createQoSRules(
'vvs-0DM4qZEVSKON-DXN-NwVpw',
{'priority': 2}
),
mock.call.modifyVolumeSet(
'vvs-0DM4qZEVSKON-DXN-NwVpw', flashCachePolicy=1),
mock.call.addVolumeToVolumeSet(
'vvs-0DM4qZEVSKON-DXN-NwVpw',
'osv-0DM4qZEVSKON-DXN-NwVpw')]
mock_client.assert_has_calls(
[mock.call.getWsApiVersion()] +
self.standard_login +
expected +
self.standard_logout)
self.assertEqual(return_model, None)
@mock.patch.object(volume_types, 'get_volume_type')
def test_unsupported_flash_cache_volume(self, _mock_volume_types):
mock_client = self.setup_driver(wsapi_version=self.wsapi_version_312)
_mock_volume_types.return_value = {
'name': 'flash-cache-on',
'extra_specs': {
'cpg': HP3PAR_CPG2,
'snap_cpg': HP3PAR_CPG_SNAP,
'vvs_name': self.VVS_NAME,
'qos': self.QOS,
'tpvv': True,
'tdvv': False,
'hp3par:flash_cache': 'true',
'volume_type': self.volume_type_flash_cache}}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
self.assertRaises(exception.InvalidInput,
common.get_flash_cache_policy,
self.flash_cache_3par_keys)
@mock.patch.object(volume_types, 'get_volume_type')
def test_retype_not_3par(self, _mock_volume_types):
_mock_volume_types.return_value = self.RETYPE_VOLUME_TYPE_1
mock_client = self.setup_driver(mock_conf=self.RETYPE_CONF)
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
self.assertRaises(exception.InvalidHost,
self.driver.retype,
self.ctxt,
self.RETYPE_VOLUME_INFO_0,
self.RETYPE_VOLUME_TYPE_1,
self.RETYPE_DIFF,
self.RETYPE_HOST_NOT3PAR)
expected = [mock.call.getVolume(self.VOLUME_3PAR_NAME)]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
@mock.patch.object(volume_types, 'get_volume_type')
def test_retype_volume_not_found(self, _mock_volume_types):
_mock_volume_types.return_value = self.RETYPE_VOLUME_TYPE_1
mock_client = self.setup_driver(mock_conf=self.RETYPE_CONF)
mock_client.getVolume.side_effect = hpexceptions.HTTPNotFound
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
self.assertRaises(hpexceptions.HTTPNotFound,
self.driver.retype,
self.ctxt,
self.RETYPE_VOLUME_INFO_0,
self.RETYPE_VOLUME_TYPE_1,
self.RETYPE_DIFF,
self.RETYPE_HOST)
expected = [mock.call.getVolume(self.VOLUME_3PAR_NAME)]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
@mock.patch.object(volume_types, 'get_volume_type')
def test_retype_specs_error_reverts_snap_cpg(self, _mock_volume_types):
_mock_volume_types.side_effect = [
self.RETYPE_VOLUME_TYPE_1, self.RETYPE_VOLUME_TYPE_0]
mock_client = self.setup_driver(mock_conf=self.RETYPE_CONF)
mock_client.getVolume.return_value = self.RETYPE_VOLUME_INFO_0
mock_client.addVolumeToVolumeSet.side_effect = \
hpexceptions.HTTPForbidden
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
self.assertRaises(hpexceptions.HTTPForbidden,
self.driver.retype,
self.ctxt,
{'id': self.VOLUME_ID},
self.RETYPE_VOLUME_TYPE_0,
self.RETYPE_DIFF,
self.RETYPE_HOST)
old_settings = {
'snapCPG': self.RETYPE_VOLUME_INFO_0['snapCPG'],
'comment': self.RETYPE_VOLUME_INFO_0['comment']}
new_settings = {
'snapCPG': (
self.RETYPE_VOLUME_TYPE_1['extra_specs']['snap_cpg']),
'comment': mock.ANY}
expected = [
mock.call.modifyVolume(self.VOLUME_3PAR_NAME, new_settings)
]
mock_client.assert_has_calls(expected)
expected = [
mock.call.modifyVolume(self.VOLUME_3PAR_NAME, old_settings)
]
mock_client.assert_has_calls(expected + self.standard_logout)
@mock.patch.object(volume_types, 'get_volume_type')
def test_retype_revert_comment(self, _mock_volume_types):
_mock_volume_types.side_effect = [
self.RETYPE_VOLUME_TYPE_2, self.RETYPE_VOLUME_TYPE_1]
mock_client = self.setup_driver(mock_conf=self.RETYPE_CONF)
mock_client.getVolume.return_value = self.RETYPE_VOLUME_INFO_1
mock_client.deleteVolumeSet.side_effect = hpexceptions.HTTPForbidden
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
self.assertRaises(hpexceptions.HTTPForbidden,
self.driver.retype,
self.ctxt,
{'id': self.VOLUME_ID},
self.RETYPE_VOLUME_TYPE_2,
self.RETYPE_DIFF,
self.RETYPE_HOST)
original = {
'snapCPG': self.RETYPE_VOLUME_INFO_1['snapCPG'],
'comment': self.RETYPE_VOLUME_INFO_1['comment']}
expected = [
mock.call.modifyVolume('osv-0DM4qZEVSKON-DXN-NwVpw', original)]
mock_client.assert_has_calls(expected + self.standard_logout)
@mock.patch.object(volume_types, 'get_volume_type')
def test_retype_different_array(self, _mock_volume_types):
_mock_volume_types.return_value = self.RETYPE_VOLUME_TYPE_1
mock_client = self.setup_driver(mock_conf=self.RETYPE_CONF)
mock_client.getStorageSystemInfo.return_value = {
'serialNumber': 'XXXXXXX'}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
self.assertRaises(exception.InvalidHost,
self.driver.retype,
self.ctxt,
self.RETYPE_VOLUME_INFO_0,
self.RETYPE_VOLUME_TYPE_1,
self.RETYPE_DIFF,
self.RETYPE_HOST)
expected = [
mock.call.getVolume(self.VOLUME_3PAR_NAME),
mock.call.getStorageSystemInfo()]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
@mock.patch.object(volume_types, 'get_volume_type')
def test_retype_across_cpg_domains(self, _mock_volume_types):
_mock_volume_types.return_value = self.RETYPE_VOLUME_TYPE_1
mock_client = self.setup_driver(mock_conf=self.RETYPE_CONF)
mock_client.getCPG.side_effect = [
{'domain': 'domain1'},
{'domain': 'domain2'},
]
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
self.assertRaises(exception.Invalid3PARDomain,
self.driver.retype,
self.ctxt,
self.RETYPE_VOLUME_INFO_0,
self.RETYPE_VOLUME_TYPE_1,
self.RETYPE_DIFF,
self.RETYPE_HOST)
expected = [
mock.call.getVolume(self.VOLUME_3PAR_NAME),
mock.call.getStorageSystemInfo(),
mock.call.getCPG(self.RETYPE_VOLUME_INFO_0['userCPG']),
mock.call.getCPG(
self.RETYPE_VOLUME_TYPE_1['extra_specs']['cpg'])
]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
@mock.patch.object(volume_types, 'get_volume_type')
def test_retype_across_snap_cpg_domains(self, _mock_volume_types):
_mock_volume_types.return_value = self.RETYPE_VOLUME_TYPE_1
mock_client = self.setup_driver(mock_conf=self.RETYPE_CONF)
mock_client.getCPG.side_effect = [
{'domain': 'cpg_domain'},
{'domain': 'cpg_domain'},
{'domain': 'snap_cpg_domain_1'},
]
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
self.assertRaises(exception.Invalid3PARDomain,
self.driver.retype,
self.ctxt,
self.RETYPE_VOLUME_INFO_0,
self.RETYPE_VOLUME_TYPE_1,
self.RETYPE_DIFF,
self.RETYPE_HOST)
expected = [
mock.call.getVolume(self.VOLUME_3PAR_NAME),
mock.call.getStorageSystemInfo(),
mock.call.getCPG(self.RETYPE_VOLUME_INFO_0['userCPG']),
mock.call.getCPG(
self.RETYPE_VOLUME_TYPE_1['extra_specs']['cpg']),
mock.call.getCPG(
self.RETYPE_VOLUME_TYPE_1['extra_specs']['snap_cpg'])
]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
@mock.patch.object(volume_types, 'get_volume_type')
def test_retype_to_bad_persona(self, _mock_volume_types):
_mock_volume_types.return_value = self.RETYPE_VOLUME_TYPE_BAD_PERSONA
mock_client = self.setup_driver(mock_conf=self.RETYPE_CONF)
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
self.assertRaises(exception.InvalidInput,
self.driver.retype,
self.ctxt,
self.RETYPE_VOLUME_INFO_0,
self.RETYPE_VOLUME_TYPE_BAD_PERSONA,
self.RETYPE_DIFF,
self.RETYPE_HOST)
expected = [mock.call.getVolume(self.VOLUME_3PAR_NAME)]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
@mock.patch.object(volume_types, 'get_volume_type')
def test_retype_tune(self, _mock_volume_types):
_mock_volume_types.return_value = self.RETYPE_VOLUME_TYPE_1
mock_client = self.setup_driver(mock_conf=self.RETYPE_CONF)
qos_ref = qos_specs.create(self.ctxt, 'qos-specs-1', self.QOS)
type_ref = volume_types.create(self.ctxt,
"type1", {"qos:maxIOPS": "100",
"qos:maxBWS": "50",
"qos:minIOPS": "10",
"qos:minBWS": "20",
"qos:latency": "5",
"qos:priority": "high"})
qos_specs.associate_qos_with_type(self.ctxt,
qos_ref['id'],
type_ref['id'])
type_ref = volume_types.get_volume_type(self.ctxt, type_ref['id'])
volume = {'id': HP3PARBaseDriver.CLONE_ID}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
retyped = self.driver.retype(
self.ctxt, volume, type_ref, None, self.RETYPE_HOST)
self.assertTrue(retyped)
expected = [
mock.call.modifyVolume('osv-0DM4qZEVSKON-AAAAAAAAA',
{'comment': mock.ANY,
'snapCPG': 'OpenStackCPGSnap'}),
mock.call.deleteVolumeSet('vvs-0DM4qZEVSKON-AAAAAAAAA'),
mock.call.addVolumeToVolumeSet('myvvs',
'osv-0DM4qZEVSKON-AAAAAAAAA'),
mock.call.modifyVolume('osv-0DM4qZEVSKON-AAAAAAAAA',
{'action': 6,
'userCPG': 'OpenStackCPG',
'conversionOperation': 1,
'tuneOperation': 1}),
mock.call.getTask(1)
]
mock_client.assert_has_calls(expected + self.standard_logout)
@mock.patch.object(volume_types, 'get_volume_type')
def test_retype_qos_spec(self, _mock_volume_types):
_mock_volume_types.return_value = self.RETYPE_VOLUME_TYPE_1
mock_client = self.setup_driver(mock_conf=self.RETYPE_CONF)
cpg = "any_cpg"
snap_cpg = "any_cpg"
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
common._retype(self.volume,
HP3PARBaseDriver.VOLUME_3PAR_NAME,
"old_type", "old_type_id",
HP3PARBaseDriver.RETYPE_HOST,
None, cpg, cpg, snap_cpg, snap_cpg,
True, False, False, True, None, None,
self.QOS_SPECS, self.RETYPE_QOS_SPECS,
None, None,
"{}")
expected = [
mock.call.createVolumeSet('vvs-0DM4qZEVSKON-DXN-NwVpw', None),
mock.call.createQoSRules(
'vvs-0DM4qZEVSKON-DXN-NwVpw',
{'ioMinGoal': 100, 'ioMaxLimit': 1000,
'bwMinGoalKB': 25600, 'bwMaxLimitKB': 51200,
'priority': 3,
'latencyGoal': 25}
),
mock.call.addVolumeToVolumeSet(
'vvs-0DM4qZEVSKON-DXN-NwVpw',
'osv-0DM4qZEVSKON-DXN-NwVpw')]
mock_client.assert_has_calls(expected)
@mock.patch.object(volume_types, 'get_volume_type')
def test_retype_dedup(self, _mock_volume_types):
_mock_volume_types.return_value = self.RETYPE_VOLUME_TYPE_3
mock_client = self.setup_driver(mock_conf=self.RETYPE_CONF)
cpg = "any_cpg"
snap_cpg = "any_cpg"
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
common._retype(self.volume,
HP3PARBaseDriver.VOLUME_3PAR_NAME,
"old_type", "old_type_id",
HP3PARBaseDriver.RETYPE_HOST,
None, cpg, cpg, snap_cpg, snap_cpg,
True, False, False, True, None, None,
self.QOS_SPECS, self.RETYPE_QOS_SPECS,
None, None,
"{}")
expected = [
mock.call.modifyVolume('osv-0DM4qZEVSKON-DXN-NwVpw',
{'action': 6,
'userCPG': 'any_cpg',
'conversionOperation': 3,
'tuneOperation': 1}),
mock.call.getTask(1)]
mock_client.assert_has_calls(expected)
def test_delete_volume(self):
mock_client = self.setup_driver()
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
self.driver.delete_volume(self.volume)
expected = [mock.call.deleteVolume(self.VOLUME_3PAR_NAME)]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
def test_create_cloned_volume(self):
mock_client = self.setup_driver()
mock_client.copyVolume.return_value = {'taskid': 1}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
volume = {'name': HP3PARBaseDriver.VOLUME_NAME,
'id': HP3PARBaseDriver.CLONE_ID,
'display_name': 'Foo Volume',
'size': 2,
'host': volume_utils.append_host(self.FAKE_HOST,
HP3PAR_CPG2),
'source_volid': HP3PARBaseDriver.VOLUME_ID}
src_vref = {}
model_update = self.driver.create_cloned_volume(volume, src_vref)
self.assertIsNone(model_update)
expected = [
mock.call.copyVolume(
self.VOLUME_3PAR_NAME,
'osv-0DM4qZEVSKON-AAAAAAAAA',
HP3PAR_CPG2,
{'snapCPG': 'OpenStackCPGSnap', 'tpvv': True,
'tdvv': False, 'online': True})]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
@mock.patch.object(volume_types, 'get_volume_type')
def test_create_cloned_qos_volume(self, _mock_volume_types):
_mock_volume_types.return_value = self.RETYPE_VOLUME_TYPE_2
mock_client = self.setup_driver()
mock_client.copyVolume.return_value = {'taskid': 1}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
src_vref = {}
volume = self.volume_qos.copy()
host = "TEST_HOST"
pool = "TEST_POOL"
volume_host = volume_utils.append_host(host, pool)
expected_cpg = pool
volume['id'] = HP3PARBaseDriver.CLONE_ID
volume['host'] = volume_host
volume['source_volid'] = HP3PARBaseDriver.VOLUME_ID
model_update = self.driver.create_cloned_volume(volume, src_vref)
self.assertEqual(model_update, None)
expected = [
mock.call.getCPG(expected_cpg),
mock.call.copyVolume(
self.VOLUME_3PAR_NAME,
'osv-0DM4qZEVSKON-AAAAAAAAA',
expected_cpg,
{'snapCPG': 'OpenStackCPGSnap', 'tpvv': True,
'tdvv': False, 'online': True})]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
def test_migrate_volume(self):
conf = {
'getStorageSystemInfo.return_value': {
'serialNumber': '1234'},
'getTask.return_value': {
'status': 1},
'getCPG.return_value': {},
'copyVolume.return_value': {'taskid': 1},
'getVolume.return_value': self.RETYPE_VOLUME_INFO_1
}
mock_client = self.setup_driver(mock_conf=conf)
mock_client.getVolume.return_value = self.MANAGE_VOLUME_INFO
mock_client.modifyVolume.return_value = ("anyResponse", {'taskid': 1})
mock_client.getTask.return_value = self.STATUS_DONE
volume = {'name': HP3PARBaseDriver.VOLUME_NAME,
'id': HP3PARBaseDriver.CLONE_ID,
'display_name': 'Foo Volume',
'volume_type_id': None,
'size': 2,
'status': 'available',
'host': HP3PARBaseDriver.FAKE_HOST,
'source_volid': HP3PARBaseDriver.VOLUME_ID}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
volume_name_3par = common._encode_name(volume['id'])
loc_info = 'HP3PARDriver:1234:CPG-FC1'
host = {'host': 'stack@3parfc1#CPG-FC1',
'capabilities': {'location_info': loc_info}}
result = self.driver.migrate_volume(context.get_admin_context(),
volume, host)
self.assertIsNotNone(result)
self.assertEqual((True, None), result)
osv_matcher = 'osv-' + volume_name_3par
expected = [
mock.call.modifyVolume(
osv_matcher,
{'comment': '{"qos": {}, "display_name": "Foo Volume"}',
'snapCPG': HP3PAR_CPG_SNAP}),
mock.call.modifyVolume(osv_matcher,
{'action': 6,
'userCPG': 'CPG-FC1',
'conversionOperation': 1,
'tuneOperation': 1}),
mock.call.getTask(mock.ANY)
]
mock_client.assert_has_calls(expected + self.standard_logout)
@mock.patch.object(volume_types, 'get_volume_type')
def test_migrate_volume_with_type(self, _mock_volume_types):
_mock_volume_types.return_value = self.RETYPE_VOLUME_TYPE_2
conf = {
'getStorageSystemInfo.return_value': {
'serialNumber': '1234'},
'getTask.return_value': {
'status': 1},
'getCPG.return_value': {},
'copyVolume.return_value': {'taskid': 1},
'getVolume.return_value': self.RETYPE_VOLUME_INFO_1
}
mock_client = self.setup_driver(mock_conf=conf)
mock_client.getVolume.return_value = self.MANAGE_VOLUME_INFO
mock_client.modifyVolume.return_value = ("anyResponse", {'taskid': 1})
mock_client.getTask.return_value = self.STATUS_DONE
display_name = 'Foo Volume'
volume = {'name': HP3PARBaseDriver.VOLUME_NAME,
'id': HP3PARBaseDriver.CLONE_ID,
'display_name': display_name,
"volume_type_id": self.RETYPE_VOLUME_TYPE_2['id'],
'size': 2,
'status': 'available',
'host': HP3PARBaseDriver.FAKE_HOST,
'source_volid': HP3PARBaseDriver.VOLUME_ID}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
volume_name_3par = common._encode_name(volume['id'])
loc_info = 'HP3PARDriver:1234:CPG-FC1'
instance_host = 'stack@3parfc1#CPG-FC1'
host = {'host': instance_host,
'capabilities': {'location_info': loc_info}}
result = self.driver.migrate_volume(context.get_admin_context(),
volume, host)
self.assertIsNotNone(result)
self.assertEqual((True, None), result)
osv_matcher = 'osv-' + volume_name_3par
expected_comment = {
"display_name": display_name,
"volume_type_id": self.RETYPE_VOLUME_TYPE_2['id'],
"volume_type_name": self.RETYPE_VOLUME_TYPE_2['name'],
"vvs": self.RETYPE_VOLUME_TYPE_2['extra_specs']['vvs']
}
expected = [
mock.call.modifyVolume(
osv_matcher,
{'comment': self.CommentMatcher(self.assertEqual,
expected_comment),
'snapCPG': self.RETYPE_VOLUME_TYPE_2
['extra_specs']['snap_cpg']}),
mock.call.modifyVolume(
osv_matcher,
{'action': 6,
'userCPG': 'CPG-FC1',
'conversionOperation': 1,
'tuneOperation': 1}),
mock.call.getTask(mock.ANY)
]
mock_client.assert_has_calls(
expected +
self.standard_logout)
def test_migrate_volume_diff_host(self):
conf = {
'getStorageSystemInfo.return_value': {
'serialNumber': 'different'},
}
mock_client = self.setup_driver(mock_conf=conf)
volume = {'name': HP3PARBaseDriver.VOLUME_NAME,
'id': HP3PARBaseDriver.CLONE_ID,
'display_name': 'Foo Volume',
'volume_type_id': None,
'size': 2,
'status': 'available',
'host': HP3PARBaseDriver.FAKE_HOST,
'source_volid': HP3PARBaseDriver.VOLUME_ID}
loc_info = 'HP3PARDriver:1234:CPG-FC1'
host = {'host': 'stack@3parfc1',
'capabilities': {'location_info': loc_info}}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
result = self.driver.migrate_volume(context.get_admin_context(),
volume, host)
self.assertIsNotNone(result)
self.assertEqual((False, None), result)
@mock.patch.object(volume_types, 'get_volume_type')
def test_migrate_volume_diff_domain(self, _mock_volume_types):
_mock_volume_types.return_value = self.volume_type
conf = {
'getStorageSystemInfo.return_value': {
'serialNumber': '1234'},
'getTask.return_value': {
'status': 1},
'getCPG.return_value': {},
'copyVolume.return_value': {'taskid': 1},
'getVolume.return_value': self.RETYPE_VOLUME_INFO_1
}
mock_client = self.setup_driver(mock_conf=conf)
mock_client.getVolume.return_value = self.MANAGE_VOLUME_INFO
mock_client.modifyVolume.return_value = ("anyResponse", {'taskid': 1})
mock_client.getTask.return_value = self.STATUS_DONE
volume = {'name': HP3PARBaseDriver.VOLUME_NAME,
'id': HP3PARBaseDriver.CLONE_ID,
'display_name': 'Foo Volume',
'volume_type_id': None,
'size': 2,
'status': 'available',
'host': HP3PARBaseDriver.FAKE_HOST,
'source_volid': HP3PARBaseDriver.VOLUME_ID}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
volume_name_3par = common._encode_name(volume['id'])
loc_info = 'HP3PARDriver:1234:CPG-FC1'
host = {'host': 'stack@3parfc1
'capabilities': {'location_info': loc_info}}
result = self.driver.migrate_volume(context.get_admin_context(),
volume, host)
self.assertIsNotNone(result)
self.assertEqual((True, None), result)
osv_matcher = 'osv-' + volume_name_3par
expected = [
mock.call.modifyVolume(
osv_matcher,
{'comment': '{"qos": {}, "display_name": "Foo Volume"}',
'snapCPG': HP3PAR_CPG_SNAP}),
mock.call.modifyVolume(osv_matcher,
{'action': 6,
'userCPG': 'CPG-FC1',
'conversionOperation': 1,
'tuneOperation': 1}),
mock.call.getTask(mock.ANY),
]
mock_client.assert_has_calls(expected + self.standard_logout)
@mock.patch.object(volume_types, 'get_volume_type')
def test_migrate_volume_attached(self, _mock_volume_types):
_mock_volume_types.return_value = self.RETYPE_VOLUME_TYPE_1
mock_client = self.setup_driver(mock_conf=self.RETYPE_CONF)
volume = {'name': HP3PARBaseDriver.VOLUME_NAME,
'volume_type_id': None,
'id': HP3PARBaseDriver.CLONE_ID,
'display_name': 'Foo Volume',
'size': 2,
'status': 'in-use',
'host': HP3PARBaseDriver.FAKE_HOST,
'source_volid': HP3PARBaseDriver.VOLUME_ID}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
volume_name_3par = common._encode_name(volume['id'])
osv_matcher = 'osv-' + volume_name_3par
loc_info = 'HP3PARDriver:1234567:CPG-FC1'
protocol = "FC"
if self.properties['driver_volume_type'] == "iscsi":
protocol = "iSCSI"
host = {'host': 'stack@3parfc1',
'capabilities': {'location_info': loc_info,
'storage_protocol': protocol}}
result = self.driver.migrate_volume(context.get_admin_context(),
volume, host)
new_comment = {"qos": {},
"retype_test": "test comment"}
expected = [
mock.call.modifyVolume(osv_matcher,
{'comment': self.CommentMatcher(
self.assertEqual, new_comment),
'snapCPG': 'OpenStackCPGSnap'}),
mock.call.modifyVolume(osv_matcher,
{'action': 6,
'userCPG': 'OpenStackCPG',
'conversionOperation': 1,
'tuneOperation': 1}),
mock.call.getTask(1),
mock.call.logout()
]
mock_client.assert_has_calls(expected)
self.assertIsNotNone(result)
self.assertEqual((True, {'host': 'stack@3parfc1
result)
@mock.patch.object(volume_types, 'get_volume_type')
def test_migrate_volume_attached_diff_protocol(self, _mock_volume_types):
_mock_volume_types.return_value = self.RETYPE_VOLUME_TYPE_1
mock_client = self.setup_driver(mock_conf=self.RETYPE_CONF)
protocol = "OTHER"
volume = {'name': HP3PARBaseDriver.VOLUME_NAME,
'volume_type_id': None,
'id': HP3PARBaseDriver.CLONE_ID,
'display_name': 'Foo Volume',
'size': 2,
'status': 'in-use',
'host': HP3PARBaseDriver.FAKE_HOST,
'source_volid': HP3PARBaseDriver.VOLUME_ID}
loc_info = 'HP3PARDriver:1234567:CPG-FC1'
host = {'host': 'stack@3parfc1',
'capabilities': {'location_info': loc_info,
'storage_protocol': protocol}}
result = self.driver.migrate_volume(context.get_admin_context(),
volume, host)
self.assertIsNotNone(result)
self.assertEqual((False, None), result)
expected = []
mock_client.assert_has_calls(expected)
def test_attach_volume(self):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
self.driver.attach_volume(context.get_admin_context(),
self.volume,
'abcdef',
'newhost',
'/dev/vdb')
expected = [
mock.call.setVolumeMetaData(
self.VOLUME_3PAR_NAME,
'HPQ-CS-instance_uuid',
'abcdef')]
mock_client.assert_has_calls(expected)
# test the exception
mock_client.setVolumeMetaData.side_effect = Exception('Custom ex')
self.assertRaises(exception.CinderException,
self.driver.attach_volume,
context.get_admin_context(),
self.volume,
'abcdef',
'newhost',
'/dev/vdb')
def test_detach_volume(self):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
self.driver.detach_volume(context.get_admin_context(), self.volume,
None)
expected = [
mock.call.removeVolumeMetaData(
self.VOLUME_3PAR_NAME,
'HPQ-CS-instance_uuid')]
mock_client.assert_has_calls(expected)
# test the exception
mock_client.removeVolumeMetaData.side_effect = Exception(
'Custom ex')
self.assertRaises(exception.CinderException,
self.driver.detach_volume,
context.get_admin_context(),
self.volume, None)
def test_create_snapshot(self):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
self.driver.create_snapshot(self.snapshot)
comment = (
'{"volume_id": "761fc5e5-5191-4ec7-aeba-33e36de44156",'
' "display_name": "fakesnap",'
' "description": "test description name",'
' "volume_name":'
' "volume-d03338a9-9115-48a3-8dfc-35cdfcdc15a7"}')
expected = [
mock.call.createSnapshot(
'oss-L4I73ONuTci9Fd4ceij-MQ',
'osv-dh-F5VGRTseuujPjbeRBVg',
{
'comment': comment,
'readOnly': True})]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
def test_delete_snapshot(self):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
self.driver.delete_snapshot(self.snapshot)
expected = [
mock.call.deleteVolume('oss-L4I73ONuTci9Fd4ceij-MQ')]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
def test_delete_snapshot_in_use(self):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
self.driver.create_snapshot(self.snapshot)
self.driver.create_volume_from_snapshot(self.volume, self.snapshot)
ex = hpexceptions.HTTPConflict("In use")
mock_client.deleteVolume = mock.Mock(side_effect=ex)
# Deleting the snapshot that a volume is dependent on should fail
self.assertRaises(exception.SnapshotIsBusy,
self.driver.delete_snapshot,
self.snapshot)
def test_delete_snapshot_not_found(self):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
self.driver.create_snapshot(self.snapshot)
try:
ex = hpexceptions.HTTPNotFound("not found")
mock_client.deleteVolume = mock.Mock(side_effect=ex)
self.driver.delete_snapshot(self.snapshot)
except Exception:
self.fail("Deleting a snapshot that is missing should act "
"as if it worked.")
def test_create_volume_from_snapshot(self):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
model_update = self.driver.create_volume_from_snapshot(
self.volume,
self.snapshot)
self.assertIsNone(model_update)
comment = (
'{"snapshot_id": "2f823bdc-e36e-4dc8-bd15-de1c7a28ff31",'
' "display_name": "Foo Volume",'
' "volume_id": "d03338a9-9115-48a3-8dfc-35cdfcdc15a7"}')
expected = [
mock.call.createSnapshot(
self.VOLUME_3PAR_NAME,
'oss-L4I73ONuTci9Fd4ceij-MQ',
{
'comment': comment,
'readOnly': False})]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
volume = self.volume.copy()
volume['size'] = 1
self.assertRaises(exception.InvalidInput,
self.driver.create_volume_from_snapshot,
volume, self.snapshot)
def test_create_volume_from_snapshot_and_extend(self):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
conf = {
'getTask.return_value': {
'status': 1},
'copyVolume.return_value': {'taskid': 1},
'getVolume.return_value': {}
}
mock_client = self.setup_driver(mock_conf=conf)
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
volume = self.volume.copy()
volume['size'] = self.volume['size'] + 10
model_update = self.driver.create_volume_from_snapshot(
volume,
self.snapshot)
self.assertEqual(model_update, None)
comment = (
'{"snapshot_id": "2f823bdc-e36e-4dc8-bd15-de1c7a28ff31",'
' "display_name": "Foo Volume",'
' "volume_id": "d03338a9-9115-48a3-8dfc-35cdfcdc15a7"}')
volume_name_3par = common._encode_name(volume['id'])
osv_matcher = 'osv-' + volume_name_3par
omv_matcher = 'omv-' + volume_name_3par
expected = [
mock.call.createSnapshot(
self.VOLUME_3PAR_NAME,
'oss-L4I73ONuTci9Fd4ceij-MQ',
{
'comment': comment,
'readOnly': False}),
mock.call.copyVolume(
osv_matcher, omv_matcher, HP3PAR_CPG, mock.ANY),
mock.call.getTask(mock.ANY),
mock.call.getVolume(osv_matcher),
mock.call.deleteVolume(osv_matcher),
mock.call.modifyVolume(omv_matcher, {'newName': osv_matcher}),
mock.call.growVolume(osv_matcher, 10 * 1024)]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
@mock.patch.object(volume_types, 'get_volume_type')
def test_create_volume_from_snapshot_and_extend_with_qos(
self, _mock_volume_types):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
conf = {
'getTask.return_value': {
'status': 1},
'copyVolume.return_value': {'taskid': 1},
'getVolume.return_value': {}
}
mock_client = self.setup_driver(mock_conf=conf)
_mock_volume_types.return_value = {
'name': 'gold',
'extra_specs': {
'cpg': HP3PAR_CPG_QOS,
'snap_cpg': HP3PAR_CPG_SNAP,
'vvs_name': self.VVS_NAME,
'qos': self.QOS,
'tpvv': True,
'tdvv': False,
'volume_type': self.volume_type}}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
volume = self.volume_qos.copy()
volume['size'] = self.volume['size'] + 10
model_update = self.driver.create_volume_from_snapshot(
volume,
self.snapshot)
self.assertEqual(model_update, None)
comment = (
'{"snapshot_id": "2f823bdc-e36e-4dc8-bd15-de1c7a28ff31",'
' "display_name": "Foo Volume",'
' "volume_id": "d03338a9-9115-48a3-8dfc-35cdfcdc15a7"}')
volume_name_3par = common._encode_name(volume['id'])
osv_matcher = 'osv-' + volume_name_3par
omv_matcher = 'omv-' + volume_name_3par
expected = [
mock.call.createSnapshot(
self.VOLUME_3PAR_NAME,
'oss-L4I73ONuTci9Fd4ceij-MQ',
{
'comment': comment,
'readOnly': False}),
mock.call.getCPG(HP3PAR_CPG),
mock.call.copyVolume(
osv_matcher, omv_matcher, HP3PAR_CPG, mock.ANY),
mock.call.getTask(mock.ANY),
mock.call.getVolume(osv_matcher),
mock.call.deleteVolume(osv_matcher),
mock.call.modifyVolume(omv_matcher, {'newName': osv_matcher}),
mock.call.growVolume(osv_matcher, 10 * 1024)]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
def test_create_volume_from_snapshot_and_extend_copy_fail(self):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
conf = {
'getTask.return_value': {
'status': 4,
'failure message': 'out of disk space'},
'copyVolume.return_value': {'taskid': 1},
'getVolume.return_value': {}
}
mock_client = self.setup_driver(mock_conf=conf)
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
volume = self.volume.copy()
volume['size'] = self.volume['size'] + 10
self.assertRaises(exception.CinderException,
self.driver.create_volume_from_snapshot,
volume, self.snapshot)
@mock.patch.object(volume_types, 'get_volume_type')
def test_create_volume_from_snapshot_qos(self, _mock_volume_types):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
_mock_volume_types.return_value = {
'name': 'gold',
'extra_specs': {
'cpg': HP3PAR_CPG,
'snap_cpg': HP3PAR_CPG_SNAP,
'vvs_name': self.VVS_NAME,
'qos': self.QOS,
'tpvv': True,
'tdvv': False,
'volume_type': self.volume_type}}
self.driver.create_volume_from_snapshot(
self.volume_qos,
self.snapshot)
comment = (
'{"snapshot_id": "2f823bdc-e36e-4dc8-bd15-de1c7a28ff31",'
' "display_name": "Foo Volume",'
' "volume_id": "d03338a9-9115-48a3-8dfc-35cdfcdc15a7"}')
expected = [
mock.call.createSnapshot(
self.VOLUME_3PAR_NAME,
'oss-L4I73ONuTci9Fd4ceij-MQ', {
'comment': comment,
'readOnly': False})]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
volume = self.volume.copy()
volume['size'] = 1
self.assertRaises(exception.InvalidInput,
self.driver.create_volume_from_snapshot,
volume, self.snapshot)
def test_terminate_connection(self):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
mock_client.getHostVLUNs.return_value = [
{'active': True,
'volumeName': self.VOLUME_3PAR_NAME,
'lun': None, 'type': 0}]
mock_client.queryHost.return_value = {
'members': [{
'name': self.FAKE_HOST
}]
}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
self.driver.terminate_connection(
self.volume,
self.connector,
force=True)
expected = [
mock.call.queryHost(iqns=[self.connector['initiator']]),
mock.call.getHostVLUNs(self.FAKE_HOST),
mock.call.deleteVLUN(
self.VOLUME_3PAR_NAME,
None,
self.FAKE_HOST),
mock.call.getHostVLUNs(self.FAKE_HOST),
mock.call.deleteHost(self.FAKE_HOST),
mock.call.removeVolumeMetaData(
self.VOLUME_3PAR_NAME, CHAP_USER_KEY),
mock.call.removeVolumeMetaData(
self.VOLUME_3PAR_NAME, CHAP_PASS_KEY)]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
def test_update_volume_key_value_pair(self):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
key = 'a'
value = 'b'
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
common.update_volume_key_value_pair(
self.volume,
key,
value)
expected = [
mock.call.setVolumeMetaData(self.VOLUME_3PAR_NAME, key, value)]
mock_client.assert_has_calls(expected)
# check exception
mock_client.setVolumeMetaData.side_effect = Exception('fake')
self.assertRaises(exception.VolumeBackendAPIException,
common.update_volume_key_value_pair,
self.volume,
None,
'b')
def test_clear_volume_key_value_pair(self):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
key = 'a'
common = self.driver._login()
common.clear_volume_key_value_pair(self.volume, key)
expected = [
mock.call.removeVolumeMetaData(self.VOLUME_3PAR_NAME, key)]
mock_client.assert_has_calls(expected)
# check the exception
mock_client.removeVolumeMetaData.side_effect = Exception('fake')
self.assertRaises(exception.VolumeBackendAPIException,
common.clear_volume_key_value_pair,
self.volume,
None)
def test_extend_volume(self):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
grow_size = 3
old_size = self.volume['size']
new_size = old_size + grow_size
self.driver.extend_volume(self.volume, str(new_size))
growth_size_mib = grow_size * units.Ki
expected = [
mock.call.growVolume(self.VOLUME_3PAR_NAME, growth_size_mib)]
mock_client.assert_has_calls(expected)
def test_extend_volume_non_base(self):
extend_ex = hpexceptions.HTTPForbidden(error={'code': 150})
conf = {
'getTask.return_value': {
'status': 1},
'getCPG.return_value': {},
'copyVolume.return_value': {'taskid': 1},
'getVolume.return_value': {},
# Throw an exception first time only
'growVolume.side_effect': [extend_ex,
None],
}
mock_client = self.setup_driver(mock_conf=conf)
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
grow_size = 3
old_size = self.volume['size']
new_size = old_size + grow_size
self.driver.extend_volume(self.volume, str(new_size))
self.assertEqual(2, mock_client.growVolume.call_count)
def test_extend_volume_non_base_failure(self):
extend_ex = hpexceptions.HTTPForbidden(error={'code': 150})
conf = {
'getTask.return_value': {
'status': 1},
'getCPG.return_value': {},
'copyVolume.return_value': {'taskid': 1},
'getVolume.return_value': {},
# Always fail
'growVolume.side_effect': extend_ex
}
mock_client = self.setup_driver(mock_conf=conf)
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
grow_size = 3
old_size = self.volume['size']
new_size = old_size + grow_size
self.assertRaises(hpexceptions.HTTPForbidden,
self.driver.extend_volume,
self.volume,
str(new_size))
def test_get_ports(self):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
mock_client.getPorts.return_value = {
'members': [
{'portPos': {'node': 0, 'slot': 8, 'cardPort': 2},
'protocol': 2,
'IPAddr': '10.10.120.252',
'linkState': 4,
'device': [],
'iSCSIName': 'iqn.2000-05.com.3pardata:21810002ac00383d',
'mode': 2,
'HWAddr': '2C27D75375D2',
'type': 8},
{'portPos': {'node': 1, 'slot': 8, 'cardPort': 1},
'protocol': 2,
'IPAddr': '10.10.220.253',
'linkState': 4,
'device': [],
'iSCSIName': 'iqn.2000-05.com.3pardata:21810002ac00383d',
'mode': 2,
'HWAddr': '2C27D75375D6',
'type': 8},
{'portWWN': '20210002AC00383D',
'protocol': 1,
'linkState': 4,
'mode': 2,
'device': ['cage2'],
'nodeWWN': '20210002AC00383D',
'type': 2,
'portPos': {'node': 0, 'slot': 6, 'cardPort': 3}}]}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
ports = common.get_ports()['members']
self.assertEqual(len(ports), 3)
def test_get_by_qos_spec_with_scoping(self):
mock_client = self.setup_driver()
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
qos_ref = qos_specs.create(self.ctxt, 'qos-specs-1', self.QOS)
type_ref = volume_types.create(self.ctxt,
"type1", {"qos:maxIOPS": "100",
"qos:maxBWS": "50",
"qos:minIOPS": "10",
"qos:minBWS": "20",
"qos:latency": "5",
"qos:priority": "high"})
qos_specs.associate_qos_with_type(self.ctxt,
qos_ref['id'],
type_ref['id'])
type_ref = volume_types.get_volume_type(self.ctxt, type_ref['id'])
qos = common._get_qos_by_volume_type(type_ref)
self.assertEqual(qos, {'maxIOPS': '1000', 'maxBWS': '50',
'minIOPS': '100', 'minBWS': '25',
'latency': '25', 'priority': 'low'})
def test_get_by_qos_spec(self):
mock_client = self.setup_driver()
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
qos_ref = qos_specs.create(
self.ctxt,
'qos-specs-1',
self.QOS_SPECS)
type_ref = volume_types.create(self.ctxt,
"type1", {"qos:maxIOPS": "100",
"qos:maxBWS": "50",
"qos:minIOPS": "10",
"qos:minBWS": "20",
"qos:latency": "5",
"qos:priority": "high"})
qos_specs.associate_qos_with_type(self.ctxt,
qos_ref['id'],
type_ref['id'])
type_ref = volume_types.get_volume_type(self.ctxt, type_ref['id'])
qos = common._get_qos_by_volume_type(type_ref)
self.assertEqual(qos, {'maxIOPS': '1000', 'maxBWS': '50',
'minIOPS': '100', 'minBWS': '25',
'latency': '25', 'priority': 'low'})
def test_get_by_qos_by_type_only(self):
mock_client = self.setup_driver()
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
type_ref = volume_types.create(self.ctxt,
"type1", {"qos:maxIOPS": "100",
"qos:maxBWS": "50",
"qos:minIOPS": "10",
"qos:minBWS": "20",
"qos:latency": "5",
"qos:priority": "high"})
type_ref = volume_types.get_volume_type(self.ctxt, type_ref['id'])
qos = common._get_qos_by_volume_type(type_ref)
self.assertEqual(qos, {'maxIOPS': '100', 'maxBWS': '50',
'minIOPS': '10', 'minBWS': '20',
'latency': '5', 'priority': 'high'})
def test_create_vlun(self):
host = 'fake-host'
lun_id = 11
nsp = '1:2:3'
mock_client = self.setup_driver()
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
location = ("%(name)s,%(lunid)s,%(host)s,%(nsp)s" %
{'name': self.VOLUME_NAME,
'lunid': lun_id,
'host': host,
'nsp': nsp})
mock_client.createVLUN.return_value = location
expected_info = {'volume_name': self.VOLUME_NAME,
'lun_id': lun_id,
'host_name': host,
'nsp': nsp}
common = self.driver._login()
vlun_info = common._create_3par_vlun(
self.VOLUME_NAME,
host,
nsp)
self.assertEqual(expected_info, vlun_info)
location = ("%(name)s,%(lunid)s,%(host)s" %
{'name': self.VOLUME_NAME,
'lunid': lun_id,
'host': host})
mock_client.createVLUN.return_value = location
expected_info = {'volume_name': self.VOLUME_NAME,
'lun_id': lun_id,
'host_name': host}
vlun_info = common._create_3par_vlun(
self.VOLUME_NAME,
host,
None)
self.assertEqual(expected_info, vlun_info)
def test__get_existing_volume_ref_name(self):
mock_client = self.setup_driver()
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
unm_matcher = common._get_3par_unm_name(self.volume['id'])
existing_ref = {'source-name': unm_matcher}
result = common._get_existing_volume_ref_name(existing_ref)
self.assertEqual(unm_matcher, result)
existing_ref = {'source-id': self.volume['id']}
result = common._get_existing_volume_ref_name(existing_ref)
self.assertEqual(unm_matcher, result)
existing_ref = {'bad-key': 'foo'}
self.assertRaises(
exception.ManageExistingInvalidReference,
common._get_existing_volume_ref_name,
existing_ref)
@mock.patch.object(volume_types, 'get_volume_type')
def test_manage_existing(self, _mock_volume_types):
_mock_volume_types.return_value = self.volume_type
mock_client = self.setup_driver()
new_comment = {"display_name": "Foo Volume",
"name": "volume-007dbfce-7579-40bc-8f90-a20b3902283e",
"volume_id": "007dbfce-7579-40bc-8f90-a20b3902283e",
"type": "OpenStack"}
volume = {'display_name': None,
'host': self.FAKE_CINDER_HOST,
'volume_type': 'gold',
'volume_type_id': 'acfa9fa4-54a0-4340-a3d8-bfcf19aea65e',
'id': '007dbfce-7579-40bc-8f90-a20b3902283e'}
mock_client.getVolume.return_value = self.MANAGE_VOLUME_INFO
mock_client.modifyVolume.return_value = ("anyResponse", {'taskid': 1})
mock_client.getTask.return_value = self.STATUS_DONE
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
unm_matcher = common._get_3par_unm_name(self.volume['id'])
osv_matcher = common._get_3par_vol_name(volume['id'])
vvs_matcher = common._get_3par_vvs_name(volume['id'])
existing_ref = {'source-name': unm_matcher}
expected_obj = {'display_name': 'Foo Volume'}
obj = self.driver.manage_existing(volume, existing_ref)
expected_manage = [
mock.call.getVolume(existing_ref['source-name']),
mock.call.modifyVolume(existing_ref['source-name'],
{'newName': osv_matcher,
'comment': self.CommentMatcher(
self.assertEqual, new_comment)}),
]
retype_comment_qos = {
"display_name": "Foo Volume",
"volume_type_name": self.volume_type['name'],
"volume_type_id": self.volume_type['id'],
"qos": {
'maxIOPS': '1000',
'maxBWS': '50',
'minIOPS': '100',
'minBWS': '25',
'latency': '25',
'priority': 'low'
}
}
expected_snap_cpg = HP3PAR_CPG_SNAP
expected_retype_modify = [
mock.call.modifyVolume(osv_matcher,
{'comment': self.CommentMatcher(
self.assertEqual,
retype_comment_qos),
'snapCPG': expected_snap_cpg}),
mock.call.deleteVolumeSet(vvs_matcher),
]
expected_retype_specs = [
mock.call.createVolumeSet(vvs_matcher, None),
mock.call.createQoSRules(
vvs_matcher,
{'ioMinGoal': 100, 'ioMaxLimit': 1000,
'bwMinGoalKB': 25600, 'priority': 1, 'latencyGoal': 25,
'bwMaxLimitKB': 51200}),
mock.call.addVolumeToVolumeSet(vvs_matcher, osv_matcher),
mock.call.modifyVolume(
osv_matcher,
{'action': 6,
'userCPG': HP3PAR_CPG,
'conversionOperation': 1, 'tuneOperation': 1}),
mock.call.getTask(1)
]
mock_client.assert_has_calls(self.standard_login + expected_manage)
mock_client.assert_has_calls(expected_retype_modify)
mock_client.assert_has_calls(
expected_retype_specs +
self.standard_logout)
self.assertEqual(expected_obj, obj)
@mock.patch.object(volume_types, 'get_volume_type')
def test_manage_existing_with_no_snap_cpg(self, _mock_volume_types):
_mock_volume_types.return_value = self.volume_type
mock_client = self.setup_driver()
new_comment = {"display_name": "Foo Volume",
"name": "volume-007dbfce-7579-40bc-8f90-a20b3902283e",
"volume_id": "007dbfce-7579-40bc-8f90-a20b3902283e",
"type": "OpenStack"}
volume = {'display_name': None,
'host': 'my-stack1@3parxxx
'volume_type': 'gold',
'volume_type_id': 'acfa9fa4-54a0-4340-a3d8-bfcf19aea65e',
'id': '007dbfce-7579-40bc-8f90-a20b3902283e'}
mock_client.getVolume.return_value = self.MV_INFO_WITH_NO_SNAPCPG
mock_client.modifyVolume.return_value = ("anyResponse", {'taskid': 1})
mock_client.getTask.return_value = self.STATUS_DONE
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
unm_matcher = common._get_3par_unm_name(self.volume['id'])
osv_matcher = common._get_3par_vol_name(volume['id'])
existing_ref = {'source-name': unm_matcher}
expected_obj = {'display_name': 'Foo Volume'}
obj = self.driver.manage_existing(volume, existing_ref)
expected_manage = [
mock.call.getVolume(existing_ref['source-name']),
mock.call.modifyVolume(
existing_ref['source-name'],
{'newName': osv_matcher,
'comment': self.CommentMatcher(self.assertEqual,
new_comment),
# manage_existing() should be setting
# blank snapCPG to the userCPG
'snapCPG': 'testUserCpg0'})
]
mock_client.assert_has_calls(self.standard_login + expected_manage)
self.assertEqual(expected_obj, obj)
@mock.patch.object(volume_types, 'get_volume_type')
def test_manage_existing_vvs(self, _mock_volume_types):
test_volume_type = self.RETYPE_VOLUME_TYPE_2
vvs = test_volume_type['extra_specs']['vvs']
_mock_volume_types.return_value = test_volume_type
mock_client = self.setup_driver()
mock_client.getVolume.return_value = self.MANAGE_VOLUME_INFO
mock_client.modifyVolume.return_value = ("anyResponse", {'taskid': 1})
mock_client.getTask.return_value = self.STATUS_DONE
id = '007abcde-7579-40bc-8f90-a20b3902283e'
new_comment = {"display_name": "Test Volume",
"name": ("volume-%s" % id),
"volume_id": id,
"type": "OpenStack"}
volume = {'display_name': 'Test Volume',
'host': 'my-stack1@3parxxx
'volume_type': 'gold',
'volume_type_id': 'acfa9fa4-54a0-4340-a3d8-bfcf19aea65e',
'id': id}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
unm_matcher = common._get_3par_unm_name(self.volume['id'])
osv_matcher = common._get_3par_vol_name(volume['id'])
vvs_matcher = common._get_3par_vvs_name(volume['id'])
existing_ref = {'source-name': unm_matcher}
obj = self.driver.manage_existing(volume, existing_ref)
expected_obj = {'display_name': 'Test Volume'}
expected_manage = [
mock.call.getVolume(existing_ref['source-name']),
mock.call.modifyVolume(existing_ref['source-name'],
{'newName': osv_matcher,
'comment': self.CommentMatcher(
self.assertEqual, new_comment)})
]
retype_comment_vvs = {
"display_name": "Foo Volume",
"volume_type_name": test_volume_type['name'],
"volume_type_id": test_volume_type['id'],
"vvs": vvs
}
expected_retype = [
mock.call.modifyVolume(osv_matcher,
{'comment': self.CommentMatcher(
self.assertEqual,
retype_comment_vvs),
'snapCPG': 'OpenStackCPGSnap'}),
mock.call.deleteVolumeSet(vvs_matcher),
mock.call.addVolumeToVolumeSet(vvs, osv_matcher),
mock.call.modifyVolume(osv_matcher,
{'action': 6,
'userCPG': 'CPGNOTUSED',
'conversionOperation': 1,
'tuneOperation': 1}),
mock.call.getTask(1)
]
mock_client.assert_has_calls(self.standard_login + expected_manage)
mock_client.assert_has_calls(
expected_retype +
self.standard_logout)
self.assertEqual(expected_obj, obj)
def test_manage_existing_no_volume_type(self):
mock_client = self.setup_driver()
comment = (
'{"display_name": "Foo Volume"}')
new_comment = (
'{"type": "OpenStack",'
' "display_name": "Foo Volume",'
' "name": "volume-007dbfce-7579-40bc-8f90-a20b3902283e",'
' "volume_id": "007dbfce-7579-40bc-8f90-a20b3902283e"}')
volume = {'display_name': None,
'volume_type': None,
'volume_type_id': None,
'id': '007dbfce-7579-40bc-8f90-a20b3902283e'}
mock_client.getVolume.return_value = {'comment': comment,
'userCPG': 'testUserCpg0'}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
unm_matcher = common._get_3par_unm_name(self.volume['id'])
osv_matcher = common._get_3par_vol_name(volume['id'])
existing_ref = {'source-name': unm_matcher}
obj = self.driver.manage_existing(volume, existing_ref)
expected_obj = {'display_name': 'Foo Volume'}
expected = [
mock.call.getVolume(existing_ref['source-name']),
mock.call.modifyVolume(existing_ref['source-name'],
{'newName': osv_matcher,
'comment': new_comment,
# manage_existing() should be setting
# blank snapCPG to the userCPG
'snapCPG': 'testUserCpg0'})
]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
self.assertEqual(expected_obj, obj)
volume['display_name'] = 'Test Volume'
obj = self.driver.manage_existing(volume, existing_ref)
expected_obj = {'display_name': 'Test Volume'}
expected = [
mock.call.getVolume(existing_ref['source-name']),
mock.call.modifyVolume(existing_ref['source-name'],
{'newName': osv_matcher,
'comment': new_comment,
# manage_existing() should be setting
# blank snapCPG to the userCPG
'snapCPG': 'testUserCpg0'})
]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
self.assertEqual(expected_obj, obj)
mock_client.getVolume.return_value = {'userCPG': 'testUserCpg0'}
volume['display_name'] = None
common = self.driver._login()
obj = self.driver.manage_existing(volume, existing_ref)
expected_obj = {'display_name': None}
expected = [
mock.call.getVolume(existing_ref['source-name']),
mock.call.modifyVolume(existing_ref['source-name'],
{'newName': osv_matcher,
'comment': new_comment,
# manage_existing() should be setting
# blank snapCPG to the userCPG
'snapCPG': 'testUserCpg0'})
]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
self.assertEqual(expected_obj, obj)
def test_manage_existing_invalid_input(self):
mock_client = self.setup_driver()
volume = {'display_name': None,
'volume_type': None,
'id': '007dbfce-7579-40bc-8f90-a20b3902283e'}
mock_client.getVolume.side_effect = hpexceptions.HTTPNotFound('fake')
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
unm_matcher = common._get_3par_unm_name(self.volume['id'])
existing_ref = {'source-name': unm_matcher}
self.assertRaises(exception.InvalidInput,
self.driver.manage_existing,
volume=volume,
existing_ref=existing_ref)
expected = [mock.call.getVolume(existing_ref['source-name'])]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
def test_manage_existing_volume_type_exception(self):
mock_client = self.setup_driver()
comment = (
'{"display_name": "Foo Volume"}')
volume = {'display_name': None,
'volume_type': 'gold',
'volume_type_id': 'bcfa9fa4-54a0-4340-a3d8-bfcf19aea65e',
'id': '007dbfce-7579-40bc-8f90-a20b3902283e'}
mock_client.getVolume.return_value = {'comment': comment}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
unm_matcher = common._get_3par_unm_name(self.volume['id'])
existing_ref = {'source-name': unm_matcher}
self.assertRaises(exception.ManageExistingVolumeTypeMismatch,
self.driver.manage_existing,
volume=volume,
existing_ref=existing_ref)
expected = [mock.call.getVolume(existing_ref['source-name'])]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
@mock.patch.object(volume_types, 'get_volume_type')
def test_manage_existing_retype_exception(self, _mock_volume_types):
mock_client = self.setup_driver()
_mock_volume_types.return_value = {
'name': 'gold',
'id': 'gold-id',
'extra_specs': {
'cpg': HP3PAR_CPG,
'snap_cpg': HP3PAR_CPG_SNAP,
'vvs_name': self.VVS_NAME,
'qos': self.QOS,
'tpvv': True,
'tdvv': False,
'volume_type': self.volume_type}}
volume = {'display_name': None,
'host': 'stack1@3pariscsi
'volume_type': 'gold',
'volume_type_id': 'bcfa9fa4-54a0-4340-a3d8-bfcf19aea65e',
'id': '007dbfce-7579-40bc-8f90-a20b3902283e'}
mock_client.getVolume.return_value = self.MANAGE_VOLUME_INFO
mock_client.modifyVolume.return_value = ("anyResponse", {'taskid': 1})
mock_client.getTask.return_value = self.STATUS_DONE
mock_client.getCPG.side_effect = [
{'domain': 'domain1'},
{'domain': 'domain2'},
{'domain': 'domain3'},
]
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
unm_matcher = common._get_3par_unm_name(self.volume['id'])
osv_matcher = common._get_3par_vol_name(volume['id'])
existing_ref = {'source-name': unm_matcher}
self.assertRaises(exception.Invalid3PARDomain,
self.driver.manage_existing,
volume=volume,
existing_ref=existing_ref)
expected = [
mock.call.getVolume(unm_matcher),
mock.call.modifyVolume(
unm_matcher, {
'newName': osv_matcher,
'comment': mock.ANY}),
mock.call.getCPG('POOL1'),
mock.call.getVolume(osv_matcher),
mock.call.getCPG('testUserCpg0'),
mock.call.getCPG('POOL1'),
mock.call.modifyVolume(
osv_matcher, {'newName': unm_matcher,
'comment': self.MANAGE_VOLUME_INFO
['comment']})
]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
def test_manage_existing_get_size(self):
mock_client = self.setup_driver()
mock_client.getVolume.return_value = {'sizeMiB': 2048}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
unm_matcher = common._get_3par_unm_name(self.volume['id'])
volume = {}
existing_ref = {'source-name': unm_matcher}
size = self.driver.manage_existing_get_size(volume, existing_ref)
expected_size = 2
expected = [mock.call.getVolume(existing_ref['source-name'])]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
self.assertEqual(expected_size, size)
def test_manage_existing_get_size_invalid_reference(self):
mock_client = self.setup_driver()
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
volume = {}
existing_ref = {'source-name': self.VOLUME_3PAR_NAME}
self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing_get_size,
volume=volume,
existing_ref=existing_ref)
mock_client.assert_has_calls(
self.standard_login +
self.standard_logout)
existing_ref = {}
self.assertRaises(exception.ManageExistingInvalidReference,
self.driver.manage_existing_get_size,
volume=volume,
existing_ref=existing_ref)
mock_client.assert_has_calls(
self.standard_login +
self.standard_logout)
def test_manage_existing_get_size_invalid_input(self):
mock_client = self.setup_driver()
mock_client.getVolume.side_effect = hpexceptions.HTTPNotFound('fake')
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
unm_matcher = common._get_3par_unm_name(self.volume['id'])
volume = {}
existing_ref = {'source-name': unm_matcher}
self.assertRaises(exception.InvalidInput,
self.driver.manage_existing_get_size,
volume=volume,
existing_ref=existing_ref)
expected = [mock.call.getVolume(existing_ref['source-name'])]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
def test_unmanage(self):
mock_client = self.setup_driver()
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
self.driver.unmanage(self.volume)
osv_matcher = common._get_3par_vol_name(self.volume['id'])
unm_matcher = common._get_3par_unm_name(self.volume['id'])
expected = [
mock.call.modifyVolume(osv_matcher, {'newName': unm_matcher})
]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
def test__safe_hostname(self):
long_hostname = "abc123abc123abc123abc123abc123abc123"
fixed_hostname = "abc123abc123abc123abc123abc123a"
common = hpcommon.HP3PARCommon(None)
safe_host = common._safe_hostname(long_hostname)
self.assertEqual(fixed_hostname, safe_host)
class TestHP3PARFCDriver(HP3PARBaseDriver, test.TestCase):
properties = {
'driver_volume_type': 'fibre_channel',
'data': {
'encrypted': False,
'target_lun': 90,
'target_wwn': ['0987654321234', '123456789000987'],
'target_discovered': True,
'initiator_target_map': {'123456789012345':
['0987654321234', '123456789000987'],
'123456789054321':
['0987654321234', '123456789000987'],
}}}
def setup_driver(self, config=None, mock_conf=None, wsapi_version=None):
self.ctxt = context.get_admin_context()
mock_client = self.setup_mock_client(
conf=config,
m_conf=mock_conf,
driver=hpfcdriver.HP3PARFCDriver)
if wsapi_version:
mock_client.getWsApiVersion.return_value = (
wsapi_version)
else:
mock_client.getWsApiVersion.return_value = (
self.wsapi_version_latest)
expected = [
mock.call.getCPG(HP3PAR_CPG),
mock.call.getCPG(HP3PAR_CPG2)]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
mock_client.reset_mock()
return mock_client
def test_initialize_connection(self):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
mock_client.getVolume.return_value = {'userCPG': HP3PAR_CPG}
mock_client.getCPG.return_value = {}
mock_client.getHost.side_effect = [
hpexceptions.HTTPNotFound('fake'),
{'name': self.FAKE_HOST,
'FCPaths': [{'driverVersion': None,
'firmwareVersion': None,
'hostSpeed': 0,
'model': None,
'portPos': {'cardPort': 1, 'node': 1,
'slot': 2},
'vendor': None,
'wwn': self.wwn[0]},
{'driverVersion': None,
'firmwareVersion': None,
'hostSpeed': 0,
'model': None,
'portPos': {'cardPort': 1, 'node': 0,
'slot': 2},
'vendor': None,
'wwn': self.wwn[1]}]}]
mock_client.queryHost.return_value = {
'members': [{
'name': self.FAKE_HOST
}]
}
mock_client.getVLUN.side_effect = [
hpexceptions.HTTPNotFound('fake')]
mock_client.getHostVLUNs.return_value = [
{'active': True,
'volumeName': self.VOLUME_3PAR_NAME,
'lun': 90, 'type': 0}]
location = ("%(volume_name)s,%(lun_id)s,%(host)s,%(nsp)s" %
{'volume_name': self.VOLUME_3PAR_NAME,
'lun_id': 90,
'host': self.FAKE_HOST,
'nsp': 'something'})
mock_client.createVLUN.return_value = location
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
result = self.driver.initialize_connection(
self.volume,
self.connector)
expected = [
mock.call.getVolume(self.VOLUME_3PAR_NAME),
mock.call.getCPG(HP3PAR_CPG),
mock.call.getHost(self.FAKE_HOST),
mock.call.queryHost(wwns=['123456789012345',
'123456789054321']),
mock.call.getHost(self.FAKE_HOST),
mock.call.getPorts(),
mock.call.getVLUN(self.VOLUME_3PAR_NAME),
mock.call.createVLUN(
self.VOLUME_3PAR_NAME,
auto=True,
hostname=self.FAKE_HOST),
mock.call.getHostVLUNs(self.FAKE_HOST)]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
self.assertDictMatch(result, self.properties)
@mock.patch('cinder.zonemanager.utils.create_lookup_service')
def test_initialize_connection_with_lookup_single_nsp(self, mock_lookup):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
class fake_lookup_object(object):
def get_device_mapping_from_network(self, connector, target_wwns):
fake_map = {
'FAB_1': {
'target_port_wwn_list': ['0987654321234'],
'initiator_port_wwn_list': ['123456789012345']
}
}
return fake_map
mock_lookup.return_value = fake_lookup_object()
mock_client = self.setup_driver()
mock_client.getVolume.return_value = {'userCPG': HP3PAR_CPG}
mock_client.getCPG.return_value = {}
mock_client.getHost.side_effect = [
hpexceptions.HTTPNotFound('fake'),
{'name': self.FAKE_HOST,
'FCPaths': [{'driverVersion': None,
'firmwareVersion': None,
'hostSpeed': 0,
'model': None,
'portPos': {'cardPort': 1, 'node': 1,
'slot': 2},
'vendor': None,
'wwn': self.wwn[0]}]}]
mock_client.queryHost.return_value = {
'members': [{
'name': self.FAKE_HOST
}]
}
mock_client.getVLUN.side_effect = [
hpexceptions.HTTPNotFound('fake')]
mock_client.getHostVLUNs.return_value = [
{'active': True,
'volumeName': self.VOLUME_3PAR_NAME,
'lun': 90, 'type': 0}]
location = ("%(volume_name)s,%(lun_id)s,%(host)s,%(nsp)s" %
{'volume_name': self.VOLUME_3PAR_NAME,
'lun_id': 90,
'host': self.FAKE_HOST,
'nsp': 'something'})
mock_client.createVLUN.return_value = location
connector = {'ip': '10.0.0.2',
'initiator': 'iqn.1993-08.org.debian:01:222',
'wwpns': [self.wwn[0]],
'wwnns': ["223456789012345"],
'host': self.FAKE_HOST}
expected_properties = {
'driver_volume_type': 'fibre_channel',
'data': {
'encrypted': False,
'target_lun': 90,
'target_wwn': ['0987654321234'],
'target_discovered': True,
'initiator_target_map': {'123456789012345':
['0987654321234']
}}}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
result = self.driver.initialize_connection(self.volume, connector)
expected = [
mock.call.getVolume(self.VOLUME_3PAR_NAME),
mock.call.getCPG(HP3PAR_CPG),
mock.call.getHost(self.FAKE_HOST),
mock.ANY,
mock.call.getHost(self.FAKE_HOST),
mock.call.getPorts(),
mock.call.getVLUN(self.VOLUME_3PAR_NAME),
mock.call.getPorts(),
mock.call.createVLUN(
self.VOLUME_3PAR_NAME,
auto=True,
hostname=self.FAKE_HOST,
portPos={'node': 7, 'slot': 1, 'cardPort': 1}),
mock.call.getHostVLUNs(self.FAKE_HOST)]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
self.assertDictMatch(result, expected_properties)
def test_initialize_connection_encrypted(self):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
mock_client.getVolume.return_value = {'userCPG': HP3PAR_CPG}
mock_client.getCPG.return_value = {}
mock_client.getHost.side_effect = [
hpexceptions.HTTPNotFound('fake'),
{'name': self.FAKE_HOST,
'FCPaths': [{'driverVersion': None,
'firmwareVersion': None,
'hostSpeed': 0,
'model': None,
'portPos': {'cardPort': 1, 'node': 1,
'slot': 2},
'vendor': None,
'wwn': self.wwn[0]},
{'driverVersion': None,
'firmwareVersion': None,
'hostSpeed': 0,
'model': None,
'portPos': {'cardPort': 1, 'node': 0,
'slot': 2},
'vendor': None,
'wwn': self.wwn[1]}]}]
mock_client.queryHost.return_value = {
'members': [{
'name': self.FAKE_HOST
}]
}
mock_client.getVLUN.side_effect = [
hpexceptions.HTTPNotFound('fake')]
mock_client.getHostVLUNs.return_value = [
{'active': True,
'volumeName': self.VOLUME_3PAR_NAME,
'lun': 90, 'type': 0}]
location = ("%(volume_name)s,%(lun_id)s,%(host)s,%(nsp)s" %
{'volume_name': self.VOLUME_3PAR_NAME,
'lun_id': 90,
'host': self.FAKE_HOST,
'nsp': 'something'})
mock_client.createVLUN.return_value = location
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
result = self.driver.initialize_connection(
self.volume_encrypted,
self.connector)
expected = [
mock.call.getVolume(self.VOLUME_3PAR_NAME),
mock.call.getCPG(HP3PAR_CPG),
mock.call.getHost(self.FAKE_HOST),
mock.call.queryHost(wwns=['123456789012345',
'123456789054321']),
mock.call.getHost(self.FAKE_HOST),
mock.call.getPorts(),
mock.call.getVLUN(self.VOLUME_3PAR_NAME),
mock.call.createVLUN(
self.VOLUME_3PAR_NAME,
auto=True,
hostname=self.FAKE_HOST),
mock.call.getHostVLUNs(self.FAKE_HOST)]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
expected_properties = self.properties
expected_properties['data']['encrypted'] = True
self.assertDictMatch(result, expected_properties)
def test_terminate_connection(self):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
mock_client = self.setup_driver()
effects = [
[{'active': True, 'volumeName': self.VOLUME_3PAR_NAME,
'lun': None, 'type': 0}],
hpexceptions.HTTPNotFound,
hpexceptions.HTTPNotFound]
mock_client.getHostVLUNs.side_effect = effects
mock_client.queryHost.return_value = {
'members': [{
'name': self.FAKE_HOST
}]
}
expected = [
mock.call.queryHost(wwns=['123456789012345', '123456789054321']),
mock.call.getHostVLUNs(self.FAKE_HOST),
mock.call.deleteVLUN(
self.VOLUME_3PAR_NAME,
None,
self.FAKE_HOST),
mock.call.getHostVLUNs(self.FAKE_HOST),
mock.call.deleteHost(self.FAKE_HOST),
mock.call.getHostVLUNs(self.FAKE_HOST),
mock.call.getPorts()]
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
conn_info = self.driver.terminate_connection(self.volume,
self.connector)
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
self.assertIn('data', conn_info)
self.assertIn('initiator_target_map', conn_info['data'])
mock_client.reset_mock()
mock_client.getHostVLUNs.side_effect = effects
# mock some deleteHost exceptions that are handled
delete_with_vlun = hpexceptions.HTTPConflict(
error={'message': "has exported VLUN"})
delete_with_hostset = hpexceptions.HTTPConflict(
error={'message': "host is a member of a set"})
mock_client.deleteHost = mock.Mock(
side_effect=[delete_with_vlun, delete_with_hostset])
conn_info = self.driver.terminate_connection(self.volume,
self.connector)
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
mock_client.reset_mock()
mock_client.getHostVLUNs.side_effect = effects
conn_info = self.driver.terminate_connection(self.volume,
self.connector)
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
@mock.patch('cinder.zonemanager.utils.create_lookup_service')
def test_terminate_connection_with_lookup(self, mock_lookup):
# setup_mock_client drive with default configuration
# and return the mock HTTP 3PAR client
class fake_lookup_object(object):
def get_device_mapping_from_network(self, connector, target_wwns):
fake_map = {
'FAB_1': {
'target_port_wwn_list': ['0987654321234'],
'initiator_port_wwn_list': ['123456789012345']
}
}
return fake_map
mock_lookup.return_value = fake_lookup_object()
mock_client = self.setup_driver()
effects = [
[{'active': True, 'volumeName': self.VOLUME_3PAR_NAME,
'lun': None, 'type': 0}],
hpexceptions.HTTPNotFound,
hpexceptions.HTTPNotFound]
mock_client.queryHost.return_value = {
'members': [{
'name': self.FAKE_HOST
}]
}
mock_client.getHostVLUNs.side_effect = effects
expected = [
mock.call.queryHost(wwns=['123456789012345', '123456789054321']),
mock.call.getHostVLUNs(self.FAKE_HOST),
mock.call.deleteVLUN(
self.VOLUME_3PAR_NAME,
None,
self.FAKE_HOST),
mock.call.getHostVLUNs(self.FAKE_HOST),
mock.call.deleteHost(self.FAKE_HOST),
mock.call.getHostVLUNs(self.FAKE_HOST),
mock.call.getPorts()]
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
conn_info = self.driver.terminate_connection(self.volume,
self.connector)
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
self.assertIn('data', conn_info)
self.assertIn('initiator_target_map', conn_info['data'])
mock_client.reset_mock()
mock_client.getHostVLUNs.side_effect = effects
# mock some deleteHost exceptions that are handled
delete_with_vlun = hpexceptions.HTTPConflict(
error={'message': "has exported VLUN"})
delete_with_hostset = hpexceptions.HTTPConflict(
error={'message': "host is a member of a set"})
mock_client.deleteHost = mock.Mock(
side_effect=[delete_with_vlun, delete_with_hostset])
conn_info = self.driver.terminate_connection(self.volume,
self.connector)
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
mock_client.reset_mock()
mock_client.getHostVLUNs.side_effect = effects
conn_info = self.driver.terminate_connection(self.volume,
self.connector)
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
def test_terminate_connection_more_vols(self):
mock_client = self.setup_driver()
# mock more than one vlun on the host (don't even try to remove host)
mock_client.getHostVLUNs.return_value = \
[
{'active': True,
'volumeName': self.VOLUME_3PAR_NAME,
'lun': None, 'type': 0},
{'active': True,
'volumeName': 'there-is-another-volume',
'lun': None, 'type': 0},
]
mock_client.queryHost.return_value = {
'members': [{
'name': self.FAKE_HOST
}]
}
expect_less = [
mock.call.queryHost(wwns=['123456789012345', '123456789054321']),
mock.call.getHostVLUNs(self.FAKE_HOST),
mock.call.deleteVLUN(
self.VOLUME_3PAR_NAME,
None,
self.FAKE_HOST),
mock.call.getHostVLUNs(self.FAKE_HOST),
mock.call.getHostVLUNs(self.FAKE_HOST)]
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
conn_info = self.driver.terminate_connection(self.volume,
self.connector)
mock_client.assert_has_calls(
self.standard_login +
expect_less +
self.standard_logout)
self.assertNotIn('initiator_target_map', conn_info['data'])
def test_get_volume_stats(self):
config = self.setup_configuration()
config.filter_function = FILTER_FUNCTION
config.goodness_function = GOODNESS_FUNCTION
mock_client = self.setup_driver(config=config)
mock_client.getCPG.return_value = self.cpgs[0]
mock_client.getStorageSystemInfo.return_value = {
'serialNumber': '1234'
}
mock_client.getCPGAvailableSpace.return_value = {
"capacityEfficiency": {u'compaction': 594.4},
"rawFreeMiB": 1024.0 * 6,
"usableFreeMiB": 1024.0 * 3
}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
stats = self.driver.get_volume_stats(True)
const = 0.0009765625
self.assertEqual(stats['storage_protocol'], 'FC')
self.assertEqual(stats['total_capacity_gb'], 0)
self.assertEqual(stats['free_capacity_gb'], 0)
self.assertEqual(stats['pools'][0]['total_capacity_gb'], 24.0)
self.assertEqual(stats['pools'][0]['free_capacity_gb'], 3.0)
self.assertEqual(stats['pools'][0]['capacity_utilization'], 87.5)
self.assertEqual(stats['pools'][0]['total_volumes'], 3)
self.assertEqual(stats['pools'][0]['goodness_function'],
GOODNESS_FUNCTION)
self.assertEqual(stats['pools'][0]['filter_function'],
FILTER_FUNCTION)
expected = [
mock.call.getStorageSystemInfo(),
mock.call.getCPG(HP3PAR_CPG),
mock.call.getCPGAvailableSpace(HP3PAR_CPG),
mock.call.getCPG(HP3PAR_CPG2),
mock.call.getCPGAvailableSpace(HP3PAR_CPG2)]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
stats = self.driver.get_volume_stats(True)
self.assertEqual(stats['storage_protocol'], 'FC')
self.assertEqual(stats['total_capacity_gb'], 0)
self.assertEqual(stats['free_capacity_gb'], 0)
self.assertEqual(stats['pools'][0]['total_capacity_gb'], 24.0)
self.assertEqual(stats['pools'][0]['free_capacity_gb'], 3.0)
self.assertEqual(stats['pools'][0]['capacity_utilization'], 87.5)
self.assertEqual(stats['pools'][0]['total_volumes'], 3)
self.assertEqual(stats['pools'][0]['goodness_function'],
GOODNESS_FUNCTION)
self.assertEqual(stats['pools'][0]['filter_function'],
FILTER_FUNCTION)
cpg2 = self.cpgs[0].copy()
cpg2.update({'SDGrowth': {'limitMiB': 8192}})
mock_client.getCPG.return_value = cpg2
stats = self.driver.get_volume_stats(True)
self.assertEqual(stats['storage_protocol'], 'FC')
total_capacity_gb = 8192 * const
self.assertEqual(stats['total_capacity_gb'], 0)
self.assertEqual(stats['pools'][0]['total_capacity_gb'],
total_capacity_gb)
free_capacity_gb = int(
(8192 - (self.cpgs[0]['UsrUsage']['usedMiB'] +
self.cpgs[0]['SDUsage']['usedMiB'])) * const)
self.assertEqual(stats['free_capacity_gb'], 0)
self.assertEqual(stats['pools'][0]['free_capacity_gb'],
free_capacity_gb)
cap_util = (float(total_capacity_gb - free_capacity_gb) /
float(total_capacity_gb)) * 100
self.assertEqual(stats['pools'][0]['capacity_utilization'],
cap_util)
self.assertEqual(stats['pools'][0]['total_volumes'], 3)
self.assertEqual(stats['pools'][0]['goodness_function'],
GOODNESS_FUNCTION)
self.assertEqual(stats['pools'][0]['filter_function'],
FILTER_FUNCTION)
common.client.deleteCPG(HP3PAR_CPG)
common.client.createCPG(HP3PAR_CPG, {})
def test_create_host(self):
mock_client = self.setup_driver()
mock_client.getVolume.return_value = {'userCPG': HP3PAR_CPG}
mock_client.getCPG.return_value = {}
mock_client.getHost.side_effect = [
hpexceptions.HTTPNotFound('fake'),
{'name': self.FAKE_HOST,
'FCPaths': [{'driverVersion': None,
'firmwareVersion': None,
'hostSpeed': 0,
'model': None,
'portPos': {'cardPort': 1, 'node': 1,
'slot': 2},
'vendor': None,
'wwn': self.wwn[0]},
{'driverVersion': None,
'firmwareVersion': None,
'hostSpeed': 0,
'model': None,
'portPos': {'cardPort': 1, 'node': 0,
'slot': 2},
'vendor': None,
'wwn': self.wwn[1]}]}]
mock_client.queryHost.return_value = None
mock_client.getVLUN.return_value = {'lun': 186}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
host = self.driver._create_host(
common,
self.volume,
self.connector)
expected = [
mock.call.getVolume('osv-0DM4qZEVSKON-DXN-NwVpw'),
mock.call.getCPG(HP3PAR_CPG),
mock.call.getHost(self.FAKE_HOST),
mock.call.queryHost(wwns=['123456789012345',
'123456789054321']),
mock.call.createHost(
self.FAKE_HOST,
FCWwns=['123456789012345', '123456789054321'],
optional={'domain': None, 'persona': 2}),
mock.call.getHost(self.FAKE_HOST)]
mock_client.assert_has_calls(expected)
self.assertEqual(host['name'], self.FAKE_HOST)
def test_create_invalid_host(self):
mock_client = self.setup_driver()
mock_client.getVolume.return_value = {'userCPG': HP3PAR_CPG}
mock_client.getCPG.return_value = {}
mock_client.getHost.side_effect = [
hpexceptions.HTTPNotFound('Host not found.'), {
'name': 'fakehost.foo',
'FCPaths': [{'wwn': '123456789012345'}, {
'wwn': '123456789054321'}]}]
mock_client.queryHost.return_value = {
'members': [{
'name': 'fakehost.foo'
}]
}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
host = self.driver._create_host(
common,
self.volume,
self.connector)
expected = [
mock.call.getVolume('osv-0DM4qZEVSKON-DXN-NwVpw'),
mock.call.getCPG(HP3PAR_CPG),
mock.call.getHost('fakehost'),
mock.call.queryHost(wwns=['123456789012345',
'123456789054321']),
mock.call.getHost('fakehost.foo')]
mock_client.assert_has_calls(expected)
self.assertEqual(host['name'], 'fakehost.foo')
def test_create_modify_host(self):
mock_client = self.setup_driver()
mock_client.getVolume.return_value = {'userCPG': HP3PAR_CPG}
mock_client.getCPG.return_value = {}
mock_client.getHost.side_effect = [{
'name': self.FAKE_HOST, 'FCPaths': []},
{'name': self.FAKE_HOST,
'FCPaths': [{'wwn': '123456789012345'}, {
'wwn': '123456789054321'}]}]
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
host = self.driver._create_host(
common,
self.volume,
self.connector)
expected = [
mock.call.getVolume('osv-0DM4qZEVSKON-DXN-NwVpw'),
mock.call.getCPG(HP3PAR_CPG),
mock.call.getHost('fakehost'),
mock.call.modifyHost(
'fakehost', {
'FCWWNs': ['123456789012345', '123456789054321'],
'pathOperation': 1}),
mock.call.getHost('fakehost')]
mock_client.assert_has_calls(expected)
self.assertEqual(host['name'], self.FAKE_HOST)
self.assertEqual(len(host['FCPaths']), 2)
def test_modify_host_with_new_wwn(self):
mock_client = self.setup_driver()
mock_client.getVolume.return_value = {'userCPG': HP3PAR_CPG}
mock_client.getCPG.return_value = {}
getHost_ret1 = {
'name': self.FAKE_HOST,
'FCPaths': [{'wwn': '123456789054321'}]}
getHost_ret2 = {
'name': self.FAKE_HOST,
'FCPaths': [{'wwn': '123456789012345'},
{'wwn': '123456789054321'}]}
mock_client.getHost.side_effect = [getHost_ret1, getHost_ret2]
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
host = self.driver._create_host(
common,
self.volume,
self.connector)
expected = [
mock.call.getVolume('osv-0DM4qZEVSKON-DXN-NwVpw'),
mock.call.getCPG(HP3PAR_CPG),
mock.call.getHost('fakehost'),
mock.call.modifyHost(
'fakehost', {
'FCWWNs': ['123456789012345'], 'pathOperation': 1}),
mock.call.getHost('fakehost')]
mock_client.assert_has_calls(expected)
self.assertEqual(host['name'], self.FAKE_HOST)
self.assertEqual(len(host['FCPaths']), 2)
def test_modify_host_with_unknown_wwn_and_new_wwn(self):
mock_client = self.setup_driver()
mock_client.getVolume.return_value = {'userCPG': HP3PAR_CPG}
mock_client.getCPG.return_value = {}
getHost_ret1 = {
'name': self.FAKE_HOST,
'FCPaths': [{'wwn': '123456789054321'},
{'wwn': 'xxxxxxxxxxxxxxx'}]}
getHost_ret2 = {
'name': self.FAKE_HOST,
'FCPaths': [{'wwn': '123456789012345'},
{'wwn': '123456789054321'},
{'wwn': 'xxxxxxxxxxxxxxx'}]}
mock_client.getHost.side_effect = [getHost_ret1, getHost_ret2]
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
host = self.driver._create_host(
common,
self.volume,
self.connector)
expected = [
mock.call.getVolume('osv-0DM4qZEVSKON-DXN-NwVpw'),
mock.call.getCPG(HP3PAR_CPG),
mock.call.getHost('fakehost'),
mock.call.modifyHost(
'fakehost', {
'FCWWNs': ['123456789012345'], 'pathOperation': 1}),
mock.call.getHost('fakehost')]
mock_client.assert_has_calls(expected)
self.assertEqual(host['name'], self.FAKE_HOST)
self.assertEqual(len(host['FCPaths']), 3)
class TestHP3PARISCSIDriver(HP3PARBaseDriver, test.TestCase):
TARGET_IQN = 'iqn.2000-05.com.3pardata:21810002ac00383d'
TARGET_LUN = 186
properties = {
'driver_volume_type': 'iscsi',
'data':
{'encrypted': False,
'target_discovered': True,
'target_iqn': TARGET_IQN,
'target_lun': TARGET_LUN,
'target_portal': '1.1.1.2:1234'}}
def setup_driver(self, config=None, mock_conf=None, wsapi_version=None):
self.ctxt = context.get_admin_context()
mock_client = self.setup_mock_client(
conf=config,
m_conf=mock_conf,
driver=hpdriver.HP3PARISCSIDriver)
if wsapi_version:
mock_client.getWsApiVersion.return_value = (
wsapi_version)
else:
mock_client.getWsApiVersion.return_value = (
self.wsapi_version_latest)
expected_get_cpgs = [
mock.call.getCPG(HP3PAR_CPG),
mock.call.getCPG(HP3PAR_CPG2)]
expected_get_ports = [mock.call.getPorts()]
mock_client.assert_has_calls(
self.standard_login +
expected_get_cpgs +
self.standard_logout +
self.standard_login +
expected_get_ports +
self.standard_logout)
mock_client.reset_mock()
return mock_client
def test_initialize_connection(self):
mock_client = self.setup_driver()
mock_client.getVolume.return_value = {'userCPG': HP3PAR_CPG}
mock_client.getCPG.return_value = {}
mock_client.getHost.side_effect = [
hpexceptions.HTTPNotFound('fake'),
{'name': self.FAKE_HOST}]
mock_client.queryHost.return_value = {
'members': [{
'name': self.FAKE_HOST
}]
}
mock_client.getHostVLUNs.return_value = [
{'active': True,
'volumeName': self.VOLUME_3PAR_NAME,
'lun': self.TARGET_LUN, 'type': 0}]
mock_client.getVLUN.return_value = {
'hostname': self.FAKE_HOST,
'lun': self.TARGET_LUN,
'portPos': {'node': 8, 'slot': 1, 'cardPort': 1}}
location = ("%(volume_name)s,%(lun_id)s,%(host)s,%(nsp)s" %
{'volume_name': self.VOLUME_3PAR_NAME,
'lun_id': self.TARGET_LUN,
'host': self.FAKE_HOST,
'nsp': 'something'})
mock_client.createVLUN.return_value = location
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
result = self.driver.initialize_connection(
self.volume,
self.connector)
expected = [
mock.call.getVolume(self.VOLUME_3PAR_NAME),
mock.call.getCPG(HP3PAR_CPG),
mock.call.getHost(self.FAKE_HOST),
mock.call.queryHost(iqns=['iqn.1993-08.org.debian:01:222']),
mock.call.getHost(self.FAKE_HOST),
mock.call.getVLUN(self.VOLUME_3PAR_NAME)]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
self.assertDictMatch(result, self.properties)
def test_initialize_connection_encrypted(self):
mock_client = self.setup_driver()
mock_client.getVolume.return_value = {'userCPG': HP3PAR_CPG}
mock_client.getCPG.return_value = {}
mock_client.getHost.side_effect = [
hpexceptions.HTTPNotFound('fake'),
{'name': self.FAKE_HOST}]
mock_client.queryHost.return_value = {
'members': [{
'name': self.FAKE_HOST
}]
}
mock_client.getHostVLUNs.return_value = [
{'active': True,
'volumeName': self.VOLUME_3PAR_NAME,
'lun': self.TARGET_LUN, 'type': 0}]
mock_client.getVLUN.return_value = {
'hostname': self.FAKE_HOST,
'lun': self.TARGET_LUN,
'portPos': {'node': 8, 'slot': 1, 'cardPort': 1}}
location = ("%(volume_name)s,%(lun_id)s,%(host)s,%(nsp)s" %
{'volume_name': self.VOLUME_3PAR_NAME,
'lun_id': self.TARGET_LUN,
'host': self.FAKE_HOST,
'nsp': 'something'})
mock_client.createVLUN.return_value = location
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
result = self.driver.initialize_connection(
self.volume_encrypted,
self.connector)
expected = [
mock.call.getVolume(self.VOLUME_3PAR_NAME),
mock.call.getCPG(HP3PAR_CPG),
mock.call.getHost(self.FAKE_HOST),
mock.call.queryHost(iqns=['iqn.1993-08.org.debian:01:222']),
mock.call.getHost(self.FAKE_HOST),
mock.call.getVLUN(self.VOLUME_3PAR_NAME)]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
expected_properties = self.properties
expected_properties['data']['encrypted'] = True
self.assertDictMatch(result, self.properties)
def test_get_volume_stats(self):
config = self.setup_configuration()
config.filter_function = FILTER_FUNCTION
config.goodness_function = GOODNESS_FUNCTION
mock_client = self.setup_driver(config=config)
mock_client.getCPG.return_value = self.cpgs[0]
mock_client.getStorageSystemInfo.return_value = {
'serialNumber': '1234'
}
mock_client.getCPGAvailableSpace.return_value = {
"capacityEfficiency": {u'compaction': 594.4},
"rawFreeMiB": 1024.0 * 6,
"usableFreeMiB": 1024.0 * 3
}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
stats = self.driver.get_volume_stats(True)
const = 0.0009765625
self.assertEqual(stats['storage_protocol'], 'iSCSI')
self.assertEqual(stats['total_capacity_gb'], 0)
self.assertEqual(stats['free_capacity_gb'], 0)
self.assertEqual(stats['pools'][0]['total_capacity_gb'], 24.0)
self.assertEqual(stats['pools'][0]['free_capacity_gb'], 3.0)
self.assertEqual(stats['pools'][0]['capacity_utilization'], 87.5)
self.assertEqual(stats['pools'][0]['total_volumes'], 3)
self.assertEqual(stats['pools'][0]['goodness_function'],
GOODNESS_FUNCTION)
self.assertEqual(stats['pools'][0]['filter_function'],
FILTER_FUNCTION)
expected = [
mock.call.getStorageSystemInfo(),
mock.call.getCPG(HP3PAR_CPG),
mock.call.getCPGAvailableSpace(HP3PAR_CPG),
mock.call.getCPG(HP3PAR_CPG2),
mock.call.getCPGAvailableSpace(HP3PAR_CPG2)]
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
cpg2 = self.cpgs[0].copy()
cpg2.update({'SDGrowth': {'limitMiB': 8192}})
mock_client.getCPG.return_value = cpg2
stats = self.driver.get_volume_stats(True)
self.assertEqual(stats['storage_protocol'], 'iSCSI')
total_capacity_gb = 8192 * const
self.assertEqual(stats['total_capacity_gb'], 0)
self.assertEqual(stats['pools'][0]['total_capacity_gb'],
total_capacity_gb)
free_capacity_gb = int(
(8192 - (self.cpgs[0]['UsrUsage']['usedMiB'] +
self.cpgs[0]['SDUsage']['usedMiB'])) * const)
self.assertEqual(stats['free_capacity_gb'], 0)
self.assertEqual(stats['pools'][0]['free_capacity_gb'],
free_capacity_gb)
cap_util = (float(total_capacity_gb - free_capacity_gb) /
float(total_capacity_gb)) * 100
self.assertEqual(stats['pools'][0]['capacity_utilization'],
cap_util)
self.assertEqual(stats['pools'][0]['total_volumes'], 3)
self.assertEqual(stats['pools'][0]['goodness_function'],
GOODNESS_FUNCTION)
self.assertEqual(stats['pools'][0]['filter_function'],
FILTER_FUNCTION)
def test_create_host(self):
mock_client = self.setup_driver()
mock_client.getVolume.return_value = {'userCPG': HP3PAR_CPG}
mock_client.getCPG.return_value = {}
mock_client.getHost.side_effect = [
hpexceptions.HTTPNotFound('fake'),
{'name': self.FAKE_HOST}]
mock_client.queryHost.return_value = None
mock_client.getVLUN.return_value = {'lun': self.TARGET_LUN}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
host, auth_username, auth_password = self.driver._create_host(
common, self.volume, self.connector)
expected = [
mock.call.getVolume('osv-0DM4qZEVSKON-DXN-NwVpw'),
mock.call.getCPG(HP3PAR_CPG),
mock.call.getHost(self.FAKE_HOST),
mock.call.queryHost(iqns=['iqn.1993-08.org.debian:01:222']),
mock.call.createHost(
self.FAKE_HOST,
optional={'domain': None, 'persona': 2},
iscsiNames=['iqn.1993-08.org.debian:01:222']),
mock.call.getHost(self.FAKE_HOST)]
mock_client.assert_has_calls(expected)
self.assertEqual(host['name'], self.FAKE_HOST)
self.assertEqual(auth_username, None)
self.assertEqual(auth_password, None)
def test_create_host_chap_enabled(self):
config = self.setup_configuration()
config.hp3par_iscsi_chap_enabled = True
mock_client = self.setup_driver(config=config)
mock_client.getVolume.return_value = {'userCPG': HP3PAR_CPG}
mock_client.getCPG.return_value = {}
mock_client.getHost.side_effect = [
hpexceptions.HTTPNotFound('fake'),
{'name': self.FAKE_HOST}]
mock_client.queryHost.return_value = None
mock_client.getVLUN.return_value = {'lun': self.TARGET_LUN}
expected_mod_request = {
'chapOperation': mock_client.HOST_EDIT_ADD,
'chapOperationMode': mock_client.CHAP_INITIATOR,
'chapName': 'test-user',
'chapSecret': 'test-pass'
}
def get_side_effect(*args):
data = {'value': None}
if args[1] == CHAP_USER_KEY:
data['value'] = 'test-user'
elif args[1] == CHAP_PASS_KEY:
data['value'] = 'test-pass'
return data
mock_client.getVolumeMetaData.side_effect = get_side_effect
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
host, auth_username, auth_password = self.driver._create_host(
common, self.volume, self.connector)
expected = [
mock.call.getVolume('osv-0DM4qZEVSKON-DXN-NwVpw'),
mock.call.getCPG(HP3PAR_CPG),
mock.call.getVolumeMetaData(
'osv-0DM4qZEVSKON-DXN-NwVpw', CHAP_USER_KEY),
mock.call.getVolumeMetaData(
'osv-0DM4qZEVSKON-DXN-NwVpw', CHAP_PASS_KEY),
mock.call.getHost(self.FAKE_HOST),
mock.call.queryHost(iqns=['iqn.1993-08.org.debian:01:222']),
mock.call.createHost(
self.FAKE_HOST,
optional={'domain': None, 'persona': 2},
iscsiNames=['iqn.1993-08.org.debian:01:222']),
mock.call.modifyHost(
'fakehost',
expected_mod_request),
mock.call.getHost(self.FAKE_HOST)
]
mock_client.assert_has_calls(expected)
self.assertEqual(host['name'], self.FAKE_HOST)
self.assertEqual(auth_username, 'test-user')
self.assertEqual(auth_password, 'test-pass')
def test_create_invalid_host(self):
mock_client = self.setup_driver()
mock_client.getVolume.return_value = {'userCPG': HP3PAR_CPG}
mock_client.getCPG.return_value = {}
mock_client.getHost.side_effect = [
hpexceptions.HTTPNotFound('Host not found.'),
{'name': 'fakehost.foo'}]
mock_client.queryHost.return_value = {
'members': [{
'name': 'fakehost.foo'
}]
}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
host, auth_username, auth_password = self.driver._create_host(
common, self.volume, self.connector)
expected = [
mock.call.getVolume('osv-0DM4qZEVSKON-DXN-NwVpw'),
mock.call.getCPG(HP3PAR_CPG),
mock.call.getHost(self.FAKE_HOST),
mock.call.queryHost(iqns=['iqn.1993-08.org.debian:01:222']),
mock.call.getHost('fakehost.foo')]
mock_client.assert_has_calls(expected)
self.assertEqual(host['name'], 'fakehost.foo')
self.assertEqual(auth_username, None)
self.assertEqual(auth_password, None)
def test_create_invalid_host_chap_enabled(self):
config = self.setup_configuration()
config.hp3par_iscsi_chap_enabled = True
mock_client = self.setup_driver(config=config)
mock_client.getVolume.return_value = {'userCPG': HP3PAR_CPG}
mock_client.getCPG.return_value = {}
mock_client.getHost.side_effect = [
hpexceptions.HTTPNotFound('Host not found.'),
{'name': 'fakehost.foo'}]
mock_client.queryHost.return_value = {
'members': [{
'name': 'fakehost.foo'
}]
}
def get_side_effect(*args):
data = {'value': None}
if args[1] == CHAP_USER_KEY:
data['value'] = 'test-user'
elif args[1] == CHAP_PASS_KEY:
data['value'] = 'test-pass'
return data
mock_client.getVolumeMetaData.side_effect = get_side_effect
expected_mod_request = {
'chapOperation': mock_client.HOST_EDIT_ADD,
'chapOperationMode': mock_client.CHAP_INITIATOR,
'chapName': 'test-user',
'chapSecret': 'test-pass'
}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
host, auth_username, auth_password = self.driver._create_host(
common, self.volume, self.connector)
expected = [
mock.call.getVolume('osv-0DM4qZEVSKON-DXN-NwVpw'),
mock.call.getCPG(HP3PAR_CPG),
mock.call.getVolumeMetaData(
'osv-0DM4qZEVSKON-DXN-NwVpw', CHAP_USER_KEY),
mock.call.getVolumeMetaData(
'osv-0DM4qZEVSKON-DXN-NwVpw', CHAP_PASS_KEY),
mock.call.getHost(self.FAKE_HOST),
mock.call.queryHost(iqns=['iqn.1993-08.org.debian:01:222']),
mock.call.modifyHost(
'fakehost.foo',
expected_mod_request),
mock.call.getHost('fakehost.foo')
]
mock_client.assert_has_calls(expected)
self.assertEqual(host['name'], 'fakehost.foo')
self.assertEqual(auth_username, 'test-user')
self.assertEqual(auth_password, 'test-pass')
def test_create_modify_host(self):
mock_client = self.setup_driver()
mock_client.getVolume.return_value = {'userCPG': HP3PAR_CPG}
mock_client.getCPG.return_value = {}
mock_client.getHost.side_effect = [
{'name': self.FAKE_HOST, 'FCPaths': []},
{'name': self.FAKE_HOST,
'FCPaths': [{'wwn': '123456789012345'},
{'wwn': '123456789054321'}]}]
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
host, auth_username, auth_password = self.driver._create_host(
common, self.volume, self.connector)
expected = [
mock.call.getVolume('osv-0DM4qZEVSKON-DXN-NwVpw'),
mock.call.getCPG(HP3PAR_CPG),
mock.call.getHost(self.FAKE_HOST),
mock.call.modifyHost(
self.FAKE_HOST,
{'pathOperation': 1,
'iSCSINames': ['iqn.1993-08.org.debian:01:222']}),
mock.call.getHost(self.FAKE_HOST)]
mock_client.assert_has_calls(expected)
self.assertEqual(host['name'], self.FAKE_HOST)
self.assertEqual(auth_username, None)
self.assertEqual(auth_password, None)
self.assertEqual(len(host['FCPaths']), 2)
def test_create_modify_host_chap_enabled(self):
config = self.setup_configuration()
config.hp3par_iscsi_chap_enabled = True
mock_client = self.setup_driver(config=config)
mock_client.getVolume.return_value = {'userCPG': HP3PAR_CPG}
mock_client.getCPG.return_value = {}
mock_client.getHost.side_effect = [
{'name': self.FAKE_HOST, 'FCPaths': []},
{'name': self.FAKE_HOST,
'FCPaths': [{'wwn': '123456789012345'},
{'wwn': '123456789054321'}]}]
def get_side_effect(*args):
data = {'value': None}
if args[1] == CHAP_USER_KEY:
data['value'] = 'test-user'
elif args[1] == CHAP_PASS_KEY:
data['value'] = 'test-pass'
return data
mock_client.getVolumeMetaData.side_effect = get_side_effect
expected_mod_request = {
'chapOperation': mock_client.HOST_EDIT_ADD,
'chapOperationMode': mock_client.CHAP_INITIATOR,
'chapName': 'test-user',
'chapSecret': 'test-pass'
}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
host, auth_username, auth_password = self.driver._create_host(
common, self.volume, self.connector)
expected = [
mock.call.getVolume('osv-0DM4qZEVSKON-DXN-NwVpw'),
mock.call.getCPG(HP3PAR_CPG),
mock.call.getVolumeMetaData(
'osv-0DM4qZEVSKON-DXN-NwVpw', CHAP_USER_KEY),
mock.call.getVolumeMetaData(
'osv-0DM4qZEVSKON-DXN-NwVpw', CHAP_PASS_KEY),
mock.call.getHost(self.FAKE_HOST),
mock.call.modifyHost(
self.FAKE_HOST,
{'pathOperation': 1,
'iSCSINames': ['iqn.1993-08.org.debian:01:222']}),
mock.call.modifyHost(
self.FAKE_HOST,
expected_mod_request
),
mock.call.getHost(self.FAKE_HOST)]
mock_client.assert_has_calls(expected)
self.assertEqual(host['name'], self.FAKE_HOST)
self.assertEqual(auth_username, 'test-user')
self.assertEqual(auth_password, 'test-pass')
self.assertEqual(len(host['FCPaths']), 2)
def test_get_least_used_nsp_for_host_single(self):
mock_client = self.setup_driver()
mock_client.getPorts.return_value = PORTS_RET
mock_client.getVLUNs.return_value = VLUNS1_RET
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
iscsi_ips = ["10.10.220.253"]
self.driver.configuration.hp3par_iscsi_ips = iscsi_ips
self.driver.initialize_iscsi_ports(common)
nsp = self.driver._get_least_used_nsp_for_host(common, 'newhost')
self.assertEqual(nsp, "1:8:1")
def test_get_least_used_nsp_for_host_new(self):
mock_client = self.setup_driver()
mock_client.getPorts.return_value = PORTS_RET
mock_client.getVLUNs.return_value = VLUNS1_RET
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
iscsi_ips = ["10.10.220.252", "10.10.220.253"]
self.driver.configuration.hp3par_iscsi_ips = iscsi_ips
self.driver.initialize_iscsi_ports(common)
nsp = self.driver._get_least_used_nsp_for_host(common, 'newhost')
self.assertEqual(nsp, "1:8:2")
def test_get_least_used_nsp_for_host_reuse(self):
mock_client = self.setup_driver()
mock_client.getPorts.return_value = PORTS_RET
mock_client.getVLUNs.return_value = VLUNS1_RET
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
iscsi_ips = ["10.10.220.252", "10.10.220.253"]
self.driver.configuration.hp3par_iscsi_ips = iscsi_ips
self.driver.initialize_iscsi_ports(common)
nsp = self.driver._get_least_used_nsp_for_host(common, 'foo')
self.assertEqual(nsp, "1:8:2")
nsp = self.driver._get_least_used_nsp_for_host(common, 'bar')
self.assertEqual(nsp, "1:8:1")
def test_get_least_used_nps_for_host_fc(self):
mock_client = self.setup_driver()
mock_client.getPorts.return_value = PORTS1_RET
mock_client.getVLUNs.return_value = VLUNS5_RET
iscsi_ips = ["10.10.220.252", "10.10.220.253"]
self.driver.configuration.hp3par_iscsi_ips = iscsi_ips
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
self.driver.initialize_iscsi_ports(common)
nsp = self.driver._get_least_used_nsp_for_host(common, 'newhost')
self.assertNotEqual(nsp, "0:6:3")
self.assertEqual(nsp, "1:8:1")
def test_invalid_iscsi_ip(self):
config = self.setup_configuration()
config.hp3par_iscsi_ips = ['10.10.220.250', '10.10.220.251']
config.iscsi_ip_address = '10.10.10.10'
mock_conf = {
'getPorts.return_value': {
'members': [
{'portPos': {'node': 1, 'slot': 8, 'cardPort': 2},
'protocol': 2,
'IPAddr': '10.10.220.252',
'linkState': 4,
'device': [],
'iSCSIName': self.TARGET_IQN,
'mode': 2,
'HWAddr': '2C27D75375D2',
'type': 8},
{'portPos': {'node': 1, 'slot': 8, 'cardPort': 1},
'protocol': 2,
'IPAddr': '10.10.220.253',
'linkState': 4,
'device': [],
'iSCSIName': self.TARGET_IQN,
'mode': 2,
'HWAddr': '2C27D75375D6',
'type': 8}]}}
self.assertRaises(exception.InvalidInput,
self.setup_driver,
config=config,
mock_conf=mock_conf)
def test_get_least_used_nsp(self):
mock_client = self.setup_driver()
ports = [
{'portPos': {'node': 1, 'slot': 8, 'cardPort': 2}, 'active': True},
{'portPos': {'node': 1, 'slot': 8, 'cardPort': 1}, 'active': True},
{'portPos': {'node': 1, 'slot': 8, 'cardPort': 2}, 'active': True},
{'portPos': {'node': 0, 'slot': 2, 'cardPort': 2}, 'active': True},
{'portPos': {'node': 0, 'slot': 2, 'cardPort': 1}, 'active': True},
{'portPos': {'node': 0, 'slot': 2, 'cardPort': 1}, 'active': True},
{'portPos': {'node': 0, 'slot': 2, 'cardPort': 1}, 'active': True},
{'portPos': {'node': 0, 'slot': 2, 'cardPort': 1}, 'active': True}]
mock_client.getVLUNs.return_value = {'members': ports}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
vluns = common.client.getVLUNs()
nsp = self.driver._get_least_used_nsp(common, vluns['members'],
['0:2:1', '1:8:1'])
self.assertEqual(nsp, '1:8:1')
ports = [
{'portPos': {'node': 1, 'slot': 2, 'cardPort': 1},
'active': True},
{'portPos': {'node': 1, 'slot': 2, 'cardPort': 1},
'active': True},
{'portPos': {'node': 1, 'slot': 2, 'cardPort': 1},
'active': True},
{'portPos': {'node': 1, 'slot': 2, 'cardPort': 1},
'active': True},
{'portPos': {'node': 0, 'slot': 2, 'cardPort': 1},
'active': True},
{'portPos': {'node': 0, 'slot': 2, 'cardPort': 1},
'active': True},
{'portPos': {'node': 0, 'slot': 2, 'cardPort': 1},
'active': True},
{'portPos': {'node': 0, 'slot': 2, 'cardPort': 1},
'active': True},
{'portPos': {'node': 0, 'slot': 2, 'cardPort': 1},
'active': True}]
mock_client.getVLUNs.return_value = {'members': ports}
common = self.driver._login()
vluns = common.client.getVLUNs()
nsp = self.driver._get_least_used_nsp(common, vluns['members'],
['0:2:1', '1:2:1'])
self.assertEqual(nsp, '1:2:1')
ports = [
{'portPos': {'node': 1, 'slot': 2, 'cardPort': 1},
'active': True},
{'portPos': {'node': 1, 'slot': 2, 'cardPort': 1},
'active': True},
{'portPos': {'node': 1, 'slot': 2, 'cardPort': 1},
'active': True},
{'portPos': {'node': 1, 'slot': 2, 'cardPort': 1},
'active': True},
{'portPos': {'node': 0, 'slot': 2, 'cardPort': 1},
'active': True},
{'portPos': {'node': 0, 'slot': 2, 'cardPort': 1},
'active': True},
{'portPos': {'node': 0, 'slot': 2, 'cardPort': 1},
'active': True},
{'portPos': {'node': 0, 'slot': 2, 'cardPort': 1},
'active': True},
{'portPos': {'node': 0, 'slot': 2, 'cardPort': 1},
'active': True}]
mock_client.getVLUNs.return_value = {'members': ports}
common = self.driver._login()
vluns = common.client.getVLUNs()
nsp = self.driver._get_least_used_nsp(common, vluns['members'],
['1:1:1', '1:2:1'])
self.assertEqual(nsp, '1:1:1')
def test_set_3par_chaps(self):
mock_client = self.setup_driver()
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
expected = []
self.driver._set_3par_chaps(
common, 'test-host', 'test-vol', 'test-host', 'pass')
mock_client.assert_has_calls(expected)
config = self.setup_configuration()
config.hp3par_iscsi_chap_enabled = True
mock_client = self.setup_driver(config=config)
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
expected_mod_request = {
'chapOperation': mock_client.HOST_EDIT_ADD,
'chapOperationMode': mock_client.CHAP_INITIATOR,
'chapName': 'test-host',
'chapSecret': 'fake'
}
expected = [
mock.call.modifyHost('test-host', expected_mod_request)
]
self.driver._set_3par_chaps(
common, 'test-host', 'test-vol', 'test-host', 'fake')
mock_client.assert_has_calls(expected)
@mock.patch('cinder.volume.utils.generate_password')
def test_do_export(self, mock_utils):
mock_client = self.setup_driver()
volume = {'host': 'test-host@3pariscsi',
'id': 'd03338a9-9115-48a3-8dfc-35cdfcdc15a7'}
mock_utils.return_value = 'random-pass'
mock_client.getHostVLUNs.return_value = [
{'active': True,
'volumeName': self.VOLUME_3PAR_NAME,
'lun': None, 'type': 0,
'remoteName': 'iqn.1993-08.org.debian:01:222'}
]
mock_client.getHost.return_value = {
'name': 'osv-0DM4qZEVSKON-DXN-NwVpw',
'initiatorChapEnabled': True
}
mock_client.getVolumeMetaData.return_value = {
'value': 'random-pass'
}
expected = []
expected_model = {'provider_auth': None}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
model = self.driver._do_export(common, volume)
mock_client.assert_has_calls(expected)
self.assertEqual(expected_model, model)
mock_client.reset_mock()
config = self.setup_configuration()
config.hp3par_iscsi_chap_enabled = True
mock_client = self.setup_driver(config=config)
volume = {'host': 'test-host@3pariscsi',
'id': 'd03338a9-9115-48a3-8dfc-35cdfcdc15a7'}
mock_utils.return_value = 'random-pass'
mock_client.getHostVLUNs.return_value = [
{'active': True,
'volumeName': self.VOLUME_3PAR_NAME,
'lun': None, 'type': 0,
'remoteName': 'iqn.1993-08.org.debian:01:222'}
]
mock_client.getHost.return_value = {
'name': 'osv-0DM4qZEVSKON-DXN-NwVpw',
'initiatorChapEnabled': True
}
mock_client.getVolumeMetaData.return_value = {
'value': 'random-pass'
}
expected = [
mock.call.getHostVLUNs('test-host'),
mock.call.getHost('test-host'),
mock.call.getVolumeMetaData(
'osv-0DM4qZEVSKON-DXN-NwVpw', CHAP_PASS_KEY),
mock.call.setVolumeMetaData(
'osv-0DM4qZEVSKON-DXN-NwVpw', CHAP_USER_KEY, 'test-host'),
mock.call.setVolumeMetaData(
'osv-0DM4qZEVSKON-DXN-NwVpw', CHAP_PASS_KEY, 'random-pass')
]
expected_model = {'provider_auth': 'CHAP test-host random-pass'}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
model = self.driver._do_export(common, volume)
mock_client.assert_has_calls(expected)
self.assertEqual(expected_model, model)
@mock.patch('cinder.volume.utils.generate_password')
def test_do_export_host_not_found(self, mock_utils):
config = self.setup_configuration()
config.hp3par_iscsi_chap_enabled = True
mock_client = self.setup_driver(config=config)
volume = {'host': 'test-host@3pariscsi',
'id': 'd03338a9-9115-48a3-8dfc-35cdfcdc15a7'}
mock_utils.return_value = "random-pass"
mock_client.getHostVLUNs.side_effect = hpexceptions.HTTPNotFound(
'fake')
mock_client.getVolumeMetaData.return_value = {
'value': 'random-pass'
}
expected = [
mock.call.getHostVLUNs('test-host'),
mock.call.setVolumeMetaData(
'osv-0DM4qZEVSKON-DXN-NwVpw', CHAP_USER_KEY, 'test-host'),
mock.call.setVolumeMetaData(
'osv-0DM4qZEVSKON-DXN-NwVpw', CHAP_PASS_KEY, 'random-pass')
]
expected_model = {'provider_auth': 'CHAP test-host random-pass'}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
model = self.driver._do_export(common, volume)
mock_client.assert_has_calls(expected)
self.assertEqual(expected_model, model)
@mock.patch('cinder.volume.utils.generate_password')
def test_do_export_host_chap_disabled(self, mock_utils):
config = self.setup_configuration()
config.hp3par_iscsi_chap_enabled = True
mock_client = self.setup_driver(config=config)
volume = {'host': 'test-host@3pariscsi',
'id': 'd03338a9-9115-48a3-8dfc-35cdfcdc15a7'}
mock_utils.return_value = 'random-pass'
mock_client.getHostVLUNs.return_value = [
{'active': True,
'volumeName': self.VOLUME_3PAR_NAME,
'lun': None, 'type': 0,
'remoteName': 'iqn.1993-08.org.debian:01:222'}
]
mock_client.getHost.return_value = {
'name': 'fake-host',
'initiatorChapEnabled': False
}
mock_client.getVolumeMetaData.return_value = {
'value': 'random-pass'
}
expected = [
mock.call.getHostVLUNs('test-host'),
mock.call.getHost('test-host'),
mock.call.getVolumeMetaData(
'osv-0DM4qZEVSKON-DXN-NwVpw', CHAP_PASS_KEY),
mock.call.setVolumeMetaData(
'osv-0DM4qZEVSKON-DXN-NwVpw', CHAP_USER_KEY, 'test-host'),
mock.call.setVolumeMetaData(
'osv-0DM4qZEVSKON-DXN-NwVpw', CHAP_PASS_KEY, 'random-pass')
]
expected_model = {'provider_auth': 'CHAP test-host random-pass'}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
model = self.driver._do_export(common, volume)
mock_client.assert_has_calls(expected)
self.assertEqual(expected_model, model)
@mock.patch('cinder.volume.utils.generate_password')
def test_do_export_no_active_vluns(self, mock_utils):
config = self.setup_configuration()
config.hp3par_iscsi_chap_enabled = True
mock_client = self.setup_driver(config=config)
volume = {'host': 'test-host@3pariscsi',
'id': 'd03338a9-9115-48a3-8dfc-35cdfcdc15a7'}
mock_utils.return_value = "random-pass"
mock_client.getHostVLUNs.return_value = [
{'active': False,
'volumeName': self.VOLUME_3PAR_NAME,
'lun': None, 'type': 0,
'remoteName': 'iqn.1993-08.org.debian:01:222'}
]
mock_client.getHost.return_value = {
'name': 'fake-host',
'initiatorChapEnabled': True
}
mock_client.getVolumeMetaData.return_value = {
'value': 'random-pass'
}
expected = [
mock.call.getHostVLUNs('test-host'),
mock.call.getHost('test-host'),
mock.call.setVolumeMetaData(
'osv-0DM4qZEVSKON-DXN-NwVpw', CHAP_USER_KEY, 'test-host'),
mock.call.setVolumeMetaData(
'osv-0DM4qZEVSKON-DXN-NwVpw', CHAP_PASS_KEY, 'random-pass')
]
expected_model = {'provider_auth': 'CHAP test-host random-pass'}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
model = self.driver._do_export(common, volume)
mock_client.assert_has_calls(expected)
self.assertEqual(model, expected_model)
def test_ensure_export(self):
mock_client = self.setup_driver()
volume = {'host': 'test-host@3pariscsi',
'id': 'd03338a9-9115-48a3-8dfc-35cdfcdc15a7'}
mock_client.getAllVolumeMetaData.return_value = {
'total': 0,
'members': []
}
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
model = self.driver.ensure_export(None, volume)
expected = [
mock.call.getVolume('osv-0DM4qZEVSKON-DXN-NwVpw'),
mock.call.getAllVolumeMetaData('osv-0DM4qZEVSKON-DXN-NwVpw')
]
expected_model = {'provider_auth': None}
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
self.assertEqual(model, expected_model)
mock_client.getAllVolumeMetaData.return_value = {
'total': 2,
'members': [
{
'creationTimeSec': 1406074222,
'value': 'fake-host',
'key': CHAP_USER_KEY,
'creationTime8601': '2014-07-22T17:10:22-07:00'
},
{
'creationTimeSec': 1406074222,
'value': 'random-pass',
'key': CHAP_PASS_KEY,
'creationTime8601': '2014-07-22T17:10:22-07:00'
}
]
}
model = self.driver.ensure_export(None, volume)
expected = [
mock.call.getVolume('osv-0DM4qZEVSKON-DXN-NwVpw'),
mock.call.getAllVolumeMetaData('osv-0DM4qZEVSKON-DXN-NwVpw')
]
expected_model = {'provider_auth': "CHAP fake-host random-pass"}
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
self.assertEqual(model, expected_model)
def test_ensure_export_missing_volume(self):
mock_client = self.setup_driver()
volume = {'host': 'test-host@3pariscsi',
'id': 'd03338a9-9115-48a3-8dfc-35cdfcdc15a7'}
mock_client.getVolume.side_effect = hpexceptions.HTTPNotFound(
'fake')
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
model = self.driver.ensure_export(None, volume)
expected = [mock.call.getVolume('osv-0DM4qZEVSKON-DXN-NwVpw')]
expected_model = None
mock_client.assert_has_calls(
self.standard_login +
expected +
self.standard_logout)
self.assertEqual(model, expected_model)
@mock.patch.object(volume_types, 'get_volume_type')
def test_get_volume_settings_default_pool(self, _mock_volume_types):
_mock_volume_types.return_value = {
'name': 'gold',
'id': 'gold-id',
'extra_specs': {}}
mock_client = self.setup_driver()
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
volume = {'host': 'test-host@3pariscsi#pool_foo',
'id': 'd03338a9-9115-48a3-8dfc-35cdfcdc15a7'}
pool = volume_utils.extract_host(volume['host'], 'pool')
model = common.get_volume_settings_from_type_id('gold-id', pool)
self.assertEqual(model['cpg'], 'pool_foo')
def test_get_model_update(self):
mock_client = self.setup_driver()
with mock.patch.object(hpcommon.HP3PARCommon,
'_create_client') as mock_create_client:
mock_create_client.return_value = mock_client
common = self.driver._login()
model_update = common._get_model_update('xxx@yyy#zzz', 'CPG')
self.assertEqual(model_update, {'host': 'xxx@yyy#CPG'})
VLUNS5_RET = ({'members':
[{'portPos': {'node': 0, 'slot': 8, 'cardPort': 2},
'active': True},
{'portPos': {'node': 1, 'slot': 8, 'cardPort': 1},
'active': True}]})
PORTS_RET = ({'members':
[{'portPos': {'node': 1, 'slot': 8, 'cardPort': 2},
'protocol': 2,
'IPAddr': '10.10.220.252',
'linkState': 4,
'device': [],
'iSCSIName': 'iqn.2000-05.com.3pardata:21820002ac00383d',
'mode': 2,
'HWAddr': '2C27D75375D2',
'type': 8},
{'portPos': {'node': 1, 'slot': 8, 'cardPort': 1},
'protocol': 2,
'IPAddr': '10.10.220.253',
'linkState': 4,
'device': [],
'iSCSIName': 'iqn.2000-05.com.3pardata:21810002ac00383d',
'mode': 2,
'HWAddr': '2C27D75375D6',
'type': 8}]})
VLUNS1_RET = ({'members':
[{'portPos': {'node': 1, 'slot': 8, 'cardPort': 2},
'hostname': 'foo', 'active': True},
{'portPos': {'node': 1, 'slot': 8, 'cardPort': 1},
'hostname': 'bar', 'active': True},
{'portPos': {'node': 1, 'slot': 8, 'cardPort': 1},
'hostname': 'bar', 'active': True},
{'portPos': {'node': 1, 'slot': 8, 'cardPort': 1},
'hostname': 'bar', 'active': True}]})
PORTS1_RET = ({'members':
[{'portPos': {'node': 0, 'slot': 8, 'cardPort': 2},
'protocol': 2,
'IPAddr': '10.10.120.252',
'linkState': 4,
'device': [],
'iSCSIName': 'iqn.2000-05.com.3pardata:21820002ac00383d',
'mode': 2,
'HWAddr': '2C27D75375D2',
'type': 8},
{'portPos': {'node': 1, 'slot': 8, 'cardPort': 1},
'protocol': 2,
'IPAddr': '10.10.220.253',
'linkState': 4,
'device': [],
'iSCSIName': 'iqn.2000-05.com.3pardata:21810002ac00383d',
'mode': 2,
'HWAddr': '2C27D75375D6',
'type': 8},
{'portWWN': '20210002AC00383D',
'protocol': 1,
'linkState': 4,
'mode': 2,
'device': ['cage2'],
'nodeWWN': '20210002AC00383D',
'type': 2,
'portPos': {'node': 0, 'slot': 6, 'cardPort': 3}}]})
| true | true |
f7249cfe31de6802563053234016e283c91f0986 | 878 | py | Python | tests/conftest.py | lycantropos/bentley_ottmann | 988075aada80e5d5c8d53d513de130004b69c3b9 | [
"MIT"
] | 13 | 2020-04-03T04:43:44.000Z | 2022-01-18T10:40:40.000Z | tests/conftest.py | lycantropos/bentley_ottmann | 988075aada80e5d5c8d53d513de130004b69c3b9 | [
"MIT"
] | 19 | 2020-01-31T05:25:42.000Z | 2021-04-01T13:20:05.000Z | tests/conftest.py | lycantropos/bentley_ottmann | 988075aada80e5d5c8d53d513de130004b69c3b9 | [
"MIT"
] | 3 | 2020-06-08T11:15:32.000Z | 2021-02-15T12:37:01.000Z | import os
import platform
import pytest
from ground.base import (Context,
get_context)
from hypothesis import (HealthCheck,
settings)
on_azure_pipelines = bool(os.getenv('TF_BUILD', False))
is_pypy = platform.python_implementation() == 'PyPy'
settings.register_profile('default',
deadline=None,
max_examples=(settings.default.max_examples
// (1 + 3 * is_pypy)
if on_azure_pipelines
else settings.default.max_examples),
suppress_health_check=[HealthCheck.filter_too_much,
HealthCheck.too_slow])
@pytest.fixture(scope='session')
def context() -> Context:
return get_context()
| 35.12 | 77 | 0.525057 | import os
import platform
import pytest
from ground.base import (Context,
get_context)
from hypothesis import (HealthCheck,
settings)
on_azure_pipelines = bool(os.getenv('TF_BUILD', False))
is_pypy = platform.python_implementation() == 'PyPy'
settings.register_profile('default',
deadline=None,
max_examples=(settings.default.max_examples
// (1 + 3 * is_pypy)
if on_azure_pipelines
else settings.default.max_examples),
suppress_health_check=[HealthCheck.filter_too_much,
HealthCheck.too_slow])
@pytest.fixture(scope='session')
def context() -> Context:
return get_context()
| true | true |
f7249ecb6643e8a4d8abef519c48499b5a5bb0e4 | 630 | py | Python | Django/Video_Project/Day05/SqlToModel/manage.py | pyforspider/LearningLog | ac5988d7fbb0d07d6e7485f9050250af5bcba089 | [
"MIT"
] | null | null | null | Django/Video_Project/Day05/SqlToModel/manage.py | pyforspider/LearningLog | ac5988d7fbb0d07d6e7485f9050250af5bcba089 | [
"MIT"
] | 18 | 2020-02-12T01:18:12.000Z | 2022-03-12T00:42:15.000Z | Django/Video_Project/Day05/SqlToModel/manage.py | pyforspider/LearningLog | ac5988d7fbb0d07d6e7485f9050250af5bcba089 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'SqlToModel.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| 28.636364 | 74 | 0.684127 |
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'SqlToModel.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| true | true |
f7249f59bdaa349ad040c4306eb1c2ca214840ed | 5,504 | py | Python | accounts/tests.py | mgovoni-devel/MatD3 | 5b68d147f886bce427f92bb560159e62cec2d4e7 | [
"BSD-2-Clause-FreeBSD"
] | 7 | 2019-09-14T07:24:09.000Z | 2021-06-15T16:15:05.000Z | accounts/tests.py | mgovoni-devel/MatD3 | 5b68d147f886bce427f92bb560159e62cec2d4e7 | [
"BSD-2-Clause-FreeBSD"
] | 14 | 2019-12-05T01:49:19.000Z | 2021-06-23T18:34:51.000Z | accounts/tests.py | mgovoni-devel/MatD3 | 5b68d147f886bce427f92bb560159e62cec2d4e7 | [
"BSD-2-Clause-FreeBSD"
] | 2 | 2019-11-06T21:16:57.000Z | 2019-11-30T10:51:44.000Z | # This file is covered by the BSD license. See LICENSE in the root directory.
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Group
from django.core import mail
from django.test import TestCase
from django.urls import reverse
from rest_framework import status
User = get_user_model()
USERNAME = 'testuser'
PASSWORD = '28&}>z1-%ZY|0ATwGU+7I!F7pJ:+(E'
FIRSTNAME = 'first'
LASTNAME = 'last'
EMAIL = 'mail@example.com'
DESCRIPTION = 'description'
INSTITUTION = 'institution'
WEBSITE = 'http://example.com'
class UserCreationTestCase(TestCase):
@classmethod
def setUpTestData(cls):
User.objects.create(
username='superuser', email=EMAIL, is_superuser=True)
Group.objects.create(name='users')
def test_success(self):
response = self.client.post(reverse('accounts:register'), {
'username': USERNAME,
'email': EMAIL,
'password1': PASSWORD,
'password2': PASSWORD,
})
self.assertFalse(User.objects.last().is_active)
self.assertContains(response, 'Confirmation email has been sent.')
for line in mail.outbox[0].body.splitlines():
line_stripped = line.lstrip()
if line_stripped.startswith('http'):
activation_url = line_stripped
break
response = self.client.get(activation_url, follow=True)
self.assertRedirects(response, reverse('accounts:profile'))
self.assertContains(response, 'Account confirmed.')
self.assertTrue(User.objects.last().is_active)
self.assertFalse(User.objects.last().is_staff)
self.assertEqual(len(mail.outbox), 2)
def test_no_email_or_username(self):
response = self.client.post(reverse('accounts:register'), {
'username': USERNAME, 'password1': PASSWORD, 'password2': PASSWORD,
})
self.assertContains(response, 'This field is required')
response = self.client.post(reverse('accounts:register'), {
'email': EMAIL, 'password1': PASSWORD, 'password2': PASSWORD,
})
self.assertContains(response, 'This field is required')
self.assertEqual(User.objects.count(), 1)
def test_incorrect_activation(self):
uid = 'MMM'
token = '00a-'+20*'0'
response = self.client.get(
reverse('accounts:activate', kwargs={'uid': uid, 'token': token}),
follow=True)
self.assertContains(response, 'Activation link is invalid!')
def test_user_profile(self):
user = User.objects.create(username=USERNAME, email=EMAIL)
user.set_password(PASSWORD)
user.save()
response = self.client.get(reverse('accounts:profile'))
self.assertEqual(response.status_code, status.HTTP_302_FOUND)
self.client.force_login(user)
response = self.client.get(reverse('accounts:profile'))
self.assertEqual(response.status_code, status.HTTP_200_OK)
response = self.client.post(reverse('accounts:profile'), {
'first_name': FIRSTNAME,
'last_name': LASTNAME,
'email': EMAIL,
'description': DESCRIPTION,
'institution': INSTITUTION,
'website': WEBSITE,
}, follow=True)
user = User.objects.last()
self.assertEqual(user.first_name, FIRSTNAME)
self.assertEqual(user.last_name, LASTNAME)
self.assertEqual(user.userprofile.description, DESCRIPTION)
self.assertEqual(user.userprofile.institution, INSTITUTION)
self.assertEqual(user.userprofile.website, WEBSITE)
def test_change_password(self):
response = self.client.post(reverse('accounts:change_password'))
self.assertEqual(response.status_code, status.HTTP_302_FOUND)
user = User.objects.first()
user.set_password(PASSWORD)
user.save()
self.client.force_login(user)
response = self.client.post(reverse('accounts:change_password'),
{'old_password': PASSWORD})
self.assertContains(response,
'Incorrect password or new passwords not matching')
response = self.client.post(reverse('accounts:change_password'), {
'old_password': PASSWORD,
'new_password1': PASSWORD,
'new_password2': PASSWORD,
}, follow=True)
self.assertNotContains(
response, 'Incorrect password or new passwords not matching')
self.assertContains(response, 'Password successfully changed')
class TemplateTestCase(TestCase):
@classmethod
def setUpTestData(cls):
User.objects.create(
username='superuser', email=EMAIL, is_superuser=True)
cls.user = User.objects.create(
username=USERNAME, is_active=True)
def test_buttons(self):
response = self.client.get('')
self.assertContains(response, 'Register')
self.client.force_login(self.user)
response = self.client.get('')
self.assertContains(response, 'Profile')
self.assertNotContains(response, 'Add Data')
self.user.is_staff = True
self.user.save()
response = self.client.get('')
self.assertContains(response, 'Add Data')
class AnonymousUserTestCase(TestCase):
def test_load_pages(self):
response = self.client.get(reverse('accounts:profile'))
self.assertEqual(response.status_code, status.HTTP_302_FOUND)
| 40.175182 | 79 | 0.649891 |
from django.contrib.auth import get_user_model
from django.contrib.auth.models import Group
from django.core import mail
from django.test import TestCase
from django.urls import reverse
from rest_framework import status
User = get_user_model()
USERNAME = 'testuser'
PASSWORD = '28&}>z1-%ZY|0ATwGU+7I!F7pJ:+(E'
FIRSTNAME = 'first'
LASTNAME = 'last'
EMAIL = 'mail@example.com'
DESCRIPTION = 'description'
INSTITUTION = 'institution'
WEBSITE = 'http://example.com'
class UserCreationTestCase(TestCase):
@classmethod
def setUpTestData(cls):
User.objects.create(
username='superuser', email=EMAIL, is_superuser=True)
Group.objects.create(name='users')
def test_success(self):
response = self.client.post(reverse('accounts:register'), {
'username': USERNAME,
'email': EMAIL,
'password1': PASSWORD,
'password2': PASSWORD,
})
self.assertFalse(User.objects.last().is_active)
self.assertContains(response, 'Confirmation email has been sent.')
for line in mail.outbox[0].body.splitlines():
line_stripped = line.lstrip()
if line_stripped.startswith('http'):
activation_url = line_stripped
break
response = self.client.get(activation_url, follow=True)
self.assertRedirects(response, reverse('accounts:profile'))
self.assertContains(response, 'Account confirmed.')
self.assertTrue(User.objects.last().is_active)
self.assertFalse(User.objects.last().is_staff)
self.assertEqual(len(mail.outbox), 2)
def test_no_email_or_username(self):
response = self.client.post(reverse('accounts:register'), {
'username': USERNAME, 'password1': PASSWORD, 'password2': PASSWORD,
})
self.assertContains(response, 'This field is required')
response = self.client.post(reverse('accounts:register'), {
'email': EMAIL, 'password1': PASSWORD, 'password2': PASSWORD,
})
self.assertContains(response, 'This field is required')
self.assertEqual(User.objects.count(), 1)
def test_incorrect_activation(self):
uid = 'MMM'
token = '00a-'+20*'0'
response = self.client.get(
reverse('accounts:activate', kwargs={'uid': uid, 'token': token}),
follow=True)
self.assertContains(response, 'Activation link is invalid!')
def test_user_profile(self):
user = User.objects.create(username=USERNAME, email=EMAIL)
user.set_password(PASSWORD)
user.save()
response = self.client.get(reverse('accounts:profile'))
self.assertEqual(response.status_code, status.HTTP_302_FOUND)
self.client.force_login(user)
response = self.client.get(reverse('accounts:profile'))
self.assertEqual(response.status_code, status.HTTP_200_OK)
response = self.client.post(reverse('accounts:profile'), {
'first_name': FIRSTNAME,
'last_name': LASTNAME,
'email': EMAIL,
'description': DESCRIPTION,
'institution': INSTITUTION,
'website': WEBSITE,
}, follow=True)
user = User.objects.last()
self.assertEqual(user.first_name, FIRSTNAME)
self.assertEqual(user.last_name, LASTNAME)
self.assertEqual(user.userprofile.description, DESCRIPTION)
self.assertEqual(user.userprofile.institution, INSTITUTION)
self.assertEqual(user.userprofile.website, WEBSITE)
def test_change_password(self):
response = self.client.post(reverse('accounts:change_password'))
self.assertEqual(response.status_code, status.HTTP_302_FOUND)
user = User.objects.first()
user.set_password(PASSWORD)
user.save()
self.client.force_login(user)
response = self.client.post(reverse('accounts:change_password'),
{'old_password': PASSWORD})
self.assertContains(response,
'Incorrect password or new passwords not matching')
response = self.client.post(reverse('accounts:change_password'), {
'old_password': PASSWORD,
'new_password1': PASSWORD,
'new_password2': PASSWORD,
}, follow=True)
self.assertNotContains(
response, 'Incorrect password or new passwords not matching')
self.assertContains(response, 'Password successfully changed')
class TemplateTestCase(TestCase):
@classmethod
def setUpTestData(cls):
User.objects.create(
username='superuser', email=EMAIL, is_superuser=True)
cls.user = User.objects.create(
username=USERNAME, is_active=True)
def test_buttons(self):
response = self.client.get('')
self.assertContains(response, 'Register')
self.client.force_login(self.user)
response = self.client.get('')
self.assertContains(response, 'Profile')
self.assertNotContains(response, 'Add Data')
self.user.is_staff = True
self.user.save()
response = self.client.get('')
self.assertContains(response, 'Add Data')
class AnonymousUserTestCase(TestCase):
def test_load_pages(self):
response = self.client.get(reverse('accounts:profile'))
self.assertEqual(response.status_code, status.HTTP_302_FOUND)
| true | true |
f724a0579c82ae147fc51ad5b6508680a848d799 | 27,245 | py | Python | app/lib/dns/import_manager.py | grepleria/SnitchDNS | 24f98b01fd5fca9aa2c660d6ee15742f2e44915c | [
"MIT"
] | 152 | 2020-12-07T13:26:53.000Z | 2022-03-23T02:00:04.000Z | app/lib/dns/import_manager.py | grepleria/SnitchDNS | 24f98b01fd5fca9aa2c660d6ee15742f2e44915c | [
"MIT"
] | 16 | 2020-12-07T17:04:36.000Z | 2022-03-10T11:12:52.000Z | app/lib/dns/import_manager.py | grepleria/SnitchDNS | 24f98b01fd5fca9aa2c660d6ee15742f2e44915c | [
"MIT"
] | 36 | 2020-12-09T13:04:40.000Z | 2022-03-12T18:14:36.000Z | from app.lib.dns.helpers.shared import SharedHelper
import os
import datetime
import json
import progressbar
from app import db
class DNSImportManager(SharedHelper):
IMPORT_TYPE_ZONE = 1
IMPORT_TYPE_RECORD = 2
@property
def last_error(self):
return self.__last_error
@last_error.setter
def last_error(self, value):
self.__last_error = value
def __init__(self, dns_zones, dns_records, users):
self.__last_error = ''
self.__dns_zones = dns_zones
self.__dns_records = dns_records
self.__zone_headers = ['domain', 'active', 'catch_all', 'forwarding', 'regex', 'master', 'tags']
self.__record_headers = ['domain', 'id', 'ttl', 'cls', 'type', 'active', 'data', 'is_conditional', 'conditional_count', 'conditional_limit', 'conditional_reset', 'conditional_data']
self.__users = users
def identify(self, csvfile):
self.last_error = ''
if not os.path.isfile(csvfile):
self.last_error = 'CSV file does not exist'
return False
header = self._load_csv_header(csvfile)
zone_header_count = 0
record_header_count = 0
for column in header:
if column in self.__zone_headers:
zone_header_count += 1
if column in self.__record_headers:
record_header_count += 1
if zone_header_count == len(self.__zone_headers):
return self.IMPORT_TYPE_ZONE
elif record_header_count == len(self.__record_headers):
return self.IMPORT_TYPE_RECORD
self.last_error = 'If you are uploading a ZONE file these are the required columns: {0}. If you are uploading a RECORD file then the required columns are: {1}.'.format(', '.join(self.__zone_headers), ', '.join(self.__record_headers))
return False
def review(self, csvfile, type, user_id, show_progressbar=False):
self.last_error = ''
if not os.path.isfile(csvfile):
self.last_error = 'CSV file does not exist'
return False
lines = self._load_csv(csvfile)
if len(lines) == 0:
self.last_error = 'CSV is empty'
return False
user = self.__users.get_user(user_id)
if not user:
self.last_error = 'Could not find user with ID {0}'.format(user_id)
return False
all_errors = []
errors = []
rows = []
if type == self.IMPORT_TYPE_ZONE:
rows = self.__categorise_rows(lines, type)
rows, errors = self.__process_zones(rows, user, show_progressbar=show_progressbar)
elif type == self.IMPORT_TYPE_RECORD:
rows = self.__categorise_rows(lines, type)
rows, errors = self.__process_records(rows, user, show_progressbar=show_progressbar)
all_errors += errors
# Sort errors per row number.
all_errors = sorted(all_errors, key=lambda k: k['row'])
return {
'data': rows,
'errors': all_errors
}
def run(self, data, type, user_id, show_progressbar=False):
errors = []
if type == self.IMPORT_TYPE_ZONE:
self.__import_zones(data, user_id, show_progressbar=show_progressbar)
elif type == self.IMPORT_TYPE_RECORD:
self.__import_records(data, user_id, errors, show_progressbar=show_progressbar)
return errors if len(errors) > 0 else True
def __import_zones(self, zones, user_id, show_progressbar=False, batch_size=100):
"""
This function has been heavily optimised as when I tried to import 250k domains its ETA was 1.5h, which isn't
very practical. The main assumption made here is that when this function is called, all validation checks will
have ready been completed.
"""
widget = [
progressbar.FormatLabel(''),
' ',
progressbar.Percentage(),
' ',
progressbar.Bar('#'),
' ',
progressbar.RotatingMarker(),
' ',
progressbar.ETA()
]
count = 0
unique_tags = []
if show_progressbar:
widget[0] = progressbar.FormatLabel('Importing zones')
bar = progressbar.ProgressBar(max_value=len(zones), widgets=widget)
# with bar as zones:
for zone_to_import in list(zones):
count += 1
bar.update(count) if show_progressbar else False
self.__zone_update_or_create(
zone_to_import['domain'],
zone_to_import['active'],
zone_to_import['catch_all'],
zone_to_import['forwarding'],
zone_to_import['regex'],
zone_to_import['master'],
user_id,
id=zone_to_import['id'],
autocommit=False
)
if count % batch_size == 0:
db.session.commit()
unique_tags = list(set(unique_tags + zone_to_import['tags']))
db.session.commit()
if show_progressbar:
widget[0] = progressbar.FormatLabel('Re-mapping zones')
bar = progressbar.ProgressBar(max_value=len(zones), widgets=widget)
domain_mapping = self.__get_domain_mapping(user_id)
zone_ids = []
i = 0
for zone_to_import in list(zones):
i += 1
bar.update(i) if show_progressbar else False
zone_to_import['id'] = domain_mapping[zone_to_import['domain']] if zone_to_import['domain'] in domain_mapping else 0
zone_ids.append(zone_to_import['id'])
self.__zone_clear_tags(zone_ids, show_progressbar=show_progressbar, widget=widget)
if show_progressbar:
widget[0] = progressbar.FormatLabel('Importing tags')
bar = progressbar.ProgressBar(max_value=len(zones), widgets=widget)
self.__tags_create(user_id, unique_tags)
tag_mapping = self.__get_tag_mapping(user_id)
count = 0
for zone_to_import in list(zones):
count += 1
bar.update(count) if show_progressbar else False
tags = {}
for tag in zone_to_import['tags']:
tags[tag] = tag_mapping[tag]
self.__zone_save_tags(zone_to_import['id'], tags, autocommit=False)
if count % batch_size == 0:
db.session.commit()
db.session.commit()
return True
def __import_records(self, records, user_id, errors, show_progressbar=False, batch_size = 100):
domain_mapping = self.__get_domain_mapping(user_id)
widget = [
progressbar.FormatLabel(''),
' ',
progressbar.Percentage(),
' ',
progressbar.Bar('#'),
' ',
progressbar.RotatingMarker(),
' ',
progressbar.ETA()
]
if show_progressbar:
widget[0] = progressbar.FormatLabel('Importing records')
bar = progressbar.ProgressBar(max_value=len(records), widgets=widget)
count = 0
for record_to_import in records:
count += 1
bar.update(count) if show_progressbar else False
# First, get the zone.
zone_id = domain_mapping[record_to_import['domain']] if record_to_import['domain'] in domain_mapping else None
if not zone_id:
# At this point all zones should exist.
errors.append('Could not find zone: {0}'.format(record_to_import['domain']))
continue
data = json.dumps(record_to_import['data']) if isinstance(record_to_import['data'], dict) else record_to_import['data']
conditional_data = json.dumps(record_to_import['conditional_data']) if isinstance(record_to_import['conditional_data'], dict) else record_to_import['conditional_data']
self.__record_update_or_create(
zone_id,
record_to_import['ttl'],
record_to_import['cls'],
record_to_import['type'],
record_to_import['active'],
data,
record_to_import['is_conditional'],
record_to_import['conditional_count'],
record_to_import['conditional_limit'],
record_to_import['conditional_reset'],
conditional_data,
id=record_to_import['record_id'],
autocommit=False
)
if count % batch_size == 0:
db.session.commit()
db.session.commit()
return True
def __process_zones(self, zones, user, show_progressbar=False):
errors = []
items = []
widget = [
progressbar.FormatLabel(''),
' ',
progressbar.Percentage(),
' ',
progressbar.Bar('#'),
' ',
progressbar.RotatingMarker(),
' ',
progressbar.ETA()
]
if show_progressbar:
widget[0] = progressbar.FormatLabel('Processing zones')
bar = progressbar.ProgressBar(max_value=len(zones), widgets=widget)
domain_mapping = self.__get_domain_mapping(user.id)
user_base_domain = '.' + self.__dns_zones.get_base_domain(user.admin, user.username)
count = 0
for zone in zones:
count += 1
bar.update(count) if show_progressbar else False
active = True if zone['active'] in ['1', 'yes', 'true'] else False
catch_all = True if zone['catch_all'] in ['1', 'yes', 'true'] else False
forwarding = True if zone['forwarding'] in ['1', 'yes', 'true'] else False
regex = True if zone['regex'] in ['1', 'yes', 'true'] else False
master = True if zone['master'] in ['1', 'yes', 'true'] else False
tags = zone['tags'].split(',')
# Trim each element.
map(str.strip, tags)
# Remove empty elements.
tags = list(filter(None, tags))
is_valid = True
if not user.admin:
if zone['domain'][-len(user_base_domain):] != user_base_domain and user_base_domain != '.' + zone['domain']:
is_valid = False
errors.append({'row': zone['row'], 'error': 'Zone {0} does not match your assigned master domain'.format(zone['domain'])})
if is_valid:
domain = {
'id': domain_mapping[zone['domain']] if zone['domain'] in domain_mapping else 0,
'domain': zone['domain'],
'active': active,
'catch_all': catch_all,
'forwarding': forwarding,
'regex': regex,
'master': master,
'tags': tags
}
items.append(domain)
return items, errors
def __process_records(self, records, user, show_progressbar=False):
errors = []
items = []
widget = [
progressbar.FormatLabel(''),
' ',
progressbar.Percentage(),
' ',
progressbar.Bar('#'),
' ',
progressbar.RotatingMarker(),
' ',
progressbar.ETA()
]
if show_progressbar:
widget[0] = progressbar.FormatLabel('Processing records')
bar = progressbar.ProgressBar(max_value=len(records), widgets=widget)
domain_mapping = self.__get_domain_mapping(user.id)
domain_mapping_reverse = self.__get_domain_mapping(user.id, reverse=True)
count = 0
for record in records:
count += 1
bar.update(count) if show_progressbar else False
record_errors = []
active = True if record['active'] in ['1', 'yes', 'true'] else False
zone_id = self.__process_record_zone(record, record_errors, domain_mapping)
record_id = self.__process_record_id(record, zone_id, record_errors, domain_mapping_reverse)
ttl = self.__process_record_ttl(record, record_errors)
cls = self.__process_record_cls(record, record_errors)
type = self.__process_record_type(record, record_errors)
is_conditional = True if record['is_conditional'] in ['1', 'yes', 'true'] else False
conditional_reset = True if record['conditional_reset'] in ['1', 'yes', 'true'] else False
conditional_count = self.__process_number(record, record_errors, 'conditional_count')
conditional_limit = self.__process_number(record, record_errors, 'conditional_limit')
data = {}
conditional_data = {}
if len(type) > 0:
data = self.__process_record_data(record, type, record_errors)
if is_conditional:
conditional_data = self.__process_record_data(record, type, record_errors, is_conditional=True)
if len(record_errors) == 0:
items.append({
'record_id': record_id,
'zone_id': zone_id,
'domain': record['domain'],
'active': active,
'ttl': ttl,
'cls': cls,
'type': type,
'data': data,
'is_conditional': is_conditional,
'conditional_count': conditional_count,
'conditional_limit': conditional_limit,
'conditional_reset': conditional_reset,
'conditional_data': conditional_data
})
else:
errors += record_errors
return items, errors
def __process_number(self, record, errors, attribute):
value = record[attribute]
if len(value) == 0 or value.isdigit() is False:
errors.append({'row': record['row'], 'error': 'Invalid attribute {0} value: {1}'.format(record[attribute], value)})
return 0
return int(value)
def __process_record_id(self, record, zone_id, errors, domain_mapping):
zone_id = zone_id if zone_id > 0 else None
record_id = 0
if len(record['id']) > 0:
if not record['id'].isdigit():
errors.append({'row': record['row'], 'error': 'Invalid record id: {0}'.format(record['id'])})
return 0
record_id = int(record['id'])
if record_id > 0:
record_exists = self.__record_exists(record_id, dns_zone_id=zone_id)
if not record_exists:
# Record not found - treat as new.
return 0
if zone_id > 0:
domain = domain_mapping[zone_id] if zone_id in domain_mapping else None
if not domain:
errors.append({'row': record['row'], 'error': 'Zone {0} not found'.format(record['domain'])})
return 0
if record['domain'] != domain:
errors.append({'row': record['row'], 'error': 'Record {0} does not belong to zone {1}'.format(record_id, zone_id)})
return 0
return record_id
def __process_record_zone(self, record, errors, domain_mapping):
zone_id = domain_mapping[record['domain']] if record['domain'] in domain_mapping else 0
if zone_id == 0:
errors.append({'row': record['row'], 'error': 'Zone not found: {0}'.format(record['domain'])})
return zone_id
def __record_exists(self, dns_record_id, dns_zone_id=None):
params = {'id': dns_record_id}
sql = "SELECT COUNT(id) AS c FROM dns_records WHERE id = :id"
if dns_zone_id is not None:
params['dns_zone_id'] = dns_zone_id
sql += " AND dns_zone_id = :dns_zone_id"
result = db.session.execute(sql, params).first()
return result[0] > 0 if result is not None else False
def __process_record_ttl(self, record, errors):
ttl = 0
if not record['ttl'].isdigit():
errors.append({'row': record['row'], 'error': 'Invalid TTL: {0}'.format(record['ttl'])})
else:
ttl = int(record['ttl'])
if ttl < 0:
errors.append({'row': record['row'], 'error': 'Invalid TTL: {0}'.format(record['ttl'])})
return ttl
def __process_record_cls(self, record, errors):
cls = ''
if not record['cls'] in self.__dns_records.get_classes():
errors.append({'row': record['row'], 'error': 'Invalid class: {0}'.format(record['cls'])})
else:
cls = record['cls']
return cls
def __process_record_type(self, record, errors):
type = ''
if not record['type'] in self.__dns_records.get_types():
errors.append({'row': record['row'], 'error': 'Invalid type: {0}'.format(record['type'])})
else:
type = record['type']
return type
def __properties_to_dict(self, record, errors, is_conditional=False):
attribute = 'conditional_data' if is_conditional else 'data'
rows = record[attribute].split("\n")
properties = {}
for row in rows:
parts = row.split('=', 1)
if len(parts) != 2:
errors.append({'row': record['row'], 'error': 'Invalid record property: {0}'.format(row)})
continue
name = parts[0].lower().strip()
value = parts[1].strip()
properties[name] = value
return properties
def __process_record_data(self, record, type, errors, is_conditional=False):
record_properties = self.__properties_to_dict(record, errors, is_conditional=is_conditional)
required_properties = self.__dns_records.get_record_type_properties(type, clean=True)
data = {}
for property_name, property_type in required_properties.items():
if not property_name in record_properties:
errors.append({'row': record['row'], 'error': 'Missing record property: {0}'.format(property_name)})
continue
value = record_properties[property_name]
if (property_type == 'int') and (isinstance(value, str)):
if not value.isdigit():
errors.append({'row': record['row'], 'error': "Invalid value '{0}' for property '{1}'".format(value, property_name)})
continue
value = int(value)
if (property_type == 'str') and (len(value) == 0):
errors.append({'row': record['row'], 'error': "Invalid value '{0}' for property '{1}'".format(value, property_name)})
continue
elif (property_type == 'int') and (value < 0):
errors.append({'row': record['row'], 'error': "Invalid value '{0}' for property '{1}'".format(value, property_name)})
continue
data[property_name] = value
return data
def __categorise_rows(self, rows, type):
data = []
for i, row in enumerate(rows):
# Error row is +1 because the first row is the header which was removed.
actual_row = i + 1
if type == self.IMPORT_TYPE_ZONE:
data.append({
'row': actual_row,
'domain': row['domain'].strip().lower(),
'active': row['active'].strip().lower(),
'catch_all': row['catch_all'].strip().lower(),
'forwarding': row['forwarding'].strip().lower(),
'regex': row['regex'].strip().lower(),
'master': row['master'].strip().lower(),
'tags': row['tags'].strip()
})
elif type == self.IMPORT_TYPE_RECORD:
data.append({
'row': actual_row,
'domain': row['domain'].strip().lower(),
'id': row['id'].strip(),
'ttl': row['ttl'].strip().lower(),
'cls': row['cls'].strip().upper(),
'type': row['type'].strip().upper(),
'active': row['active'].strip().lower(),
'data': row['data'].strip(),
'is_conditional': row['is_conditional'].strip().lower(),
'conditional_count': row['conditional_count'].strip().lower(),
'conditional_limit': row['conditional_limit'].strip().lower(),
'conditional_reset': row['conditional_reset'].strip().lower(),
'conditional_data': row['conditional_data'].strip(),
})
return data
def __get_domain_mapping(self, user_id, reverse=False):
result = db.session.execute(
"SELECT id, domain FROM dns_zones WHERE user_id = :user_id",
{'user_id': user_id}
)
mapping = {}
for row in result:
if reverse:
mapping[row[0]] = row[1]
else:
mapping[row[1]] = row[0]
return mapping
def __get_tag_mapping(self, user_id):
result = db.session.execute(
"SELECT id, name FROM tags WHERE user_id = :user_id",
{'user_id': user_id}
)
mapping = {}
for row in result:
mapping[row[1]] = row[0]
return mapping
def __zone_update_or_create(self, domain, active, catch_all, forwarding, regex, master, user_id, id=None, autocommit=True):
params = {
'domain': domain,
'active': active,
'catch_all': catch_all,
'forwarding': forwarding,
'regex': regex,
'master': master,
'user_id': user_id,
'updated_at': datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
}
if (id is None) or (id == 0):
params['created_at'] = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
sql = "INSERT INTO dns_zones (domain, active, catch_all, forwarding, regex, master, user_id, updated_at, created_at)" \
"VALUES(:domain, :active, :catch_all, :forwarding, :regex, :master, :user_id, :updated_at, :created_at)"
else:
params['id'] = id
sql = "UPDATE dns_zones SET domain = :domain, active = :active, catch_all = :catch_all, forwarding = :forwarding, regex = :regex, master = :master, user_id = :user_id, updated_at = :updated_at WHERE id = :id"
result = db.session.execute(sql, params)
if autocommit:
db.session.commit()
return True
def __record_update_or_create(self, zone_id, ttl, cls, type, active, data, is_conditional, conditional_count,
conditional_limit, conditional_reset, conditional_data, id=None, autocommit=True):
params = {
'zone_id': zone_id,
'ttl': ttl,
'cls': cls,
'type': type,
'active': active,
'data': data,
'has_conditional_responses': is_conditional,
'conditional_count': conditional_count,
'conditional_limit': conditional_limit,
'conditional_reset': conditional_reset,
'conditional_data': conditional_data,
'updated_at': datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
}
if (id is None) or (id == 0):
params['created_at'] = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
sql = "INSERT INTO dns_records (dns_zone_id, ttl, cls, type, data, active, has_conditional_responses, conditional_count, conditional_limit, conditional_reset, conditional_data, updated_at, created_at) " \
"VALUES(:zone_id, :ttl, :cls, :type, :data, :active, :has_conditional_responses, :conditional_count, :conditional_limit, :conditional_reset, :conditional_data, :updated_at, :created_at)"
else:
params['id'] = id
sql = "UPDATE dns_records SET dns_zone_id = :zone_id, ttl = :ttl, cls = :cls, type = :type, data = :data, active = :active, has_conditional_responses = :has_conditional_responses, conditional_count = :conditional_count, conditional_limit = :conditional_limit, conditional_reset = :conditional_reset, conditional_data = :conditional_data, updated_at = :updated_at WHERE id = :id"
result = db.session.execute(sql, params)
if autocommit:
db.session.commit()
return True
def __tags_create(self, user_id, tags):
for tag in tags:
name = tag.strip().lower()
result = db.session.execute(
"SELECT id FROM tags WHERE name = :name AND user_id = :user_id",
{'name': name, 'user_id': user_id}
).first()
if result is None:
params = {
'user_id': user_id,
'name': tag,
'created_at': datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
'updated_at': datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
}
sql = "INSERT INTO tags (user_id, name, created_at, updated_at) VALUES(:user_id, :name, :created_at, :updated_at)"
db.session.execute(sql, params)
db.session.commit()
return True
def __zone_save_tags(self, zone_id, tags, autocommit=True):
for name, id in tags.items():
params = {
'dns_zone_id': zone_id,
'tag_id': id,
'created_at': datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
'updated_at': datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
}
sql = "INSERT INTO dns_zone_tags (dns_zone_id, tag_id, created_at, updated_at) VALUES(:dns_zone_id, :tag_id, :created_at, :updated_at)"
db.session.execute(sql, params)
if autocommit:
db.session.commit()
return True
def __zone_clear_tags(self, zone_ids, batch_size=100, show_progressbar=False, widget=None):
batches = list(self.__chunks(zone_ids, batch_size))
if show_progressbar:
widget[0] = progressbar.FormatLabel('Removing existing tags')
bar = progressbar.ProgressBar(max_value=len(batches), widgets=widget)
count = 0
for batch in batches:
count += 1
bar.update(count) if show_progressbar else False
i = 0
params = {}
for id in batch:
i += 1
params['param' + str(i)] = id
bind = [':' + v for v in params.keys()]
sql = "DELETE FROM dns_zone_tags WHERE dns_zone_id IN({0})".format(', '.join(bind))
db.session.execute(sql, params)
db.session.commit()
return True
def __chunks(self, data, size):
# From https://stackoverflow.com/questions/312443/how-do-you-split-a-list-into-evenly-sized-chunks
for i in range(0, len(data), size):
yield data[i:i + size]
| 39.89019 | 390 | 0.562342 | from app.lib.dns.helpers.shared import SharedHelper
import os
import datetime
import json
import progressbar
from app import db
class DNSImportManager(SharedHelper):
IMPORT_TYPE_ZONE = 1
IMPORT_TYPE_RECORD = 2
@property
def last_error(self):
return self.__last_error
@last_error.setter
def last_error(self, value):
self.__last_error = value
def __init__(self, dns_zones, dns_records, users):
self.__last_error = ''
self.__dns_zones = dns_zones
self.__dns_records = dns_records
self.__zone_headers = ['domain', 'active', 'catch_all', 'forwarding', 'regex', 'master', 'tags']
self.__record_headers = ['domain', 'id', 'ttl', 'cls', 'type', 'active', 'data', 'is_conditional', 'conditional_count', 'conditional_limit', 'conditional_reset', 'conditional_data']
self.__users = users
def identify(self, csvfile):
self.last_error = ''
if not os.path.isfile(csvfile):
self.last_error = 'CSV file does not exist'
return False
header = self._load_csv_header(csvfile)
zone_header_count = 0
record_header_count = 0
for column in header:
if column in self.__zone_headers:
zone_header_count += 1
if column in self.__record_headers:
record_header_count += 1
if zone_header_count == len(self.__zone_headers):
return self.IMPORT_TYPE_ZONE
elif record_header_count == len(self.__record_headers):
return self.IMPORT_TYPE_RECORD
self.last_error = 'If you are uploading a ZONE file these are the required columns: {0}. If you are uploading a RECORD file then the required columns are: {1}.'.format(', '.join(self.__zone_headers), ', '.join(self.__record_headers))
return False
def review(self, csvfile, type, user_id, show_progressbar=False):
self.last_error = ''
if not os.path.isfile(csvfile):
self.last_error = 'CSV file does not exist'
return False
lines = self._load_csv(csvfile)
if len(lines) == 0:
self.last_error = 'CSV is empty'
return False
user = self.__users.get_user(user_id)
if not user:
self.last_error = 'Could not find user with ID {0}'.format(user_id)
return False
all_errors = []
errors = []
rows = []
if type == self.IMPORT_TYPE_ZONE:
rows = self.__categorise_rows(lines, type)
rows, errors = self.__process_zones(rows, user, show_progressbar=show_progressbar)
elif type == self.IMPORT_TYPE_RECORD:
rows = self.__categorise_rows(lines, type)
rows, errors = self.__process_records(rows, user, show_progressbar=show_progressbar)
all_errors += errors
all_errors = sorted(all_errors, key=lambda k: k['row'])
return {
'data': rows,
'errors': all_errors
}
def run(self, data, type, user_id, show_progressbar=False):
errors = []
if type == self.IMPORT_TYPE_ZONE:
self.__import_zones(data, user_id, show_progressbar=show_progressbar)
elif type == self.IMPORT_TYPE_RECORD:
self.__import_records(data, user_id, errors, show_progressbar=show_progressbar)
return errors if len(errors) > 0 else True
def __import_zones(self, zones, user_id, show_progressbar=False, batch_size=100):
widget = [
progressbar.FormatLabel(''),
' ',
progressbar.Percentage(),
' ',
progressbar.Bar('#'),
' ',
progressbar.RotatingMarker(),
' ',
progressbar.ETA()
]
count = 0
unique_tags = []
if show_progressbar:
widget[0] = progressbar.FormatLabel('Importing zones')
bar = progressbar.ProgressBar(max_value=len(zones), widgets=widget)
for zone_to_import in list(zones):
count += 1
bar.update(count) if show_progressbar else False
self.__zone_update_or_create(
zone_to_import['domain'],
zone_to_import['active'],
zone_to_import['catch_all'],
zone_to_import['forwarding'],
zone_to_import['regex'],
zone_to_import['master'],
user_id,
id=zone_to_import['id'],
autocommit=False
)
if count % batch_size == 0:
db.session.commit()
unique_tags = list(set(unique_tags + zone_to_import['tags']))
db.session.commit()
if show_progressbar:
widget[0] = progressbar.FormatLabel('Re-mapping zones')
bar = progressbar.ProgressBar(max_value=len(zones), widgets=widget)
domain_mapping = self.__get_domain_mapping(user_id)
zone_ids = []
i = 0
for zone_to_import in list(zones):
i += 1
bar.update(i) if show_progressbar else False
zone_to_import['id'] = domain_mapping[zone_to_import['domain']] if zone_to_import['domain'] in domain_mapping else 0
zone_ids.append(zone_to_import['id'])
self.__zone_clear_tags(zone_ids, show_progressbar=show_progressbar, widget=widget)
if show_progressbar:
widget[0] = progressbar.FormatLabel('Importing tags')
bar = progressbar.ProgressBar(max_value=len(zones), widgets=widget)
self.__tags_create(user_id, unique_tags)
tag_mapping = self.__get_tag_mapping(user_id)
count = 0
for zone_to_import in list(zones):
count += 1
bar.update(count) if show_progressbar else False
tags = {}
for tag in zone_to_import['tags']:
tags[tag] = tag_mapping[tag]
self.__zone_save_tags(zone_to_import['id'], tags, autocommit=False)
if count % batch_size == 0:
db.session.commit()
db.session.commit()
return True
def __import_records(self, records, user_id, errors, show_progressbar=False, batch_size = 100):
domain_mapping = self.__get_domain_mapping(user_id)
widget = [
progressbar.FormatLabel(''),
' ',
progressbar.Percentage(),
' ',
progressbar.Bar('#'),
' ',
progressbar.RotatingMarker(),
' ',
progressbar.ETA()
]
if show_progressbar:
widget[0] = progressbar.FormatLabel('Importing records')
bar = progressbar.ProgressBar(max_value=len(records), widgets=widget)
count = 0
for record_to_import in records:
count += 1
bar.update(count) if show_progressbar else False
zone_id = domain_mapping[record_to_import['domain']] if record_to_import['domain'] in domain_mapping else None
if not zone_id:
errors.append('Could not find zone: {0}'.format(record_to_import['domain']))
continue
data = json.dumps(record_to_import['data']) if isinstance(record_to_import['data'], dict) else record_to_import['data']
conditional_data = json.dumps(record_to_import['conditional_data']) if isinstance(record_to_import['conditional_data'], dict) else record_to_import['conditional_data']
self.__record_update_or_create(
zone_id,
record_to_import['ttl'],
record_to_import['cls'],
record_to_import['type'],
record_to_import['active'],
data,
record_to_import['is_conditional'],
record_to_import['conditional_count'],
record_to_import['conditional_limit'],
record_to_import['conditional_reset'],
conditional_data,
id=record_to_import['record_id'],
autocommit=False
)
if count % batch_size == 0:
db.session.commit()
db.session.commit()
return True
def __process_zones(self, zones, user, show_progressbar=False):
errors = []
items = []
widget = [
progressbar.FormatLabel(''),
' ',
progressbar.Percentage(),
' ',
progressbar.Bar('#'),
' ',
progressbar.RotatingMarker(),
' ',
progressbar.ETA()
]
if show_progressbar:
widget[0] = progressbar.FormatLabel('Processing zones')
bar = progressbar.ProgressBar(max_value=len(zones), widgets=widget)
domain_mapping = self.__get_domain_mapping(user.id)
user_base_domain = '.' + self.__dns_zones.get_base_domain(user.admin, user.username)
count = 0
for zone in zones:
count += 1
bar.update(count) if show_progressbar else False
active = True if zone['active'] in ['1', 'yes', 'true'] else False
catch_all = True if zone['catch_all'] in ['1', 'yes', 'true'] else False
forwarding = True if zone['forwarding'] in ['1', 'yes', 'true'] else False
regex = True if zone['regex'] in ['1', 'yes', 'true'] else False
master = True if zone['master'] in ['1', 'yes', 'true'] else False
tags = zone['tags'].split(',')
map(str.strip, tags)
tags = list(filter(None, tags))
is_valid = True
if not user.admin:
if zone['domain'][-len(user_base_domain):] != user_base_domain and user_base_domain != '.' + zone['domain']:
is_valid = False
errors.append({'row': zone['row'], 'error': 'Zone {0} does not match your assigned master domain'.format(zone['domain'])})
if is_valid:
domain = {
'id': domain_mapping[zone['domain']] if zone['domain'] in domain_mapping else 0,
'domain': zone['domain'],
'active': active,
'catch_all': catch_all,
'forwarding': forwarding,
'regex': regex,
'master': master,
'tags': tags
}
items.append(domain)
return items, errors
def __process_records(self, records, user, show_progressbar=False):
errors = []
items = []
widget = [
progressbar.FormatLabel(''),
' ',
progressbar.Percentage(),
' ',
progressbar.Bar('#'),
' ',
progressbar.RotatingMarker(),
' ',
progressbar.ETA()
]
if show_progressbar:
widget[0] = progressbar.FormatLabel('Processing records')
bar = progressbar.ProgressBar(max_value=len(records), widgets=widget)
domain_mapping = self.__get_domain_mapping(user.id)
domain_mapping_reverse = self.__get_domain_mapping(user.id, reverse=True)
count = 0
for record in records:
count += 1
bar.update(count) if show_progressbar else False
record_errors = []
active = True if record['active'] in ['1', 'yes', 'true'] else False
zone_id = self.__process_record_zone(record, record_errors, domain_mapping)
record_id = self.__process_record_id(record, zone_id, record_errors, domain_mapping_reverse)
ttl = self.__process_record_ttl(record, record_errors)
cls = self.__process_record_cls(record, record_errors)
type = self.__process_record_type(record, record_errors)
is_conditional = True if record['is_conditional'] in ['1', 'yes', 'true'] else False
conditional_reset = True if record['conditional_reset'] in ['1', 'yes', 'true'] else False
conditional_count = self.__process_number(record, record_errors, 'conditional_count')
conditional_limit = self.__process_number(record, record_errors, 'conditional_limit')
data = {}
conditional_data = {}
if len(type) > 0:
data = self.__process_record_data(record, type, record_errors)
if is_conditional:
conditional_data = self.__process_record_data(record, type, record_errors, is_conditional=True)
if len(record_errors) == 0:
items.append({
'record_id': record_id,
'zone_id': zone_id,
'domain': record['domain'],
'active': active,
'ttl': ttl,
'cls': cls,
'type': type,
'data': data,
'is_conditional': is_conditional,
'conditional_count': conditional_count,
'conditional_limit': conditional_limit,
'conditional_reset': conditional_reset,
'conditional_data': conditional_data
})
else:
errors += record_errors
return items, errors
def __process_number(self, record, errors, attribute):
value = record[attribute]
if len(value) == 0 or value.isdigit() is False:
errors.append({'row': record['row'], 'error': 'Invalid attribute {0} value: {1}'.format(record[attribute], value)})
return 0
return int(value)
def __process_record_id(self, record, zone_id, errors, domain_mapping):
zone_id = zone_id if zone_id > 0 else None
record_id = 0
if len(record['id']) > 0:
if not record['id'].isdigit():
errors.append({'row': record['row'], 'error': 'Invalid record id: {0}'.format(record['id'])})
return 0
record_id = int(record['id'])
if record_id > 0:
record_exists = self.__record_exists(record_id, dns_zone_id=zone_id)
if not record_exists:
return 0
if zone_id > 0:
domain = domain_mapping[zone_id] if zone_id in domain_mapping else None
if not domain:
errors.append({'row': record['row'], 'error': 'Zone {0} not found'.format(record['domain'])})
return 0
if record['domain'] != domain:
errors.append({'row': record['row'], 'error': 'Record {0} does not belong to zone {1}'.format(record_id, zone_id)})
return 0
return record_id
def __process_record_zone(self, record, errors, domain_mapping):
zone_id = domain_mapping[record['domain']] if record['domain'] in domain_mapping else 0
if zone_id == 0:
errors.append({'row': record['row'], 'error': 'Zone not found: {0}'.format(record['domain'])})
return zone_id
def __record_exists(self, dns_record_id, dns_zone_id=None):
params = {'id': dns_record_id}
sql = "SELECT COUNT(id) AS c FROM dns_records WHERE id = :id"
if dns_zone_id is not None:
params['dns_zone_id'] = dns_zone_id
sql += " AND dns_zone_id = :dns_zone_id"
result = db.session.execute(sql, params).first()
return result[0] > 0 if result is not None else False
def __process_record_ttl(self, record, errors):
ttl = 0
if not record['ttl'].isdigit():
errors.append({'row': record['row'], 'error': 'Invalid TTL: {0}'.format(record['ttl'])})
else:
ttl = int(record['ttl'])
if ttl < 0:
errors.append({'row': record['row'], 'error': 'Invalid TTL: {0}'.format(record['ttl'])})
return ttl
def __process_record_cls(self, record, errors):
cls = ''
if not record['cls'] in self.__dns_records.get_classes():
errors.append({'row': record['row'], 'error': 'Invalid class: {0}'.format(record['cls'])})
else:
cls = record['cls']
return cls
def __process_record_type(self, record, errors):
type = ''
if not record['type'] in self.__dns_records.get_types():
errors.append({'row': record['row'], 'error': 'Invalid type: {0}'.format(record['type'])})
else:
type = record['type']
return type
def __properties_to_dict(self, record, errors, is_conditional=False):
attribute = 'conditional_data' if is_conditional else 'data'
rows = record[attribute].split("\n")
properties = {}
for row in rows:
parts = row.split('=', 1)
if len(parts) != 2:
errors.append({'row': record['row'], 'error': 'Invalid record property: {0}'.format(row)})
continue
name = parts[0].lower().strip()
value = parts[1].strip()
properties[name] = value
return properties
def __process_record_data(self, record, type, errors, is_conditional=False):
record_properties = self.__properties_to_dict(record, errors, is_conditional=is_conditional)
required_properties = self.__dns_records.get_record_type_properties(type, clean=True)
data = {}
for property_name, property_type in required_properties.items():
if not property_name in record_properties:
errors.append({'row': record['row'], 'error': 'Missing record property: {0}'.format(property_name)})
continue
value = record_properties[property_name]
if (property_type == 'int') and (isinstance(value, str)):
if not value.isdigit():
errors.append({'row': record['row'], 'error': "Invalid value '{0}' for property '{1}'".format(value, property_name)})
continue
value = int(value)
if (property_type == 'str') and (len(value) == 0):
errors.append({'row': record['row'], 'error': "Invalid value '{0}' for property '{1}'".format(value, property_name)})
continue
elif (property_type == 'int') and (value < 0):
errors.append({'row': record['row'], 'error': "Invalid value '{0}' for property '{1}'".format(value, property_name)})
continue
data[property_name] = value
return data
def __categorise_rows(self, rows, type):
data = []
for i, row in enumerate(rows):
actual_row = i + 1
if type == self.IMPORT_TYPE_ZONE:
data.append({
'row': actual_row,
'domain': row['domain'].strip().lower(),
'active': row['active'].strip().lower(),
'catch_all': row['catch_all'].strip().lower(),
'forwarding': row['forwarding'].strip().lower(),
'regex': row['regex'].strip().lower(),
'master': row['master'].strip().lower(),
'tags': row['tags'].strip()
})
elif type == self.IMPORT_TYPE_RECORD:
data.append({
'row': actual_row,
'domain': row['domain'].strip().lower(),
'id': row['id'].strip(),
'ttl': row['ttl'].strip().lower(),
'cls': row['cls'].strip().upper(),
'type': row['type'].strip().upper(),
'active': row['active'].strip().lower(),
'data': row['data'].strip(),
'is_conditional': row['is_conditional'].strip().lower(),
'conditional_count': row['conditional_count'].strip().lower(),
'conditional_limit': row['conditional_limit'].strip().lower(),
'conditional_reset': row['conditional_reset'].strip().lower(),
'conditional_data': row['conditional_data'].strip(),
})
return data
def __get_domain_mapping(self, user_id, reverse=False):
result = db.session.execute(
"SELECT id, domain FROM dns_zones WHERE user_id = :user_id",
{'user_id': user_id}
)
mapping = {}
for row in result:
if reverse:
mapping[row[0]] = row[1]
else:
mapping[row[1]] = row[0]
return mapping
def __get_tag_mapping(self, user_id):
result = db.session.execute(
"SELECT id, name FROM tags WHERE user_id = :user_id",
{'user_id': user_id}
)
mapping = {}
for row in result:
mapping[row[1]] = row[0]
return mapping
def __zone_update_or_create(self, domain, active, catch_all, forwarding, regex, master, user_id, id=None, autocommit=True):
params = {
'domain': domain,
'active': active,
'catch_all': catch_all,
'forwarding': forwarding,
'regex': regex,
'master': master,
'user_id': user_id,
'updated_at': datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
}
if (id is None) or (id == 0):
params['created_at'] = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
sql = "INSERT INTO dns_zones (domain, active, catch_all, forwarding, regex, master, user_id, updated_at, created_at)" \
"VALUES(:domain, :active, :catch_all, :forwarding, :regex, :master, :user_id, :updated_at, :created_at)"
else:
params['id'] = id
sql = "UPDATE dns_zones SET domain = :domain, active = :active, catch_all = :catch_all, forwarding = :forwarding, regex = :regex, master = :master, user_id = :user_id, updated_at = :updated_at WHERE id = :id"
result = db.session.execute(sql, params)
if autocommit:
db.session.commit()
return True
def __record_update_or_create(self, zone_id, ttl, cls, type, active, data, is_conditional, conditional_count,
conditional_limit, conditional_reset, conditional_data, id=None, autocommit=True):
params = {
'zone_id': zone_id,
'ttl': ttl,
'cls': cls,
'type': type,
'active': active,
'data': data,
'has_conditional_responses': is_conditional,
'conditional_count': conditional_count,
'conditional_limit': conditional_limit,
'conditional_reset': conditional_reset,
'conditional_data': conditional_data,
'updated_at': datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
}
if (id is None) or (id == 0):
params['created_at'] = datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
sql = "INSERT INTO dns_records (dns_zone_id, ttl, cls, type, data, active, has_conditional_responses, conditional_count, conditional_limit, conditional_reset, conditional_data, updated_at, created_at) " \
"VALUES(:zone_id, :ttl, :cls, :type, :data, :active, :has_conditional_responses, :conditional_count, :conditional_limit, :conditional_reset, :conditional_data, :updated_at, :created_at)"
else:
params['id'] = id
sql = "UPDATE dns_records SET dns_zone_id = :zone_id, ttl = :ttl, cls = :cls, type = :type, data = :data, active = :active, has_conditional_responses = :has_conditional_responses, conditional_count = :conditional_count, conditional_limit = :conditional_limit, conditional_reset = :conditional_reset, conditional_data = :conditional_data, updated_at = :updated_at WHERE id = :id"
result = db.session.execute(sql, params)
if autocommit:
db.session.commit()
return True
def __tags_create(self, user_id, tags):
for tag in tags:
name = tag.strip().lower()
result = db.session.execute(
"SELECT id FROM tags WHERE name = :name AND user_id = :user_id",
{'name': name, 'user_id': user_id}
).first()
if result is None:
params = {
'user_id': user_id,
'name': tag,
'created_at': datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
'updated_at': datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
}
sql = "INSERT INTO tags (user_id, name, created_at, updated_at) VALUES(:user_id, :name, :created_at, :updated_at)"
db.session.execute(sql, params)
db.session.commit()
return True
def __zone_save_tags(self, zone_id, tags, autocommit=True):
for name, id in tags.items():
params = {
'dns_zone_id': zone_id,
'tag_id': id,
'created_at': datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'),
'updated_at': datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')
}
sql = "INSERT INTO dns_zone_tags (dns_zone_id, tag_id, created_at, updated_at) VALUES(:dns_zone_id, :tag_id, :created_at, :updated_at)"
db.session.execute(sql, params)
if autocommit:
db.session.commit()
return True
def __zone_clear_tags(self, zone_ids, batch_size=100, show_progressbar=False, widget=None):
batches = list(self.__chunks(zone_ids, batch_size))
if show_progressbar:
widget[0] = progressbar.FormatLabel('Removing existing tags')
bar = progressbar.ProgressBar(max_value=len(batches), widgets=widget)
count = 0
for batch in batches:
count += 1
bar.update(count) if show_progressbar else False
i = 0
params = {}
for id in batch:
i += 1
params['param' + str(i)] = id
bind = [':' + v for v in params.keys()]
sql = "DELETE FROM dns_zone_tags WHERE dns_zone_id IN({0})".format(', '.join(bind))
db.session.execute(sql, params)
db.session.commit()
return True
def __chunks(self, data, size):
for i in range(0, len(data), size):
yield data[i:i + size]
| true | true |
f724a1382e8af5cf9306be07c058f0768f836073 | 807 | py | Python | VetsApp/migrations/0001_initial.py | Sabrinax3/Pet-Clinic-1 | 776955d118a46c8d4eaa74de22ea0280b82debc9 | [
"MIT"
] | 2 | 2020-04-13T14:26:54.000Z | 2022-01-19T01:30:25.000Z | VetsApp/migrations/0001_initial.py | Sabrinax3/Pet-Clinic-1 | 776955d118a46c8d4eaa74de22ea0280b82debc9 | [
"MIT"
] | 2 | 2020-05-29T18:52:55.000Z | 2020-05-30T02:06:28.000Z | VetsApp/migrations/0001_initial.py | Sabrinax3/Pet-Clinic-1 | 776955d118a46c8d4eaa74de22ea0280b82debc9 | [
"MIT"
] | 8 | 2020-04-11T08:30:44.000Z | 2020-05-30T03:26:13.000Z | # Generated by Django 3.0.5 on 2020-04-10 10:01
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Vets',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('Name', models.CharField(max_length=100)),
('ContactNo', models.CharField(max_length=30)),
('Address', models.CharField(max_length=200)),
('University', models.CharField(max_length=100)),
('HighestDegree', models.CharField(max_length=50)),
('Image', models.CharField(max_length=1000)),
],
),
]
| 29.888889 | 114 | 0.570012 |
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Vets',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('Name', models.CharField(max_length=100)),
('ContactNo', models.CharField(max_length=30)),
('Address', models.CharField(max_length=200)),
('University', models.CharField(max_length=100)),
('HighestDegree', models.CharField(max_length=50)),
('Image', models.CharField(max_length=1000)),
],
),
]
| true | true |
f724a2c802c8c96a90155f3e5f0760fede5be41e | 4,704 | py | Python | stanCode_Projects/name_searching_system/babynames.py | calvin0123/sc-projects | 88ac98e3543a1399387c2033f36dc5c6b86c488c | [
"MIT"
] | null | null | null | stanCode_Projects/name_searching_system/babynames.py | calvin0123/sc-projects | 88ac98e3543a1399387c2033f36dc5c6b86c488c | [
"MIT"
] | null | null | null | stanCode_Projects/name_searching_system/babynames.py | calvin0123/sc-projects | 88ac98e3543a1399387c2033f36dc5c6b86c488c | [
"MIT"
] | 1 | 2021-12-04T22:50:23.000Z | 2021-12-04T22:50:23.000Z | """
SC101 Baby Names Project
Adapted from Nick Parlante's Baby Names assignment by
Jerry Liao.
------------------------------------------
File: babynames.py
Name: Calvin Chen
This file reads the most famous baby names from 1900 to
2010 in the US and stores the .txt into the dictionary
to provide the information for the babygraphics.py.
"""
import sys
def add_data_for_name(name_data, year, rank, name):
"""
Adds the given year and rank to the associated name in the name_data dict.
Input:
name_data (dict): dict holding baby name data
year (str): the year of the data entry to add
rank (str): the rank of the data entry to add
name (str): the name of the data entry to add
Output:
This function modifies the name_data dict to store the provided
name, year, and rank. This function does not return any values.
"""
if name not in name_data:
name_data[name] = {}
name_data[name][year] = rank
else:
if year in name_data[name]:
if int(name_data[name][year]) < int(rank):
return name_data
else:
name_data[name][year] = rank
else:
name_data[name][year] = rank
def add_file(name_data, filename):
"""
Reads the information from the specified file and populates the name_data
dict with the data found in the file.
Input:
name_data (dict): dict holding baby name data
filename (str): name of the file holding baby name data
Output:
This function modifies the name_data dict to store information from
the provided file name. This function does not return any value.
"""
with open(filename, 'r') as f:
for line in f:
line_l = line.split(',')
if len(line_l) > 1:
rank = line_l[0].strip()
name1 = line_l[1].strip()
name2 = line_l[2].strip()
add_data_for_name(name_data, year, rank, name1)
add_data_for_name(name_data, year, rank, name2)
else:
year = line_l[0].strip()
def read_files(filenames):
"""
Reads the data from all files specified in the provided list
into a single name_data dict and then returns that dict.
Input:
filenames (List[str]): a list of filenames containing baby name data
Returns:
name_data (dict): the dict storing all baby name data in a structured manner
"""
name_data = {}
for file in filenames:
add_file(name_data, file)
return name_data
def search_names(name_data, target):
"""
Given a name_data dict that stores baby name information and a target string,
returns a list of all names in the dict that contain the target string. This
function should be case-insensitive with respect to the target string.
Input:
name_data (dict): a dict containing baby name data organized by name
target (str): a string to look for in the names contained within name_data
Returns:
matching_names (List[str]): a list of all names from name_data that contain
the target string
"""
names = []
for name in name_data:
if target in name.lower():
names.append(name)
return names
def print_names(name_data):
"""
(provided, DO NOT MODIFY)
Given a name_data dict, print out all its data, one name per line.
The names are printed in alphabetical order,
with the corresponding years data displayed in increasing order.
Input:
name_data (dict): a dict containing baby name data organized by name
Returns:
This function does not return anything
"""
for key, value in sorted(name_data.items()):
print(key, sorted(value.items()))
def main():
# (provided, DO NOT MODIFY)
args = sys.argv[1:]
# Two command line forms
# 1. file1 file2 file3 ..
# 2. -search target file1 file2 file3 ..
# Assume no search, so list of filenames to read
# is the args list
filenames = args
# Check if we are doing search, set target variable
target = ''
if len(args) >= 2 and args[0] == '-search':
target = args[1]
filenames = args[2:] # Update filenames to skip first 2
# Read in all the filenames: baby-1990.txt, baby-2000.txt, ...
names = read_files(filenames)
# Either we do a search or just print everything.
if len(target) > 0:
search_results = search_names(names, target)
for name in search_results:
print(name)
else:
print_names(names)
if __name__ == '__main__':
main()
| 29.961783 | 84 | 0.624787 |
import sys
def add_data_for_name(name_data, year, rank, name):
if name not in name_data:
name_data[name] = {}
name_data[name][year] = rank
else:
if year in name_data[name]:
if int(name_data[name][year]) < int(rank):
return name_data
else:
name_data[name][year] = rank
else:
name_data[name][year] = rank
def add_file(name_data, filename):
with open(filename, 'r') as f:
for line in f:
line_l = line.split(',')
if len(line_l) > 1:
rank = line_l[0].strip()
name1 = line_l[1].strip()
name2 = line_l[2].strip()
add_data_for_name(name_data, year, rank, name1)
add_data_for_name(name_data, year, rank, name2)
else:
year = line_l[0].strip()
def read_files(filenames):
name_data = {}
for file in filenames:
add_file(name_data, file)
return name_data
def search_names(name_data, target):
names = []
for name in name_data:
if target in name.lower():
names.append(name)
return names
def print_names(name_data):
for key, value in sorted(name_data.items()):
print(key, sorted(value.items()))
def main():
args = sys.argv[1:]
filenames = args
target = ''
if len(args) >= 2 and args[0] == '-search':
target = args[1]
filenames = args[2:]
names = read_files(filenames)
if len(target) > 0:
search_results = search_names(names, target)
for name in search_results:
print(name)
else:
print_names(names)
if __name__ == '__main__':
main()
| true | true |
f724a33ad866379ea4c6e3d1a4ecff9dfb612aba | 16,136 | py | Python | flower/api/tasks.py | jiangrz/flower | 4d6fad197e97c9c36f8052345a348345ef4505a3 | [
"BSD-3-Clause"
] | null | null | null | flower/api/tasks.py | jiangrz/flower | 4d6fad197e97c9c36f8052345a348345ef4505a3 | [
"BSD-3-Clause"
] | null | null | null | flower/api/tasks.py | jiangrz/flower | 4d6fad197e97c9c36f8052345a348345ef4505a3 | [
"BSD-3-Clause"
] | 1 | 2021-04-23T17:34:09.000Z | 2021-04-23T17:34:09.000Z | from __future__ import absolute_import
import json
import logging
from datetime import datetime
from threading import Thread
from tornado import web
from tornado import gen
from tornado.escape import json_decode
from tornado.web import HTTPError
from celery import states
from celery.result import AsyncResult
from celery.contrib.abortable import AbortableAsyncResult
from celery.backends.base import DisabledBackend
from ..utils import tasks
from ..views import BaseHandler
from ..utils.broker import Broker
from ..api.control import ControlHandler
logger = logging.getLogger(__name__)
class BaseTaskHandler(BaseHandler):
def get_task_args(self):
try:
body = self.request.body
options = json_decode(body) if body else {}
except ValueError as e:
raise HTTPError(400, str(e))
args = options.pop('args', [])
kwargs = options.pop('kwargs', {})
if not isinstance(args, (list, tuple)):
raise HTTPError(400, 'args must be an array')
return args, kwargs, options
@staticmethod
def backend_configured(result):
return not isinstance(result.backend, DisabledBackend)
def write_error(self, status_code, **kwargs):
self.set_status(status_code)
def update_response_result(self, response, result):
if result.state == states.FAILURE:
response.update({'result': self.safe_result(result.result),
'traceback': result.traceback})
else:
response.update({'result': self.safe_result(result.result)})
def normalize_options(self, options):
if 'eta' in options:
options['eta'] = datetime.strptime(options['eta'],
self.DATE_FORMAT)
if 'countdown' in options:
options['countdown'] = float(options['countdown'])
if 'expires' in options:
expires = options['expires']
try:
expires = float(expires)
except ValueError:
expires = datetime.strptime(expires, self.DATE_FORMAT)
options['expires'] = expires
def safe_result(self, result):
"returns json encodable result"
try:
json.dumps(result)
except TypeError:
return repr(result)
else:
return result
class TaskApply(BaseTaskHandler):
@web.authenticated
@web.asynchronous
def post(self, taskname):
"""
Execute a task by name and wait results
**Example request**:
.. sourcecode:: http
POST /api/task/apply/tasks.add HTTP/1.1
Accept: application/json
Accept-Encoding: gzip, deflate, compress
Content-Length: 16
Content-Type: application/json; charset=utf-8
Host: localhost:5555
{
"args": [1, 2]
}
**Example response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Content-Length: 71
Content-Type: application/json; charset=UTF-8
{
"state": "SUCCESS",
"task-id": "c60be250-fe52-48df-befb-ac66174076e6",
"result": 3
}
:query args: a list of arguments
:query kwargs: a dictionary of arguments
:reqheader Authorization: optional OAuth token to authenticate
:statuscode 200: no error
:statuscode 401: unauthorized request
:statuscode 404: unknown task
"""
args, kwargs, options = self.get_task_args()
logger.debug("Invoking a task '%s' with '%s' and '%s'",
taskname, args, kwargs)
try:
task = self.capp.tasks[taskname]
except KeyError:
raise HTTPError(404, "Unknown task '%s'" % taskname)
try:
self.normalize_options(options)
except ValueError:
raise HTTPError(400, 'Invalid option')
result = task.apply_async(args=args, kwargs=kwargs, **options)
response = {'task-id': result.task_id}
# In tornado for not blocking event loop we must return results
# from other thread by self.finish()
th = Thread(target=self.wait_results, args=(result, response, ))
th.start()
# So just exit
def wait_results(self, result, response):
# Wait until task finished and do not raise anything
result.get(propagate=False)
# Write results and finish async function
self.update_response_result(response, result)
if self.backend_configured(result):
response.update(state=result.state)
self.finish(response)
class TaskAsyncApply(BaseTaskHandler):
DATE_FORMAT = '%Y-%m-%d %H:%M:%S.%f'
@web.authenticated
def post(self, taskname):
"""
Execute a task
**Example request**:
.. sourcecode:: http
POST /api/task/async-apply/tasks.add HTTP/1.1
Accept: application/json
Accept-Encoding: gzip, deflate, compress
Content-Length: 16
Content-Type: application/json; charset=utf-8
Host: localhost:5555
{
"args": [1, 2]
}
**Example response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Content-Length: 71
Content-Type: application/json; charset=UTF-8
Date: Sun, 13 Apr 2014 15:55:00 GMT
{
"state": "PENDING",
"task-id": "abc300c7-2922-4069-97b6-a635cc2ac47c"
}
:query args: a list of arguments
:query kwargs: a dictionary of arguments
:query options: a dictionary of `apply_async` keyword arguments
:reqheader Authorization: optional OAuth token to authenticate
:statuscode 200: no error
:statuscode 401: unauthorized request
:statuscode 404: unknown task
"""
args, kwargs, options = self.get_task_args()
logger.debug("Invoking a task '%s' with '%s' and '%s'",
taskname, args, kwargs)
try:
task = self.capp.tasks[taskname]
except KeyError:
raise HTTPError(404, "Unknown task '%s'" % taskname)
try:
self.normalize_options(options)
except ValueError:
raise HTTPError(400, 'Invalid option')
result = task.apply_async(args=args, kwargs=kwargs, **options)
response = {'task-id': result.task_id}
if self.backend_configured(result):
response.update(state=result.state)
self.write(response)
class TaskSend(BaseTaskHandler):
@web.authenticated
def post(self, taskname):
"""
Execute a task by name (doesn't require task sources)
**Example request**:
.. sourcecode:: http
POST /api/task/send-task/tasks.add HTTP/1.1
Accept: application/json
Accept-Encoding: gzip, deflate, compress
Content-Length: 16
Content-Type: application/json; charset=utf-8
Host: localhost:5555
{
"args": [1, 2]
}
**Example response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Content-Length: 71
Content-Type: application/json; charset=UTF-8
{
"state": "SUCCESS",
"task-id": "c60be250-fe52-48df-befb-ac66174076e6"
}
:query args: a list of arguments
:query kwargs: a dictionary of arguments
:reqheader Authorization: optional OAuth token to authenticate
:statuscode 200: no error
:statuscode 401: unauthorized request
:statuscode 404: unknown task
"""
args, kwargs, options = self.get_task_args()
logger.debug("Invoking task '%s' with '%s' and '%s'",
taskname, args, kwargs)
result = self.capp.send_task(
taskname, args=args, kwargs=kwargs, **options)
response = {'task-id': result.task_id}
if self.backend_configured(result):
response.update(state=result.state)
self.write(response)
class TaskResult(BaseTaskHandler):
@web.authenticated
def get(self, taskid):
"""
Get a task result
**Example request**:
.. sourcecode:: http
GET /api/task/result/c60be250-fe52-48df-befb-ac66174076e6 HTTP/1.1
Host: localhost:5555
**Example response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Content-Length: 84
Content-Type: application/json; charset=UTF-8
{
"result": 3,
"state": "SUCCESS",
"task-id": "c60be250-fe52-48df-befb-ac66174076e6"
}
:query timeout: how long to wait, in seconds, before the operation times out
:reqheader Authorization: optional OAuth token to authenticate
:statuscode 200: no error
:statuscode 401: unauthorized request
:statuscode 503: result backend is not configured
"""
timeout = self.get_argument('timeout', None)
timeout = float(timeout) if timeout is not None else None
result = AsyncResult(taskid)
if not self.backend_configured(result):
raise HTTPError(503)
response = {'task-id': taskid, 'state': result.state}
if timeout:
result.get(timeout=timeout, propagate=False)
self.update_response_result(response, result)
elif result.ready():
self.update_response_result(response, result)
self.write(response)
class TaskAbort(BaseTaskHandler):
@web.authenticated
def post(self, taskid):
"""
Abort a running task
**Example request**:
.. sourcecode:: http
POST /api/task/abort/c60be250-fe52-48df-befb-ac66174076e6 HTTP/1.1
Host: localhost:5555
**Example response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Content-Length: 61
Content-Type: application/json; charset=UTF-8
{
"message": "Aborted '1480b55c-b8b2-462c-985e-24af3e9158f9'"
}
:reqheader Authorization: optional OAuth token to authenticate
:statuscode 200: no error
:statuscode 401: unauthorized request
:statuscode 503: result backend is not configured
"""
logger.info("Aborting task '%s'", taskid)
result = AbortableAsyncResult(taskid)
if not self.backend_configured(result):
raise HTTPError(503)
result.abort()
self.write(dict(message="Aborted '%s'" % taskid))
class GetQueueLengths(BaseTaskHandler):
@web.authenticated
@gen.coroutine
def get(self):
app = self.application
broker_options = self.capp.conf.BROKER_TRANSPORT_OPTIONS
http_api = None
if app.transport == 'amqp' and app.options.broker_api:
http_api = app.options.broker_api
broker = Broker(app.capp.connection().as_uri(include_password=True),
http_api=http_api, broker_options=broker_options)
queue_names = ControlHandler.get_active_queue_names()
if not queue_names:
queue_names = set([self.capp.conf.CELERY_DEFAULT_QUEUE])
queues = yield broker.queues(sorted(queue_names))
self.write({'active_queues': queues})
class ListTasks(BaseTaskHandler):
@web.authenticated
def get(self):
"""
List tasks
**Example request**:
.. sourcecode:: http
GET /api/tasks HTTP/1.1
Host: localhost:5555
User-Agent: HTTPie/0.8.0
**Example response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Content-Length: 1109
Content-Type: application/json; charset=UTF-8
Etag: "b2478118015c8b825f7b88ce6b660e5449746c37"
Server: TornadoServer/3.1.1
{
"e42ceb2d-8730-47b5-8b4d-8e0d2a1ef7c9": {
"args": "[3, 4]",
"client": null,
"clock": 1079,
"eta": null,
"exception": null,
"exchange": null,
"expires": null,
"failed": null,
"kwargs": "{}",
"name": "tasks.add",
"received": 1398505411.107885,
"result": "'7'",
"retried": null,
"retries": 0,
"revoked": null,
"routing_key": null,
"runtime": 0.01610181899741292,
"sent": null,
"started": 1398505411.108985,
"state": "SUCCESS",
"succeeded": 1398505411.124802,
"timestamp": 1398505411.124802,
"traceback": null,
"uuid": "e42ceb2d-8730-47b5-8b4d-8e0d2a1ef7c9"
},
"f67ea225-ae9e-42a8-90b0-5de0b24507e0": {
"args": "[1, 2]",
"client": null,
"clock": 1042,
"eta": null,
"exception": null,
"exchange": null,
"expires": null,
"failed": null,
"kwargs": "{}",
"name": "tasks.add",
"received": 1398505395.327208,
"result": "'3'",
"retried": null,
"retries": 0,
"revoked": null,
"routing_key": null,
"runtime": 0.012884548006695695,
"sent": null,
"started": 1398505395.3289,
"state": "SUCCESS",
"succeeded": 1398505395.341089,
"timestamp": 1398505395.341089,
"traceback": null,
"uuid": "f67ea225-ae9e-42a8-90b0-5de0b24507e0"
}
}
:query limit: maximum number of tasks
:query workername: filter task by workername
:query taskname: filter tasks by taskname
:query state: filter tasks by state
:reqheader Authorization: optional OAuth token to authenticate
:statuscode 200: no error
:statuscode 401: unauthorized request
"""
app = self.application
limit = self.get_argument('limit', None)
worker = self.get_argument('workername', None)
type = self.get_argument('taskname', None)
state = self.get_argument('state', None)
limit = limit and int(limit)
worker = worker if worker != 'All' else None
type = type if type != 'All' else None
state = state if state != 'All' else None
result = []
for task_id, task in tasks.iter_tasks(
app.events, limit=limit, type=type,
worker=worker, state=state):
task = tasks.as_dict(task)
task.pop('worker', None)
result.append((task_id, task))
self.write(dict(result))
class ListTaskTypes(BaseTaskHandler):
@web.authenticated
def get(self):
"""
List (seen) task types
**Example request**:
.. sourcecode:: http
GET /api/task/types HTTP/1.1
Host: localhost:5555
**Example response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Content-Length: 44
Content-Type: application/json; charset=UTF-8
{
"task-types": [
"tasks.add",
"tasks.sleep"
]
}
:reqheader Authorization: optional OAuth token to authenticate
:statuscode 200: no error
:statuscode 401: unauthorized request
"""
seen_task_types = self.application.events.state.task_types()
response = {}
response['task-types'] = seen_task_types
self.write(response)
class TaskInfo(BaseTaskHandler):
@web.authenticated
def get(self, taskid):
"""
Get a task info
**Example request**:
.. sourcecode:: http
GET /api/task/info/91396550-c228-4111-9da4-9d88cfd5ddc6 HTTP/1.1
Accept: */*
Accept-Encoding: gzip, deflate, compress
Host: localhost:5555
**Example response**:
.. sourcecode:: http
HTTP/1.1 200 OK
Content-Length: 575
Content-Type: application/json; charset=UTF-8
{
"args": "[2, 2]",
"client": null,
"clock": 25,
"eta": null,
"exception": null,
"exchange": null,
"expires": null,
"failed": null,
"kwargs": "{}",
"name": "tasks.add",
"received": 1400806241.970742,
"result": "'4'",
"retried": null,
"retries": null,
"revoked": null,
"routing_key": null,
"runtime": 2.0037889280356467,
"sent": null,
"started": 1400806241.972624,
"state": "SUCCESS",
"succeeded": 1400806243.975336,
"task-id": "91396550-c228-4111-9da4-9d88cfd5ddc6",
"timestamp": 1400806243.975336,
"traceback": null,
"worker": "celery@worker1"
}
:reqheader Authorization: optional OAuth token to authenticate
:statuscode 200: no error
:statuscode 401: unauthorized request
:statuscode 404: unknown task
"""
task = tasks.get_task_by_id(self.application.events, taskid)
if not task:
raise HTTPError(404, "Unknown task '%s'" % taskid)
response = {}
for name in task._fields:
if name not in ['uuid', 'worker']:
response[name] = getattr(task, name, None)
response['task-id'] = task.uuid
if task.worker is not None:
response['worker'] = task.worker.hostname
self.write(response)
| 26.715232 | 76 | 0.622707 | from __future__ import absolute_import
import json
import logging
from datetime import datetime
from threading import Thread
from tornado import web
from tornado import gen
from tornado.escape import json_decode
from tornado.web import HTTPError
from celery import states
from celery.result import AsyncResult
from celery.contrib.abortable import AbortableAsyncResult
from celery.backends.base import DisabledBackend
from ..utils import tasks
from ..views import BaseHandler
from ..utils.broker import Broker
from ..api.control import ControlHandler
logger = logging.getLogger(__name__)
class BaseTaskHandler(BaseHandler):
def get_task_args(self):
try:
body = self.request.body
options = json_decode(body) if body else {}
except ValueError as e:
raise HTTPError(400, str(e))
args = options.pop('args', [])
kwargs = options.pop('kwargs', {})
if not isinstance(args, (list, tuple)):
raise HTTPError(400, 'args must be an array')
return args, kwargs, options
@staticmethod
def backend_configured(result):
return not isinstance(result.backend, DisabledBackend)
def write_error(self, status_code, **kwargs):
self.set_status(status_code)
def update_response_result(self, response, result):
if result.state == states.FAILURE:
response.update({'result': self.safe_result(result.result),
'traceback': result.traceback})
else:
response.update({'result': self.safe_result(result.result)})
def normalize_options(self, options):
if 'eta' in options:
options['eta'] = datetime.strptime(options['eta'],
self.DATE_FORMAT)
if 'countdown' in options:
options['countdown'] = float(options['countdown'])
if 'expires' in options:
expires = options['expires']
try:
expires = float(expires)
except ValueError:
expires = datetime.strptime(expires, self.DATE_FORMAT)
options['expires'] = expires
def safe_result(self, result):
try:
json.dumps(result)
except TypeError:
return repr(result)
else:
return result
class TaskApply(BaseTaskHandler):
@web.authenticated
@web.asynchronous
def post(self, taskname):
args, kwargs, options = self.get_task_args()
logger.debug("Invoking a task '%s' with '%s' and '%s'",
taskname, args, kwargs)
try:
task = self.capp.tasks[taskname]
except KeyError:
raise HTTPError(404, "Unknown task '%s'" % taskname)
try:
self.normalize_options(options)
except ValueError:
raise HTTPError(400, 'Invalid option')
result = task.apply_async(args=args, kwargs=kwargs, **options)
response = {'task-id': result.task_id}
th = Thread(target=self.wait_results, args=(result, response, ))
th.start()
def wait_results(self, result, response):
result.get(propagate=False)
self.update_response_result(response, result)
if self.backend_configured(result):
response.update(state=result.state)
self.finish(response)
class TaskAsyncApply(BaseTaskHandler):
DATE_FORMAT = '%Y-%m-%d %H:%M:%S.%f'
@web.authenticated
def post(self, taskname):
args, kwargs, options = self.get_task_args()
logger.debug("Invoking a task '%s' with '%s' and '%s'",
taskname, args, kwargs)
try:
task = self.capp.tasks[taskname]
except KeyError:
raise HTTPError(404, "Unknown task '%s'" % taskname)
try:
self.normalize_options(options)
except ValueError:
raise HTTPError(400, 'Invalid option')
result = task.apply_async(args=args, kwargs=kwargs, **options)
response = {'task-id': result.task_id}
if self.backend_configured(result):
response.update(state=result.state)
self.write(response)
class TaskSend(BaseTaskHandler):
@web.authenticated
def post(self, taskname):
args, kwargs, options = self.get_task_args()
logger.debug("Invoking task '%s' with '%s' and '%s'",
taskname, args, kwargs)
result = self.capp.send_task(
taskname, args=args, kwargs=kwargs, **options)
response = {'task-id': result.task_id}
if self.backend_configured(result):
response.update(state=result.state)
self.write(response)
class TaskResult(BaseTaskHandler):
@web.authenticated
def get(self, taskid):
timeout = self.get_argument('timeout', None)
timeout = float(timeout) if timeout is not None else None
result = AsyncResult(taskid)
if not self.backend_configured(result):
raise HTTPError(503)
response = {'task-id': taskid, 'state': result.state}
if timeout:
result.get(timeout=timeout, propagate=False)
self.update_response_result(response, result)
elif result.ready():
self.update_response_result(response, result)
self.write(response)
class TaskAbort(BaseTaskHandler):
@web.authenticated
def post(self, taskid):
logger.info("Aborting task '%s'", taskid)
result = AbortableAsyncResult(taskid)
if not self.backend_configured(result):
raise HTTPError(503)
result.abort()
self.write(dict(message="Aborted '%s'" % taskid))
class GetQueueLengths(BaseTaskHandler):
@web.authenticated
@gen.coroutine
def get(self):
app = self.application
broker_options = self.capp.conf.BROKER_TRANSPORT_OPTIONS
http_api = None
if app.transport == 'amqp' and app.options.broker_api:
http_api = app.options.broker_api
broker = Broker(app.capp.connection().as_uri(include_password=True),
http_api=http_api, broker_options=broker_options)
queue_names = ControlHandler.get_active_queue_names()
if not queue_names:
queue_names = set([self.capp.conf.CELERY_DEFAULT_QUEUE])
queues = yield broker.queues(sorted(queue_names))
self.write({'active_queues': queues})
class ListTasks(BaseTaskHandler):
@web.authenticated
def get(self):
app = self.application
limit = self.get_argument('limit', None)
worker = self.get_argument('workername', None)
type = self.get_argument('taskname', None)
state = self.get_argument('state', None)
limit = limit and int(limit)
worker = worker if worker != 'All' else None
type = type if type != 'All' else None
state = state if state != 'All' else None
result = []
for task_id, task in tasks.iter_tasks(
app.events, limit=limit, type=type,
worker=worker, state=state):
task = tasks.as_dict(task)
task.pop('worker', None)
result.append((task_id, task))
self.write(dict(result))
class ListTaskTypes(BaseTaskHandler):
@web.authenticated
def get(self):
seen_task_types = self.application.events.state.task_types()
response = {}
response['task-types'] = seen_task_types
self.write(response)
class TaskInfo(BaseTaskHandler):
@web.authenticated
def get(self, taskid):
task = tasks.get_task_by_id(self.application.events, taskid)
if not task:
raise HTTPError(404, "Unknown task '%s'" % taskid)
response = {}
for name in task._fields:
if name not in ['uuid', 'worker']:
response[name] = getattr(task, name, None)
response['task-id'] = task.uuid
if task.worker is not None:
response['worker'] = task.worker.hostname
self.write(response)
| true | true |
f724a3584071ba22c1c4ba5bdfbe1f6a5f6bdc1b | 3,428 | py | Python | source/draw_ising_ph.py | OminiaVincit/qphase-trans | 40e0c078dcd74282e8d8f44690433bf670bff8cb | [
"MIT"
] | null | null | null | source/draw_ising_ph.py | OminiaVincit/qphase-trans | 40e0c078dcd74282e8d8f44690433bf670bff8cb | [
"MIT"
] | null | null | null | source/draw_ising_ph.py | OminiaVincit/qphase-trans | 40e0c078dcd74282e8d8f44690433bf670bff8cb | [
"MIT"
] | null | null | null | import sys
import numpy as np
import matplotlib.pyplot as plt
import sys
import os
import time
import argparse
from visual_utils import generate_listcol
import seaborn as sns
def calculate_npent(death_scales):
sd = np.sum(death_scales)
npent = 0
for d in death_scales:
dr = d/sd
npent -= dr*np.log(dr)
npent = npent/np.log(sd)
return npent
if __name__ == '__main__':
# Check for command line arguments
parser = argparse.ArgumentParser()
parser.add_argument('--basename', type=str, default='exp_20200217_ising')
parser.add_argument('--res', type=str, default='results')
parser.add_argument('--dim', type=int, default=0)
args = parser.parse_args()
print(args)
resname, basename, d = args.res, args.basename, args.dim
plt.style.use('seaborn-colorblind')
#cycles = plt.rcParams['axes.prop_cycle'].by_key()['color']
cycles = generate_listcol(option=3)
print(cycles)
plt.rc('font', family='serif')
plt.rc('mathtext', fontset='cm')
plt.rcParams['font.size'] = 16
gs = [0.2, 0.8, 1.0, 1.2, 1.8]
N = len(gs)
fig, axs = plt.subplots(1, N, figsize=(3*N, 2.8), squeeze=False, sharey=True)
axs = axs.ravel()
#ax.set_xlabel(r"Transverse Field " r"$g$", fontsize=24)
mk = '_'
lstyle = 'dashed'
sz=80
alpha=1.0
Ls = [32, 64, 128, 256, 512, 1024]
for j in range(len(gs)):
ax = axs[j]
g = gs[j]
gidx = int((g - 0.1) / 0.05)
for i in range(len(Ls)):
L = Ls[i]
phfile = '{}_L_{}_ph_dim_{}.txt'.format(basename, L, d)
phfile = os.path.join(resname, phfile)
print(phfile)
if os.path.isfile(phfile):
arr = np.loadtxt(phfile)
death_scales, nlist = arr[:, 1], arr[:, 3]
ids1 = (death_scales != np.inf)
ids2 = (nlist == gidx)
ids = ids1 * ids2
death_scales = death_scales[ids]
npent = calculate_npent(death_scales)
print(arr.shape, gidx, len(death_scales), npent)
sns.kdeplot(death_scales, legend=False, shade=True, color=cycles[i], ax=ax, label='$L$={}'.format(L))
#sns.displot(death_scales[ids], bins=20, ax=ax)
#ax.plot(glist, npent_list, linestyle=lstyle, label = 'e-{}'.format(L))
#ax.plot(glist, pnorm_list, linestyle=lstyle, label = 'p-{}'.format(L))
#ax.plot(glist, vals_list, linestyle='solid', marker='o', color=cols[i], alpha=alpha, linewidth=1.0, markersize=8, label='L={}'.format(L))
#ax.scatter(glist, vals_list, s=sz, alpha=alpha, edgecolor='k', linewidths='1', label = 'L-{}'.format(L))
#ax.scatter(glist, pnorm_list, s=sz, alpha=alpha, label = 'p-{}'.format(L))
#ax.set_xlabel('Birth-scale')
ax.set_ylabel('')
ax.set_xticks([0.0, 0.5])
ax.tick_params(direction='out', length=8)
ax.set_xlim([0.0, 0.6])
ax.set_ylim([0, 60])
ax.set_title('$g$={},E={:.3f}'.format(g, npent))
axs[0].legend(fontsize=10)
#axs[0].set_ylabel('Density')
for figtype in ['png', 'pdf', 'svg']:
fig_ofile = os.path.join(resname, '{}_diagram_d_{}.{}'.format(basename,d, figtype))
plt.savefig(fig_ofile, bbox_inches='tight', format=figtype)
plt.show()
| 36.860215 | 154 | 0.57147 | import sys
import numpy as np
import matplotlib.pyplot as plt
import sys
import os
import time
import argparse
from visual_utils import generate_listcol
import seaborn as sns
def calculate_npent(death_scales):
sd = np.sum(death_scales)
npent = 0
for d in death_scales:
dr = d/sd
npent -= dr*np.log(dr)
npent = npent/np.log(sd)
return npent
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--basename', type=str, default='exp_20200217_ising')
parser.add_argument('--res', type=str, default='results')
parser.add_argument('--dim', type=int, default=0)
args = parser.parse_args()
print(args)
resname, basename, d = args.res, args.basename, args.dim
plt.style.use('seaborn-colorblind')
cycles = generate_listcol(option=3)
print(cycles)
plt.rc('font', family='serif')
plt.rc('mathtext', fontset='cm')
plt.rcParams['font.size'] = 16
gs = [0.2, 0.8, 1.0, 1.2, 1.8]
N = len(gs)
fig, axs = plt.subplots(1, N, figsize=(3*N, 2.8), squeeze=False, sharey=True)
axs = axs.ravel()
mk = '_'
lstyle = 'dashed'
sz=80
alpha=1.0
Ls = [32, 64, 128, 256, 512, 1024]
for j in range(len(gs)):
ax = axs[j]
g = gs[j]
gidx = int((g - 0.1) / 0.05)
for i in range(len(Ls)):
L = Ls[i]
phfile = '{}_L_{}_ph_dim_{}.txt'.format(basename, L, d)
phfile = os.path.join(resname, phfile)
print(phfile)
if os.path.isfile(phfile):
arr = np.loadtxt(phfile)
death_scales, nlist = arr[:, 1], arr[:, 3]
ids1 = (death_scales != np.inf)
ids2 = (nlist == gidx)
ids = ids1 * ids2
death_scales = death_scales[ids]
npent = calculate_npent(death_scales)
print(arr.shape, gidx, len(death_scales), npent)
sns.kdeplot(death_scales, legend=False, shade=True, color=cycles[i], ax=ax, label='$L$={}'.format(L))
ax.set_ylabel('')
ax.set_xticks([0.0, 0.5])
ax.tick_params(direction='out', length=8)
ax.set_xlim([0.0, 0.6])
ax.set_ylim([0, 60])
ax.set_title('$g$={},E={:.3f}'.format(g, npent))
axs[0].legend(fontsize=10)
for figtype in ['png', 'pdf', 'svg']:
fig_ofile = os.path.join(resname, '{}_diagram_d_{}.{}'.format(basename,d, figtype))
plt.savefig(fig_ofile, bbox_inches='tight', format=figtype)
plt.show()
| true | true |
f724a3bb3cfd2e2c82e1c443ccd2a3266923c550 | 5,090 | py | Python | src/niveristand/clientapi/realtimesequencedefinition.py | arnoldcsorvasi/niveristand-python | 39e5593e10bb372c801d6fa521e8fc166dab8cfe | [
"MIT"
] | 6 | 2018-07-04T10:59:43.000Z | 2022-03-24T13:34:33.000Z | src/niveristand/clientapi/realtimesequencedefinition.py | arnoldcsorvasi/niveristand-python | 39e5593e10bb372c801d6fa521e8fc166dab8cfe | [
"MIT"
] | 14 | 2018-11-05T20:05:33.000Z | 2022-03-10T12:54:58.000Z | src/niveristand/clientapi/realtimesequencedefinition.py | arnoldcsorvasi/niveristand-python | 39e5593e10bb372c801d6fa521e8fc166dab8cfe | [
"MIT"
] | 15 | 2018-07-04T07:58:49.000Z | 2022-02-22T16:35:26.000Z | import os
from niveristand import _errormessages, errors
from niveristand import _internal
from niveristand._translation.py2rtseq.utils import _py_param_name_to_rtseq_param_name
from niveristand.clientapi import stimulusprofileapi
from niveristand.clientapi._factory import _DefaultGatewayFactory
from niveristand.clientapi._sequencecallinfo import _SequenceCallInfoFactory
from niveristand.clientapi._sequenceparameterassignmentinfo import _SequenceParameterAssignmentInfoFactory
from NationalInstruments.VeriStand.RealTimeSequenceDefinitionApi import Expression # noqa: I100
from NationalInstruments.VeriStand.RealTimeSequenceDefinitionApi import ForEachLoop
from NationalInstruments.VeriStand.RealTimeSequenceDefinitionApi import ForLoop
from NationalInstruments.VeriStand.RealTimeSequenceDefinitionApi import GenerateError
from NationalInstruments.VeriStand.RealTimeSequenceDefinitionApi import IfElse
from NationalInstruments.VeriStand.RealTimeSequenceDefinitionApi import LocalDeclaration
from NationalInstruments.VeriStand.RealTimeSequenceDefinitionApi import Multitask
from NationalInstruments.VeriStand.RealTimeSequenceDefinitionApi import RealTimeSequence
from NationalInstruments.VeriStand.RealTimeSequenceDefinitionApi import ReturnDeclaration
from NationalInstruments.VeriStand.RealTimeSequenceDefinitionApi import StopTask
from NationalInstruments.VeriStand.RealTimeSequenceDefinitionApi import Task
from NationalInstruments.VeriStand.RealTimeSequenceDefinitionApi import WhileLoop
from NationalInstruments.VeriStand.RealTimeSequenceDefinitionApi import Yield
from System.IO import IOException
_internal.dummy()
factory = None
workspace = None
def add_local_variable(rt_seq, name, value):
name = _create_unique_lv_name(name)
local_declaration = LocalDeclaration(name, value._data_value)
rt_seq.Variables.LocalVariables.AddLocalVariable(local_declaration)
return name
def add_assignment(block, dest_name, source_name):
add_expression(block, '%s = %s' % (dest_name, source_name))
def add_expression(block, expression):
block.AddStatement(Expression('%s' % expression))
def add_yield(block):
block.AddStatement(Yield())
def add_if_else(block, test_condition):
if_else = IfElse(Expression(test_condition))
block.AddStatement(if_else)
return if_else
def add_for_loop(block, loop_variable, iterations):
for_loop = ForLoop(loop_variable, Expression(str(iterations)), False)
block.AddStatement(for_loop)
return for_loop
def add_foreach_loop(block, loop_variable, iterations):
foreach_loop = ForEachLoop(loop_variable, Expression(str(iterations)), False)
block.AddStatement(foreach_loop)
return foreach_loop
def add_while(block, test_condition):
while_block = WhileLoop(Expression(test_condition), False)
block.AddStatement(while_block)
return while_block
def add_multi_task(block):
multi_task = Multitask()
block.AddStatement(multi_task)
return multi_task
def add_task(multi_task, name):
task = Task(name)
multi_task.AddTask(task)
return task.Body
def create_real_time_sequence():
return RealTimeSequence()
def add_return_variable(rtseq, name, default_value):
name = _create_unique_lv_name(name)
return_declaration = ReturnDeclaration(name, default_value._data_value)
rtseq.Variables.ReturnType = return_declaration
return name
def add_generate_error(block, code, message, action):
block.AddStatement(GenerateError(code, message, action))
def add_stop_task(block, taskname):
block.AddStatement(StopTask(taskname))
def save_real_time_sequence(rtseq, filepath):
try:
rtseq.SaveSequence(os.path.join(filepath))
except(IOException) as e:
raise IOError(e.Message)
def _create_unique_lv_name(name):
try:
_create_unique_lv_name.lv_cnt += 1
except AttributeError:
_create_unique_lv_name.lv_cnt = 0
if name is None:
name = ''
name = 'lv_' + name + '_' + str(_create_unique_lv_name.lv_cnt)
_create_unique_lv_name.lv_cnt += 1
return name
def to_channel_ref_name(name):
return "ch_" + name
def _get_channel_node_info(name, node_info_list):
for channel in node_info_list:
if channel.FullPath == name:
return channel
raise errors.VeristandError(_errormessages.channel_not_found % name)
def run_rt_sequence(rt_sequence_path, rtseq_params):
rtseq_params = \
[_SequenceParameterAssignmentInfoFactory.create(_py_param_name_to_rtseq_param_name(key), rtseq_params[key])
for key in rtseq_params]
seq_call_info = _SequenceCallInfoFactory.create(rt_sequence_path, None, rtseq_params, False, 100000)
session = _DefaultGatewayFactory.get_new_stimulus_profile_session(rt_sequence_path, [seq_call_info], "")
sequence_control = session[os.path.splitext(os.path.basename(rt_sequence_path))[0] + ":1"]
state = stimulusprofileapi.StimulusProfileState(session)
sequence_control.register_sequence_complete_event_handler(state._sequence_complete_event_handler)
session.deploy(True)
return state
| 34.863014 | 115 | 0.803929 | import os
from niveristand import _errormessages, errors
from niveristand import _internal
from niveristand._translation.py2rtseq.utils import _py_param_name_to_rtseq_param_name
from niveristand.clientapi import stimulusprofileapi
from niveristand.clientapi._factory import _DefaultGatewayFactory
from niveristand.clientapi._sequencecallinfo import _SequenceCallInfoFactory
from niveristand.clientapi._sequenceparameterassignmentinfo import _SequenceParameterAssignmentInfoFactory
from NationalInstruments.VeriStand.RealTimeSequenceDefinitionApi import Expression
from NationalInstruments.VeriStand.RealTimeSequenceDefinitionApi import ForEachLoop
from NationalInstruments.VeriStand.RealTimeSequenceDefinitionApi import ForLoop
from NationalInstruments.VeriStand.RealTimeSequenceDefinitionApi import GenerateError
from NationalInstruments.VeriStand.RealTimeSequenceDefinitionApi import IfElse
from NationalInstruments.VeriStand.RealTimeSequenceDefinitionApi import LocalDeclaration
from NationalInstruments.VeriStand.RealTimeSequenceDefinitionApi import Multitask
from NationalInstruments.VeriStand.RealTimeSequenceDefinitionApi import RealTimeSequence
from NationalInstruments.VeriStand.RealTimeSequenceDefinitionApi import ReturnDeclaration
from NationalInstruments.VeriStand.RealTimeSequenceDefinitionApi import StopTask
from NationalInstruments.VeriStand.RealTimeSequenceDefinitionApi import Task
from NationalInstruments.VeriStand.RealTimeSequenceDefinitionApi import WhileLoop
from NationalInstruments.VeriStand.RealTimeSequenceDefinitionApi import Yield
from System.IO import IOException
_internal.dummy()
factory = None
workspace = None
def add_local_variable(rt_seq, name, value):
name = _create_unique_lv_name(name)
local_declaration = LocalDeclaration(name, value._data_value)
rt_seq.Variables.LocalVariables.AddLocalVariable(local_declaration)
return name
def add_assignment(block, dest_name, source_name):
add_expression(block, '%s = %s' % (dest_name, source_name))
def add_expression(block, expression):
block.AddStatement(Expression('%s' % expression))
def add_yield(block):
block.AddStatement(Yield())
def add_if_else(block, test_condition):
if_else = IfElse(Expression(test_condition))
block.AddStatement(if_else)
return if_else
def add_for_loop(block, loop_variable, iterations):
for_loop = ForLoop(loop_variable, Expression(str(iterations)), False)
block.AddStatement(for_loop)
return for_loop
def add_foreach_loop(block, loop_variable, iterations):
foreach_loop = ForEachLoop(loop_variable, Expression(str(iterations)), False)
block.AddStatement(foreach_loop)
return foreach_loop
def add_while(block, test_condition):
while_block = WhileLoop(Expression(test_condition), False)
block.AddStatement(while_block)
return while_block
def add_multi_task(block):
multi_task = Multitask()
block.AddStatement(multi_task)
return multi_task
def add_task(multi_task, name):
task = Task(name)
multi_task.AddTask(task)
return task.Body
def create_real_time_sequence():
return RealTimeSequence()
def add_return_variable(rtseq, name, default_value):
name = _create_unique_lv_name(name)
return_declaration = ReturnDeclaration(name, default_value._data_value)
rtseq.Variables.ReturnType = return_declaration
return name
def add_generate_error(block, code, message, action):
block.AddStatement(GenerateError(code, message, action))
def add_stop_task(block, taskname):
block.AddStatement(StopTask(taskname))
def save_real_time_sequence(rtseq, filepath):
try:
rtseq.SaveSequence(os.path.join(filepath))
except(IOException) as e:
raise IOError(e.Message)
def _create_unique_lv_name(name):
try:
_create_unique_lv_name.lv_cnt += 1
except AttributeError:
_create_unique_lv_name.lv_cnt = 0
if name is None:
name = ''
name = 'lv_' + name + '_' + str(_create_unique_lv_name.lv_cnt)
_create_unique_lv_name.lv_cnt += 1
return name
def to_channel_ref_name(name):
return "ch_" + name
def _get_channel_node_info(name, node_info_list):
for channel in node_info_list:
if channel.FullPath == name:
return channel
raise errors.VeristandError(_errormessages.channel_not_found % name)
def run_rt_sequence(rt_sequence_path, rtseq_params):
rtseq_params = \
[_SequenceParameterAssignmentInfoFactory.create(_py_param_name_to_rtseq_param_name(key), rtseq_params[key])
for key in rtseq_params]
seq_call_info = _SequenceCallInfoFactory.create(rt_sequence_path, None, rtseq_params, False, 100000)
session = _DefaultGatewayFactory.get_new_stimulus_profile_session(rt_sequence_path, [seq_call_info], "")
sequence_control = session[os.path.splitext(os.path.basename(rt_sequence_path))[0] + ":1"]
state = stimulusprofileapi.StimulusProfileState(session)
sequence_control.register_sequence_complete_event_handler(state._sequence_complete_event_handler)
session.deploy(True)
return state
| true | true |
f724a43ca95266c2f6ed70bcb679da48a9313bcc | 1,756 | py | Python | dargor/colored_tracebacks.py | dargor/dargor-py | 54b97ac5aaeadd0535fdc492407015c770a5fd67 | [
"0BSD"
] | null | null | null | dargor/colored_tracebacks.py | dargor/dargor-py | 54b97ac5aaeadd0535fdc492407015c770a5fd67 | [
"0BSD"
] | null | null | null | dargor/colored_tracebacks.py | dargor/dargor-py | 54b97ac5aaeadd0535fdc492407015c770a5fd67 | [
"0BSD"
] | null | null | null | #
# Copyright (c) 2020, Gabriel Linder <linder.gabriel@gmail.com>
#
# Permission to use, copy, modify, and/or distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
# OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
# PERFORMANCE OF THIS SOFTWARE.
#
import asyncio
import sys
import traceback
from contextlib import suppress
from pygments import highlight
from pygments.formatters import Terminal256Formatter
from pygments.lexers import Python3TracebackLexer
def excepthook(exc_type, exc_value, exc_traceback):
tb = ''.join(traceback.format_exception(exc_type,
exc_value,
exc_traceback))
lexer = Python3TracebackLexer(stripall=True, tabsize=4)
formatter = Terminal256Formatter(style='vim', bg='dark')
print(highlight(tb, lexer, formatter).strip(), file=sys.stderr)
def asyncio_exception_handler(loop, context):
with suppress(KeyError):
e = context['exception']
excepthook(type(e), e, e.__traceback__)
loop.default_exception_handler(context)
def install():
sys.excepthook = excepthook
loop = asyncio.get_event_loop()
loop.set_exception_handler(asyncio_exception_handler)
| 37.361702 | 79 | 0.735763 |
import asyncio
import sys
import traceback
from contextlib import suppress
from pygments import highlight
from pygments.formatters import Terminal256Formatter
from pygments.lexers import Python3TracebackLexer
def excepthook(exc_type, exc_value, exc_traceback):
tb = ''.join(traceback.format_exception(exc_type,
exc_value,
exc_traceback))
lexer = Python3TracebackLexer(stripall=True, tabsize=4)
formatter = Terminal256Formatter(style='vim', bg='dark')
print(highlight(tb, lexer, formatter).strip(), file=sys.stderr)
def asyncio_exception_handler(loop, context):
with suppress(KeyError):
e = context['exception']
excepthook(type(e), e, e.__traceback__)
loop.default_exception_handler(context)
def install():
sys.excepthook = excepthook
loop = asyncio.get_event_loop()
loop.set_exception_handler(asyncio_exception_handler)
| true | true |
f724a4c8c450af916d151c5bf8044ebed35a78ce | 5,230 | py | Python | NASA SPACEAPPS CHALLENGE/Solution/Software part/Astronomical Data and Python Libraries/Openpyxl/openpyxl-2.4.0-a1/openpyxl/writer/worksheet.py | sahirsharma/Martian | 062e9b47849512863c16713811f347ad7e121b56 | [
"MIT"
] | 7 | 2016-12-12T02:29:42.000Z | 2020-05-12T21:21:21.000Z | NASA SPACEAPPS CHALLENGE/Solution/Software part/Astronomical Data and Python Libraries/Openpyxl/openpyxl-2.4.0-a1/openpyxl/writer/worksheet.py | sahirsharma/Martian | 062e9b47849512863c16713811f347ad7e121b56 | [
"MIT"
] | 31 | 2017-01-05T06:07:28.000Z | 2018-05-27T13:13:06.000Z | NASA SPACEAPPS CHALLENGE/Solution/Software part/Astronomical Data and Python Libraries/Openpyxl/openpyxl-2.4.0-a1/openpyxl/writer/worksheet.py | sahirsharma/Martian | 062e9b47849512863c16713811f347ad7e121b56 | [
"MIT"
] | 3 | 2017-12-21T23:30:12.000Z | 2019-01-03T20:51:52.000Z | from __future__ import absolute_import
# Copyright (c) 2010-2016 openpyxl
"""Write worksheets to xml representations."""
# Python stdlib imports
from io import BytesIO
from openpyxl import LXML
# package imports
from openpyxl.xml.functions import (
Element,
xmlfile,
)
from openpyxl.xml.constants import SHEET_MAIN_NS
from openpyxl.formatting import ConditionalFormatting
from openpyxl.styles.differential import DifferentialStyle
from openpyxl.packaging.relationship import Relationship
from openpyxl.worksheet.merge import MergeCells, MergeCell
from openpyxl.worksheet.properties import WorksheetProperties
from openpyxl.worksheet.hyperlink import Hyperlink
from openpyxl.worksheet.related import Related
from openpyxl.worksheet.header_footer import HeaderFooter
from openpyxl.worksheet.dimensions import (
SheetFormatProperties,
SheetDimension,
)
from .etree_worksheet import write_cell
def write_mergecells(worksheet):
"""Write merged cells to xml."""
merged = [MergeCell(ref) for ref in worksheet._merged_cells]
if not merged:
return
return MergeCells(mergeCell=merged).to_tree()
def write_conditional_formatting(worksheet):
"""Write conditional formatting to xml."""
wb = worksheet.parent
for range_string, rules in worksheet.conditional_formatting.cf_rules.items():
cf = Element('conditionalFormatting', {'sqref': range_string})
for rule in rules:
if rule.dxf is not None:
if rule.dxf != DifferentialStyle():
rule.dxfId = len(wb._differential_styles)
wb._differential_styles.append(rule.dxf)
cf.append(rule.to_tree())
yield cf
def write_hyperlinks(worksheet):
"""Write worksheet hyperlinks to xml."""
if not worksheet._hyperlinks:
return
tag = Element('hyperlinks')
for link in worksheet._hyperlinks:
if link.target:
rel = Relationship(type="hyperlink", TargetMode="External", Target=link.target)
worksheet._rels.append(rel)
link.id = "rId{0}".format(len(worksheet._rels))
tag.append(link.to_tree())
return tag
def write_drawing(worksheet):
"""
Add link to drawing if required
"""
if worksheet._charts or worksheet._images:
rel = Relationship(type="drawing", Target="")
worksheet._rels.append(rel)
drawing = Related()
drawing.id = "rId%s" % len(worksheet._rels)
return drawing.to_tree("drawing")
def write_worksheet(worksheet, shared_strings):
"""Write a worksheet to an xml file."""
ws = worksheet
ws._rels = []
ws._hyperlinks = []
if LXML is True:
from .lxml_worksheet import write_cell, write_rows
else:
from .etree_worksheet import write_cell, write_rows
out = BytesIO()
with xmlfile(out) as xf:
with xf.element('worksheet', xmlns=SHEET_MAIN_NS):
props = ws.sheet_properties.to_tree()
xf.write(props)
dim = SheetDimension(ref=ws.calculate_dimension())
xf.write(dim.to_tree())
xf.write(ws.views.to_tree())
cols = ws.column_dimensions.to_tree()
ws.sheet_format.outlineLevelCol = ws.column_dimensions.max_outline
xf.write(ws.sheet_format.to_tree())
if cols is not None:
xf.write(cols)
# write data
write_rows(xf, ws)
if ws.protection.sheet:
xf.write(ws.protection.to_tree())
if ws.auto_filter:
xf.write(ws.auto_filter.to_tree())
if ws.sort_state:
xf.write(ws.sort_state.to_tree())
merge = write_mergecells(ws)
if merge is not None:
xf.write(merge)
cfs = write_conditional_formatting(ws)
for cf in cfs:
xf.write(cf)
if ws.data_validations:
xf.write(ws.data_validations.to_tree())
hyper = write_hyperlinks(ws)
if hyper is not None:
xf.write(hyper)
options = ws.print_options
if dict(options):
new_element = options.to_tree()
xf.write(new_element)
margins = ws.page_margins.to_tree()
xf.write(margins)
setup = ws.page_setup
if dict(setup):
new_element = setup.to_tree()
xf.write(new_element)
if bool(ws.HeaderFooter):
xf.write(ws.HeaderFooter.to_tree())
drawing = write_drawing(ws)
if drawing is not None:
xf.write(drawing)
# if there is an existing vml file associated with this sheet or if there
# are any comments we need to add a legacyDrawing relation to the vml file.
if (ws.legacy_drawing is not None or ws._comments):
legacyDrawing = Related(id="anysvml")
xml = legacyDrawing.to_tree("legacyDrawing")
xf.write(xml)
if ws.page_breaks:
xf.write(ws.page_breaks.to_tree())
xml = out.getvalue()
out.close()
return xml
| 28.895028 | 91 | 0.624092 | from __future__ import absolute_import
from io import BytesIO
from openpyxl import LXML
from openpyxl.xml.functions import (
Element,
xmlfile,
)
from openpyxl.xml.constants import SHEET_MAIN_NS
from openpyxl.formatting import ConditionalFormatting
from openpyxl.styles.differential import DifferentialStyle
from openpyxl.packaging.relationship import Relationship
from openpyxl.worksheet.merge import MergeCells, MergeCell
from openpyxl.worksheet.properties import WorksheetProperties
from openpyxl.worksheet.hyperlink import Hyperlink
from openpyxl.worksheet.related import Related
from openpyxl.worksheet.header_footer import HeaderFooter
from openpyxl.worksheet.dimensions import (
SheetFormatProperties,
SheetDimension,
)
from .etree_worksheet import write_cell
def write_mergecells(worksheet):
merged = [MergeCell(ref) for ref in worksheet._merged_cells]
if not merged:
return
return MergeCells(mergeCell=merged).to_tree()
def write_conditional_formatting(worksheet):
wb = worksheet.parent
for range_string, rules in worksheet.conditional_formatting.cf_rules.items():
cf = Element('conditionalFormatting', {'sqref': range_string})
for rule in rules:
if rule.dxf is not None:
if rule.dxf != DifferentialStyle():
rule.dxfId = len(wb._differential_styles)
wb._differential_styles.append(rule.dxf)
cf.append(rule.to_tree())
yield cf
def write_hyperlinks(worksheet):
if not worksheet._hyperlinks:
return
tag = Element('hyperlinks')
for link in worksheet._hyperlinks:
if link.target:
rel = Relationship(type="hyperlink", TargetMode="External", Target=link.target)
worksheet._rels.append(rel)
link.id = "rId{0}".format(len(worksheet._rels))
tag.append(link.to_tree())
return tag
def write_drawing(worksheet):
if worksheet._charts or worksheet._images:
rel = Relationship(type="drawing", Target="")
worksheet._rels.append(rel)
drawing = Related()
drawing.id = "rId%s" % len(worksheet._rels)
return drawing.to_tree("drawing")
def write_worksheet(worksheet, shared_strings):
ws = worksheet
ws._rels = []
ws._hyperlinks = []
if LXML is True:
from .lxml_worksheet import write_cell, write_rows
else:
from .etree_worksheet import write_cell, write_rows
out = BytesIO()
with xmlfile(out) as xf:
with xf.element('worksheet', xmlns=SHEET_MAIN_NS):
props = ws.sheet_properties.to_tree()
xf.write(props)
dim = SheetDimension(ref=ws.calculate_dimension())
xf.write(dim.to_tree())
xf.write(ws.views.to_tree())
cols = ws.column_dimensions.to_tree()
ws.sheet_format.outlineLevelCol = ws.column_dimensions.max_outline
xf.write(ws.sheet_format.to_tree())
if cols is not None:
xf.write(cols)
write_rows(xf, ws)
if ws.protection.sheet:
xf.write(ws.protection.to_tree())
if ws.auto_filter:
xf.write(ws.auto_filter.to_tree())
if ws.sort_state:
xf.write(ws.sort_state.to_tree())
merge = write_mergecells(ws)
if merge is not None:
xf.write(merge)
cfs = write_conditional_formatting(ws)
for cf in cfs:
xf.write(cf)
if ws.data_validations:
xf.write(ws.data_validations.to_tree())
hyper = write_hyperlinks(ws)
if hyper is not None:
xf.write(hyper)
options = ws.print_options
if dict(options):
new_element = options.to_tree()
xf.write(new_element)
margins = ws.page_margins.to_tree()
xf.write(margins)
setup = ws.page_setup
if dict(setup):
new_element = setup.to_tree()
xf.write(new_element)
if bool(ws.HeaderFooter):
xf.write(ws.HeaderFooter.to_tree())
drawing = write_drawing(ws)
if drawing is not None:
xf.write(drawing)
if (ws.legacy_drawing is not None or ws._comments):
legacyDrawing = Related(id="anysvml")
xml = legacyDrawing.to_tree("legacyDrawing")
xf.write(xml)
if ws.page_breaks:
xf.write(ws.page_breaks.to_tree())
xml = out.getvalue()
out.close()
return xml
| true | true |
f724a5ea1a9b0a0227598c547ffae0a1b2f3abd4 | 14,218 | py | Python | experimental/webserver/common/wsgi.py | manuparra/oc2dm | 5459c1fdde909fdd4d59b3ad29d7d5c962b23694 | [
"MIT"
] | 1 | 2019-11-27T18:34:12.000Z | 2019-11-27T18:34:12.000Z | experimental/webserver/common/wsgi.py | manuparra/oc2dm | 5459c1fdde909fdd4d59b3ad29d7d5c962b23694 | [
"MIT"
] | null | null | null | experimental/webserver/common/wsgi.py | manuparra/oc2dm | 5459c1fdde909fdd4d59b3ad29d7d5c962b23694 | [
"MIT"
] | 1 | 2020-04-10T14:44:44.000Z | 2020-04-10T14:44:44.000Z | # Copyright 2017 DiCTIS UGR
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Utility methods for working with WSGI servers
"""
from __future__ import print_function
import errno
import functools
import os
import signal
import sys
import eventlet
import eventlet.greenio
import eventlet.wsgi
from eventlet.green import socket
from omlcc_catalog.common import config
from omlcc_catalog.common import exception
wsgi_opts = [
cfg.StrOpt('secure_proxy_ssl_header',
deprecated_for_removal=True,
deprecated_reason=_('Use the http_proxy_to_wsgi middleware '
'instead.'),
help=_('The HTTP header used to determine the scheme for the '
'original request, even if it was removed by an SSL '
'terminating proxy. Typical value is '
'"HTTP_X_FORWARDED_PROTO".')),
]
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
CONF.register_opts(bind_opts)
CONF.register_opts(socket_opts)
CONF.register_opts(eventlet_opts)
CONF.register_opts(wsgi_opts)
def set_eventlet_hub():
try:
eventlet.hubs.use_hub('poll')
except Exception:
try:
eventlet.hubs.use_hub('selects')
except Exception:
msg = _("eventlet 'poll' nor 'selects' hubs are available "
"on this platform")
raise exception.WorkerCreationFailure(
reason=msg)
class Server(object):
"""Server class to manage multiple WSGI sockets and applications.
This class requires initialize_glance_store set to True if
glance store needs to be initialized.
"""
def __init__(self, threads=1000, initialize_glance_store=False):
os.umask(0o27) # ensure files are created with the correct privileges
self._logger = logging.getLogger("eventlet.wsgi.server")
self.threads = threads
self.children = set()
self.stale_children = set()
self.running = True
# NOTE(abhishek): Allows us to only re-initialize glance_store when
# the API's configuration reloads.
self.initialize_glance_store = initialize_glance_store
self.pgid = os.getpid()
try:
# NOTE(flaper87): Make sure this process
# runs in its own process group.
os.setpgid(self.pgid, self.pgid)
except OSError:
# NOTE(flaper87): When running glance-control,
# (glance's functional tests, for example)
# setpgid fails with EPERM as glance-control
# creates a fresh session, of which the newly
# launched service becomes the leader (session
# leaders may not change process groups)
#
# Running glance-(api|registry) is safe and
# shouldn't raise any error here.
self.pgid = 0
def hup(self, *args):
"""
Reloads configuration files with zero down time
"""
signal.signal(signal.SIGHUP, signal.SIG_IGN)
raise exception.SIGHUPInterrupt
def kill_children(self, *args):
"""Kills the entire process group."""
signal.signal(signal.SIGTERM, signal.SIG_IGN)
signal.signal(signal.SIGINT, signal.SIG_IGN)
self.running = False
os.killpg(self.pgid, signal.SIGTERM)
def start(self, application, default_port):
"""
Run a WSGI server with the given application.
:param application: The application to be run in the WSGI server
:param default_port: Port to bind to if none is specified in conf
"""
self.application = application
self.default_port = default_port
self.configure()
self.start_wsgi()
def start_wsgi(self):
workers = get_num_workers()
if workers == 0:
# Useful for profiling, test, debug etc.
self.pool = self.create_pool()
self.pool.spawn_n(self._single_run, self.application, self.sock)
return
else:
LOG.info(_LI("Starting %d workers"), workers)
signal.signal(signal.SIGTERM, self.kill_children)
signal.signal(signal.SIGINT, self.kill_children)
signal.signal(signal.SIGHUP, self.hup)
while len(self.children) < workers:
self.run_child()
def create_pool(self):
return get_asynchronous_eventlet_pool(size=self.threads)
def _remove_children(self, pid):
if pid in self.children:
self.children.remove(pid)
LOG.info(_LI('Removed dead child %s'), pid)
elif pid in self.stale_children:
self.stale_children.remove(pid)
LOG.info(_LI('Removed stale child %s'), pid)
else:
LOG.warn(_LW('Unrecognised child %s') % pid)
def _verify_and_respawn_children(self, pid, status):
if len(self.stale_children) == 0:
LOG.debug('No stale children')
if os.WIFEXITED(status) and os.WEXITSTATUS(status) != 0:
LOG.error(_LE('Not respawning child %d, cannot '
'recover from termination') % pid)
if not self.children and not self.stale_children:
LOG.info(
_LI('All workers have terminated. Exiting'))
self.running = False
else:
if len(self.children) < get_num_workers():
self.run_child()
def wait_on_children(self):
while self.running:
try:
pid, status = os.wait()
if os.WIFEXITED(status) or os.WIFSIGNALED(status):
self._remove_children(pid)
self._verify_and_respawn_children(pid, status)
except OSError as err:
if err.errno not in (errno.EINTR, errno.ECHILD):
raise
except KeyboardInterrupt:
LOG.info(_LI('Caught keyboard interrupt. Exiting.'))
break
except exception.SIGHUPInterrupt:
self.reload()
continue
eventlet.greenio.shutdown_safe(self.sock)
self.sock.close()
LOG.debug('Exited')
def configure(self, old_conf=None, has_changed=None):
"""
Apply configuration settings
:param old_conf: Cached old configuration settings (if any)
:param has changed: callable to determine if a parameter has changed
"""
eventlet.wsgi.MAX_HEADER_LINE = CONF.max_header_line
self.client_socket_timeout = CONF.client_socket_timeout or None
self.configure_socket(old_conf, has_changed)
if self.initialize_glance_store:
initialize_glance_store()
def reload(self):
"""
Reload and re-apply configuration settings
Existing child processes are sent a SIGHUP signal
and will exit after completing existing requests.
New child processes, which will have the updated
configuration, are spawned. This allows preventing
interruption to the service.
"""
def _has_changed(old, new, param):
old = old.get(param)
new = getattr(new, param)
return (new != old)
old_conf = utils.stash_conf_values()
has_changed = functools.partial(_has_changed, old_conf, CONF)
CONF.reload_config_files()
os.killpg(self.pgid, signal.SIGHUP)
self.stale_children = self.children
self.children = set()
# Ensure any logging config changes are picked up
logging.setup(CONF, 'glance')
config.set_config_defaults()
self.configure(old_conf, has_changed)
self.start_wsgi()
def wait(self):
"""Wait until all servers have completed running."""
try:
if self.children:
self.wait_on_children()
else:
self.pool.waitall()
except KeyboardInterrupt:
pass
def run_child(self):
def child_hup(*args):
"""Shuts down child processes, existing requests are handled."""
signal.signal(signal.SIGHUP, signal.SIG_IGN)
eventlet.wsgi.is_accepting = False
self.sock.close()
pid = os.fork()
if pid == 0:
signal.signal(signal.SIGHUP, child_hup)
signal.signal(signal.SIGTERM, signal.SIG_DFL)
# ignore the interrupt signal to avoid a race whereby
# a child worker receives the signal before the parent
# and is respawned unnecessarily as a result
signal.signal(signal.SIGINT, signal.SIG_IGN)
# The child has no need to stash the unwrapped
# socket, and the reference prevents a clean
# exit on sighup
self._sock = None
self.run_server()
LOG.info(_LI('Child %d exiting normally'), os.getpid())
# self.pool.waitall() is now called in wsgi's server so
# it's safe to exit here
sys.exit(0)
else:
LOG.info(_LI('Started child %s'), pid)
self.children.add(pid)
def run_server(self):
"""Run a WSGI server."""
if cfg.CONF.pydev_worker_debug_host:
utils.setup_remote_pydev_debug(cfg.CONF.pydev_worker_debug_host,
cfg.CONF.pydev_worker_debug_port)
eventlet.wsgi.HttpProtocol.default_request_version = "HTTP/1.0"
self.pool = self.create_pool()
try:
eventlet.wsgi.server(self.sock,
self.application,
log=self._logger,
custom_pool=self.pool,
debug=False,
keepalive=CONF.http_keepalive,
socket_timeout=self.client_socket_timeout)
except socket.error as err:
if err[0] != errno.EINVAL:
raise
# waiting on async pools
if ASYNC_EVENTLET_THREAD_POOL_LIST:
for pool in ASYNC_EVENTLET_THREAD_POOL_LIST:
pool.waitall()
def _single_run(self, application, sock):
"""Start a WSGI server in a new green thread."""
LOG.info(_LI("Starting single process server"))
eventlet.wsgi.server(sock, application, custom_pool=self.pool,
log=self._logger,
debug=False,
keepalive=CONF.http_keepalive,
socket_timeout=self.client_socket_timeout)
def configure_socket(self, old_conf=None, has_changed=None):
"""
Ensure a socket exists and is appropriately configured.
This function is called on start up, and can also be
called in the event of a configuration reload.
When called for the first time a new socket is created.
If reloading and either bind_host or bind port have been
changed the existing socket must be closed and a new
socket opened (laws of physics).
In all other cases (bind_host/bind_port have not changed)
the existing socket is reused.
:param old_conf: Cached old configuration settings (if any)
:param has changed: callable to determine if a parameter has changed
"""
# Do we need a fresh socket?
new_sock = (old_conf is None or (
has_changed('bind_host') or
has_changed('bind_port')))
# Will we be using https?
use_ssl = not (not CONF.cert_file or not CONF.key_file)
# Were we using https before?
old_use_ssl = (old_conf is not None and not (
not old_conf.get('key_file') or
not old_conf.get('cert_file')))
# Do we now need to perform an SSL wrap on the socket?
wrap_sock = use_ssl is True and (old_use_ssl is False or new_sock)
# Do we now need to perform an SSL unwrap on the socket?
unwrap_sock = use_ssl is False and old_use_ssl is True
if new_sock:
self._sock = None
if old_conf is not None:
self.sock.close()
_sock = get_socket(self.default_port)
_sock.setsockopt(socket.SOL_SOCKET,
socket.SO_REUSEADDR, 1)
# sockets can hang around forever without keepalive
_sock.setsockopt(socket.SOL_SOCKET,
socket.SO_KEEPALIVE, 1)
self._sock = _sock
if wrap_sock:
self.sock = ssl_wrap_socket(self._sock)
if unwrap_sock:
self.sock = self._sock
if new_sock and not use_ssl:
self.sock = self._sock
# Pick up newly deployed certs
if old_conf is not None and use_ssl is True and old_use_ssl is True:
if has_changed('cert_file') or has_changed('key_file'):
utils.validate_key_cert(CONF.key_file, CONF.cert_file)
if has_changed('cert_file'):
self.sock.certfile = CONF.cert_file
if has_changed('key_file'):
self.sock.keyfile = CONF.key_file
if new_sock or (old_conf is not None and has_changed('tcp_keepidle')):
# This option isn't available in the OS X version of eventlet
if hasattr(socket, 'TCP_KEEPIDLE'):
self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE,
CONF.tcp_keepidle)
if old_conf is not None and has_changed('backlog'):
self.sock.listen(CONF.backlog)
| 38.531165 | 78 | 0.602546 |
from __future__ import print_function
import errno
import functools
import os
import signal
import sys
import eventlet
import eventlet.greenio
import eventlet.wsgi
from eventlet.green import socket
from omlcc_catalog.common import config
from omlcc_catalog.common import exception
wsgi_opts = [
cfg.StrOpt('secure_proxy_ssl_header',
deprecated_for_removal=True,
deprecated_reason=_('Use the http_proxy_to_wsgi middleware '
'instead.'),
help=_('The HTTP header used to determine the scheme for the '
'original request, even if it was removed by an SSL '
'terminating proxy. Typical value is '
'"HTTP_X_FORWARDED_PROTO".')),
]
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
CONF.register_opts(bind_opts)
CONF.register_opts(socket_opts)
CONF.register_opts(eventlet_opts)
CONF.register_opts(wsgi_opts)
def set_eventlet_hub():
try:
eventlet.hubs.use_hub('poll')
except Exception:
try:
eventlet.hubs.use_hub('selects')
except Exception:
msg = _("eventlet 'poll' nor 'selects' hubs are available "
"on this platform")
raise exception.WorkerCreationFailure(
reason=msg)
class Server(object):
def __init__(self, threads=1000, initialize_glance_store=False):
os.umask(0o27)
self._logger = logging.getLogger("eventlet.wsgi.server")
self.threads = threads
self.children = set()
self.stale_children = set()
self.running = True
self.initialize_glance_store = initialize_glance_store
self.pgid = os.getpid()
try:
# NOTE(flaper87): Make sure this process
# runs in its own process group.
os.setpgid(self.pgid, self.pgid)
except OSError:
# NOTE(flaper87): When running glance-control,
# (glance's functional tests, for example)
self.pgid = 0
def hup(self, *args):
signal.signal(signal.SIGHUP, signal.SIG_IGN)
raise exception.SIGHUPInterrupt
def kill_children(self, *args):
signal.signal(signal.SIGTERM, signal.SIG_IGN)
signal.signal(signal.SIGINT, signal.SIG_IGN)
self.running = False
os.killpg(self.pgid, signal.SIGTERM)
def start(self, application, default_port):
self.application = application
self.default_port = default_port
self.configure()
self.start_wsgi()
def start_wsgi(self):
workers = get_num_workers()
if workers == 0:
# Useful for profiling, test, debug etc.
self.pool = self.create_pool()
self.pool.spawn_n(self._single_run, self.application, self.sock)
return
else:
LOG.info(_LI("Starting %d workers"), workers)
signal.signal(signal.SIGTERM, self.kill_children)
signal.signal(signal.SIGINT, self.kill_children)
signal.signal(signal.SIGHUP, self.hup)
while len(self.children) < workers:
self.run_child()
def create_pool(self):
return get_asynchronous_eventlet_pool(size=self.threads)
def _remove_children(self, pid):
if pid in self.children:
self.children.remove(pid)
LOG.info(_LI('Removed dead child %s'), pid)
elif pid in self.stale_children:
self.stale_children.remove(pid)
LOG.info(_LI('Removed stale child %s'), pid)
else:
LOG.warn(_LW('Unrecognised child %s') % pid)
def _verify_and_respawn_children(self, pid, status):
if len(self.stale_children) == 0:
LOG.debug('No stale children')
if os.WIFEXITED(status) and os.WEXITSTATUS(status) != 0:
LOG.error(_LE('Not respawning child %d, cannot '
'recover from termination') % pid)
if not self.children and not self.stale_children:
LOG.info(
_LI('All workers have terminated. Exiting'))
self.running = False
else:
if len(self.children) < get_num_workers():
self.run_child()
def wait_on_children(self):
while self.running:
try:
pid, status = os.wait()
if os.WIFEXITED(status) or os.WIFSIGNALED(status):
self._remove_children(pid)
self._verify_and_respawn_children(pid, status)
except OSError as err:
if err.errno not in (errno.EINTR, errno.ECHILD):
raise
except KeyboardInterrupt:
LOG.info(_LI('Caught keyboard interrupt. Exiting.'))
break
except exception.SIGHUPInterrupt:
self.reload()
continue
eventlet.greenio.shutdown_safe(self.sock)
self.sock.close()
LOG.debug('Exited')
def configure(self, old_conf=None, has_changed=None):
eventlet.wsgi.MAX_HEADER_LINE = CONF.max_header_line
self.client_socket_timeout = CONF.client_socket_timeout or None
self.configure_socket(old_conf, has_changed)
if self.initialize_glance_store:
initialize_glance_store()
def reload(self):
def _has_changed(old, new, param):
old = old.get(param)
new = getattr(new, param)
return (new != old)
old_conf = utils.stash_conf_values()
has_changed = functools.partial(_has_changed, old_conf, CONF)
CONF.reload_config_files()
os.killpg(self.pgid, signal.SIGHUP)
self.stale_children = self.children
self.children = set()
# Ensure any logging config changes are picked up
logging.setup(CONF, 'glance')
config.set_config_defaults()
self.configure(old_conf, has_changed)
self.start_wsgi()
def wait(self):
try:
if self.children:
self.wait_on_children()
else:
self.pool.waitall()
except KeyboardInterrupt:
pass
def run_child(self):
def child_hup(*args):
signal.signal(signal.SIGHUP, signal.SIG_IGN)
eventlet.wsgi.is_accepting = False
self.sock.close()
pid = os.fork()
if pid == 0:
signal.signal(signal.SIGHUP, child_hup)
signal.signal(signal.SIGTERM, signal.SIG_DFL)
# ignore the interrupt signal to avoid a race whereby
# a child worker receives the signal before the parent
# and is respawned unnecessarily as a result
signal.signal(signal.SIGINT, signal.SIG_IGN)
# The child has no need to stash the unwrapped
# socket, and the reference prevents a clean
# exit on sighup
self._sock = None
self.run_server()
LOG.info(_LI('Child %d exiting normally'), os.getpid())
# self.pool.waitall() is now called in wsgi's server so
sys.exit(0)
else:
LOG.info(_LI('Started child %s'), pid)
self.children.add(pid)
def run_server(self):
if cfg.CONF.pydev_worker_debug_host:
utils.setup_remote_pydev_debug(cfg.CONF.pydev_worker_debug_host,
cfg.CONF.pydev_worker_debug_port)
eventlet.wsgi.HttpProtocol.default_request_version = "HTTP/1.0"
self.pool = self.create_pool()
try:
eventlet.wsgi.server(self.sock,
self.application,
log=self._logger,
custom_pool=self.pool,
debug=False,
keepalive=CONF.http_keepalive,
socket_timeout=self.client_socket_timeout)
except socket.error as err:
if err[0] != errno.EINVAL:
raise
# waiting on async pools
if ASYNC_EVENTLET_THREAD_POOL_LIST:
for pool in ASYNC_EVENTLET_THREAD_POOL_LIST:
pool.waitall()
def _single_run(self, application, sock):
LOG.info(_LI("Starting single process server"))
eventlet.wsgi.server(sock, application, custom_pool=self.pool,
log=self._logger,
debug=False,
keepalive=CONF.http_keepalive,
socket_timeout=self.client_socket_timeout)
def configure_socket(self, old_conf=None, has_changed=None):
# Do we need a fresh socket?
new_sock = (old_conf is None or (
has_changed('bind_host') or
has_changed('bind_port')))
# Will we be using https?
use_ssl = not (not CONF.cert_file or not CONF.key_file)
# Were we using https before?
old_use_ssl = (old_conf is not None and not (
not old_conf.get('key_file') or
not old_conf.get('cert_file')))
# Do we now need to perform an SSL wrap on the socket?
wrap_sock = use_ssl is True and (old_use_ssl is False or new_sock)
# Do we now need to perform an SSL unwrap on the socket?
unwrap_sock = use_ssl is False and old_use_ssl is True
if new_sock:
self._sock = None
if old_conf is not None:
self.sock.close()
_sock = get_socket(self.default_port)
_sock.setsockopt(socket.SOL_SOCKET,
socket.SO_REUSEADDR, 1)
# sockets can hang around forever without keepalive
_sock.setsockopt(socket.SOL_SOCKET,
socket.SO_KEEPALIVE, 1)
self._sock = _sock
if wrap_sock:
self.sock = ssl_wrap_socket(self._sock)
if unwrap_sock:
self.sock = self._sock
if new_sock and not use_ssl:
self.sock = self._sock
# Pick up newly deployed certs
if old_conf is not None and use_ssl is True and old_use_ssl is True:
if has_changed('cert_file') or has_changed('key_file'):
utils.validate_key_cert(CONF.key_file, CONF.cert_file)
if has_changed('cert_file'):
self.sock.certfile = CONF.cert_file
if has_changed('key_file'):
self.sock.keyfile = CONF.key_file
if new_sock or (old_conf is not None and has_changed('tcp_keepidle')):
# This option isn't available in the OS X version of eventlet
if hasattr(socket, 'TCP_KEEPIDLE'):
self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE,
CONF.tcp_keepidle)
if old_conf is not None and has_changed('backlog'):
self.sock.listen(CONF.backlog)
| true | true |
f724a61cd3bb5219a49f1a660296b229d468a428 | 2,178 | py | Python | A03_Wellst.py | spring-2019-csc-226/a03-master | 7be446b28d5aebfc4c4635cc418db0c0c0fadbca | [
"MIT"
] | null | null | null | A03_Wellst.py | spring-2019-csc-226/a03-master | 7be446b28d5aebfc4c4635cc418db0c0c0fadbca | [
"MIT"
] | 4 | 2019-02-07T06:26:00.000Z | 2019-02-19T00:08:43.000Z | A03_Wellst.py | spring-2019-csc-226/a03-master | 7be446b28d5aebfc4c4635cc418db0c0c0fadbca | [
"MIT"
] | 1 | 2019-02-07T02:23:37.000Z | 2019-02-07T02:23:37.000Z | #######################################################################################################################
# Taran Wells
# Wellst
# https://docs.google.com/document/d/1RBeOXjYBBjZ507wVeQVIPBrU7gBvTNJi8BYGDvtC53w/edit?usp=sharing
#######################################################################################################################
import turtle # allows us to use the turtles library
wn = turtle.Screen()
wn.colormode(255)
# setup turtles
base = turtle.Turtle()
base.hideturtle()
roof = turtle.Turtle()
roof.hideturtle()
glass = turtle.Turtle
wn.bgcolor("red")
def house_base(t, sz):
"""Base of house"""
t.color(250, 165, 10) # house orange
t.pendown()
t.begin_fill()
for side in range(2):
t.forward(sz)
t.right(90) # square house
t.forward(sz)
t.right(90)
t.end_fill()
t.penup()
def house_roof(t1, sz):
"""Roof of house"""
t1.color(135, 30, 160) # roof purple
t1.begin_fill()
for side in range(3):
t1.forward(sz) # shape roof
t1.left(120)
t1.end_fill()
t1.penup()
def placement(t2, sz):
"""place glass in starting position"""
t2.fd(sz)
t2.right(90)
t2.fd(sz)
def house_window(t3):
"""window on house"""
t3.begin_fill()
t3.pendown()
t3.pencolor('black')
for side in range(4):
t3.fd(35)
t3.right(90)
t3.fillcolor(30, 135, 160) # make window light blue
t3.end_fill()
def main():
roof.penup()
roof.back(30)
roof.pendown()
house_base(base, 140)
placement(base, 70)
house_roof(roof, 200)
house_window(base)
base.left(90)
house_window(base)
base.left(90)
house_window(base)
base.left(90)
house_window(base)
base.pu()
base.left(90)
base.fd(70)
base.right(90)
base.pd()
base.begin_fill()
for grass in range(2):
base.fd(1000)
base.left(90)
base.fd(2000)
base.left(90)
base.fd(1000)
base.left(90)
base.fd(1000)
base.fillcolor(0, 255, 0)
base.end_fill()
main() # calls on main function
wn.exitonclick()
| 21.78 | 119 | 0.524334 | true | true | |
f724a823d84a94955722b0212528e735eddb241d | 1,343 | py | Python | setup.py | GianmarcoFolchi/basketball_reference_scraper | 9d286b66bd2856f3fe0ba255552c5b81b2f87148 | [
"MIT"
] | null | null | null | setup.py | GianmarcoFolchi/basketball_reference_scraper | 9d286b66bd2856f3fe0ba255552c5b81b2f87148 | [
"MIT"
] | null | null | null | setup.py | GianmarcoFolchi/basketball_reference_scraper | 9d286b66bd2856f3fe0ba255552c5b81b2f87148 | [
"MIT"
] | null | null | null | import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="basketball_reference_scraper",
version="1.0.28",
author="Vishaal Agartha",
author_email="vishaalagartha@gmail.com",
license="MIT",
description="A Python client for scraping stats and data from Basketball Reference",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/vishaalagartha/basketball_reference_scraper",
packages=setuptools.find_packages(),
package_data={'basketball_reference_scraper': ['*.txt']},
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires=">=3.6",
install_requires=[
'beautifulsoup4==4.8.2',
'bs4==0.0.1',
'lxml==4.6.3',
'numpy==1.18.1',
'pandas==0.25.3',
'python-dateutil==2.8.1',
'pytz==2019.3',
'requests==2.22.0',
'six==1.13.0',
'soupsieve==1.9.5'
],
extras_require={
'test': ['unittest'],
},
keywords=[
"nba",
"sports",
"data mining",
"basketball",
"basketball reference",
"basketball-reference.com",
],
)
| 27.979167 | 88 | 0.590469 | import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="basketball_reference_scraper",
version="1.0.28",
author="Vishaal Agartha",
author_email="vishaalagartha@gmail.com",
license="MIT",
description="A Python client for scraping stats and data from Basketball Reference",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/vishaalagartha/basketball_reference_scraper",
packages=setuptools.find_packages(),
package_data={'basketball_reference_scraper': ['*.txt']},
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
python_requires=">=3.6",
install_requires=[
'beautifulsoup4==4.8.2',
'bs4==0.0.1',
'lxml==4.6.3',
'numpy==1.18.1',
'pandas==0.25.3',
'python-dateutil==2.8.1',
'pytz==2019.3',
'requests==2.22.0',
'six==1.13.0',
'soupsieve==1.9.5'
],
extras_require={
'test': ['unittest'],
},
keywords=[
"nba",
"sports",
"data mining",
"basketball",
"basketball reference",
"basketball-reference.com",
],
)
| true | true |
f724a86f608abd7e0fcfb80a41433b1d6e143ea0 | 3,471 | py | Python | setup.py | pbellec/SUITPy | a0450518100d3f5f86423f48d2b7f22c68deebe9 | [
"MIT"
] | null | null | null | setup.py | pbellec/SUITPy | a0450518100d3f5f86423f48d2b7f22c68deebe9 | [
"MIT"
] | null | null | null | setup.py | pbellec/SUITPy | a0450518100d3f5f86423f48d2b7f22c68deebe9 | [
"MIT"
] | null | null | null | #! /usr/bin/env python
"""
@author: maedbhking
based heavily on flexible functionality of nilearn `setup.py`
"""
descr = """A python package for cerebellar neuroimaging..."""
import sys
import os
from setuptools import setup, find_packages
def load_version():
"""Executes SUITPy/version.py in a globals dictionary and return it.
Note: importing SUITPy is not an option because there may be
dependencies like nibabel which are not installed and
setup.py is supposed to install them.
"""
# load all vars into globals, otherwise
# the later function call using global vars doesn't work.
globals_dict = {}
with open(os.path.join('SUITPy', 'version.py')) as fp:
exec(fp.read(), globals_dict)
return globals_dict
def is_installing():
# Allow command-lines such as "python setup.py build install"
install_commands = set(['install', 'develop'])
return install_commands.intersection(set(sys.argv))
def list_required_packages():
required_packages = []
required_packages_orig = ['%s>=%s' % (mod, meta['min_version'])
for mod, meta
in _VERSION_GLOBALS['REQUIRED_MODULE_METADATA']
]
for package in required_packages_orig:
required_packages.append(package)
return required_packages
# Make sources available using relative paths from this file's directory.
os.chdir(os.path.dirname(os.path.abspath(__file__)))
_VERSION_GLOBALS = load_version()
DISTNAME = 'SUITPy'
DESCRIPTION = 'Mapping and plotting cerebellar fMRI data in Python'
with open('README.rst') as fp:
LONG_DESCRIPTION = fp.read()
MAINTAINER = 'Maedbh King'
MAINTAINER_EMAIL = 'maedbhking@berkeley.edu'
URL = 'https://github.com/DiedrichsenLab/SUITPy'
LICENSE = 'MIT'
DOWNLOAD_URL = 'https://github.com/DiedrichsenLab/SUITPy/archive/refs/tags/v1.0.3.tar.gz'
VERSION = _VERSION_GLOBALS['__version__']
if __name__ == "__main__":
if is_installing():
module_check_fn = _VERSION_GLOBALS['_check_module_dependencies']
module_check_fn(is_SUITPy_installing=True)
setup(name=DISTNAME,
maintainer=MAINTAINER,
maintainer_email=MAINTAINER_EMAIL,
description=DESCRIPTION,
license=LICENSE,
url=URL,
version=VERSION,
download_url=DOWNLOAD_URL,
long_description=LONG_DESCRIPTION,
zip_safe=False, # the package can run out of an .egg file
classifiers=[
'Intended Audience :: Science/Research',
'Intended Audience :: Developers',
'License :: OSI Approved',
'Programming Language :: Python',
'Topic :: Software Development',
'Topic :: Scientific/Engineering',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Operating System :: Unix',
'Operating System :: MacOS',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
],
packages=find_packages(),
package_data={
'SUITPy.surfaces': ['*.surf.gii', '*.C.scene', '*.shape.gii', '*.txt'],
},
install_requires=list_required_packages(),
python_requires='>=3.6',
)
| 34.366337 | 89 | 0.63123 |
descr = """A python package for cerebellar neuroimaging..."""
import sys
import os
from setuptools import setup, find_packages
def load_version():
globals_dict = {}
with open(os.path.join('SUITPy', 'version.py')) as fp:
exec(fp.read(), globals_dict)
return globals_dict
def is_installing():
# Allow command-lines such as "python setup.py build install"
install_commands = set(['install', 'develop'])
return install_commands.intersection(set(sys.argv))
def list_required_packages():
required_packages = []
required_packages_orig = ['%s>=%s' % (mod, meta['min_version'])
for mod, meta
in _VERSION_GLOBALS['REQUIRED_MODULE_METADATA']
]
for package in required_packages_orig:
required_packages.append(package)
return required_packages
# Make sources available using relative paths from this file's directory.
os.chdir(os.path.dirname(os.path.abspath(__file__)))
_VERSION_GLOBALS = load_version()
DISTNAME = 'SUITPy'
DESCRIPTION = 'Mapping and plotting cerebellar fMRI data in Python'
with open('README.rst') as fp:
LONG_DESCRIPTION = fp.read()
MAINTAINER = 'Maedbh King'
MAINTAINER_EMAIL = 'maedbhking@berkeley.edu'
URL = 'https://github.com/DiedrichsenLab/SUITPy'
LICENSE = 'MIT'
DOWNLOAD_URL = 'https://github.com/DiedrichsenLab/SUITPy/archive/refs/tags/v1.0.3.tar.gz'
VERSION = _VERSION_GLOBALS['__version__']
if __name__ == "__main__":
if is_installing():
module_check_fn = _VERSION_GLOBALS['_check_module_dependencies']
module_check_fn(is_SUITPy_installing=True)
setup(name=DISTNAME,
maintainer=MAINTAINER,
maintainer_email=MAINTAINER_EMAIL,
description=DESCRIPTION,
license=LICENSE,
url=URL,
version=VERSION,
download_url=DOWNLOAD_URL,
long_description=LONG_DESCRIPTION,
zip_safe=False,
classifiers=[
'Intended Audience :: Science/Research',
'Intended Audience :: Developers',
'License :: OSI Approved',
'Programming Language :: Python',
'Topic :: Software Development',
'Topic :: Scientific/Engineering',
'Operating System :: Microsoft :: Windows',
'Operating System :: POSIX',
'Operating System :: Unix',
'Operating System :: MacOS',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
],
packages=find_packages(),
package_data={
'SUITPy.surfaces': ['*.surf.gii', '*.C.scene', '*.shape.gii', '*.txt'],
},
install_requires=list_required_packages(),
python_requires='>=3.6',
)
| true | true |
f724a880d570332da47359919de6f56a2a986caf | 821 | py | Python | deckz/cli/watch.py | m09/deckz | 0f97ef2a43c2c714ac18173a4fe3266cccba31e2 | [
"Apache-2.0"
] | null | null | null | deckz/cli/watch.py | m09/deckz | 0f97ef2a43c2c714ac18173a4fe3266cccba31e2 | [
"Apache-2.0"
] | 41 | 2020-04-06T13:49:18.000Z | 2020-12-24T11:14:47.000Z | deckz/cli/watch.py | m09/deckz | 0f97ef2a43c2c714ac18173a4fe3266cccba31e2 | [
"Apache-2.0"
] | null | null | null | from logging import getLogger
from pathlib import Path
from typing import List, Optional
from typer import Argument
from deckz.cli import app
from deckz.paths import Paths
from deckz.watching import watch as watching_watch
_logger = getLogger(__name__)
@app.command()
def watch(
targets: Optional[List[str]] = Argument(None),
handout: bool = False,
presentation: bool = True,
print: bool = False,
minimum_delay: int = 5,
deck_path: Path = Path("."),
) -> None:
"""Compile on change."""
_logger.info("Watching current and shared directories")
watching_watch(
minimum_delay=minimum_delay,
paths=Paths.from_defaults(deck_path),
build_handout=handout,
build_presentation=presentation,
build_print=print,
target_whitelist=targets,
)
| 24.878788 | 59 | 0.699147 | from logging import getLogger
from pathlib import Path
from typing import List, Optional
from typer import Argument
from deckz.cli import app
from deckz.paths import Paths
from deckz.watching import watch as watching_watch
_logger = getLogger(__name__)
@app.command()
def watch(
targets: Optional[List[str]] = Argument(None),
handout: bool = False,
presentation: bool = True,
print: bool = False,
minimum_delay: int = 5,
deck_path: Path = Path("."),
) -> None:
_logger.info("Watching current and shared directories")
watching_watch(
minimum_delay=minimum_delay,
paths=Paths.from_defaults(deck_path),
build_handout=handout,
build_presentation=presentation,
build_print=print,
target_whitelist=targets,
)
| true | true |
f724a8d819de226898e0d3a10a65ce1725cf1a9d | 1,629 | py | Python | tce/tcloud/cvm/ResetInstancesInternetMaxBandwidth.py | liangzhengkang/tencentcloud-sdk-python | c8f990b33f3701e04149a3d613538829a88269eb | [
"Apache-2.0"
] | null | null | null | tce/tcloud/cvm/ResetInstancesInternetMaxBandwidth.py | liangzhengkang/tencentcloud-sdk-python | c8f990b33f3701e04149a3d613538829a88269eb | [
"Apache-2.0"
] | null | null | null | tce/tcloud/cvm/ResetInstancesInternetMaxBandwidth.py | liangzhengkang/tencentcloud-sdk-python | c8f990b33f3701e04149a3d613538829a88269eb | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
import os
from tencentcloud.common import credential
from tencentcloud.common.exception.tencent_cloud_sdk_exception import TencentCloudSDKException
# 导入对应产品模块的client models。
from tencentcloud.cvm.v20170312 import cvm_client, models
import json
# 导入可选配置类
from tencentcloud.common.profile.client_profile import ClientProfile
from tencentcloud.common.profile.http_profile import HttpProfile
import ssl
ssl._create_default_https_context = ssl._create_unverified_context
try:
# 实例化一个认证对象,入参需要传入腾讯云账户secretId,secretKey
cred = credential.Credential("AKIDylMjqkOq7Azay9Nq8D5kCSVM1Sfft4Sd", "K8lBONAk7IEzXt30kGXcS5UfbJm0zkG4")
httpProfile = HttpProfile()
httpProfile.endpoint = "cvm.api3.test.403a.tcecqpoc.fsphere.cn"
clientProfile = ClientProfile()
clientProfile.httpProfile = httpProfile
# 实例化要请求产品(以cvm为例)的client对象,clientProfile是可选的。
client = cvm_client.CvmClient(cred, "shanghai", clientProfile)
# 实例化一个cvm实例信息查询请求对象,每个接口都会对应一个request对象。
req = models.ResetInstancesInternetMaxBandwidthRequest()
# 这里还支持以标准json格式的string来赋值请求参数的方式。下面的代码跟上面的参数赋值是等效的。
params = '{"InstanceIds":["ins-gwggvy39"],"InternetAccessible":{"InternetMaxBandwidthOut":30}}'
req.from_json_string(params)
# 通过client对象调用DescribeInstances方法发起请求。注意请求方法名与请求对象是对应的。
# 返回的resp是一个DescribeInstancesResponse类的实例,与请求对象对应。
resp = client.ResetInstancesInternetMaxBandwidth(req)
# 输出json格式的字符串回包
print(resp.to_json_string())
# 也可以取出单个值。
# 你可以通过官网接口文档或跳转到response对象的定义处查看返回字段的定义。
# print(resp.TotalCount)
except TencentCloudSDKException as err:
print(err) | 33.9375 | 108 | 0.796808 |
import os
from tencentcloud.common import credential
from tencentcloud.common.exception.tencent_cloud_sdk_exception import TencentCloudSDKException
from tencentcloud.cvm.v20170312 import cvm_client, models
import json
from tencentcloud.common.profile.client_profile import ClientProfile
from tencentcloud.common.profile.http_profile import HttpProfile
import ssl
ssl._create_default_https_context = ssl._create_unverified_context
try:
cred = credential.Credential("AKIDylMjqkOq7Azay9Nq8D5kCSVM1Sfft4Sd", "K8lBONAk7IEzXt30kGXcS5UfbJm0zkG4")
httpProfile = HttpProfile()
httpProfile.endpoint = "cvm.api3.test.403a.tcecqpoc.fsphere.cn"
clientProfile = ClientProfile()
clientProfile.httpProfile = httpProfile
client = cvm_client.CvmClient(cred, "shanghai", clientProfile)
req = models.ResetInstancesInternetMaxBandwidthRequest()
params = '{"InstanceIds":["ins-gwggvy39"],"InternetAccessible":{"InternetMaxBandwidthOut":30}}'
req.from_json_string(params)
resp = client.ResetInstancesInternetMaxBandwidth(req)
print(resp.to_json_string())
except TencentCloudSDKException as err:
print(err) | true | true |
f724a945f1fb6084253128c597e56a1ed312286b | 654 | py | Python | src/contas/models.py | br-monteiro/learning-django | 68f16f17e0b4357d15a5b7e9c9a66da2bccd7a63 | [
"MIT"
] | null | null | null | src/contas/models.py | br-monteiro/learning-django | 68f16f17e0b4357d15a5b7e9c9a66da2bccd7a63 | [
"MIT"
] | null | null | null | src/contas/models.py | br-monteiro/learning-django | 68f16f17e0b4357d15a5b7e9c9a66da2bccd7a63 | [
"MIT"
] | null | null | null | from django.db import models
class Category(models.Model):
name = models.CharField(max_length=100)
created_at = models.DateTimeField(auto_now_add=True)
def __str__(self):
return self.name
class Meta:
verbose_name_plural = 'Categorias'
class Transactions(models.Model):
date = models.DateField()
description = models.CharField(max_length=100)
value = models.DecimalField(max_digits=7, decimal_places=2)
observations = models.TextField(null=True, blank=True)
category=models.ForeignKey(Category, on_delete=models.CASCADE)
def __str__(self):
return self.description
class Meta:
verbose_name_plural = 'Transações' | 26.16 | 64 | 0.759939 | from django.db import models
class Category(models.Model):
name = models.CharField(max_length=100)
created_at = models.DateTimeField(auto_now_add=True)
def __str__(self):
return self.name
class Meta:
verbose_name_plural = 'Categorias'
class Transactions(models.Model):
date = models.DateField()
description = models.CharField(max_length=100)
value = models.DecimalField(max_digits=7, decimal_places=2)
observations = models.TextField(null=True, blank=True)
category=models.ForeignKey(Category, on_delete=models.CASCADE)
def __str__(self):
return self.description
class Meta:
verbose_name_plural = 'Transações' | true | true |
f724aaa61baa5d75e1ec4a635d7608adf9883661 | 3,135 | py | Python | src/dms-preview/azext_dms/vendored_sdks/datamigration/models/migrate_sql_server_sql_mi_task_output_agent_job_level.py | mayank88mahajan/azure-cli-extensions | 8bd389a1877bffd14052bec5519ce75dc6fc34cf | [
"MIT"
] | 1 | 2019-05-10T19:58:09.000Z | 2019-05-10T19:58:09.000Z | src/dms-preview/azext_dms/vendored_sdks/datamigration/models/migrate_sql_server_sql_mi_task_output_agent_job_level.py | mayank88mahajan/azure-cli-extensions | 8bd389a1877bffd14052bec5519ce75dc6fc34cf | [
"MIT"
] | 2 | 2019-10-02T23:37:38.000Z | 2020-10-02T01:17:31.000Z | src/dms-preview/azext_dms/vendored_sdks/datamigration/models/migrate_sql_server_sql_mi_task_output_agent_job_level.py | mayank88mahajan/azure-cli-extensions | 8bd389a1877bffd14052bec5519ce75dc6fc34cf | [
"MIT"
] | 1 | 2018-08-28T14:36:47.000Z | 2018-08-28T14:36:47.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .migrate_sql_server_sql_mi_task_output import MigrateSqlServerSqlMITaskOutput
class MigrateSqlServerSqlMITaskOutputAgentJobLevel(MigrateSqlServerSqlMITaskOutput):
"""MigrateSqlServerSqlMITaskOutputAgentJobLevel.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: Result identifier
:vartype id: str
:param result_type: Required. Constant filled by server.
:type result_type: str
:ivar name: Agent Job name.
:vartype name: str
:ivar is_enabled: The state of the original Agent Job.
:vartype is_enabled: bool
:ivar state: Current state of migration. Possible values include: 'None',
'InProgress', 'Failed', 'Warning', 'Completed', 'Skipped', 'Stopped'
:vartype state: str or ~azure.mgmt.datamigration.models.MigrationState
:ivar started_on: Migration start time
:vartype started_on: datetime
:ivar ended_on: Migration end time
:vartype ended_on: datetime
:ivar message: Migration progress message
:vartype message: str
:ivar exceptions_and_warnings: Migration errors and warnings per job
:vartype exceptions_and_warnings:
list[~azure.mgmt.datamigration.models.ReportableException]
"""
_validation = {
'id': {'readonly': True},
'result_type': {'required': True},
'name': {'readonly': True},
'is_enabled': {'readonly': True},
'state': {'readonly': True},
'started_on': {'readonly': True},
'ended_on': {'readonly': True},
'message': {'readonly': True},
'exceptions_and_warnings': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'result_type': {'key': 'resultType', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'is_enabled': {'key': 'isEnabled', 'type': 'bool'},
'state': {'key': 'state', 'type': 'str'},
'started_on': {'key': 'startedOn', 'type': 'iso-8601'},
'ended_on': {'key': 'endedOn', 'type': 'iso-8601'},
'message': {'key': 'message', 'type': 'str'},
'exceptions_and_warnings': {'key': 'exceptionsAndWarnings', 'type': '[ReportableException]'},
}
def __init__(self, **kwargs):
super(MigrateSqlServerSqlMITaskOutputAgentJobLevel, self).__init__(**kwargs)
self.name = None
self.is_enabled = None
self.state = None
self.started_on = None
self.ended_on = None
self.message = None
self.exceptions_and_warnings = None
self.result_type = 'AgentJobLevelOutput'
| 39.683544 | 101 | 0.625199 |
from .migrate_sql_server_sql_mi_task_output import MigrateSqlServerSqlMITaskOutput
class MigrateSqlServerSqlMITaskOutputAgentJobLevel(MigrateSqlServerSqlMITaskOutput):
_validation = {
'id': {'readonly': True},
'result_type': {'required': True},
'name': {'readonly': True},
'is_enabled': {'readonly': True},
'state': {'readonly': True},
'started_on': {'readonly': True},
'ended_on': {'readonly': True},
'message': {'readonly': True},
'exceptions_and_warnings': {'readonly': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'result_type': {'key': 'resultType', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'is_enabled': {'key': 'isEnabled', 'type': 'bool'},
'state': {'key': 'state', 'type': 'str'},
'started_on': {'key': 'startedOn', 'type': 'iso-8601'},
'ended_on': {'key': 'endedOn', 'type': 'iso-8601'},
'message': {'key': 'message', 'type': 'str'},
'exceptions_and_warnings': {'key': 'exceptionsAndWarnings', 'type': '[ReportableException]'},
}
def __init__(self, **kwargs):
super(MigrateSqlServerSqlMITaskOutputAgentJobLevel, self).__init__(**kwargs)
self.name = None
self.is_enabled = None
self.state = None
self.started_on = None
self.ended_on = None
self.message = None
self.exceptions_and_warnings = None
self.result_type = 'AgentJobLevelOutput'
| true | true |
f724ab3e48fa4fed5007bb73f609343695c1c2e7 | 2,530 | py | Python | GPflow/test_gplvm.py | blutooth/dgp | bedbbc3595fbe124d7a06c3d6d64f9009304491e | [
"Apache-2.0"
] | 1 | 2018-09-06T04:42:37.000Z | 2018-09-06T04:42:37.000Z | GPflow/test_gplvm.py | blutooth/dgp | bedbbc3595fbe124d7a06c3d6d64f9009304491e | [
"Apache-2.0"
] | null | null | null | GPflow/test_gplvm.py | blutooth/dgp | bedbbc3595fbe124d7a06c3d6d64f9009304491e | [
"Apache-2.0"
] | null | null | null | from __future__ import print_function
import kernels
import numpy as np
import unittest
import gplvm
class TestBayesianGPLVM(unittest.TestCase):
def setUp(self):
N = 10 # number of data points
D = 1 # latent dimensions
M = 5 # inducings points
R = 2 # data dimension
k = kernels.RBF(D)
Z = np.linspace(0,1,M)
Z = np.expand_dims(Z, D)
rng = np.random.RandomState(1)
Y = rng.randn(N,R)
self.m = gplvm.BayesianGPLVM(X_mean = np.zeros((N,D)),
X_var=np.ones((N,D)), Y=Y, kern=k, Z=Z)
def test_linearSolution(self):
# You could implement a standard GPLVM, and show that it recovers PCA when the kernel is linear ->
# How to deal with rotations and linear rescalings.
pass
def test_GPLVM_BGPLVM_Equivalence(self):
# You could set the variance of the BGPLVM to zero and show that it's the same as the GPLVM
# BGPLVM with variance to 0 is same as GPLVM
N = 10 # number of data points
Q = 1 # latent dimensions
M = 5 # inducing points
D = 2 # data dimension
k = kernels.RBF(Q)
Z = np.linspace(0, 1, M)
Z = np.expand_dims(Z, Q)
rng = np.random.RandomState(1)
Y = rng.randn(N, Q)
XInit = rng.rand(N, Q)
# use 0 variance for BGPLVM
m = gplvm.BayesianGPLVM(X_mean=XInit, X_var=np.ones((N, Q)), Y=Y, kern=k, Z=Z)
print(m)
m.X_var.fixed = True
ll = m.compute_log_likelihood()
print(ll)
m = gplvm.BayesianGPLVM(X_mean=XInit, X_var=np.ones((N, Q)), Y=Y, kern=k, Z=Z, X_prior_mean=np.zeros((N,Q)), X_prior_var = np.ones((N,Q)))
llprior = m.compute_log_likelihood()
print(m)
print(llprior)
assert ll == llprior
Z = np.linspace(0, 1, M*2)
Z = np.expand_dims(Z, Q)
m = gplvm.BayesianGPLVM(X_mean=XInit, X_var=np.ones((N, Q)), Y=Y, kern=k, Z=Z, X_prior_mean=np.zeros((N,Q)), X_prior_var = np.ones((N,Q)))
llmoreZ = m.compute_log_likelihood()
print(llmoreZ)
assert llmoreZ > ll
# m.optimize()
# mGPLVM = GPflow.gplvm.GPLVM(Y=Y, Q=Q, kern=k, XInit=XInit)
# mGPLVM.optimize()
# assert np.allclose(m.X_mean.value, mGPLVM.X.value)
# this does not work - f= +Infinity!
def test_gplvmOptimization(self):
print('Run optimisation')
# self.m.optimize()
if __name__ == "__main__":
unittest.main()
| 33.733333 | 146 | 0.581818 | from __future__ import print_function
import kernels
import numpy as np
import unittest
import gplvm
class TestBayesianGPLVM(unittest.TestCase):
def setUp(self):
N = 10
D = 1
M = 5
R = 2
k = kernels.RBF(D)
Z = np.linspace(0,1,M)
Z = np.expand_dims(Z, D)
rng = np.random.RandomState(1)
Y = rng.randn(N,R)
self.m = gplvm.BayesianGPLVM(X_mean = np.zeros((N,D)),
X_var=np.ones((N,D)), Y=Y, kern=k, Z=Z)
def test_linearSolution(self):
pass
def test_GPLVM_BGPLVM_Equivalence(self):
# BGPLVM with variance to 0 is same as GPLVM
N = 10 # number of data points
Q = 1 # latent dimensions
M = 5 # inducing points
D = 2 # data dimension
k = kernels.RBF(Q)
Z = np.linspace(0, 1, M)
Z = np.expand_dims(Z, Q)
rng = np.random.RandomState(1)
Y = rng.randn(N, Q)
XInit = rng.rand(N, Q)
# use 0 variance for BGPLVM
m = gplvm.BayesianGPLVM(X_mean=XInit, X_var=np.ones((N, Q)), Y=Y, kern=k, Z=Z)
print(m)
m.X_var.fixed = True
ll = m.compute_log_likelihood()
print(ll)
m = gplvm.BayesianGPLVM(X_mean=XInit, X_var=np.ones((N, Q)), Y=Y, kern=k, Z=Z, X_prior_mean=np.zeros((N,Q)), X_prior_var = np.ones((N,Q)))
llprior = m.compute_log_likelihood()
print(m)
print(llprior)
assert ll == llprior
Z = np.linspace(0, 1, M*2)
Z = np.expand_dims(Z, Q)
m = gplvm.BayesianGPLVM(X_mean=XInit, X_var=np.ones((N, Q)), Y=Y, kern=k, Z=Z, X_prior_mean=np.zeros((N,Q)), X_prior_var = np.ones((N,Q)))
llmoreZ = m.compute_log_likelihood()
print(llmoreZ)
assert llmoreZ > ll
# m.optimize()
# mGPLVM = GPflow.gplvm.GPLVM(Y=Y, Q=Q, kern=k, XInit=XInit)
# mGPLVM.optimize()
# assert np.allclose(m.X_mean.value, mGPLVM.X.value)
# this does not work - f= +Infinity!
def test_gplvmOptimization(self):
print('Run optimisation')
# self.m.optimize()
if __name__ == "__main__":
unittest.main()
| true | true |
f724ae22374b14f37ab0977ec53f524308417895 | 1,422 | py | Python | demo/wp_scalogram.py | astromaddie/pywavelets-py3 | 9d434929cb748eb44be86a4b712d8f3009326693 | [
"MIT"
] | 1 | 2018-03-13T10:44:47.000Z | 2018-03-13T10:44:47.000Z | demo/wp_scalogram.py | astromaddie/pywavelets-py3 | 9d434929cb748eb44be86a4b712d8f3009326693 | [
"MIT"
] | null | null | null | demo/wp_scalogram.py | astromaddie/pywavelets-py3 | 9d434929cb748eb44be86a4b712d8f3009326693 | [
"MIT"
] | 1 | 2018-03-13T10:44:54.000Z | 2018-03-13T10:44:54.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import numpy as np
import matplotlib.pyplot as plt
import pywt
x = np.linspace(0, 1, num=512)
data = np.sin(250 * np.pi * x**2)
wavelet = 'db2'
level = 4
order = "freq" # other option is "normal"
interpolation = 'nearest'
cmap = plt.cm.cool
# Construct wavelet packet
wp = pywt.WaveletPacket(data, wavelet, 'sym', maxlevel=level)
nodes = wp.get_level(level, order=order)
labels = [n.path for n in nodes]
values = np.array([n.data for n in nodes], 'd')
values = abs(values)
# Show signal and wavelet packet coefficients
fig = plt.figure()
fig.subplots_adjust(hspace=0.2, bottom=.03, left=.07, right=.97, top=.92)
ax = fig.add_subplot(2, 1, 1)
ax.set_title("linchirp signal")
ax.plot(x, data, 'b')
ax.set_xlim(0, x[-1])
ax = fig.add_subplot(2, 1, 2)
ax.set_title("Wavelet packet coefficients at level %d" % level)
ax.imshow(values, interpolation=interpolation, cmap=cmap, aspect="auto",
origin="lower", extent=[0, 1, 0, len(values)])
ax.set_yticks(np.arange(0.5, len(labels) + 0.5), labels)
# Show spectrogram and wavelet packet coefficients
fig2 = plt.figure()
ax2 = fig2.add_subplot(211)
ax2.specgram(data, NFFT=64, noverlap=32, cmap=cmap)
ax2.set_title("Spectrogram of signal")
ax3 = fig2.add_subplot(212)
ax3.imshow(values, origin='upper', extent=[-1,1,-1,1],
interpolation='nearest')
ax3.set_title("Wavelet packet coefficients")
plt.show()
| 27.346154 | 73 | 0.696203 |
import numpy as np
import matplotlib.pyplot as plt
import pywt
x = np.linspace(0, 1, num=512)
data = np.sin(250 * np.pi * x**2)
wavelet = 'db2'
level = 4
order = "freq"
interpolation = 'nearest'
cmap = plt.cm.cool
wp = pywt.WaveletPacket(data, wavelet, 'sym', maxlevel=level)
nodes = wp.get_level(level, order=order)
labels = [n.path for n in nodes]
values = np.array([n.data for n in nodes], 'd')
values = abs(values)
fig = plt.figure()
fig.subplots_adjust(hspace=0.2, bottom=.03, left=.07, right=.97, top=.92)
ax = fig.add_subplot(2, 1, 1)
ax.set_title("linchirp signal")
ax.plot(x, data, 'b')
ax.set_xlim(0, x[-1])
ax = fig.add_subplot(2, 1, 2)
ax.set_title("Wavelet packet coefficients at level %d" % level)
ax.imshow(values, interpolation=interpolation, cmap=cmap, aspect="auto",
origin="lower", extent=[0, 1, 0, len(values)])
ax.set_yticks(np.arange(0.5, len(labels) + 0.5), labels)
fig2 = plt.figure()
ax2 = fig2.add_subplot(211)
ax2.specgram(data, NFFT=64, noverlap=32, cmap=cmap)
ax2.set_title("Spectrogram of signal")
ax3 = fig2.add_subplot(212)
ax3.imshow(values, origin='upper', extent=[-1,1,-1,1],
interpolation='nearest')
ax3.set_title("Wavelet packet coefficients")
plt.show()
| true | true |
f724ae68aed162517af1cec117aa39d242e353b8 | 6,048 | py | Python | scripts/init_theano_settings.py | dylanirion/wbia-plugin-cnn | cd0018b829de3f077ca289551492cdad84806ed6 | [
"Apache-2.0"
] | null | null | null | scripts/init_theano_settings.py | dylanirion/wbia-plugin-cnn | cd0018b829de3f077ca289551492cdad84806ed6 | [
"Apache-2.0"
] | 4 | 2020-07-02T19:25:43.000Z | 2020-08-27T18:05:15.000Z | scripts/init_theano_settings.py | dylanirion/wbia-plugin-cnn | cd0018b829de3f077ca289551492cdad84806ed6 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
"""
References:
http://deeplearning.net/software/theano/library/config.html
Check Settings:
python -c 'import theano; print theano.config' | less
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import utool as ut
import os
from os.path import join
(print, rrr, profile) = ut.inject2(__name__)
"""
CommandLine:
cd %CODE_DIR%/wbia_cnn/code
cd $CODE_DIR/wbia_cnn/code
code
cd wbia_cnn/code
python train.py
Purge from system and environ:
cd
python -c "import utool as ut; ut.total_purge_developed_repo('~/code/pylearn2')"
python -c "import utool as ut; ut.total_purge_developed_repo('~/code/Theano')"
python -c "import utool as ut; ut.total_purge_developed_repo('~/code/Lasagne')"
# Remove pylearn2 scripts
sudo rm /home/joncrall/venv/bin/pylearn2-*
sudo rm /usr/local/bin/pylearn2-*
locate pylearn2 | grep -v /home/joncrall/code/pylearn2 | grep -v /home/jason/code/pylearn2
pip uninstall theano
pip uninstall lasagne
pip uninstall pylearn2
sudo -H pip uninstall theano
sudo -H pip uninstall lasagne
sudo -H pip uninstall pylearn2
sudo pip uninstall theano
sudo pip uninstall lasagne
sudo pip uninstall pylearn2
# If they do try chowning to current user
sudo chown -R $USER:$USER ~/code/pylearn2
sudo chown -R $USER:$USER ~/code/Theano
sudo chown -R $USER:$USER ~/code/Lasagne
export GLOBAL_SITE_PKGS=$(python -c "import utool as ut; print(ut.get_global_dist_packages_dir())")
export LOCAL_SITE_PKGS=$(python -c "import utool as ut; print(ut.get_local_dist_packages_dir())")
export VENV_SITE_PKGS=$(python -c "import utool as ut; print(ut.get_site_packages_dir())")
# Test that they dont exist
python -c "import pylearn2; print(pylearn2.__file__)"
python -c "import theano; print(theano.__version__)"
python -c "import lasagne; print(lasagne.__version__)"
PythonPrereqs:
co
git clone git://github.com/lisa-lab/pylearn2.git
git clone https://github.com/Theano/Theano.git
git clone https://github.com/Erotemic/Lasagne.git
cd ~/code/pylearn2 && git pull && python setup.py develop
cd ~/code/Theano && git pull && python setup.py develop
cd ~/code/Lasagne && git pull && python setup.py develop
python -c "import pylearn2; print(pylearn2.__file__)"
python -c "import theano; print(theano.__version__)"
python -c "import lasagne; print(lasagne.__version__)"
git checkout 8758ac1434175159e5c1f30123041799c2b6098a
OLD:
git clone https://github.com/Lasagne/Lasagne.git
pip install theano
pip install git+https://github.com/Lasagne/Lasagne.git
pip install git+git://github.com/lisa-lab/pylearn2.git
#pip install lasagne
#pip install pylearn2
Ensure CuDNN is installed
http://lasagne.readthedocs.io/en/latest/user/installation.html#cudnn
# Test if Theano Works with CUDNN
python -c "from theano.sandbox.cuda.dnn import dnn_available as d; print(d() or d.msg)"
# Need to register with nvidia
https://developer.nvidia.com/rdp/cudnn-download
# Check cuda version
nvcc --version
# Check if cuda is globally installed
ls -al /usr/local/cuda
# Check if CUDNN is globally installed
ls -al /usr/local/cuda/include/cudnn.h
ls -al /usr/local/cuda/lib64/cudnn*
# Download approprate version
cd ~/Downloads
# doesnt work if you dont sign in
# wget https://developer.nvidia.com/compute/machine-learning/cudnn/secure/v5.1/rc/7.5/cudnn-7.5-linux-x64-v5.1-rc-tgz
# Unpack appropriate version
cd ~/Downloads
7z x cudnn-7.5-linux-x64-v5.1-rc.tgz && 7z x -ocudnn5.1 cudnn-7.5-linux-x64-v5.1-rc.tar
7z x cudnn-7.5-linux-x64-v5.0-ga.tgz && 7z x -ocudnn5.0 cudnn-7.5-linux-x64-v5.0-ga.tar
7z x cudnn-7.0-linux-x64-v4.0-prod.tgz && 7z x -ocudnn4.0 cudnn-7.0-linux-x64-v4.0-prod.tar
tree ~/Downloads/cudnn5.1/
tree ~/Downloads/cudnn4/
# DEFINE WHERE CUDA LIVES
export CUDADIR=/usr/local/cuda
export TARGET_CUDNN_VERSION=5.1
MAIN_CUDNN_VERSION="$(echo $TARGET_CUDNN_VERSION | head -c 1)"
# Check CUDNN Install
ls -al $CUDADIR/include/cudnn.h
ls -al $CUDADIR/lib64/libcudnn*
#Look at other cuda install permissions
ls -al $CUDADIR/include/cublas.h
ls -al $CUDADIR/lib64/libcublas*
# REMOVE / UNINSTALL OLD CUDNN
sudo rm -rf $CUDADIR/include/cudnn.h
sudo rm -rf $CUDADIR/lib64/libcudnn*
# Extract into folder called cuda, need to move it to wherever cuda is installed
# cudnn consists of one header and 4 libraries
sudo cp -rv ~/Downloads/cudnn$TARGET_CUDNN_VERSION/cuda/include/cudnn.h $CUDADIR/include/cudnn.h
sudo cp -rv ~/Downloads/cudnn$TARGET_CUDNN_VERSION/cuda/lib64/libcudnn.so.$TARGET_CUDNN_VERSION* $CUDADIR/lib64/
sudo cp -rv ~/Downloads/cudnn$TARGET_CUDNN_VERSION/cuda/lib64/libcudnn_static.a $CUDADIR/lib64/
# Manually make symlinks (ones nvidia ships are broken)
sudo ln -s $CUDADIR/lib64/libcudnn.so.$TARGET_CUDNN_VERSION* $CUDADIR/lib64/libcudnn.so.$MAIN_CUDNN_VERSION
sudo ln -s $CUDADIR/lib64/libcudnn.so.$MAIN_CUDNN_VERSION $CUDADIR/lib64/libcudnn.so
# Set permissions to reflect cuda install
sudo chmod 755 /usr/local/cuda/lib64/libcudnn.so.$TARGET_CUDNN_VERSION*
# Check CUDNN Install
ls -al $CUDADIR/include/cudnn.h
ls -al $CUDADIR/lib64/libcudnn*
# Test if Theano Works with CUDNN
python -c "from theano.sandbox.cuda.dnn import dnn_available as d; print(d() or d.msg)"
"""
def init_theanorc():
theanorc_fpath = join(os.getenv('HOME'), '.theanorc')
theanorc_text = ut.codeblock(
"""
[global]
floatX = float32
device = gpu0
openmp = True
[nvcc]
fastmath = True
"""
)
if ut.checkpath(theanorc_fpath, verbose=True):
if not ut.arg_you_sure('overwrite?'):
return
ut.write_to(theanorc_fpath, theanorc_text)
if __name__ == '__main__':
init_theanorc()
| 33.787709 | 121 | 0.700562 |
from __future__ import absolute_import, division, print_function, unicode_literals
import utool as ut
import os
from os.path import join
(print, rrr, profile) = ut.inject2(__name__)
def init_theanorc():
theanorc_fpath = join(os.getenv('HOME'), '.theanorc')
theanorc_text = ut.codeblock(
"""
[global]
floatX = float32
device = gpu0
openmp = True
[nvcc]
fastmath = True
"""
)
if ut.checkpath(theanorc_fpath, verbose=True):
if not ut.arg_you_sure('overwrite?'):
return
ut.write_to(theanorc_fpath, theanorc_text)
if __name__ == '__main__':
init_theanorc()
| true | true |
f724aebbd5407f292e974e0c128452257538cb40 | 3,516 | py | Python | bindings/python/ensmallen/datasets/string/desulfotomaculumcopahuensis.py | AnacletoLAB/ensmallen_graph | b2c1b18fb1e5801712852bcc239f239e03076f09 | [
"MIT"
] | 5 | 2021-02-17T00:44:45.000Z | 2021-08-09T16:41:47.000Z | bindings/python/ensmallen/datasets/string/desulfotomaculumcopahuensis.py | AnacletoLAB/ensmallen_graph | b2c1b18fb1e5801712852bcc239f239e03076f09 | [
"MIT"
] | 18 | 2021-01-07T16:47:39.000Z | 2021-08-12T21:51:32.000Z | bindings/python/ensmallen/datasets/string/desulfotomaculumcopahuensis.py | AnacletoLAB/ensmallen | b2c1b18fb1e5801712852bcc239f239e03076f09 | [
"MIT"
] | 3 | 2021-01-14T02:20:59.000Z | 2021-08-04T19:09:52.000Z | """
This file offers the methods to automatically retrieve the graph Desulfotomaculum copahuensis.
The graph is automatically retrieved from the STRING repository.
References
---------------------
Please cite the following if you use the data:
```bib
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
```
"""
from typing import Dict
from ..automatic_graph_retrieval import AutomaticallyRetrievedGraph
from ...ensmallen import Graph # pylint: disable=import-error
def DesulfotomaculumCopahuensis(
directed: bool = False,
preprocess: bool = True,
load_nodes: bool = True,
verbose: int = 2,
cache: bool = True,
cache_path: str = "graphs/string",
version: str = "links.v11.5",
**additional_graph_kwargs: Dict
) -> Graph:
"""Return new instance of the Desulfotomaculum copahuensis graph.
The graph is automatically retrieved from the STRING repository.
Parameters
-------------------
directed: bool = False
Wether to load the graph as directed or undirected.
By default false.
preprocess: bool = True
Whether to preprocess the graph to be loaded in
optimal time and memory.
load_nodes: bool = True,
Whether to load the nodes vocabulary or treat the nodes
simply as a numeric range.
verbose: int = 2,
Wether to show loading bars during the retrieval and building
of the graph.
cache: bool = True
Whether to use cache, i.e. download files only once
and preprocess them only once.
cache_path: str = "graphs"
Where to store the downloaded graphs.
version: str = "links.v11.5"
The version of the graph to retrieve.
The available versions are:
- homology.v11.5
- physical.links.v11.5
- links.v11.5
additional_graph_kwargs: Dict
Additional graph kwargs.
Returns
-----------------------
Instace of Desulfotomaculum copahuensis graph.
References
---------------------
Please cite the following if you use the data:
```bib
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
```
"""
return AutomaticallyRetrievedGraph(
graph_name="DesulfotomaculumCopahuensis",
repository="string",
version=version,
directed=directed,
preprocess=preprocess,
load_nodes=load_nodes,
verbose=verbose,
cache=cache,
cache_path=cache_path,
additional_graph_kwargs=additional_graph_kwargs
)()
| 33.485714 | 223 | 0.681741 | from typing import Dict
from ..automatic_graph_retrieval import AutomaticallyRetrievedGraph
from ...ensmallen import Graph
def DesulfotomaculumCopahuensis(
directed: bool = False,
preprocess: bool = True,
load_nodes: bool = True,
verbose: int = 2,
cache: bool = True,
cache_path: str = "graphs/string",
version: str = "links.v11.5",
**additional_graph_kwargs: Dict
) -> Graph:
return AutomaticallyRetrievedGraph(
graph_name="DesulfotomaculumCopahuensis",
repository="string",
version=version,
directed=directed,
preprocess=preprocess,
load_nodes=load_nodes,
verbose=verbose,
cache=cache,
cache_path=cache_path,
additional_graph_kwargs=additional_graph_kwargs
)()
| true | true |
f724af0aaa79792c4cd22f016025f032f8255a1c | 1,504 | py | Python | updater/update/ratelimit.py | codl/status.chitter.xyz | ed86a6163306c938a49bdef5dea4928ef7cd09cc | [
"BSD-3-Clause"
] | 1 | 2020-06-02T13:21:59.000Z | 2020-06-02T13:21:59.000Z | updater/update/ratelimit.py | codl/status.chitter.xyz | ed86a6163306c938a49bdef5dea4928ef7cd09cc | [
"BSD-3-Clause"
] | 678 | 2018-05-27T21:47:21.000Z | 2022-02-03T14:41:28.000Z | updater/update/ratelimit.py | codl/status.chitter.xyz | ed86a6163306c938a49bdef5dea4928ef7cd09cc | [
"BSD-3-Clause"
] | null | null | null | from redis import StrictRedis as Redis
from pathlib import Path
import hashlib
import time
lua_script_path = Path(__file__).parent / 'ratelimit.lua'
with open(lua_script_path) as f:
LUA_SCRIPT = f.read()
del lua_script_path # don't want it polluting the module
class RateLimit(object):
def __init__(self,
redis_url='redis://',
redis_key_prefix='ratelimit',
bucket_size=50,
bucket_period=30):
self.redis = Redis.from_url(redis_url)
self.script = self.redis.register_script(LUA_SCRIPT)
self.redis_key_prefix = redis_key_prefix
self.bucket_size = bucket_size
self.bucket_period = bucket_period
def _exec(self, identifier, clear=False):
identifier_h = hashlib.blake2s(
identifier.encode('utf-8'), digest_size=6).hexdigest()
token_count_key = "{}:{}:count".format(self.redis_key_prefix,
identifier_h)
token_last_add_key = "{}:{}:last-add".format(self.redis_key_prefix,
identifier_h)
keys = [token_count_key, token_last_add_key]
argv = [self.bucket_size, self.bucket_period, int(time.time())]
if clear:
argv += [True]
return self.script(keys, argv)
def hit(self, identifier):
return int(self._exec(identifier))
def clear(self, identifier):
self._exec(identifier, clear=True)
| 32.695652 | 75 | 0.611037 | from redis import StrictRedis as Redis
from pathlib import Path
import hashlib
import time
lua_script_path = Path(__file__).parent / 'ratelimit.lua'
with open(lua_script_path) as f:
LUA_SCRIPT = f.read()
del lua_script_path
class RateLimit(object):
def __init__(self,
redis_url='redis://',
redis_key_prefix='ratelimit',
bucket_size=50,
bucket_period=30):
self.redis = Redis.from_url(redis_url)
self.script = self.redis.register_script(LUA_SCRIPT)
self.redis_key_prefix = redis_key_prefix
self.bucket_size = bucket_size
self.bucket_period = bucket_period
def _exec(self, identifier, clear=False):
identifier_h = hashlib.blake2s(
identifier.encode('utf-8'), digest_size=6).hexdigest()
token_count_key = "{}:{}:count".format(self.redis_key_prefix,
identifier_h)
token_last_add_key = "{}:{}:last-add".format(self.redis_key_prefix,
identifier_h)
keys = [token_count_key, token_last_add_key]
argv = [self.bucket_size, self.bucket_period, int(time.time())]
if clear:
argv += [True]
return self.script(keys, argv)
def hit(self, identifier):
return int(self._exec(identifier))
def clear(self, identifier):
self._exec(identifier, clear=True)
| true | true |
f724af6e5df81a66575ddc711157d44b2bd75cca | 190 | py | Python | optimus/version.py | Pcosmin/Optimus | ef3306d1b752bbfb1959ddb9103786acb8e9b9ba | [
"Apache-2.0"
] | 1 | 2020-09-22T13:04:37.000Z | 2020-09-22T13:04:37.000Z | optimus/version.py | rafaelang/Optimus | 809088f41588c968b2e30210f98a494a497b07ff | [
"Apache-2.0"
] | null | null | null | optimus/version.py | rafaelang/Optimus | 809088f41588c968b2e30210f98a494a497b07ff | [
"Apache-2.0"
] | null | null | null | def _safe_int(string):
try:
return int(string)
except ValueError:
return string
__version__ = '3.0.6'
VERSION = tuple(_safe_int(x) for x in __version__.split('.'))
| 19 | 61 | 0.642105 | def _safe_int(string):
try:
return int(string)
except ValueError:
return string
__version__ = '3.0.6'
VERSION = tuple(_safe_int(x) for x in __version__.split('.'))
| true | true |
f724af7352302d77818ce7630117090761337ead | 1,853 | py | Python | server/plato/test/test_user_model.py | zhlooking/plato | 9daf0dfd8b376603453eadf2d981c71d3adb2632 | [
"MIT"
] | null | null | null | server/plato/test/test_user_model.py | zhlooking/plato | 9daf0dfd8b376603453eadf2d981c71d3adb2632 | [
"MIT"
] | null | null | null | server/plato/test/test_user_model.py | zhlooking/plato | 9daf0dfd8b376603453eadf2d981c71d3adb2632 | [
"MIT"
] | null | null | null | from plato.test.base import BaseTestCase
from sqlalchemy.exc import IntegrityError
from plato import db
from plato.model.user import User
from plato.test.utils import add_user
class TestUserModel(BaseTestCase):
def test_user_model(self):
user = add_user('foo', 'foo@bar.com', 'test_pwd')
self.assertTrue(user.id)
self.assertEqual('foo', user.username)
self.assertEqual('foo@bar.com', user.email)
self.assertTrue(user.active)
self.assertTrue(user.created_at)
self.assertTrue(user.password)
self.assertTrue(user.admin == False)
def test_add_user_duplicate_username(self):
add_user('foo', 'foo@bar.com', 'test_pwd')
duplicate_user = User('foo', 'foo_1@bar.com', 'test_pwd')
db.session.add(duplicate_user)
self.assertRaises(IntegrityError, db.session.commit)
def test_add_user_duplicate_email(self):
add_user('foo', 'foo@bar.com', 'test_pwd')
duplicate_user = User('foo_1', 'foo@bar.com', 'test_pwd')
db.session.add(duplicate_user)
self.assertRaises(IntegrityError, db.session.commit)
def test_passwords_are_random(self):
user_foo = add_user('foo', 'foo@bar.com', 'test_pwd')
user_bar = add_user('bar', 'bar@bar.com', 'test_pwd')
self.assertNotEqual(user_foo.password, user_bar.password)
def test_encode_auth_token(self):
user = add_user('test@test.com', 'test@test.com', 'test')
auth_token = user.encode_auth_token(user.id)
self.assertTrue(isinstance(auth_token, bytes))
def test_decode_auth_token(self):
user = add_user('test@test.com', 'test@test.com', 'test')
auth_token = user.encode_auth_token(user.id)
self.assertTrue(isinstance(auth_token, bytes))
self.assertTrue(User.decode_auth_token(auth_token), user.id)
| 39.425532 | 68 | 0.683216 | from plato.test.base import BaseTestCase
from sqlalchemy.exc import IntegrityError
from plato import db
from plato.model.user import User
from plato.test.utils import add_user
class TestUserModel(BaseTestCase):
def test_user_model(self):
user = add_user('foo', 'foo@bar.com', 'test_pwd')
self.assertTrue(user.id)
self.assertEqual('foo', user.username)
self.assertEqual('foo@bar.com', user.email)
self.assertTrue(user.active)
self.assertTrue(user.created_at)
self.assertTrue(user.password)
self.assertTrue(user.admin == False)
def test_add_user_duplicate_username(self):
add_user('foo', 'foo@bar.com', 'test_pwd')
duplicate_user = User('foo', 'foo_1@bar.com', 'test_pwd')
db.session.add(duplicate_user)
self.assertRaises(IntegrityError, db.session.commit)
def test_add_user_duplicate_email(self):
add_user('foo', 'foo@bar.com', 'test_pwd')
duplicate_user = User('foo_1', 'foo@bar.com', 'test_pwd')
db.session.add(duplicate_user)
self.assertRaises(IntegrityError, db.session.commit)
def test_passwords_are_random(self):
user_foo = add_user('foo', 'foo@bar.com', 'test_pwd')
user_bar = add_user('bar', 'bar@bar.com', 'test_pwd')
self.assertNotEqual(user_foo.password, user_bar.password)
def test_encode_auth_token(self):
user = add_user('test@test.com', 'test@test.com', 'test')
auth_token = user.encode_auth_token(user.id)
self.assertTrue(isinstance(auth_token, bytes))
def test_decode_auth_token(self):
user = add_user('test@test.com', 'test@test.com', 'test')
auth_token = user.encode_auth_token(user.id)
self.assertTrue(isinstance(auth_token, bytes))
self.assertTrue(User.decode_auth_token(auth_token), user.id)
| true | true |
f724aff84f870ddcfdf9843c043f46bf8a185053 | 1,034 | py | Python | Activities Week 7 (social analytics)/Social_Analytics_Part3/Day3/ChatterBot.py | lraynes/ClassActivities | 920df2331f39c8a89477ab73e4393675a299d02d | [
"MIT"
] | null | null | null | Activities Week 7 (social analytics)/Social_Analytics_Part3/Day3/ChatterBot.py | lraynes/ClassActivities | 920df2331f39c8a89477ab73e4393675a299d02d | [
"MIT"
] | null | null | null | Activities Week 7 (social analytics)/Social_Analytics_Part3/Day3/ChatterBot.py | lraynes/ClassActivities | 920df2331f39c8a89477ab73e4393675a299d02d | [
"MIT"
] | null | null | null | # Dependencies
import tweepy
import time
import json
from config import consumer_key, consumer_secret, access_token, access_token_secret
# Twitter API Keys
consumer_key = consumer_key
consumer_secret = consumer_secret
access_token = access_token
access_token_secret = access_token_secret
# Setup Tweepy API Authentication
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth, parser=tweepy.parsers.JSONParser())
# Create a function that tweets
def TweetOut(tweet_number):
api.update_status(
"Can't stop. Won't stop. Chatting! This is Tweet #%s!" %
tweet_number)
# Create a function that calls the TweetOut function every minute
counter = 0
# Infinite loop
while(True):
# Call the TweetQuotes function and specify the tweet number
TweetOut(counter)
# Once tweeted, wait 60 seconds before doing anything else
time.sleep(60)
# Add 1 to the counter prior to re-running the loop
counter = counter + 1 | 25.85 | 83 | 0.766925 |
import tweepy
import time
import json
from config import consumer_key, consumer_secret, access_token, access_token_secret
consumer_key = consumer_key
consumer_secret = consumer_secret
access_token = access_token
access_token_secret = access_token_secret
auth = tweepy.OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
api = tweepy.API(auth, parser=tweepy.parsers.JSONParser())
def TweetOut(tweet_number):
api.update_status(
"Can't stop. Won't stop. Chatting! This is Tweet #%s!" %
tweet_number)
counter = 0
while(True):
TweetOut(counter)
time.sleep(60)
counter = counter + 1 | true | true |
f724b011af2d3189f97b37e16bda3a9e70054f15 | 410 | py | Python | scripts/wk/cfg/log.py | 2Shirt/WizardK | 82a2e7f85c80a52f892c1553e7a45ec0174e7bc6 | [
"MIT"
] | null | null | null | scripts/wk/cfg/log.py | 2Shirt/WizardK | 82a2e7f85c80a52f892c1553e7a45ec0174e7bc6 | [
"MIT"
] | 178 | 2017-11-17T19:14:31.000Z | 2021-12-15T07:43:29.000Z | scripts/wk/cfg/log.py | 2Shirt/WizardK | 82a2e7f85c80a52f892c1553e7a45ec0174e7bc6 | [
"MIT"
] | 1 | 2017-11-17T19:32:36.000Z | 2017-11-17T19:32:36.000Z | """WizardKit: Config - Log"""
# vim: sts=2 sw=2 ts=2
DEBUG = {
'level': 'DEBUG',
'format': '[%(asctime)s %(levelname)s] [%(name)s.%(funcName)s] %(message)s',
'datefmt': '%Y-%m-%d %H%M%S%z',
}
DEFAULT = {
'level': 'INFO',
'format': '[%(asctime)s %(levelname)s] %(message)s',
'datefmt': '%Y-%m-%d %H%M%z',
}
if __name__ == '__main__':
print("This file is not meant to be called directly.")
| 21.578947 | 78 | 0.55122 |
DEBUG = {
'level': 'DEBUG',
'format': '[%(asctime)s %(levelname)s] [%(name)s.%(funcName)s] %(message)s',
'datefmt': '%Y-%m-%d %H%M%S%z',
}
DEFAULT = {
'level': 'INFO',
'format': '[%(asctime)s %(levelname)s] %(message)s',
'datefmt': '%Y-%m-%d %H%M%z',
}
if __name__ == '__main__':
print("This file is not meant to be called directly.")
| true | true |
f724b08ac071745f08342d655971d0e5d1d90152 | 10,921 | py | Python | tests/system/test_integration.py | kaiyan-sheng/apm-server | fe1db82a1043508088e5db7c057b84b1e3ce474f | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | tests/system/test_integration.py | kaiyan-sheng/apm-server | fe1db82a1043508088e5db7c057b84b1e3ce474f | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | tests/system/test_integration.py | kaiyan-sheng/apm-server | fe1db82a1043508088e5db7c057b84b1e3ce474f | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | import time
from apmserver import integration_test
from apmserver import ClientSideBaseTest, ElasticTest, ExpvarBaseTest, ProcStartupFailureTest
from helper import wait_until
from es_helper import index_metric, index_transaction, index_error, index_span, index_onboarding, index_name
@integration_test
class Test(ElasticTest):
def test_template(self):
"""
This test starts the beat and checks that the template has been loaded to ES
"""
wait_until(lambda: self.es.indices.exists(index_onboarding))
templates = self.es.indices.get_template(index_name)
assert len(templates) == 1
t = templates[index_name]
total_fields_limit = t['settings']['index']['mapping']['total_fields']['limit']
assert total_fields_limit == "2000", total_fields_limit
def test_tags_type(self):
self.load_docs_with_template(self.get_payload_path("transactions_spans.ndjson"),
self.intake_url, 'transaction', 8)
self.assert_no_logged_warnings()
mappings = self.es.indices.get_field_mapping(index=index_transaction, fields="context.tags.*")
for name, metric in mappings["{}-000001".format(index_transaction)]["mappings"].items():
fullname = metric["full_name"]
for mapping in metric["mapping"].values():
mtype = mapping["type"]
if fullname.startswith("context.tags.bool"):
assert mtype == "boolean", name + " mapped as " + mtype + ", not boolean"
elif fullname.startswith("context.tags.number"):
assert mtype == "scaled_float", name + " mapped as " + mtype + ", not scaled_float"
else:
assert mtype == "keyword", name + " mapped as " + mtype + ", not keyword"
def test_load_docs_with_template_and_add_transaction(self):
"""
This test starts the beat with a loaded template and sends transaction data to elasticsearch.
It verifies that all data make it into ES, means data is compatible with the template
and data are in expected format.
"""
self.load_docs_with_template(self.get_payload_path("transactions_spans.ndjson"),
self.intake_url, 'transaction', 8)
self.assert_no_logged_warnings()
# compare existing ES documents for transactions with new ones
transaction_docs = self.wait_for_events('transaction', 3, index=index_transaction)
self.approve_docs('transaction', transaction_docs)
# compare existing ES documents for spans with new ones
span_docs = self.wait_for_events('transaction', 5, index=index_span)
self.approve_docs('spans', span_docs)
def test_load_docs_with_template_and_add_error(self):
"""
This test starts the beat with a loaded template and sends error data to elasticsearch.
It verifies that all data make it into ES means data is compatible with the template.
"""
self.load_docs_with_template(self.get_error_payload_path(), self.intake_url, 'error', 4)
self.assert_no_logged_warnings()
# compare existing ES documents for errors with new ones
error_docs = self.wait_for_events('error', 4, index=index_error)
self.approve_docs('error', error_docs)
@integration_test
class EnrichEventIntegrationTest(ClientSideBaseTest, ElasticTest):
def test_backend_error(self):
# for backend events library_frame information should not be changed,
# as no regex pattern is defined.
self.load_docs_with_template(self.get_backend_error_payload_path(),
self.backend_intake_url,
'error',
4)
self.check_library_frames({"true": 1, "false": 0, "empty": 3}, index_error)
def test_rum_error(self):
self.load_docs_with_template(self.get_error_payload_path(),
self.intake_url,
'error',
1)
self.check_library_frames({"true": 5, "false": 0, "empty": 1}, index_error)
def test_rum_transaction(self):
self.load_docs_with_template(self.get_transaction_payload_path(),
self.intake_url,
'transaction',
2)
self.check_library_frames({"true": 1, "false": 0, "empty": 1}, index_span)
def test_grouping_key_for_error(self):
# upload the same error, once via rum, once via backend endpoint
# check they don't have the same grouping key, as the
# `rum.exclude_from_grouping` should only be applied to the rum error.
self.load_docs_with_template(self.get_error_payload_path(),
self.intake_url,
'error',
1)
self.load_docs_with_template(self.get_error_payload_path(),
self.backend_intake_url,
'error',
2)
rs = self.es.search(index=index_error)
docs = rs['hits']['hits']
grouping_key1 = docs[0]["_source"]["error"]["grouping_key"]
grouping_key2 = docs[1]["_source"]["error"]["grouping_key"]
assert grouping_key1 != grouping_key2
def check_library_frames(self, library_frames, index_name):
rs = self.es.search(index=index_name)
l_frames = {"true": 0, "false": 0, "empty": 0}
for doc in rs['hits']['hits']:
if "error" in doc["_source"]:
err = doc["_source"]["error"]
for exception in err.get("exception", []):
self.count_library_frames(exception, l_frames)
if "log" in err:
self.count_library_frames(err["log"], l_frames)
elif "span" in doc["_source"]:
span = doc["_source"]["span"]
self.count_library_frames(span, l_frames)
assert l_frames == library_frames, "found {}, expected {}".format(
l_frames, library_frames)
@staticmethod
def count_library_frames(doc, lf):
if "stacktrace" not in doc:
return
for frame in doc["stacktrace"]:
if "library_frame" in frame:
k = "true" if frame["library_frame"] else "false"
lf[k] += 1
else:
lf["empty"] += 1
@integration_test
class ILMDisabledIntegrationTest(ElasticTest):
config_overrides = {"ilm_enabled": "false"}
def test_override_indices_config(self):
# load error and transaction document to ES
self.load_docs_with_template(self.get_error_payload_path(),
self.intake_url,
'error',
4,
query_index="{}-2017.05.09".format(index_error))
class OverrideIndicesTest(ElasticTest):
def config(self):
cfg = super(OverrideIndicesTest, self).config()
cfg.update({"override_index": index_name,
"override_template": index_name})
return cfg
@integration_test
class OverrideIndicesIntegrationTest(OverrideIndicesTest):
# default ILM=auto disables ILM when custom indices given
def test_override_indices_config(self):
# load error and transaction document to ES
self.load_docs_with_template(self.get_error_payload_path(),
self.intake_url,
'error',
4,
query_index=index_name)
self.load_docs_with_template(self.get_payload_path("transactions_spans_rum.ndjson"),
self.intake_url,
'transaction',
2,
query_index=index_name)
# check that every document is indexed once in the expected index (incl.1 onboarding doc)
assert 4+2+1 == self.es.count(index=index_name)['count']
@integration_test
class OverrideIndicesILMFalseIntegrationTest(OverrideIndicesTest):
config_overrides = {"ilm_enabled": "false"}
def test_override_indices_config(self):
# load error and transaction document to ES
self.load_docs_with_template(self.get_error_payload_path(),
self.intake_url,
'error',
4,
query_index=index_name)
assert 4+1 == self.es.count(index=index_name)['count']
@integration_test
class OverrideIndicesILMTrueIntegrationTest(OverrideIndicesTest):
config_overrides = {"ilm_enabled": "true"}
def test_override_indices_config(self):
# load error and transaction document to ES
self.load_docs_with_template(self.get_error_payload_path(),
self.intake_url,
'error',
4,
query_index=self.ilm_index(index_error))
assert 4 == self.es.count(index=self.ilm_index(index_error))['count']
@integration_test
class OverrideIndicesFailureIntegrationTest(ProcStartupFailureTest):
config_overrides = {
"override_index": "apm-foo",
"elasticsearch_host": "localhost:8200",
"file_enabled": "false",
}
def test_template_setup_error(self):
loaded_msg = "Exiting: `setup.template.name` and `setup.template.pattern` have to be set"
wait_until(lambda: self.log_contains(loaded_msg), max_timeout=5)
@integration_test
class ExpvarDisabledIntegrationTest(ExpvarBaseTest):
config_overrides = {"expvar_enabled": "false"}
def test_expvar_exists(self):
"""expvar disabled, should 404"""
r = self.get_debug_vars()
assert r.status_code == 404, r.status_code
@integration_test
class ExpvarEnabledIntegrationTest(ExpvarBaseTest):
config_overrides = {"expvar_enabled": "true"}
def test_expvar_exists(self):
"""expvar enabled, should 200"""
r = self.get_debug_vars()
assert r.status_code == 200, r.status_code
@integration_test
class ExpvarCustomUrlIntegrationTest(ExpvarBaseTest):
config_overrides = {"expvar_enabled": "true", "expvar_url": "/foo"}
expvar_url = ExpvarBaseTest.expvar_url.replace("/debug/vars", "/foo")
def test_expvar_exists(self):
"""expvar enabled, should 200"""
r = self.get_debug_vars()
assert r.status_code == 200, r.status_code
| 42.996063 | 108 | 0.597839 | import time
from apmserver import integration_test
from apmserver import ClientSideBaseTest, ElasticTest, ExpvarBaseTest, ProcStartupFailureTest
from helper import wait_until
from es_helper import index_metric, index_transaction, index_error, index_span, index_onboarding, index_name
@integration_test
class Test(ElasticTest):
def test_template(self):
wait_until(lambda: self.es.indices.exists(index_onboarding))
templates = self.es.indices.get_template(index_name)
assert len(templates) == 1
t = templates[index_name]
total_fields_limit = t['settings']['index']['mapping']['total_fields']['limit']
assert total_fields_limit == "2000", total_fields_limit
def test_tags_type(self):
self.load_docs_with_template(self.get_payload_path("transactions_spans.ndjson"),
self.intake_url, 'transaction', 8)
self.assert_no_logged_warnings()
mappings = self.es.indices.get_field_mapping(index=index_transaction, fields="context.tags.*")
for name, metric in mappings["{}-000001".format(index_transaction)]["mappings"].items():
fullname = metric["full_name"]
for mapping in metric["mapping"].values():
mtype = mapping["type"]
if fullname.startswith("context.tags.bool"):
assert mtype == "boolean", name + " mapped as " + mtype + ", not boolean"
elif fullname.startswith("context.tags.number"):
assert mtype == "scaled_float", name + " mapped as " + mtype + ", not scaled_float"
else:
assert mtype == "keyword", name + " mapped as " + mtype + ", not keyword"
def test_load_docs_with_template_and_add_transaction(self):
self.load_docs_with_template(self.get_payload_path("transactions_spans.ndjson"),
self.intake_url, 'transaction', 8)
self.assert_no_logged_warnings()
transaction_docs = self.wait_for_events('transaction', 3, index=index_transaction)
self.approve_docs('transaction', transaction_docs)
span_docs = self.wait_for_events('transaction', 5, index=index_span)
self.approve_docs('spans', span_docs)
def test_load_docs_with_template_and_add_error(self):
self.load_docs_with_template(self.get_error_payload_path(), self.intake_url, 'error', 4)
self.assert_no_logged_warnings()
error_docs = self.wait_for_events('error', 4, index=index_error)
self.approve_docs('error', error_docs)
@integration_test
class EnrichEventIntegrationTest(ClientSideBaseTest, ElasticTest):
def test_backend_error(self):
self.load_docs_with_template(self.get_backend_error_payload_path(),
self.backend_intake_url,
'error',
4)
self.check_library_frames({"true": 1, "false": 0, "empty": 3}, index_error)
def test_rum_error(self):
self.load_docs_with_template(self.get_error_payload_path(),
self.intake_url,
'error',
1)
self.check_library_frames({"true": 5, "false": 0, "empty": 1}, index_error)
def test_rum_transaction(self):
self.load_docs_with_template(self.get_transaction_payload_path(),
self.intake_url,
'transaction',
2)
self.check_library_frames({"true": 1, "false": 0, "empty": 1}, index_span)
def test_grouping_key_for_error(self):
# `rum.exclude_from_grouping` should only be applied to the rum error.
self.load_docs_with_template(self.get_error_payload_path(),
self.intake_url,
'error',
1)
self.load_docs_with_template(self.get_error_payload_path(),
self.backend_intake_url,
'error',
2)
rs = self.es.search(index=index_error)
docs = rs['hits']['hits']
grouping_key1 = docs[0]["_source"]["error"]["grouping_key"]
grouping_key2 = docs[1]["_source"]["error"]["grouping_key"]
assert grouping_key1 != grouping_key2
def check_library_frames(self, library_frames, index_name):
rs = self.es.search(index=index_name)
l_frames = {"true": 0, "false": 0, "empty": 0}
for doc in rs['hits']['hits']:
if "error" in doc["_source"]:
err = doc["_source"]["error"]
for exception in err.get("exception", []):
self.count_library_frames(exception, l_frames)
if "log" in err:
self.count_library_frames(err["log"], l_frames)
elif "span" in doc["_source"]:
span = doc["_source"]["span"]
self.count_library_frames(span, l_frames)
assert l_frames == library_frames, "found {}, expected {}".format(
l_frames, library_frames)
@staticmethod
def count_library_frames(doc, lf):
if "stacktrace" not in doc:
return
for frame in doc["stacktrace"]:
if "library_frame" in frame:
k = "true" if frame["library_frame"] else "false"
lf[k] += 1
else:
lf["empty"] += 1
@integration_test
class ILMDisabledIntegrationTest(ElasticTest):
config_overrides = {"ilm_enabled": "false"}
def test_override_indices_config(self):
# load error and transaction document to ES
self.load_docs_with_template(self.get_error_payload_path(),
self.intake_url,
'error',
4,
query_index="{}-2017.05.09".format(index_error))
class OverrideIndicesTest(ElasticTest):
def config(self):
cfg = super(OverrideIndicesTest, self).config()
cfg.update({"override_index": index_name,
"override_template": index_name})
return cfg
@integration_test
class OverrideIndicesIntegrationTest(OverrideIndicesTest):
# default ILM=auto disables ILM when custom indices given
def test_override_indices_config(self):
# load error and transaction document to ES
self.load_docs_with_template(self.get_error_payload_path(),
self.intake_url,
'error',
4,
query_index=index_name)
self.load_docs_with_template(self.get_payload_path("transactions_spans_rum.ndjson"),
self.intake_url,
'transaction',
2,
query_index=index_name)
# check that every document is indexed once in the expected index (incl.1 onboarding doc)
assert 4+2+1 == self.es.count(index=index_name)['count']
@integration_test
class OverrideIndicesILMFalseIntegrationTest(OverrideIndicesTest):
config_overrides = {"ilm_enabled": "false"}
def test_override_indices_config(self):
# load error and transaction document to ES
self.load_docs_with_template(self.get_error_payload_path(),
self.intake_url,
'error',
4,
query_index=index_name)
assert 4+1 == self.es.count(index=index_name)['count']
@integration_test
class OverrideIndicesILMTrueIntegrationTest(OverrideIndicesTest):
config_overrides = {"ilm_enabled": "true"}
def test_override_indices_config(self):
# load error and transaction document to ES
self.load_docs_with_template(self.get_error_payload_path(),
self.intake_url,
'error',
4,
query_index=self.ilm_index(index_error))
assert 4 == self.es.count(index=self.ilm_index(index_error))['count']
@integration_test
class OverrideIndicesFailureIntegrationTest(ProcStartupFailureTest):
config_overrides = {
"override_index": "apm-foo",
"elasticsearch_host": "localhost:8200",
"file_enabled": "false",
}
def test_template_setup_error(self):
loaded_msg = "Exiting: `setup.template.name` and `setup.template.pattern` have to be set"
wait_until(lambda: self.log_contains(loaded_msg), max_timeout=5)
@integration_test
class ExpvarDisabledIntegrationTest(ExpvarBaseTest):
config_overrides = {"expvar_enabled": "false"}
def test_expvar_exists(self):
r = self.get_debug_vars()
assert r.status_code == 404, r.status_code
@integration_test
class ExpvarEnabledIntegrationTest(ExpvarBaseTest):
config_overrides = {"expvar_enabled": "true"}
def test_expvar_exists(self):
r = self.get_debug_vars()
assert r.status_code == 200, r.status_code
@integration_test
class ExpvarCustomUrlIntegrationTest(ExpvarBaseTest):
config_overrides = {"expvar_enabled": "true", "expvar_url": "/foo"}
expvar_url = ExpvarBaseTest.expvar_url.replace("/debug/vars", "/foo")
def test_expvar_exists(self):
r = self.get_debug_vars()
assert r.status_code == 200, r.status_code
| true | true |
f724b0962b84b5c8a44fd19d50552d5624391c1e | 1,408 | py | Python | bcs-ui/backend/tests/container_service/observability/log_stream/test_utils.py | laodiu/bk-bcs | 2a956a42101ff6487ff521fb3ef429805bfa7e26 | [
"Apache-2.0"
] | 599 | 2019-06-25T03:20:46.000Z | 2022-03-31T12:14:33.000Z | bcs-ui/backend/tests/container_service/observability/log_stream/test_utils.py | laodiu/bk-bcs | 2a956a42101ff6487ff521fb3ef429805bfa7e26 | [
"Apache-2.0"
] | 537 | 2019-06-27T06:03:44.000Z | 2022-03-31T12:10:01.000Z | bcs-ui/backend/tests/container_service/observability/log_stream/test_utils.py | laodiu/bk-bcs | 2a956a42101ff6487ff521fb3ef429805bfa7e26 | [
"Apache-2.0"
] | 214 | 2019-06-25T03:26:05.000Z | 2022-03-31T07:52:03.000Z | # -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
from backend.container_service.observability.log_stream import utils
def test_refine_k8s_logs(log_content):
logs = utils.refine_k8s_logs(log_content, None)
assert len(logs) == 10
assert logs[0].time == '2021-05-19T12:03:52.516011121Z'
def test_calc_since_time(log_content):
logs = utils.refine_k8s_logs(log_content, None)
sine_time = utils.calc_since_time(logs[0].time, logs[-1].time)
assert sine_time == '2021-05-19T12:03:10.125788125Z'
def test_calc_previous_page(log_content):
logs = utils.refine_k8s_logs(log_content, None)
page = utils.calc_previous_page(logs, {'container_name': "", "previous": ""}, "")
assert page != ""
| 41.411765 | 115 | 0.757102 |
from backend.container_service.observability.log_stream import utils
def test_refine_k8s_logs(log_content):
logs = utils.refine_k8s_logs(log_content, None)
assert len(logs) == 10
assert logs[0].time == '2021-05-19T12:03:52.516011121Z'
def test_calc_since_time(log_content):
logs = utils.refine_k8s_logs(log_content, None)
sine_time = utils.calc_since_time(logs[0].time, logs[-1].time)
assert sine_time == '2021-05-19T12:03:10.125788125Z'
def test_calc_previous_page(log_content):
logs = utils.refine_k8s_logs(log_content, None)
page = utils.calc_previous_page(logs, {'container_name': "", "previous": ""}, "")
assert page != ""
| true | true |
f724b0ddd809230cda004541ff8a28dab8b18b75 | 1,462 | py | Python | python/logs/example-logging.py | jgordo04/housinginsights_temp | 588e912de31b7f50f7239af0bd4dfeaa693616bd | [
"MIT"
] | null | null | null | python/logs/example-logging.py | jgordo04/housinginsights_temp | 588e912de31b7f50f7239af0bd4dfeaa693616bd | [
"MIT"
] | null | null | null | python/logs/example-logging.py | jgordo04/housinginsights_temp | 588e912de31b7f50f7239af0bd4dfeaa693616bd | [
"MIT"
] | null | null | null | import logging
#Configure logging
logging_filename = "../logs/example.log"
logging.basicConfig(filename=logging_filename, level=logging.DEBUG)
#----------------
# Example logging
#----------------
# When you are writing code, instead of using the 'print' statement (which only
# is shown on the command line), you can instead use logging to write stuff to the log
# files.
# Benefit: Easier to sort through complex stuff, when you want to print lots of things
# as you work through a bug.
# Downside: Don't forget to delete your log files from time to time - they will get big
# They will be recreated next time you start the program.
# To see logging in action, run this file and then look in the newly created example.log file
# Every time you re-run this file, messages will be *added* to the log file
# Every time you delete the log file, and then re-run this file it will be created fresh.
logging.warning("--------------------starting module------------------")
logging.error("My error message")
logging.critical("My super bad error message")
logging.warning("This is a message that would always be written to logs")
logging.info("This message only comes through when level=logging.DEBUG")
#adding stack_info=True makes a log also report where it was called from (e.g. line 29), like a regular python error
logging.debug("this is a debug message", stack_info=True)
print("Example logging complete! Open example.log to see what happened.")
| 44.30303 | 116 | 0.725718 | import logging
logging_filename = "../logs/example.log"
logging.basicConfig(filename=logging_filename, level=logging.DEBUG)
# They will be recreated next time you start the program.
# To see logging in action, run this file and then look in the newly created example.log file
# Every time you re-run this file, messages will be *added* to the log file
# Every time you delete the log file, and then re-run this file it will be created fresh.
logging.warning("--------------------starting module------------------")
logging.error("My error message")
logging.critical("My super bad error message")
logging.warning("This is a message that would always be written to logs")
logging.info("This message only comes through when level=logging.DEBUG")
#adding stack_info=True makes a log also report where it was called from (e.g. line 29), like a regular python error
logging.debug("this is a debug message", stack_info=True)
print("Example logging complete! Open example.log to see what happened.")
| true | true |
f724b12ab738483eea567de9687c75d90ba7a949 | 788 | py | Python | OpenGLWrapper_JE/venv/Lib/site-packages/OpenGL/GLX/NV/multigpu_context.py | JE-Chen/je_old_repo | a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5 | [
"MIT"
] | null | null | null | OpenGLWrapper_JE/venv/Lib/site-packages/OpenGL/GLX/NV/multigpu_context.py | JE-Chen/je_old_repo | a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5 | [
"MIT"
] | null | null | null | OpenGLWrapper_JE/venv/Lib/site-packages/OpenGL/GLX/NV/multigpu_context.py | JE-Chen/je_old_repo | a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5 | [
"MIT"
] | null | null | null | '''OpenGL extension NV.multigpu_context
This module customises the behaviour of the
OpenGL.raw.GLX.NV.multigpu_context to provide a more
Python-friendly API
The official definition of this extension is available here:
http://www.opengl.org/registry/specs/NV/multigpu_context.txt
'''
from OpenGL import platform, constant, arrays
from OpenGL import extensions, wrapper
import ctypes
from OpenGL.raw.GLX import _types, _glgets
from OpenGL.raw.GLX.NV.multigpu_context import *
from OpenGL.raw.GLX.NV.multigpu_context import _EXTENSION_NAME
def glInitMultigpuContextNV():
'''Return boolean indicating whether this extension is available'''
from OpenGL import extensions
return extensions.hasGLExtension( _EXTENSION_NAME )
### END AUTOGENERATED SECTION | 34.26087 | 72 | 0.785533 | from OpenGL import platform, constant, arrays
from OpenGL import extensions, wrapper
import ctypes
from OpenGL.raw.GLX import _types, _glgets
from OpenGL.raw.GLX.NV.multigpu_context import *
from OpenGL.raw.GLX.NV.multigpu_context import _EXTENSION_NAME
def glInitMultigpuContextNV():
from OpenGL import extensions
return extensions.hasGLExtension( _EXTENSION_NAME )
| true | true |
f724b18933f6ba44d5bf81c468e375a0ec86b207 | 2,314 | py | Python | creategroup.py | vnitinv/JChat | a6fee1b8cb07f8ed4bcda404e0519e0e580646da | [
"Apache-2.0"
] | 3 | 2016-04-20T01:33:32.000Z | 2016-07-19T12:24:27.000Z | creategroup.py | vnitinv/JChat | a6fee1b8cb07f8ed4bcda404e0519e0e580646da | [
"Apache-2.0"
] | null | null | null | creategroup.py | vnitinv/JChat | a6fee1b8cb07f8ed4bcda404e0519e0e580646da | [
"Apache-2.0"
] | null | null | null | from PyQt4 import QtCore, QtGui
from boxes import ConnectedDevice
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_CreateGroup(object):
def setupUi(self, CreateGroup):
self.cd_cg = ConnectedDevice()
CreateGroup.setObjectName(_fromUtf8("CreateGroup"))
CreateGroup.resize(405, 114)
self.gridLayout = QtGui.QGridLayout(CreateGroup)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.grou_name_label = QtGui.QLabel(CreateGroup)
self.grou_name_label.setObjectName(_fromUtf8("grou_name_label"))
self.horizontalLayout.addWidget(self.grou_name_label)
self.group_name_lineEdit = QtGui.QLineEdit(CreateGroup)
self.group_name_lineEdit.setDragEnabled(True)
self.group_name_lineEdit.setObjectName(_fromUtf8("group_name_lineEdit"))
self.horizontalLayout.addWidget(self.group_name_lineEdit)
self.gridLayout.addLayout(self.horizontalLayout, 0, 0, 1, 1)
self.cg_pushButton = QtGui.QPushButton(CreateGroup)
self.cg_pushButton.setObjectName(_fromUtf8("pushButton"))
self.gridLayout.addWidget(self.cg_pushButton, 1, 0, 1, 1)
self.retranslateUi(CreateGroup)
QtCore.QObject.connect(self.cg_pushButton, QtCore.SIGNAL(_fromUtf8("clicked()")), self.updateGroups)
QtCore.QMetaObject.connectSlotsByName(CreateGroup)
def updateGroups(self):
host = str(self.group_name_lineEdit.text())
if host not in self.cd_cg.groups:
self.cd_cg.groups[host] = []
def retranslateUi(self, CreateGroup):
CreateGroup.setWindowTitle(_translate("CreateGroup", "Group", None))
self.grou_name_label.setText(_translate("CreateGroup", "Group Name:", None))
self.cg_pushButton.setText(_translate("CreateGroup", "Create", None))
| 43.660377 | 108 | 0.723423 | from PyQt4 import QtCore, QtGui
from boxes import ConnectedDevice
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_CreateGroup(object):
def setupUi(self, CreateGroup):
self.cd_cg = ConnectedDevice()
CreateGroup.setObjectName(_fromUtf8("CreateGroup"))
CreateGroup.resize(405, 114)
self.gridLayout = QtGui.QGridLayout(CreateGroup)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.grou_name_label = QtGui.QLabel(CreateGroup)
self.grou_name_label.setObjectName(_fromUtf8("grou_name_label"))
self.horizontalLayout.addWidget(self.grou_name_label)
self.group_name_lineEdit = QtGui.QLineEdit(CreateGroup)
self.group_name_lineEdit.setDragEnabled(True)
self.group_name_lineEdit.setObjectName(_fromUtf8("group_name_lineEdit"))
self.horizontalLayout.addWidget(self.group_name_lineEdit)
self.gridLayout.addLayout(self.horizontalLayout, 0, 0, 1, 1)
self.cg_pushButton = QtGui.QPushButton(CreateGroup)
self.cg_pushButton.setObjectName(_fromUtf8("pushButton"))
self.gridLayout.addWidget(self.cg_pushButton, 1, 0, 1, 1)
self.retranslateUi(CreateGroup)
QtCore.QObject.connect(self.cg_pushButton, QtCore.SIGNAL(_fromUtf8("clicked()")), self.updateGroups)
QtCore.QMetaObject.connectSlotsByName(CreateGroup)
def updateGroups(self):
host = str(self.group_name_lineEdit.text())
if host not in self.cd_cg.groups:
self.cd_cg.groups[host] = []
def retranslateUi(self, CreateGroup):
CreateGroup.setWindowTitle(_translate("CreateGroup", "Group", None))
self.grou_name_label.setText(_translate("CreateGroup", "Group Name:", None))
self.cg_pushButton.setText(_translate("CreateGroup", "Create", None))
| true | true |
f724b33b9c2842b58bfecbe90baa6d08d858641f | 201 | py | Python | test.py | komireddys/Python_flask | c2e9747064f4b1a4b6d79f729d8ab1ec62e0d706 | [
"MIT"
] | null | null | null | test.py | komireddys/Python_flask | c2e9747064f4b1a4b6d79f729d8ab1ec62e0d706 | [
"MIT"
] | null | null | null | test.py | komireddys/Python_flask | c2e9747064f4b1a4b6d79f729d8ab1ec62e0d706 | [
"MIT"
] | null | null | null | from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello():
return "<h1>Welcome to Python Flask App!</h1> <p1>hello this sample page</p1>"
if __name__ == "__main__":
app.run()
| 20.1 | 82 | 0.656716 | from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello():
return "<h1>Welcome to Python Flask App!</h1> <p1>hello this sample page</p1>"
if __name__ == "__main__":
app.run()
| true | true |
f724b3e43741d780155776413bbee552df6dfdf4 | 3,183 | py | Python | optax/_src/constrain.py | VE-FORBRYDERNE/optax | 0d5421240cc7d4bf18fbed44fc5e5e2382a6e884 | [
"Apache-2.0"
] | null | null | null | optax/_src/constrain.py | VE-FORBRYDERNE/optax | 0d5421240cc7d4bf18fbed44fc5e5e2382a6e884 | [
"Apache-2.0"
] | null | null | null | optax/_src/constrain.py | VE-FORBRYDERNE/optax | 0d5421240cc7d4bf18fbed44fc5e5e2382a6e884 | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 DeepMind Technologies Limited. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Gradient transformations used to enforce specific constraints."""
from typing import Any, NamedTuple
import jax
import jax.numpy as jnp
from optax._src import base
# pylint:disable=no-value-for-parameter
NonNegativeParamsState = base.EmptyState
def keep_params_nonnegative() -> base.GradientTransformation:
"""Modifies the updates to keep parameters non-negative, i.e. >= 0.
This transformation ensures that parameters after the update will be
larger than or equal to zero.
In a chain of transformations, this should be the last one.
WARNING: the transformation expects input params to be non-negative.
When params is negative the transformed update will move them to 0.
Returns:
An (init_fn, update_fn) tuple.
"""
def init_fn(_):
return NonNegativeParamsState()
def update_fn(updates, state, params):
if params is None:
raise ValueError(base.NO_PARAMS_MSG)
updates = jax.tree_multimap(
lambda p, u: jnp.where((p + u) < 0., -p, u), params, updates)
return updates, state
return base.GradientTransformation(init_fn, update_fn)
class ZeroNansState(NamedTuple):
"""Contains a tree.
The entry `found_nan` has the same tree structure as that of the parameters.
Each leaf is a single boolean which contains True iff a NaN was detected in
the corresponding parameter array at the last call to `update`.
"""
found_nan: Any
def zero_nans() -> base.GradientTransformation:
"""A transformation which replaces NaNs with 0.
Zeroing values in gradients is guaranteed to produce a direction of
non-increasing loss.
The state of the transformation has the same tree structure as that of the
parameters. Each leaf is a single boolean which contains True iff a NaN was
detected in the corresponding parameter array at the last call to `update`.
This state is not used by the transformation internally, but lets users be
aware when NaNs have been zeroed out.
Returns:
A `GradientTransformation`.
"""
def init_fn(params):
return ZeroNansState(
jax.tree_map(lambda p: jnp.array(False, dtype=jnp.bool_), params))
def update_fn(updates, opt_state, params=None):
del params
opt_state = ZeroNansState(
jax.tree_map(lambda p: jnp.any(jnp.isnan(p)), updates))
updates = jax.tree_map(
lambda p: jnp.where(jnp.isnan(p), jnp.zeros_like(p), p), updates)
return updates, opt_state
return base.GradientTransformation(init=init_fn, update=update_fn)
| 32.814433 | 80 | 0.723217 |
from typing import Any, NamedTuple
import jax
import jax.numpy as jnp
from optax._src import base
NonNegativeParamsState = base.EmptyState
def keep_params_nonnegative() -> base.GradientTransformation:
def init_fn(_):
return NonNegativeParamsState()
def update_fn(updates, state, params):
if params is None:
raise ValueError(base.NO_PARAMS_MSG)
updates = jax.tree_multimap(
lambda p, u: jnp.where((p + u) < 0., -p, u), params, updates)
return updates, state
return base.GradientTransformation(init_fn, update_fn)
class ZeroNansState(NamedTuple):
found_nan: Any
def zero_nans() -> base.GradientTransformation:
def init_fn(params):
return ZeroNansState(
jax.tree_map(lambda p: jnp.array(False, dtype=jnp.bool_), params))
def update_fn(updates, opt_state, params=None):
del params
opt_state = ZeroNansState(
jax.tree_map(lambda p: jnp.any(jnp.isnan(p)), updates))
updates = jax.tree_map(
lambda p: jnp.where(jnp.isnan(p), jnp.zeros_like(p), p), updates)
return updates, opt_state
return base.GradientTransformation(init=init_fn, update=update_fn)
| true | true |
f724b40a71f16bb470b2cc5786eac0d4c6031f53 | 20,284 | py | Python | tests/auth/test_init.py | JariInc/home-assistant | 24a8d60566fb5cf62942d042d38965a705d1bc65 | [
"Apache-2.0"
] | null | null | null | tests/auth/test_init.py | JariInc/home-assistant | 24a8d60566fb5cf62942d042d38965a705d1bc65 | [
"Apache-2.0"
] | null | null | null | tests/auth/test_init.py | JariInc/home-assistant | 24a8d60566fb5cf62942d042d38965a705d1bc65 | [
"Apache-2.0"
] | 3 | 2018-09-14T07:34:09.000Z | 2018-09-29T12:57:10.000Z | """Tests for the Home Assistant auth module."""
from datetime import timedelta
from unittest.mock import Mock, patch
import pytest
from homeassistant import auth, data_entry_flow
from homeassistant.auth import (
models as auth_models, auth_store, const as auth_const)
from homeassistant.auth.mfa_modules import SESSION_EXPIRATION
from homeassistant.util import dt as dt_util
from tests.common import (
MockUser, ensure_auth_manager_loaded, flush_store, CLIENT_ID)
@pytest.fixture
def mock_hass(loop):
"""Hass mock with minimum amount of data set to make it work with auth."""
hass = Mock()
hass.config.skip_pip = True
return hass
async def test_auth_manager_from_config_validates_config_and_id(mock_hass):
"""Test get auth providers."""
manager = await auth.auth_manager_from_config(mock_hass, [{
'name': 'Test Name',
'type': 'insecure_example',
'users': [],
}, {
'name': 'Invalid config because no users',
'type': 'insecure_example',
'id': 'invalid_config',
}, {
'name': 'Test Name 2',
'type': 'insecure_example',
'id': 'another',
'users': [],
}, {
'name': 'Wrong because duplicate ID',
'type': 'insecure_example',
'id': 'another',
'users': [],
}], [])
providers = [{
'name': provider.name,
'id': provider.id,
'type': provider.type,
} for provider in manager.auth_providers]
assert providers == [{
'name': 'Test Name',
'type': 'insecure_example',
'id': None,
}, {
'name': 'Test Name 2',
'type': 'insecure_example',
'id': 'another',
}]
async def test_auth_manager_from_config_auth_modules(mock_hass):
"""Test get auth modules."""
manager = await auth.auth_manager_from_config(mock_hass, [{
'name': 'Test Name',
'type': 'insecure_example',
'users': [],
}, {
'name': 'Test Name 2',
'type': 'insecure_example',
'id': 'another',
'users': [],
}], [{
'name': 'Module 1',
'type': 'insecure_example',
'data': [],
}, {
'name': 'Module 2',
'type': 'insecure_example',
'id': 'another',
'data': [],
}, {
'name': 'Duplicate ID',
'type': 'insecure_example',
'id': 'another',
'data': [],
}])
providers = [{
'name': provider.name,
'type': provider.type,
'id': provider.id,
} for provider in manager.auth_providers]
assert providers == [{
'name': 'Test Name',
'type': 'insecure_example',
'id': None,
}, {
'name': 'Test Name 2',
'type': 'insecure_example',
'id': 'another',
}]
modules = [{
'name': module.name,
'type': module.type,
'id': module.id,
} for module in manager.auth_mfa_modules]
assert modules == [{
'name': 'Module 1',
'type': 'insecure_example',
'id': 'insecure_example',
}, {
'name': 'Module 2',
'type': 'insecure_example',
'id': 'another',
}]
async def test_create_new_user(hass):
"""Test creating new user."""
manager = await auth.auth_manager_from_config(hass, [{
'type': 'insecure_example',
'users': [{
'username': 'test-user',
'password': 'test-pass',
'name': 'Test Name'
}]
}], [])
step = await manager.login_flow.async_init(('insecure_example', None))
assert step['type'] == data_entry_flow.RESULT_TYPE_FORM
step = await manager.login_flow.async_configure(step['flow_id'], {
'username': 'test-user',
'password': 'test-pass',
})
assert step['type'] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
user = step['result']
assert user is not None
assert user.is_owner is False
assert user.name == 'Test Name'
async def test_login_as_existing_user(mock_hass):
"""Test login as existing user."""
manager = await auth.auth_manager_from_config(mock_hass, [{
'type': 'insecure_example',
'users': [{
'username': 'test-user',
'password': 'test-pass',
'name': 'Test Name'
}]
}], [])
mock_hass.auth = manager
ensure_auth_manager_loaded(manager)
# Add a fake user that we're not going to log in with
user = MockUser(
id='mock-user2',
is_owner=False,
is_active=False,
name='Not user',
).add_to_auth_manager(manager)
user.credentials.append(auth_models.Credentials(
id='mock-id2',
auth_provider_type='insecure_example',
auth_provider_id=None,
data={'username': 'other-user'},
is_new=False,
))
# Add fake user with credentials for example auth provider.
user = MockUser(
id='mock-user',
is_owner=False,
is_active=False,
name='Paulus',
).add_to_auth_manager(manager)
user.credentials.append(auth_models.Credentials(
id='mock-id',
auth_provider_type='insecure_example',
auth_provider_id=None,
data={'username': 'test-user'},
is_new=False,
))
step = await manager.login_flow.async_init(('insecure_example', None))
assert step['type'] == data_entry_flow.RESULT_TYPE_FORM
step = await manager.login_flow.async_configure(step['flow_id'], {
'username': 'test-user',
'password': 'test-pass',
})
assert step['type'] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
user = step['result']
assert user is not None
assert user.id == 'mock-user'
assert user.is_owner is False
assert user.is_active is False
assert user.name == 'Paulus'
async def test_linking_user_to_two_auth_providers(hass, hass_storage):
"""Test linking user to two auth providers."""
manager = await auth.auth_manager_from_config(hass, [{
'type': 'insecure_example',
'users': [{
'username': 'test-user',
'password': 'test-pass',
}]
}, {
'type': 'insecure_example',
'id': 'another-provider',
'users': [{
'username': 'another-user',
'password': 'another-password',
}]
}], [])
step = await manager.login_flow.async_init(('insecure_example', None))
step = await manager.login_flow.async_configure(step['flow_id'], {
'username': 'test-user',
'password': 'test-pass',
})
user = step['result']
assert user is not None
step = await manager.login_flow.async_init(
('insecure_example', 'another-provider'),
context={'credential_only': True})
step = await manager.login_flow.async_configure(step['flow_id'], {
'username': 'another-user',
'password': 'another-password',
})
new_credential = step['result']
await manager.async_link_user(user, new_credential)
assert len(user.credentials) == 2
async def test_saving_loading(hass, hass_storage):
"""Test storing and saving data.
Creates one of each type that we store to test we restore correctly.
"""
manager = await auth.auth_manager_from_config(hass, [{
'type': 'insecure_example',
'users': [{
'username': 'test-user',
'password': 'test-pass',
}]
}], [])
step = await manager.login_flow.async_init(('insecure_example', None))
step = await manager.login_flow.async_configure(step['flow_id'], {
'username': 'test-user',
'password': 'test-pass',
})
user = step['result']
await manager.async_activate_user(user)
await manager.async_create_refresh_token(user, CLIENT_ID)
await flush_store(manager._store._store)
store2 = auth_store.AuthStore(hass)
users = await store2.async_get_users()
assert len(users) == 1
assert users[0] == user
async def test_cannot_retrieve_expired_access_token(hass):
"""Test that we cannot retrieve expired access tokens."""
manager = await auth.auth_manager_from_config(hass, [], [])
user = MockUser().add_to_auth_manager(manager)
refresh_token = await manager.async_create_refresh_token(user, CLIENT_ID)
assert refresh_token.user.id is user.id
assert refresh_token.client_id == CLIENT_ID
access_token = manager.async_create_access_token(refresh_token)
assert (
await manager.async_validate_access_token(access_token)
is refresh_token
)
with patch('homeassistant.util.dt.utcnow',
return_value=dt_util.utcnow() -
auth_const.ACCESS_TOKEN_EXPIRATION - timedelta(seconds=11)):
access_token = manager.async_create_access_token(refresh_token)
assert (
await manager.async_validate_access_token(access_token)
is None
)
async def test_generating_system_user(hass):
"""Test that we can add a system user."""
manager = await auth.auth_manager_from_config(hass, [], [])
user = await manager.async_create_system_user('Hass.io')
token = await manager.async_create_refresh_token(user)
assert user.system_generated
assert token is not None
assert token.client_id is None
async def test_refresh_token_requires_client_for_user(hass):
"""Test that we can add a system user."""
manager = await auth.auth_manager_from_config(hass, [], [])
user = MockUser().add_to_auth_manager(manager)
assert user.system_generated is False
with pytest.raises(ValueError):
await manager.async_create_refresh_token(user)
token = await manager.async_create_refresh_token(user, CLIENT_ID)
assert token is not None
assert token.client_id == CLIENT_ID
async def test_refresh_token_not_requires_client_for_system_user(hass):
"""Test that we can add a system user."""
manager = await auth.auth_manager_from_config(hass, [], [])
user = await manager.async_create_system_user('Hass.io')
assert user.system_generated is True
with pytest.raises(ValueError):
await manager.async_create_refresh_token(user, CLIENT_ID)
token = await manager.async_create_refresh_token(user)
assert token is not None
assert token.client_id is None
async def test_cannot_deactive_owner(mock_hass):
"""Test that we cannot deactive the owner."""
manager = await auth.auth_manager_from_config(mock_hass, [], [])
owner = MockUser(
is_owner=True,
).add_to_auth_manager(manager)
with pytest.raises(ValueError):
await manager.async_deactivate_user(owner)
async def test_remove_refresh_token(mock_hass):
"""Test that we can remove a refresh token."""
manager = await auth.auth_manager_from_config(mock_hass, [], [])
user = MockUser().add_to_auth_manager(manager)
refresh_token = await manager.async_create_refresh_token(user, CLIENT_ID)
access_token = manager.async_create_access_token(refresh_token)
await manager.async_remove_refresh_token(refresh_token)
assert (
await manager.async_get_refresh_token(refresh_token.id) is None
)
assert (
await manager.async_validate_access_token(access_token) is None
)
async def test_login_with_auth_module(mock_hass):
"""Test login as existing user with auth module."""
manager = await auth.auth_manager_from_config(mock_hass, [{
'type': 'insecure_example',
'users': [{
'username': 'test-user',
'password': 'test-pass',
'name': 'Test Name'
}],
}], [{
'type': 'insecure_example',
'data': [{
'user_id': 'mock-user',
'pin': 'test-pin'
}]
}])
mock_hass.auth = manager
ensure_auth_manager_loaded(manager)
# Add fake user with credentials for example auth provider.
user = MockUser(
id='mock-user',
is_owner=False,
is_active=False,
name='Paulus',
).add_to_auth_manager(manager)
user.credentials.append(auth_models.Credentials(
id='mock-id',
auth_provider_type='insecure_example',
auth_provider_id=None,
data={'username': 'test-user'},
is_new=False,
))
step = await manager.login_flow.async_init(('insecure_example', None))
assert step['type'] == data_entry_flow.RESULT_TYPE_FORM
step = await manager.login_flow.async_configure(step['flow_id'], {
'username': 'test-user',
'password': 'test-pass',
})
# After auth_provider validated, request auth module input form
assert step['type'] == data_entry_flow.RESULT_TYPE_FORM
assert step['step_id'] == 'mfa'
step = await manager.login_flow.async_configure(step['flow_id'], {
'pin': 'invalid-pin',
})
# Invalid auth error
assert step['type'] == data_entry_flow.RESULT_TYPE_FORM
assert step['step_id'] == 'mfa'
assert step['errors'] == {'base': 'invalid_auth'}
step = await manager.login_flow.async_configure(step['flow_id'], {
'pin': 'test-pin',
})
# Finally passed, get user
assert step['type'] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
user = step['result']
assert user is not None
assert user.id == 'mock-user'
assert user.is_owner is False
assert user.is_active is False
assert user.name == 'Paulus'
async def test_login_with_multi_auth_module(mock_hass):
"""Test login as existing user with multiple auth modules."""
manager = await auth.auth_manager_from_config(mock_hass, [{
'type': 'insecure_example',
'users': [{
'username': 'test-user',
'password': 'test-pass',
'name': 'Test Name'
}],
}], [{
'type': 'insecure_example',
'data': [{
'user_id': 'mock-user',
'pin': 'test-pin'
}]
}, {
'type': 'insecure_example',
'id': 'module2',
'data': [{
'user_id': 'mock-user',
'pin': 'test-pin2'
}]
}])
mock_hass.auth = manager
ensure_auth_manager_loaded(manager)
# Add fake user with credentials for example auth provider.
user = MockUser(
id='mock-user',
is_owner=False,
is_active=False,
name='Paulus',
).add_to_auth_manager(manager)
user.credentials.append(auth_models.Credentials(
id='mock-id',
auth_provider_type='insecure_example',
auth_provider_id=None,
data={'username': 'test-user'},
is_new=False,
))
step = await manager.login_flow.async_init(('insecure_example', None))
assert step['type'] == data_entry_flow.RESULT_TYPE_FORM
step = await manager.login_flow.async_configure(step['flow_id'], {
'username': 'test-user',
'password': 'test-pass',
})
# After auth_provider validated, request select auth module
assert step['type'] == data_entry_flow.RESULT_TYPE_FORM
assert step['step_id'] == 'select_mfa_module'
step = await manager.login_flow.async_configure(step['flow_id'], {
'multi_factor_auth_module': 'module2',
})
assert step['type'] == data_entry_flow.RESULT_TYPE_FORM
assert step['step_id'] == 'mfa'
step = await manager.login_flow.async_configure(step['flow_id'], {
'pin': 'test-pin2',
})
# Finally passed, get user
assert step['type'] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
user = step['result']
assert user is not None
assert user.id == 'mock-user'
assert user.is_owner is False
assert user.is_active is False
assert user.name == 'Paulus'
async def test_auth_module_expired_session(mock_hass):
"""Test login as existing user."""
manager = await auth.auth_manager_from_config(mock_hass, [{
'type': 'insecure_example',
'users': [{
'username': 'test-user',
'password': 'test-pass',
'name': 'Test Name'
}],
}], [{
'type': 'insecure_example',
'data': [{
'user_id': 'mock-user',
'pin': 'test-pin'
}]
}])
mock_hass.auth = manager
ensure_auth_manager_loaded(manager)
# Add fake user with credentials for example auth provider.
user = MockUser(
id='mock-user',
is_owner=False,
is_active=False,
name='Paulus',
).add_to_auth_manager(manager)
user.credentials.append(auth_models.Credentials(
id='mock-id',
auth_provider_type='insecure_example',
auth_provider_id=None,
data={'username': 'test-user'},
is_new=False,
))
step = await manager.login_flow.async_init(('insecure_example', None))
assert step['type'] == data_entry_flow.RESULT_TYPE_FORM
step = await manager.login_flow.async_configure(step['flow_id'], {
'username': 'test-user',
'password': 'test-pass',
})
assert step['type'] == data_entry_flow.RESULT_TYPE_FORM
assert step['step_id'] == 'mfa'
with patch('homeassistant.util.dt.utcnow',
return_value=dt_util.utcnow() + SESSION_EXPIRATION):
step = await manager.login_flow.async_configure(step['flow_id'], {
'pin': 'test-pin',
})
# Invalid auth due session timeout
assert step['type'] == data_entry_flow.RESULT_TYPE_FORM
assert step['step_id'] == 'mfa'
assert step['errors']['base'] == 'login_expired'
# The second try will fail as well
step = await manager.login_flow.async_configure(step['flow_id'], {
'pin': 'test-pin',
})
assert step['type'] == data_entry_flow.RESULT_TYPE_FORM
assert step['step_id'] == 'mfa'
assert step['errors']['base'] == 'login_expired'
async def test_enable_mfa_for_user(hass, hass_storage):
"""Test enable mfa module for user."""
manager = await auth.auth_manager_from_config(hass, [{
'type': 'insecure_example',
'users': [{
'username': 'test-user',
'password': 'test-pass',
}]
}], [{
'type': 'insecure_example',
'data': [],
}])
step = await manager.login_flow.async_init(('insecure_example', None))
step = await manager.login_flow.async_configure(step['flow_id'], {
'username': 'test-user',
'password': 'test-pass',
})
user = step['result']
assert user is not None
# new user don't have mfa enabled
modules = await manager.async_get_enabled_mfa(user)
assert len(modules) == 0
module = manager.get_auth_mfa_module('insecure_example')
# mfa module don't have data
assert bool(module._data) is False
# test enable mfa for user
await manager.async_enable_user_mfa(user, 'insecure_example',
{'pin': 'test-pin'})
assert len(module._data) == 1
assert module._data[0] == {'user_id': user.id, 'pin': 'test-pin'}
# test get enabled mfa
modules = await manager.async_get_enabled_mfa(user)
assert len(modules) == 1
assert 'insecure_example' in modules
# re-enable mfa for user will override
await manager.async_enable_user_mfa(user, 'insecure_example',
{'pin': 'test-pin-new'})
assert len(module._data) == 1
assert module._data[0] == {'user_id': user.id, 'pin': 'test-pin-new'}
modules = await manager.async_get_enabled_mfa(user)
assert len(modules) == 1
assert 'insecure_example' in modules
# system user cannot enable mfa
system_user = await manager.async_create_system_user('system-user')
with pytest.raises(ValueError):
await manager.async_enable_user_mfa(system_user, 'insecure_example',
{'pin': 'test-pin'})
assert len(module._data) == 1
modules = await manager.async_get_enabled_mfa(system_user)
assert len(modules) == 0
# disable mfa for user
await manager.async_disable_user_mfa(user, 'insecure_example')
assert bool(module._data) is False
# test get enabled mfa
modules = await manager.async_get_enabled_mfa(user)
assert len(modules) == 0
# disable mfa for user don't enabled just silent fail
await manager.async_disable_user_mfa(user, 'insecure_example')
| 31.793103 | 78 | 0.622215 | from datetime import timedelta
from unittest.mock import Mock, patch
import pytest
from homeassistant import auth, data_entry_flow
from homeassistant.auth import (
models as auth_models, auth_store, const as auth_const)
from homeassistant.auth.mfa_modules import SESSION_EXPIRATION
from homeassistant.util import dt as dt_util
from tests.common import (
MockUser, ensure_auth_manager_loaded, flush_store, CLIENT_ID)
@pytest.fixture
def mock_hass(loop):
hass = Mock()
hass.config.skip_pip = True
return hass
async def test_auth_manager_from_config_validates_config_and_id(mock_hass):
manager = await auth.auth_manager_from_config(mock_hass, [{
'name': 'Test Name',
'type': 'insecure_example',
'users': [],
}, {
'name': 'Invalid config because no users',
'type': 'insecure_example',
'id': 'invalid_config',
}, {
'name': 'Test Name 2',
'type': 'insecure_example',
'id': 'another',
'users': [],
}, {
'name': 'Wrong because duplicate ID',
'type': 'insecure_example',
'id': 'another',
'users': [],
}], [])
providers = [{
'name': provider.name,
'id': provider.id,
'type': provider.type,
} for provider in manager.auth_providers]
assert providers == [{
'name': 'Test Name',
'type': 'insecure_example',
'id': None,
}, {
'name': 'Test Name 2',
'type': 'insecure_example',
'id': 'another',
}]
async def test_auth_manager_from_config_auth_modules(mock_hass):
manager = await auth.auth_manager_from_config(mock_hass, [{
'name': 'Test Name',
'type': 'insecure_example',
'users': [],
}, {
'name': 'Test Name 2',
'type': 'insecure_example',
'id': 'another',
'users': [],
}], [{
'name': 'Module 1',
'type': 'insecure_example',
'data': [],
}, {
'name': 'Module 2',
'type': 'insecure_example',
'id': 'another',
'data': [],
}, {
'name': 'Duplicate ID',
'type': 'insecure_example',
'id': 'another',
'data': [],
}])
providers = [{
'name': provider.name,
'type': provider.type,
'id': provider.id,
} for provider in manager.auth_providers]
assert providers == [{
'name': 'Test Name',
'type': 'insecure_example',
'id': None,
}, {
'name': 'Test Name 2',
'type': 'insecure_example',
'id': 'another',
}]
modules = [{
'name': module.name,
'type': module.type,
'id': module.id,
} for module in manager.auth_mfa_modules]
assert modules == [{
'name': 'Module 1',
'type': 'insecure_example',
'id': 'insecure_example',
}, {
'name': 'Module 2',
'type': 'insecure_example',
'id': 'another',
}]
async def test_create_new_user(hass):
manager = await auth.auth_manager_from_config(hass, [{
'type': 'insecure_example',
'users': [{
'username': 'test-user',
'password': 'test-pass',
'name': 'Test Name'
}]
}], [])
step = await manager.login_flow.async_init(('insecure_example', None))
assert step['type'] == data_entry_flow.RESULT_TYPE_FORM
step = await manager.login_flow.async_configure(step['flow_id'], {
'username': 'test-user',
'password': 'test-pass',
})
assert step['type'] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
user = step['result']
assert user is not None
assert user.is_owner is False
assert user.name == 'Test Name'
async def test_login_as_existing_user(mock_hass):
manager = await auth.auth_manager_from_config(mock_hass, [{
'type': 'insecure_example',
'users': [{
'username': 'test-user',
'password': 'test-pass',
'name': 'Test Name'
}]
}], [])
mock_hass.auth = manager
ensure_auth_manager_loaded(manager)
user = MockUser(
id='mock-user2',
is_owner=False,
is_active=False,
name='Not user',
).add_to_auth_manager(manager)
user.credentials.append(auth_models.Credentials(
id='mock-id2',
auth_provider_type='insecure_example',
auth_provider_id=None,
data={'username': 'other-user'},
is_new=False,
))
# Add fake user with credentials for example auth provider.
user = MockUser(
id='mock-user',
is_owner=False,
is_active=False,
name='Paulus',
).add_to_auth_manager(manager)
user.credentials.append(auth_models.Credentials(
id='mock-id',
auth_provider_type='insecure_example',
auth_provider_id=None,
data={'username': 'test-user'},
is_new=False,
))
step = await manager.login_flow.async_init(('insecure_example', None))
assert step['type'] == data_entry_flow.RESULT_TYPE_FORM
step = await manager.login_flow.async_configure(step['flow_id'], {
'username': 'test-user',
'password': 'test-pass',
})
assert step['type'] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
user = step['result']
assert user is not None
assert user.id == 'mock-user'
assert user.is_owner is False
assert user.is_active is False
assert user.name == 'Paulus'
async def test_linking_user_to_two_auth_providers(hass, hass_storage):
manager = await auth.auth_manager_from_config(hass, [{
'type': 'insecure_example',
'users': [{
'username': 'test-user',
'password': 'test-pass',
}]
}, {
'type': 'insecure_example',
'id': 'another-provider',
'users': [{
'username': 'another-user',
'password': 'another-password',
}]
}], [])
step = await manager.login_flow.async_init(('insecure_example', None))
step = await manager.login_flow.async_configure(step['flow_id'], {
'username': 'test-user',
'password': 'test-pass',
})
user = step['result']
assert user is not None
step = await manager.login_flow.async_init(
('insecure_example', 'another-provider'),
context={'credential_only': True})
step = await manager.login_flow.async_configure(step['flow_id'], {
'username': 'another-user',
'password': 'another-password',
})
new_credential = step['result']
await manager.async_link_user(user, new_credential)
assert len(user.credentials) == 2
async def test_saving_loading(hass, hass_storage):
manager = await auth.auth_manager_from_config(hass, [{
'type': 'insecure_example',
'users': [{
'username': 'test-user',
'password': 'test-pass',
}]
}], [])
step = await manager.login_flow.async_init(('insecure_example', None))
step = await manager.login_flow.async_configure(step['flow_id'], {
'username': 'test-user',
'password': 'test-pass',
})
user = step['result']
await manager.async_activate_user(user)
await manager.async_create_refresh_token(user, CLIENT_ID)
await flush_store(manager._store._store)
store2 = auth_store.AuthStore(hass)
users = await store2.async_get_users()
assert len(users) == 1
assert users[0] == user
async def test_cannot_retrieve_expired_access_token(hass):
manager = await auth.auth_manager_from_config(hass, [], [])
user = MockUser().add_to_auth_manager(manager)
refresh_token = await manager.async_create_refresh_token(user, CLIENT_ID)
assert refresh_token.user.id is user.id
assert refresh_token.client_id == CLIENT_ID
access_token = manager.async_create_access_token(refresh_token)
assert (
await manager.async_validate_access_token(access_token)
is refresh_token
)
with patch('homeassistant.util.dt.utcnow',
return_value=dt_util.utcnow() -
auth_const.ACCESS_TOKEN_EXPIRATION - timedelta(seconds=11)):
access_token = manager.async_create_access_token(refresh_token)
assert (
await manager.async_validate_access_token(access_token)
is None
)
async def test_generating_system_user(hass):
manager = await auth.auth_manager_from_config(hass, [], [])
user = await manager.async_create_system_user('Hass.io')
token = await manager.async_create_refresh_token(user)
assert user.system_generated
assert token is not None
assert token.client_id is None
async def test_refresh_token_requires_client_for_user(hass):
manager = await auth.auth_manager_from_config(hass, [], [])
user = MockUser().add_to_auth_manager(manager)
assert user.system_generated is False
with pytest.raises(ValueError):
await manager.async_create_refresh_token(user)
token = await manager.async_create_refresh_token(user, CLIENT_ID)
assert token is not None
assert token.client_id == CLIENT_ID
async def test_refresh_token_not_requires_client_for_system_user(hass):
manager = await auth.auth_manager_from_config(hass, [], [])
user = await manager.async_create_system_user('Hass.io')
assert user.system_generated is True
with pytest.raises(ValueError):
await manager.async_create_refresh_token(user, CLIENT_ID)
token = await manager.async_create_refresh_token(user)
assert token is not None
assert token.client_id is None
async def test_cannot_deactive_owner(mock_hass):
manager = await auth.auth_manager_from_config(mock_hass, [], [])
owner = MockUser(
is_owner=True,
).add_to_auth_manager(manager)
with pytest.raises(ValueError):
await manager.async_deactivate_user(owner)
async def test_remove_refresh_token(mock_hass):
manager = await auth.auth_manager_from_config(mock_hass, [], [])
user = MockUser().add_to_auth_manager(manager)
refresh_token = await manager.async_create_refresh_token(user, CLIENT_ID)
access_token = manager.async_create_access_token(refresh_token)
await manager.async_remove_refresh_token(refresh_token)
assert (
await manager.async_get_refresh_token(refresh_token.id) is None
)
assert (
await manager.async_validate_access_token(access_token) is None
)
async def test_login_with_auth_module(mock_hass):
manager = await auth.auth_manager_from_config(mock_hass, [{
'type': 'insecure_example',
'users': [{
'username': 'test-user',
'password': 'test-pass',
'name': 'Test Name'
}],
}], [{
'type': 'insecure_example',
'data': [{
'user_id': 'mock-user',
'pin': 'test-pin'
}]
}])
mock_hass.auth = manager
ensure_auth_manager_loaded(manager)
# Add fake user with credentials for example auth provider.
user = MockUser(
id='mock-user',
is_owner=False,
is_active=False,
name='Paulus',
).add_to_auth_manager(manager)
user.credentials.append(auth_models.Credentials(
id='mock-id',
auth_provider_type='insecure_example',
auth_provider_id=None,
data={'username': 'test-user'},
is_new=False,
))
step = await manager.login_flow.async_init(('insecure_example', None))
assert step['type'] == data_entry_flow.RESULT_TYPE_FORM
step = await manager.login_flow.async_configure(step['flow_id'], {
'username': 'test-user',
'password': 'test-pass',
})
# After auth_provider validated, request auth module input form
assert step['type'] == data_entry_flow.RESULT_TYPE_FORM
assert step['step_id'] == 'mfa'
step = await manager.login_flow.async_configure(step['flow_id'], {
'pin': 'invalid-pin',
})
# Invalid auth error
assert step['type'] == data_entry_flow.RESULT_TYPE_FORM
assert step['step_id'] == 'mfa'
assert step['errors'] == {'base': 'invalid_auth'}
step = await manager.login_flow.async_configure(step['flow_id'], {
'pin': 'test-pin',
})
# Finally passed, get user
assert step['type'] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
user = step['result']
assert user is not None
assert user.id == 'mock-user'
assert user.is_owner is False
assert user.is_active is False
assert user.name == 'Paulus'
async def test_login_with_multi_auth_module(mock_hass):
manager = await auth.auth_manager_from_config(mock_hass, [{
'type': 'insecure_example',
'users': [{
'username': 'test-user',
'password': 'test-pass',
'name': 'Test Name'
}],
}], [{
'type': 'insecure_example',
'data': [{
'user_id': 'mock-user',
'pin': 'test-pin'
}]
}, {
'type': 'insecure_example',
'id': 'module2',
'data': [{
'user_id': 'mock-user',
'pin': 'test-pin2'
}]
}])
mock_hass.auth = manager
ensure_auth_manager_loaded(manager)
# Add fake user with credentials for example auth provider.
user = MockUser(
id='mock-user',
is_owner=False,
is_active=False,
name='Paulus',
).add_to_auth_manager(manager)
user.credentials.append(auth_models.Credentials(
id='mock-id',
auth_provider_type='insecure_example',
auth_provider_id=None,
data={'username': 'test-user'},
is_new=False,
))
step = await manager.login_flow.async_init(('insecure_example', None))
assert step['type'] == data_entry_flow.RESULT_TYPE_FORM
step = await manager.login_flow.async_configure(step['flow_id'], {
'username': 'test-user',
'password': 'test-pass',
})
# After auth_provider validated, request select auth module
assert step['type'] == data_entry_flow.RESULT_TYPE_FORM
assert step['step_id'] == 'select_mfa_module'
step = await manager.login_flow.async_configure(step['flow_id'], {
'multi_factor_auth_module': 'module2',
})
assert step['type'] == data_entry_flow.RESULT_TYPE_FORM
assert step['step_id'] == 'mfa'
step = await manager.login_flow.async_configure(step['flow_id'], {
'pin': 'test-pin2',
})
# Finally passed, get user
assert step['type'] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY
user = step['result']
assert user is not None
assert user.id == 'mock-user'
assert user.is_owner is False
assert user.is_active is False
assert user.name == 'Paulus'
async def test_auth_module_expired_session(mock_hass):
manager = await auth.auth_manager_from_config(mock_hass, [{
'type': 'insecure_example',
'users': [{
'username': 'test-user',
'password': 'test-pass',
'name': 'Test Name'
}],
}], [{
'type': 'insecure_example',
'data': [{
'user_id': 'mock-user',
'pin': 'test-pin'
}]
}])
mock_hass.auth = manager
ensure_auth_manager_loaded(manager)
# Add fake user with credentials for example auth provider.
user = MockUser(
id='mock-user',
is_owner=False,
is_active=False,
name='Paulus',
).add_to_auth_manager(manager)
user.credentials.append(auth_models.Credentials(
id='mock-id',
auth_provider_type='insecure_example',
auth_provider_id=None,
data={'username': 'test-user'},
is_new=False,
))
step = await manager.login_flow.async_init(('insecure_example', None))
assert step['type'] == data_entry_flow.RESULT_TYPE_FORM
step = await manager.login_flow.async_configure(step['flow_id'], {
'username': 'test-user',
'password': 'test-pass',
})
assert step['type'] == data_entry_flow.RESULT_TYPE_FORM
assert step['step_id'] == 'mfa'
with patch('homeassistant.util.dt.utcnow',
return_value=dt_util.utcnow() + SESSION_EXPIRATION):
step = await manager.login_flow.async_configure(step['flow_id'], {
'pin': 'test-pin',
})
# Invalid auth due session timeout
assert step['type'] == data_entry_flow.RESULT_TYPE_FORM
assert step['step_id'] == 'mfa'
assert step['errors']['base'] == 'login_expired'
# The second try will fail as well
step = await manager.login_flow.async_configure(step['flow_id'], {
'pin': 'test-pin',
})
assert step['type'] == data_entry_flow.RESULT_TYPE_FORM
assert step['step_id'] == 'mfa'
assert step['errors']['base'] == 'login_expired'
async def test_enable_mfa_for_user(hass, hass_storage):
manager = await auth.auth_manager_from_config(hass, [{
'type': 'insecure_example',
'users': [{
'username': 'test-user',
'password': 'test-pass',
}]
}], [{
'type': 'insecure_example',
'data': [],
}])
step = await manager.login_flow.async_init(('insecure_example', None))
step = await manager.login_flow.async_configure(step['flow_id'], {
'username': 'test-user',
'password': 'test-pass',
})
user = step['result']
assert user is not None
# new user don't have mfa enabled
modules = await manager.async_get_enabled_mfa(user)
assert len(modules) == 0
module = manager.get_auth_mfa_module('insecure_example')
assert bool(module._data) is False
# test enable mfa for user
await manager.async_enable_user_mfa(user, 'insecure_example',
{'pin': 'test-pin'})
assert len(module._data) == 1
assert module._data[0] == {'user_id': user.id, 'pin': 'test-pin'}
# test get enabled mfa
modules = await manager.async_get_enabled_mfa(user)
assert len(modules) == 1
assert 'insecure_example' in modules
# re-enable mfa for user will override
await manager.async_enable_user_mfa(user, 'insecure_example',
{'pin': 'test-pin-new'})
assert len(module._data) == 1
assert module._data[0] == {'user_id': user.id, 'pin': 'test-pin-new'}
modules = await manager.async_get_enabled_mfa(user)
assert len(modules) == 1
assert 'insecure_example' in modules
# system user cannot enable mfa
system_user = await manager.async_create_system_user('system-user')
with pytest.raises(ValueError):
await manager.async_enable_user_mfa(system_user, 'insecure_example',
{'pin': 'test-pin'})
assert len(module._data) == 1
modules = await manager.async_get_enabled_mfa(system_user)
assert len(modules) == 0
# disable mfa for user
await manager.async_disable_user_mfa(user, 'insecure_example')
assert bool(module._data) is False
# test get enabled mfa
modules = await manager.async_get_enabled_mfa(user)
assert len(modules) == 0
# disable mfa for user don't enabled just silent fail
await manager.async_disable_user_mfa(user, 'insecure_example')
| true | true |
f724b4c6733fdd2f61ff3c2a9eaddf0e2852a32c | 1,262 | py | Python | 1200-1300q/1266.py | rampup01/Leetcode | 8450a95a966ef83b24ffe6450f06ce8de92b3efb | [
"MIT"
] | 990 | 2018-06-05T11:49:22.000Z | 2022-03-31T08:59:17.000Z | 1200-1300q/1266.py | rampup01/Leetcode | 8450a95a966ef83b24ffe6450f06ce8de92b3efb | [
"MIT"
] | 1 | 2021-11-01T01:29:38.000Z | 2021-11-01T01:29:38.000Z | 1200-1300q/1266.py | rampup01/Leetcode | 8450a95a966ef83b24ffe6450f06ce8de92b3efb | [
"MIT"
] | 482 | 2018-06-12T22:16:53.000Z | 2022-03-29T00:23:29.000Z | '''
On a plane there are n points with integer coordinates points[i] = [xi, yi]. Your task is to find the minimum time in seconds to visit all points.
You can move according to the next rules:
In one second always you can either move vertically, horizontally by one unit or diagonally (it means to move one unit vertically and one unit horizontally in one second).
You have to visit the points in the same order as they appear in the array.
Input: points = [[1,1],[3,4],[-1,0]]
Output: 7
Explanation: One optimal path is [1,1] -> [2,2] -> [3,3] -> [3,4] -> [2,3] -> [1,2] -> [0,1] -> [-1,0]
Time from [1,1] to [3,4] = 3 seconds
Time from [3,4] to [-1,0] = 4 seconds
Total time = 7 seconds
Example 2:
Input: points = [[3,2],[-2,2]]
Output: 5
Constraints:
points.length == n
1 <= n <= 100
points[i].length == 2
-1000 <= points[i][0], points[i][1] <= 1000
'''
class Solution(object):
def minTimeToVisitAllPoints(self, points):
"""
:type points: List[List[int]]
:rtype: int
"""
if not points:
return 0
result = 0
for index in range(1, len(points)):
result += max(abs(points[index][0]-points[index-1][0]), abs(points[index][1]-points[index-1][1]))
return result
| 30.780488 | 171 | 0.616482 |
class Solution(object):
def minTimeToVisitAllPoints(self, points):
if not points:
return 0
result = 0
for index in range(1, len(points)):
result += max(abs(points[index][0]-points[index-1][0]), abs(points[index][1]-points[index-1][1]))
return result
| true | true |
f724b5246de70e1ed75d10855245cd87b8034692 | 2,628 | py | Python | app/selenium_ui/jira_ui.py | Unleash/jira-performance-tests | 3c84d65cbf70e2db5ea4c1af303d6dab4a218771 | [
"Apache-2.0"
] | null | null | null | app/selenium_ui/jira_ui.py | Unleash/jira-performance-tests | 3c84d65cbf70e2db5ea4c1af303d6dab4a218771 | [
"Apache-2.0"
] | null | null | null | app/selenium_ui/jira_ui.py | Unleash/jira-performance-tests | 3c84d65cbf70e2db5ea4c1af303d6dab4a218771 | [
"Apache-2.0"
] | null | null | null | from selenium_ui.jira import modules
from extension.jira import extension_ui # noqa F401
# this action should be the first one
def test_0_selenium_a_login(jira_webdriver, jira_datasets, jira_screen_shots):
modules.login(jira_webdriver, jira_datasets)
def test_1_selenium_browse_projects_list(jira_webdriver, jira_datasets, jira_screen_shots):
modules.browse_projects_list(jira_webdriver, jira_datasets)
def test_1_selenium_browse_boards_list(jira_webdriver, jira_datasets, jira_screen_shots):
modules.browse_boards_list(jira_webdriver, jira_datasets)
def test_1_selenium_create_issue(jira_webdriver, jira_datasets, jira_screen_shots):
modules.create_issue(jira_webdriver, jira_datasets)
def test_1_selenium_edit_issue(jira_webdriver, jira_datasets, jira_screen_shots):
modules.edit_issue(jira_webdriver, jira_datasets)
def test_1_selenium_save_comment(jira_webdriver, jira_datasets, jira_screen_shots):
modules.save_comment(jira_webdriver, jira_datasets)
def test_1_selenium_search_jql(jira_webdriver, jira_datasets, jira_screen_shots):
modules.search_jql(jira_webdriver, jira_datasets)
def test_1_selenium_view_backlog_for_scrum_board(jira_webdriver, jira_datasets, jira_screen_shots):
modules.view_backlog_for_scrum_board(jira_webdriver, jira_datasets)
def test_1_selenium_view_scrum_board(jira_webdriver, jira_datasets, jira_screen_shots):
modules.view_scrum_board(jira_webdriver, jira_datasets)
def test_1_selenium_view_kanban_board(jira_webdriver, jira_datasets, jira_screen_shots):
modules.view_kanban_board(jira_webdriver, jira_datasets)
def test_1_selenium_view_dashboard(jira_webdriver, jira_datasets, jira_screen_shots):
modules.view_dashboard(jira_webdriver, jira_datasets)
def test_1_selenium_view_issue(jira_webdriver, jira_datasets, jira_screen_shots):
modules.view_issue(jira_webdriver, jira_datasets)
def test_1_selenium_view_project_summary(jira_webdriver, jira_datasets, jira_screen_shots):
modules.view_project_summary(jira_webdriver, jira_datasets)
"""
Add custom actions anywhere between login and log out action. Move this to a different line as needed.
Write your custom selenium scripts in `app/extension/jira/extension_ui.py`.
Refer to `app/selenium_ui/jira/modules.py` for examples.
"""
def test_1_selenium_view_bind_unleash_toggle(jira_webdriver, jira_datasets, jira_screen_shots):
extension_ui.app_specific_action(jira_webdriver, jira_datasets)
# this action should be the last one
def test_2_selenium_z_log_out(jira_webdriver, jira_datasets, jira_screen_shots):
modules.log_out(jira_webdriver, jira_datasets)
| 38.086957 | 102 | 0.847412 | from selenium_ui.jira import modules
from extension.jira import extension_ui
def test_0_selenium_a_login(jira_webdriver, jira_datasets, jira_screen_shots):
modules.login(jira_webdriver, jira_datasets)
def test_1_selenium_browse_projects_list(jira_webdriver, jira_datasets, jira_screen_shots):
modules.browse_projects_list(jira_webdriver, jira_datasets)
def test_1_selenium_browse_boards_list(jira_webdriver, jira_datasets, jira_screen_shots):
modules.browse_boards_list(jira_webdriver, jira_datasets)
def test_1_selenium_create_issue(jira_webdriver, jira_datasets, jira_screen_shots):
modules.create_issue(jira_webdriver, jira_datasets)
def test_1_selenium_edit_issue(jira_webdriver, jira_datasets, jira_screen_shots):
modules.edit_issue(jira_webdriver, jira_datasets)
def test_1_selenium_save_comment(jira_webdriver, jira_datasets, jira_screen_shots):
modules.save_comment(jira_webdriver, jira_datasets)
def test_1_selenium_search_jql(jira_webdriver, jira_datasets, jira_screen_shots):
modules.search_jql(jira_webdriver, jira_datasets)
def test_1_selenium_view_backlog_for_scrum_board(jira_webdriver, jira_datasets, jira_screen_shots):
modules.view_backlog_for_scrum_board(jira_webdriver, jira_datasets)
def test_1_selenium_view_scrum_board(jira_webdriver, jira_datasets, jira_screen_shots):
modules.view_scrum_board(jira_webdriver, jira_datasets)
def test_1_selenium_view_kanban_board(jira_webdriver, jira_datasets, jira_screen_shots):
modules.view_kanban_board(jira_webdriver, jira_datasets)
def test_1_selenium_view_dashboard(jira_webdriver, jira_datasets, jira_screen_shots):
modules.view_dashboard(jira_webdriver, jira_datasets)
def test_1_selenium_view_issue(jira_webdriver, jira_datasets, jira_screen_shots):
modules.view_issue(jira_webdriver, jira_datasets)
def test_1_selenium_view_project_summary(jira_webdriver, jira_datasets, jira_screen_shots):
modules.view_project_summary(jira_webdriver, jira_datasets)
def test_1_selenium_view_bind_unleash_toggle(jira_webdriver, jira_datasets, jira_screen_shots):
extension_ui.app_specific_action(jira_webdriver, jira_datasets)
def test_2_selenium_z_log_out(jira_webdriver, jira_datasets, jira_screen_shots):
modules.log_out(jira_webdriver, jira_datasets)
| true | true |
f724b67a605f31ce4eecd96e689074ae628c81ea | 12,486 | py | Python | mailchimp3/entities/lists.py | MyMusicTaste/async-python-mailchimp | 10a53dc6f1406fa23abca89da142a51e123dd966 | [
"MIT"
] | 9 | 2018-05-15T06:49:26.000Z | 2020-12-20T13:43:56.000Z | mailchimp3/entities/lists.py | MyMusicTaste/async-python-mailchimp | 10a53dc6f1406fa23abca89da142a51e123dd966 | [
"MIT"
] | null | null | null | mailchimp3/entities/lists.py | MyMusicTaste/async-python-mailchimp | 10a53dc6f1406fa23abca89da142a51e123dd966 | [
"MIT"
] | null | null | null | # coding=utf-8
"""
The Lists API endpoint
Documentation: http://developer.mailchimp.com/documentation/mailchimp/reference/lists/
Schema: https://api.mailchimp.com/schema/3.0/Lists/Instance.json
"""
from __future__ import unicode_literals
from mailchimp3.baseapi import BaseApi
from mailchimp3.entities.listabusereports import ListAbuseReports
from mailchimp3.entities.listactivity import ListActivity
from mailchimp3.entities.listclients import ListClients
from mailchimp3.entities.listgrowthhistory import ListGrowthHistory
from mailchimp3.entities.listinterestcategories import ListInterestCategories
from mailchimp3.entities.listmembers import ListMembers
from mailchimp3.entities.listmergefields import ListMergeFields
from mailchimp3.entities.listsegments import ListSegments
from mailchimp3.entities.listsignupforms import ListSignupForms
from mailchimp3.entities.listwebhooks import ListWebhooks
from mailchimp3.helpers import check_email
class Lists(BaseApi):
"""
A MailChimp list is a powerful and flexible tool that helps you manage your contacts.
"""
def __init__(self, *args, **kwargs):
"""
Initialize the endpoint
"""
super(Lists, self).__init__(*args, **kwargs)
self.endpoint = 'lists'
self.list_id = None
self.abuse_reports = ListAbuseReports(self)
self.activity = ListActivity(self)
self.clients = ListClients(self)
self.growth_history = ListGrowthHistory(self)
self.interest_categories = ListInterestCategories(self)
self.members = ListMembers(self)
self.merge_fields = ListMergeFields(self)
self.segments = ListSegments(self)
self.signup_forms = ListSignupForms(self)
self.webhooks = ListWebhooks(self)
async def create(self, data):
"""
Create a new list in your MailChimp account.
:param data: The request body parameters
:type data: :py:class:`dict`
data = {
"name": string*,
"contact": object*
{
"company": string*,
"address1": string*,
"city": string*,
"state": string*,
"zip": string*,
"country": string*
},
"permission_reminder": string*,
"campaign_defaults": object*
{
"from_name": string*,
"from_email": string*,
"subject": string*,
"language": string*
},
"email_type_option": boolean
}
"""
if 'name' not in data:
raise KeyError('The list must have a name')
if 'contact' not in data:
raise KeyError('The list must have a contact')
if 'company' not in data['contact']:
raise KeyError('The list contact must have a company')
if 'address1' not in data['contact']:
raise KeyError('The list contact must have a address1')
if 'city' not in data['contact']:
raise KeyError('The list contact must have a city')
if 'state' not in data['contact']:
raise KeyError('The list contact must have a state')
if 'zip' not in data['contact']:
raise KeyError('The list contact must have a zip')
if 'country' not in data['contact']:
raise KeyError('The list contact must have a country')
if 'permission_reminder' not in data:
raise KeyError('The list must have a permission_reminder')
if 'campaign_defaults' not in data:
raise KeyError('The list must have a campaign_defaults')
if 'from_name' not in data['campaign_defaults']:
raise KeyError('The list campaign_defaults must have a from_name')
if 'from_email' not in data['campaign_defaults']:
raise KeyError('The list campaign_defaults must have a from_email')
check_email(data['campaign_defaults']['from_email'])
if 'subject' not in data['campaign_defaults']:
raise KeyError('The list campaign_defaults must have a subject')
if 'language' not in data['campaign_defaults']:
raise KeyError('The list campaign_defaults must have a language')
if 'email_type_option' not in data:
raise KeyError('The list must have an email_type_option')
if data['email_type_option'] not in [True, False]:
raise TypeError('The list email_type_option must be True or False')
response = await self._mc_client._post(url=self._build_path(), data=data)
if response is not None:
self.list_id = response['id']
else:
self.list_id = None
return response
async def update_members(self, list_id, data):
"""
Batch subscribe or unsubscribe list members.
Only the members array is required in the request body parameters.
Within the members array, each member requires an email_address
and either a status or status_if_new. The update_existing parameter
will also be considered required to help prevent accidental updates
to existing members and will default to false if not present.
:param list_id: The unique id for the list.
:type list_id: :py:class:`str`
:param data: The request body parameters
:type data: :py:class:`dict`
data = {
"members": array*
[
{
"email_address": string*,
"status": string* (Must be one of 'subscribed', 'unsubscribed', 'cleaned', or 'pending'),
"status_if_new": string* (Must be one of 'subscribed', 'unsubscribed', 'cleaned', or 'pending')
}
],
"update_existing": boolean*
}
"""
self.list_id = list_id
if 'members' not in data:
raise KeyError('The update must have at least one member')
else:
if not len(data['members']) <= 500:
raise ValueError('You may only batch sub/unsub 500 members at a time')
for member in data['members']:
if 'email_address' not in member:
raise KeyError('Each list member must have an email_address')
check_email(member['email_address'])
if 'status' not in member and 'status_if_new' not in member:
raise KeyError('Each list member must have either a status or a status_if_new')
valid_statuses = ['subscribed', 'unsubscribed', 'cleaned', 'pending']
if 'status' in member and member['status'] not in valid_statuses:
raise ValueError('The list member status must be one of "subscribed", "unsubscribed", "cleaned", or '
'"pending"')
if 'status_if_new' in member and member['status_if_new'] not in valid_statuses:
raise ValueError('The list member status_if_new must be one of "subscribed", "unsubscribed", '
'"cleaned", or "pending"')
if 'update_existing' not in data:
data['update_existing'] = False
return await self._mc_client._post(url=self._build_path(list_id), data=data)
async def all(self, get_all=False, **queryparams):
"""
Get information about all lists in the account.
:param get_all: Should the query get all results
:type get_all: :py:class:`bool`
:param queryparams: The query string parameters
queryparams['fields'] = []
queryparams['exclude_fields'] = []
queryparams['count'] = integer
queryparams['offset'] = integer
queryparams['before_date_created'] = string
queryparams['since_date_created'] = string
queryparams['before_campaign_last_sent'] = string
queryparams['since_campaign_last_sent'] = string
queryparams['email'] = string
queryparams['sort_field'] = string (Must be 'date_created')
queryparams['sort_dir'] = string (Must be one of 'ASC' or 'DESC')
"""
self.list_id = None
if get_all:
return await self._iterate(url=self._build_path(), **queryparams)
else:
return await self._mc_client._get(url=self._build_path(), **queryparams)
async def get(self, list_id, **queryparams):
"""
Get information about a specific list in your MailChimp account.
Results include list members who have signed up but haven’t confirmed
their subscription yet and unsubscribed or cleaned.
:param list_id: The unique id for the list.
:type list_id: :py:class:`str`
:param queryparams: The query string parameters
queryparams['fields'] = []
queryparams['exclude_fields'] = []
"""
self.list_id = list_id
return await self._mc_client._get(url=self._build_path(list_id), **queryparams)
async def update(self, list_id, data):
"""
Update the settings for a specific list.
:param list_id: The unique id for the list.
:type list_id: :py:class:`str`
:param data: The request body parameters
:type data: :py:class:`dict`
data = {
"name": string*,
"contact": object*
{
"company": string*,
"address1": string*,
"city": string*,
"state": string*,
"zip": string*,
"country": string*
},
"permission_reminder": string*,
"campaign_defaults": object*
{
"from_name": string*,
"from_email": string*,
"subject": string*,
"language": string*
},
"email_type_option": boolean
}
"""
self.list_id = list_id
if 'name' not in data:
raise KeyError('The list must have a name')
if 'contact' not in data:
raise KeyError('The list must have a contact')
if 'company' not in data['contact']:
raise KeyError('The list contact must have a company')
if 'address1' not in data['contact']:
raise KeyError('The list contact must have a address1')
if 'city' not in data['contact']:
raise KeyError('The list contact must have a city')
if 'state' not in data['contact']:
raise KeyError('The list contact must have a state')
if 'zip' not in data['contact']:
raise KeyError('The list contact must have a zip')
if 'country' not in data['contact']:
raise KeyError('The list contact must have a country')
if 'permission_reminder' not in data:
raise KeyError('The list must have a permission_reminder')
if 'campaign_defaults' not in data:
raise KeyError('The list must have a campaign_defaults')
if 'from_name' not in data['campaign_defaults']:
raise KeyError('The list campaign_defaults must have a from_name')
if 'from_email' not in data['campaign_defaults']:
raise KeyError('The list campaign_defaults must have a from_email')
check_email(data['campaign_defaults']['from_email'])
if 'subject' not in data['campaign_defaults']:
raise KeyError('The list campaign_defaults must have a subject')
if 'language' not in data['campaign_defaults']:
raise KeyError('The list campaign_defaults must have a language')
if 'email_type_option' not in data:
raise KeyError('The list must have an email_type_option')
if data['email_type_option'] not in [True, False]:
raise TypeError('The list email_type_option must be True or False')
return await self._mc_client._patch(url=self._build_path(list_id), data=data)
async def delete(self, list_id):
"""
Delete a list from your MailChimp account. If you delete a list,
you’ll lose the list history—including subscriber activity,
unsubscribes, complaints, and bounces. You’ll also lose subscribers’
email addresses, unless you exported and backed up your list.
:param list_id: The unique id for the list.
:type list_id: :py:class:`str`
"""
self.list_id = list_id
return await self._mc_client._delete(url=self._build_path(list_id))
| 44.276596 | 117 | 0.614448 |
from __future__ import unicode_literals
from mailchimp3.baseapi import BaseApi
from mailchimp3.entities.listabusereports import ListAbuseReports
from mailchimp3.entities.listactivity import ListActivity
from mailchimp3.entities.listclients import ListClients
from mailchimp3.entities.listgrowthhistory import ListGrowthHistory
from mailchimp3.entities.listinterestcategories import ListInterestCategories
from mailchimp3.entities.listmembers import ListMembers
from mailchimp3.entities.listmergefields import ListMergeFields
from mailchimp3.entities.listsegments import ListSegments
from mailchimp3.entities.listsignupforms import ListSignupForms
from mailchimp3.entities.listwebhooks import ListWebhooks
from mailchimp3.helpers import check_email
class Lists(BaseApi):
def __init__(self, *args, **kwargs):
super(Lists, self).__init__(*args, **kwargs)
self.endpoint = 'lists'
self.list_id = None
self.abuse_reports = ListAbuseReports(self)
self.activity = ListActivity(self)
self.clients = ListClients(self)
self.growth_history = ListGrowthHistory(self)
self.interest_categories = ListInterestCategories(self)
self.members = ListMembers(self)
self.merge_fields = ListMergeFields(self)
self.segments = ListSegments(self)
self.signup_forms = ListSignupForms(self)
self.webhooks = ListWebhooks(self)
async def create(self, data):
if 'name' not in data:
raise KeyError('The list must have a name')
if 'contact' not in data:
raise KeyError('The list must have a contact')
if 'company' not in data['contact']:
raise KeyError('The list contact must have a company')
if 'address1' not in data['contact']:
raise KeyError('The list contact must have a address1')
if 'city' not in data['contact']:
raise KeyError('The list contact must have a city')
if 'state' not in data['contact']:
raise KeyError('The list contact must have a state')
if 'zip' not in data['contact']:
raise KeyError('The list contact must have a zip')
if 'country' not in data['contact']:
raise KeyError('The list contact must have a country')
if 'permission_reminder' not in data:
raise KeyError('The list must have a permission_reminder')
if 'campaign_defaults' not in data:
raise KeyError('The list must have a campaign_defaults')
if 'from_name' not in data['campaign_defaults']:
raise KeyError('The list campaign_defaults must have a from_name')
if 'from_email' not in data['campaign_defaults']:
raise KeyError('The list campaign_defaults must have a from_email')
check_email(data['campaign_defaults']['from_email'])
if 'subject' not in data['campaign_defaults']:
raise KeyError('The list campaign_defaults must have a subject')
if 'language' not in data['campaign_defaults']:
raise KeyError('The list campaign_defaults must have a language')
if 'email_type_option' not in data:
raise KeyError('The list must have an email_type_option')
if data['email_type_option'] not in [True, False]:
raise TypeError('The list email_type_option must be True or False')
response = await self._mc_client._post(url=self._build_path(), data=data)
if response is not None:
self.list_id = response['id']
else:
self.list_id = None
return response
async def update_members(self, list_id, data):
self.list_id = list_id
if 'members' not in data:
raise KeyError('The update must have at least one member')
else:
if not len(data['members']) <= 500:
raise ValueError('You may only batch sub/unsub 500 members at a time')
for member in data['members']:
if 'email_address' not in member:
raise KeyError('Each list member must have an email_address')
check_email(member['email_address'])
if 'status' not in member and 'status_if_new' not in member:
raise KeyError('Each list member must have either a status or a status_if_new')
valid_statuses = ['subscribed', 'unsubscribed', 'cleaned', 'pending']
if 'status' in member and member['status'] not in valid_statuses:
raise ValueError('The list member status must be one of "subscribed", "unsubscribed", "cleaned", or '
'"pending"')
if 'status_if_new' in member and member['status_if_new'] not in valid_statuses:
raise ValueError('The list member status_if_new must be one of "subscribed", "unsubscribed", '
'"cleaned", or "pending"')
if 'update_existing' not in data:
data['update_existing'] = False
return await self._mc_client._post(url=self._build_path(list_id), data=data)
async def all(self, get_all=False, **queryparams):
self.list_id = None
if get_all:
return await self._iterate(url=self._build_path(), **queryparams)
else:
return await self._mc_client._get(url=self._build_path(), **queryparams)
async def get(self, list_id, **queryparams):
self.list_id = list_id
return await self._mc_client._get(url=self._build_path(list_id), **queryparams)
async def update(self, list_id, data):
self.list_id = list_id
if 'name' not in data:
raise KeyError('The list must have a name')
if 'contact' not in data:
raise KeyError('The list must have a contact')
if 'company' not in data['contact']:
raise KeyError('The list contact must have a company')
if 'address1' not in data['contact']:
raise KeyError('The list contact must have a address1')
if 'city' not in data['contact']:
raise KeyError('The list contact must have a city')
if 'state' not in data['contact']:
raise KeyError('The list contact must have a state')
if 'zip' not in data['contact']:
raise KeyError('The list contact must have a zip')
if 'country' not in data['contact']:
raise KeyError('The list contact must have a country')
if 'permission_reminder' not in data:
raise KeyError('The list must have a permission_reminder')
if 'campaign_defaults' not in data:
raise KeyError('The list must have a campaign_defaults')
if 'from_name' not in data['campaign_defaults']:
raise KeyError('The list campaign_defaults must have a from_name')
if 'from_email' not in data['campaign_defaults']:
raise KeyError('The list campaign_defaults must have a from_email')
check_email(data['campaign_defaults']['from_email'])
if 'subject' not in data['campaign_defaults']:
raise KeyError('The list campaign_defaults must have a subject')
if 'language' not in data['campaign_defaults']:
raise KeyError('The list campaign_defaults must have a language')
if 'email_type_option' not in data:
raise KeyError('The list must have an email_type_option')
if data['email_type_option'] not in [True, False]:
raise TypeError('The list email_type_option must be True or False')
return await self._mc_client._patch(url=self._build_path(list_id), data=data)
async def delete(self, list_id):
self.list_id = list_id
return await self._mc_client._delete(url=self._build_path(list_id))
| true | true |
f724b6f57b0beae0774790afba29b953900b5714 | 58,559 | py | Python | pilotnet/viz_gamepy.py | Pronton2001/carla_pilotnet | 813ca14e04eccd405fde5fff350fe23c6ada5657 | [
"MIT"
] | null | null | null | pilotnet/viz_gamepy.py | Pronton2001/carla_pilotnet | 813ca14e04eccd405fde5fff350fe23c6ada5657 | [
"MIT"
] | null | null | null | pilotnet/viz_gamepy.py | Pronton2001/carla_pilotnet | 813ca14e04eccd405fde5fff350fe23c6ada5657 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# Copyright (c) 2019 Computer Vision Center (CVC) at the Universitat Autonoma de
# Barcelona (UAB).
#
# This work is licensed under the terms of the MIT license.
# For a copy, see <https://opensource.org/licenses/MIT>.
# Allows controlling a vehicle with a keyboard. For a simpler and more
# documented example, please take a look at tutorial.py.
"""
Welcome to CARLA manual control.
Use ARROWS or WASD keys for control.
W : throttle
S : brake
A/D : steer left/right
Q : toggle reverse
Space : hand-brake
P : toggle autopilot
M : toggle manual transmission
,/. : gear up/down
CTRL + W : toggle constant velocity mode at 60 km/h
L : toggle next light type
SHIFT + L : toggle high beam
Z/X : toggle right/left blinker
I : toggle interior light
TAB : change sensor position
` or N : next sensor
[1-9] : change to sensor [1-9]
G : toggle radar visualization
C : change weather (Shift+C reverse)
Backspace : change vehicle
V : Select next map layer (Shift+V reverse)
B : Load current selected map layer (Shift+B to unload)
R : toggle recording images to disk
T : toggle vehicle's telemetry
CTRL + R : toggle recording of simulation (replacing any previous)
CTRL + P : start replaying last recorded simulation
CTRL + + : increments the start time of the replay by 1 second (+SHIFT = 10 seconds)
CTRL + - : decrements the start time of the replay by 1 second (+SHIFT = 10 seconds)
F1 : toggle HUD
H/? : toggle help
ESC : quit
"""
from __future__ import print_function
# ==============================================================================
# -- find carla module ---------------------------------------------------------
# ==============================================================================
import glob
import os
import sys
try:
sys.path.append(glob.glob('../carla/dist/carla-*%d.%d-%s.egg' % (
sys.version_info.major,
sys.version_info.minor,
'win-amd64' if os.name == 'nt' else 'linux-x86_64'))[0])
except IndexError:
pass
# ==============================================================================
# -- imports -------------------------------------------------------------------
# ==============================================================================
import carla
from carla import ColorConverter as cc
import argparse
import collections
import datetime
import logging
import math
import random
import re
import weakref
##################### Added by me #####################
import tensorflow as tf
from tensorflow import expand_dims
from tensorflow.image import resize
import cv2
import model as md
model = md.getPilotNetModel()
model.load_weights('model/model-weights.h5')
try:
import pygame
from pygame.locals import KMOD_CTRL
from pygame.locals import KMOD_SHIFT
from pygame.locals import K_0
from pygame.locals import K_9
from pygame.locals import K_BACKQUOTE
from pygame.locals import K_BACKSPACE
from pygame.locals import K_COMMA
from pygame.locals import K_DOWN
from pygame.locals import K_ESCAPE
from pygame.locals import K_F1
from pygame.locals import K_LEFT
from pygame.locals import K_PERIOD
from pygame.locals import K_RIGHT
from pygame.locals import K_SLASH
from pygame.locals import K_SPACE
from pygame.locals import K_TAB
from pygame.locals import K_UP
from pygame.locals import K_a
from pygame.locals import K_b
from pygame.locals import K_c
from pygame.locals import K_d
from pygame.locals import K_g
from pygame.locals import K_h
from pygame.locals import K_i
from pygame.locals import K_l
from pygame.locals import K_m
from pygame.locals import K_n
from pygame.locals import K_p
from pygame.locals import K_q
from pygame.locals import K_r
from pygame.locals import K_s
from pygame.locals import K_t
from pygame.locals import K_v
from pygame.locals import K_w
from pygame.locals import K_x
from pygame.locals import K_z
from pygame.locals import K_MINUS
from pygame.locals import K_EQUALS
except ImportError:
raise RuntimeError('cannot import pygame, make sure pygame package is installed')
try:
import numpy as np
except ImportError:
raise RuntimeError('cannot import numpy, make sure numpy package is installed')
# ==============================================================================
# -- Global functions ----------------------------------------------------------
# ==============================================================================
def find_weather_presets():
rgx = re.compile('.+?(?:(?<=[a-z])(?=[A-Z])|(?<=[A-Z])(?=[A-Z][a-z])|$)')
name = lambda x: ' '.join(m.group(0) for m in rgx.finditer(x))
presets = [x for x in dir(carla.WeatherParameters) if re.match('[A-Z].+', x)]
return [(getattr(carla.WeatherParameters, x), name(x)) for x in presets]
def get_actor_display_name(actor, truncate=250):
name = ' '.join(actor.type_id.replace('_', '.').title().split('.')[1:])
return (name[:truncate - 1] + u'\u2026') if len(name) > truncate else name
def get_actor_blueprints(world, filter, generation):
bps = world.get_blueprint_library().filter(filter)
if generation.lower() == "all":
return bps
# If the filter returns only one bp, we assume that this one needed
# and therefore, we ignore the generation
if len(bps) == 1:
return bps
try:
int_generation = int(generation)
# Check if generation is in available generations
if int_generation in [1, 2]:
bps = [x for x in bps if int(x.get_attribute('generation')) == int_generation]
return bps
else:
print(" Warning! Actor Generation is not valid. No actor will be spawned.")
return []
except:
print(" Warning! Actor Generation is not valid. No actor will be spawned.")
return []
# ==============================================================================
# -- World ---------------------------------------------------------------------
# ==============================================================================
class World(object):
def __init__(self, carla_world, hud, args):
self.world = carla_world
self.sync = args.sync
self.actor_role_name = args.rolename
try:
self.map = self.world.get_map()
except RuntimeError as error:
print('RuntimeError: {}'.format(error))
print(' The server could not send the OpenDRIVE (.xodr) file:')
print(' Make sure it exists, has the same name of your town, and is correct.')
sys.exit(1)
self.hud = hud
self.player = None
self.collision_sensor = None
self.lane_invasion_sensor = None
self.gnss_sensor = None
self.imu_sensor = None
self.radar_sensor = None
self.camera_manager = None
self._weather_presets = find_weather_presets()
self._weather_index = 0
self._actor_filter = args.filter
self._actor_generation = args.generation
self._gamma = args.gamma
self.restart()
self.world.on_tick(hud.on_world_tick)
self.recording_enabled = False
self.recording_start = 0
self.constant_velocity_enabled = False
self.show_vehicle_telemetry = False
self.current_map_layer = 0
self.map_layer_names = [
carla.MapLayer.NONE,
carla.MapLayer.Buildings,
carla.MapLayer.Decals,
carla.MapLayer.Foliage,
carla.MapLayer.Ground,
carla.MapLayer.ParkedVehicles,
carla.MapLayer.Particles,
carla.MapLayer.Props,
carla.MapLayer.StreetLights,
carla.MapLayer.Walls,
carla.MapLayer.All
]
def restart(self):
self.player_max_speed = 1.589
self.player_max_speed_fast = 3.713
# Keep same camera config if the camera manager exists.
cam_index = self.camera_manager.index if self.camera_manager is not None else 0
cam_pos_index = self.camera_manager.transform_index if self.camera_manager is not None else 0
# Get a random blueprint.
blueprint = random.choice(get_actor_blueprints(self.world, self._actor_filter, self._actor_generation))
blueprint.set_attribute('role_name', self.actor_role_name)
if blueprint.has_attribute('color'):
color = random.choice(blueprint.get_attribute('color').recommended_values)
blueprint.set_attribute('color', color)
if blueprint.has_attribute('driver_id'):
driver_id = random.choice(blueprint.get_attribute('driver_id').recommended_values)
blueprint.set_attribute('driver_id', driver_id)
if blueprint.has_attribute('is_invincible'):
blueprint.set_attribute('is_invincible', 'true')
# set the max speed
if blueprint.has_attribute('speed'):
self.player_max_speed = float(blueprint.get_attribute('speed').recommended_values[1])
self.player_max_speed_fast = float(blueprint.get_attribute('speed').recommended_values[2])
# Spawn the player.
if self.player is not None:
spawn_point = self.player.get_transform()
spawn_point.location.z += 2.0
spawn_point.rotation.roll = 0.0
spawn_point.rotation.pitch = 0.0
self.destroy()
self.player = self.world.try_spawn_actor(blueprint, spawn_point)
self.modify_vehicle_physics(self.player)
while self.player is None:
if not self.map.get_spawn_points():
print('There are no spawn points available in your map/town.')
print('Please add some Vehicle Spawn Point to your UE4 scene.')
sys.exit(1)
spawn_points = self.map.get_spawn_points()
spawn_point = random.choice(spawn_points) if spawn_points else carla.Transform()
self.player = self.world.try_spawn_actor(blueprint, spawn_point)
self.modify_vehicle_physics(self.player)
# Set up the sensors.
self.collision_sensor = CollisionSensor(self.player, self.hud)
self.lane_invasion_sensor = LaneInvasionSensor(self.player, self.hud)
self.gnss_sensor = GnssSensor(self.player)
self.imu_sensor = IMUSensor(self.player)
self.camera_manager = CameraManager(self.player, self.hud, self._gamma)
self.camera_manager.transform_index = cam_pos_index
self.camera_manager.set_sensor(cam_index, notify=False)
actor_type = get_actor_display_name(self.player)
self.hud.notification(actor_type)
if self.sync:
self.world.tick()
else:
self.world.wait_for_tick()
def next_weather(self, reverse=False):
self._weather_index += -1 if reverse else 1
self._weather_index %= len(self._weather_presets)
preset = self._weather_presets[self._weather_index]
self.hud.notification('Weather: %s' % preset[1])
self.player.get_world().set_weather(preset[0])
def next_map_layer(self, reverse=False):
self.current_map_layer += -1 if reverse else 1
self.current_map_layer %= len(self.map_layer_names)
selected = self.map_layer_names[self.current_map_layer]
self.hud.notification('LayerMap selected: %s' % selected)
def load_map_layer(self, unload=False):
selected = self.map_layer_names[self.current_map_layer]
if unload:
self.hud.notification('Unloading map layer: %s' % selected)
self.world.unload_map_layer(selected)
else:
self.hud.notification('Loading map layer: %s' % selected)
self.world.load_map_layer(selected)
def toggle_radar(self):
if self.radar_sensor is None:
self.radar_sensor = RadarSensor(self.player)
elif self.radar_sensor.sensor is not None:
self.radar_sensor.sensor.destroy()
self.radar_sensor = None
def modify_vehicle_physics(self, actor):
#If actor is not a vehicle, we cannot use the physics control
try:
physics_control = actor.get_physics_control()
physics_control.use_sweep_wheel_collision = True
actor.apply_physics_control(physics_control)
except Exception:
pass
def tick(self, clock):
self.hud.tick(self, clock)
def render(self, display):
self.camera_manager.render(display)
self.hud.render(display)
def destroy_sensors(self):
self.camera_manager.sensor.destroy()
self.camera_manager.sensor = None
self.camera_manager.index = None
def destroy(self):
if self.radar_sensor is not None:
self.toggle_radar()
sensors = [
self.camera_manager.sensor,
self.collision_sensor.sensor,
self.lane_invasion_sensor.sensor,
self.gnss_sensor.sensor,
self.imu_sensor.sensor]
for sensor in sensors:
if sensor is not None:
sensor.stop()
sensor.destroy()
if self.player is not None:
self.player.destroy()
# ==============================================================================
# -- KeyboardControl -----------------------------------------------------------
# ==============================================================================
class KeyboardControl(object):
"""Class that handles keyboard input."""
def __init__(self, world, start_in_autopilot):
self._autopilot_enabled = start_in_autopilot
if isinstance(world.player, carla.Vehicle):
self._control = carla.VehicleControl()
self._lights = carla.VehicleLightState.NONE
world.player.set_autopilot(self._autopilot_enabled)
world.player.set_light_state(self._lights)
elif isinstance(world.player, carla.Walker):
self._control = carla.WalkerControl()
self._autopilot_enabled = False
self._rotation = world.player.get_transform().rotation
else:
raise NotImplementedError("Actor type not supported")
self._steer_cache = 0.0
world.hud.notification("Press 'H' or '?' for help.", seconds=4.0)
def parse_events(self, client, world, clock, sync_mode):
if isinstance(self._control, carla.VehicleControl):
current_lights = self._lights
for event in pygame.event.get():
if event.type == pygame.QUIT:
return True
elif event.type == pygame.KEYUP:
if self._is_quit_shortcut(event.key):
return True
elif event.key == K_BACKSPACE:
if self._autopilot_enabled:
world.player.set_autopilot(False)
world.restart()
world.player.set_autopilot(True)
else:
world.restart()
elif event.key == K_F1:
world.hud.toggle_info()
elif event.key == K_v and pygame.key.get_mods() & KMOD_SHIFT:
world.next_map_layer(reverse=True)
elif event.key == K_v:
world.next_map_layer()
elif event.key == K_b and pygame.key.get_mods() & KMOD_SHIFT:
world.load_map_layer(unload=True)
elif event.key == K_b:
world.load_map_layer(unload = True) # i changed False -> True
elif event.key == K_h or (event.key == K_SLASH and pygame.key.get_mods() & KMOD_SHIFT):
world.hud.help.toggle()
elif event.key == K_TAB:
world.camera_manager.toggle_camera()
elif event.key == K_c and pygame.key.get_mods() & KMOD_SHIFT:
world.next_weather(reverse=True)
elif event.key == K_c:
world.next_weather()
elif event.key == K_g:
world.toggle_radar()
elif event.key == K_BACKQUOTE:
world.camera_manager.next_sensor()
elif event.key == K_n:
world.camera_manager.next_sensor()
elif event.key == K_w and (pygame.key.get_mods() & KMOD_CTRL):
if world.constant_velocity_enabled:
world.player.disable_constant_velocity()
world.constant_velocity_enabled = False
world.hud.notification("Disabled Constant Velocity Mode")
else:
world.player.enable_constant_velocity(carla.Vector3D(17, 0, 0))
world.constant_velocity_enabled = True
world.hud.notification("Enabled Constant Velocity Mode at 60 km/h")
elif event.key == K_t:
if world.show_vehicle_telemetry:
world.player.show_debug_telemetry(False)
world.show_vehicle_telemetry = False
world.hud.notification("Disabled Vehicle Telemetry")
else:
try:
world.player.show_debug_telemetry(True)
world.show_vehicle_telemetry = True
world.hud.notification("Enabled Vehicle Telemetry")
except Exception:
pass
elif event.key > K_0 and event.key <= K_9:
index_ctrl = 0
if pygame.key.get_mods() & KMOD_CTRL:
index_ctrl = 9
world.camera_manager.set_sensor(event.key - 1 - K_0 + index_ctrl)
elif event.key == K_r and not (pygame.key.get_mods() & KMOD_CTRL):
world.camera_manager.toggle_recording()
elif event.key == K_r and (pygame.key.get_mods() & KMOD_CTRL):
if (world.recording_enabled):
client.stop_recorder()
world.recording_enabled = False
world.hud.notification("Recorder is OFF")
else:
client.start_recorder("manual_recording.rec")
world.recording_enabled = True
world.hud.notification("Recorder is ON")
elif event.key == K_p and (pygame.key.get_mods() & KMOD_CTRL):
# stop recorder
client.stop_recorder()
world.recording_enabled = False
# work around to fix camera at start of replaying
current_index = world.camera_manager.index
world.destroy_sensors()
# disable autopilot
self._autopilot_enabled = False
world.player.set_autopilot(self._autopilot_enabled)
world.hud.notification("Replaying file 'manual_recording.rec'")
# replayer
client.replay_file("manual_recording.rec", world.recording_start, 0, 0)
world.camera_manager.set_sensor(current_index)
elif event.key == K_MINUS and (pygame.key.get_mods() & KMOD_CTRL):
if pygame.key.get_mods() & KMOD_SHIFT:
world.recording_start -= 10
else:
world.recording_start -= 1
world.hud.notification("Recording start time is %d" % (world.recording_start))
elif event.key == K_EQUALS and (pygame.key.get_mods() & KMOD_CTRL):
if pygame.key.get_mods() & KMOD_SHIFT:
world.recording_start += 10
else:
world.recording_start += 1
world.hud.notification("Recording start time is %d" % (world.recording_start))
if isinstance(self._control, carla.VehicleControl):
if event.key == K_q:
self._control.gear = 1 if self._control.reverse else -1
elif event.key == K_m:
self._control.manual_gear_shift = not self._control.manual_gear_shift
self._control.gear = world.player.get_control().gear
world.hud.notification('%s Transmission' %
('Manual' if self._control.manual_gear_shift else 'Automatic'))
elif self._control.manual_gear_shift and event.key == K_COMMA:
self._control.gear = max(-1, self._control.gear - 1)
elif self._control.manual_gear_shift and event.key == K_PERIOD:
self._control.gear = self._control.gear + 1
elif event.key == K_p and not pygame.key.get_mods() & KMOD_CTRL:
if not self._autopilot_enabled and not sync_mode:
print("WARNING: You are currently in asynchronous mode and could "
"experience some issues with the traffic simulation")
self._autopilot_enabled = not self._autopilot_enabled
world.player.set_autopilot(self._autopilot_enabled)
world.hud.notification(
'Autopilot %s' % ('On' if self._autopilot_enabled else 'Off'))
elif event.key == K_l and pygame.key.get_mods() & KMOD_CTRL:
current_lights ^= carla.VehicleLightState.Special1
elif event.key == K_l and pygame.key.get_mods() & KMOD_SHIFT:
current_lights ^= carla.VehicleLightState.HighBeam
elif event.key == K_l:
# Use 'L' key to switch between lights:
# closed -> position -> low beam -> fog
if not self._lights & carla.VehicleLightState.Position:
world.hud.notification("Position lights")
current_lights |= carla.VehicleLightState.Position
else:
world.hud.notification("Low beam lights")
current_lights |= carla.VehicleLightState.LowBeam
if self._lights & carla.VehicleLightState.LowBeam:
world.hud.notification("Fog lights")
current_lights |= carla.VehicleLightState.Fog
if self._lights & carla.VehicleLightState.Fog:
world.hud.notification("Lights off")
current_lights ^= carla.VehicleLightState.Position
current_lights ^= carla.VehicleLightState.LowBeam
current_lights ^= carla.VehicleLightState.Fog
elif event.key == K_i:
current_lights ^= carla.VehicleLightState.Interior
elif event.key == K_z:
current_lights ^= carla.VehicleLightState.LeftBlinker
elif event.key == K_x:
current_lights ^= carla.VehicleLightState.RightBlinker
if not self._autopilot_enabled:
if isinstance(self._control, carla.VehicleControl):
self._parse_vehicle_keys(pygame.key.get_pressed(), clock.get_time())
self._control.reverse = self._control.gear < 0
# Set automatic control-related vehicle lights
if self._control.brake:
current_lights |= carla.VehicleLightState.Brake
else: # Remove the Brake flag
current_lights &= ~carla.VehicleLightState.Brake
if self._control.reverse:
current_lights |= carla.VehicleLightState.Reverse
else: # Remove the Reverse flag
current_lights &= ~carla.VehicleLightState.Reverse
if current_lights != self._lights: # Change the light state only if necessary
self._lights = current_lights
world.player.set_light_state(carla.VehicleLightState(self._lights))
elif isinstance(self._control, carla.WalkerControl):
self._parse_walker_keys(pygame.key.get_pressed(), clock.get_time(), world)
world.player.apply_control(self._control)
def _parse_vehicle_keys(self, keys, milliseconds):
if keys[K_UP] or keys[K_w]:
self._control.throttle = min(self._control.throttle + 0.01, 1.00)
else:
self._control.throttle = 0.0
if keys[K_DOWN] or keys[K_s]:
self._control.brake = min(self._control.brake + 0.2, 1)
else:
self._control.brake = 0
steer_increment = 5e-4 * milliseconds
if keys[K_LEFT] or keys[K_a]:
if self._steer_cache > 0:
self._steer_cache = 0
else:
self._steer_cache -= steer_increment
elif keys[K_RIGHT] or keys[K_d]:
if self._steer_cache < 0:
self._steer_cache = 0
else:
self._steer_cache += steer_increment
else:
self._steer_cache = 0.0
self._steer_cache = min(0.7, max(-0.7, self._steer_cache))
self._control.steer = round(self._steer_cache, 1)
self._control.hand_brake = keys[K_SPACE]
def _parse_walker_keys(self, keys, milliseconds, world):
self._control.speed = 0.0
if keys[K_DOWN] or keys[K_s]:
self._control.speed = 0.0
if keys[K_LEFT] or keys[K_a]:
self._control.speed = .01
self._rotation.yaw -= 0.08 * milliseconds
if keys[K_RIGHT] or keys[K_d]:
self._control.speed = .01
self._rotation.yaw += 0.08 * milliseconds
if keys[K_UP] or keys[K_w]:
self._control.speed = world.player_max_speed_fast if pygame.key.get_mods() & KMOD_SHIFT else world.player_max_speed
self._control.jump = keys[K_SPACE]
self._rotation.yaw = round(self._rotation.yaw, 1)
self._control.direction = self._rotation.get_forward_vector()
@staticmethod
def _is_quit_shortcut(key):
return (key == K_ESCAPE) or (key == K_q and pygame.key.get_mods() & KMOD_CTRL)
# ==============================================================================
# -- HUD -----------------------------------------------------------------------
# ==============================================================================
class HUD(object):
def __init__(self, width, height):
self.dim = (width, height)
font = pygame.font.Font(pygame.font.get_default_font(), 20)
font_name = 'courier' if os.name == 'nt' else 'mono'
fonts = [x for x in pygame.font.get_fonts() if font_name in x]
default_font = 'ubuntumono'
mono = default_font if default_font in fonts else fonts[0]
mono = pygame.font.match_font(mono)
self._font_mono = pygame.font.Font(mono, 12 if os.name == 'nt' else 14)
self._notifications = FadingText(font, (width, 40), (0, height - 40))
self.help = HelpText(pygame.font.Font(mono, 16), width, height)
self.server_fps = 0
self.frame = 0
self.simulation_time = 0
self._show_info = True
self._info_text = []
self._server_clock = pygame.time.Clock()
def on_world_tick(self, timestamp):
self._server_clock.tick()
self.server_fps = self._server_clock.get_fps()
self.frame = timestamp.frame
self.simulation_time = timestamp.elapsed_seconds
def tick(self, world, clock):
self._notifications.tick(world, clock)
if not self._show_info:
return
t = world.player.get_transform()
v = world.player.get_velocity()
c = world.player.get_control()
compass = world.imu_sensor.compass
heading = 'N' if compass > 270.5 or compass < 89.5 else ''
heading += 'S' if 90.5 < compass < 269.5 else ''
heading += 'E' if 0.5 < compass < 179.5 else ''
heading += 'W' if 180.5 < compass < 359.5 else ''
colhist = world.collision_sensor.get_collision_history()
collision = [colhist[x + self.frame - 200] for x in range(0, 200)]
max_col = max(1.0, max(collision))
collision = [x / max_col for x in collision]
vehicles = world.world.get_actors().filter('vehicle.*')
self._info_text = [
'Server: % 16.0f FPS' % self.server_fps,
'Client: % 16.0f FPS' % clock.get_fps(),
'',
'Vehicle: % 20s' % get_actor_display_name(world.player, truncate=20),
'Map: % 20s' % world.map.name.split('/')[-1],
'Simulation time: % 12s' % datetime.timedelta(seconds=int(self.simulation_time)),
'',
'Speed: % 15.0f km/h' % (3.6 * math.sqrt(v.x**2 + v.y**2 + v.z**2)),
u'Compass:% 17.0f\N{DEGREE SIGN} % 2s' % (compass, heading),
'Accelero: (%5.1f,%5.1f,%5.1f)' % (world.imu_sensor.accelerometer),
'Gyroscop: (%5.1f,%5.1f,%5.1f)' % (world.imu_sensor.gyroscope),
'Location:% 20s' % ('(% 5.1f, % 5.1f)' % (t.location.x, t.location.y)),
'GNSS:% 24s' % ('(% 2.6f, % 3.6f)' % (world.gnss_sensor.lat, world.gnss_sensor.lon)),
'Height: % 18.0f m' % t.location.z,
'']
if isinstance(c, carla.VehicleControl):
self._info_text += [
('Throttle:', c.throttle, 0.0, 1.0),
('Steer:', c.steer, -1.0, 1.0),
('Brake:', c.brake, 0.0, 1.0),
('Reverse:', c.reverse),
('Hand brake:', c.hand_brake),
('Manual:', c.manual_gear_shift),
'Gear: %s' % {-1: 'R', 0: 'N'}.get(c.gear, c.gear)]
elif isinstance(c, carla.WalkerControl):
self._info_text += [
('Speed:', c.speed, 0.0, 5.556),
('Jump:', c.jump)]
self._info_text += [
'',
'Collision:',
collision,
'',
'Number of vehicles: % 8d' % len(vehicles)]
if len(vehicles) > 1:
self._info_text += ['Nearby vehicles:']
distance = lambda l: math.sqrt((l.x - t.location.x)**2 + (l.y - t.location.y)**2 + (l.z - t.location.z)**2)
vehicles = [(distance(x.get_location()), x) for x in vehicles if x.id != world.player.id]
for d, vehicle in sorted(vehicles, key=lambda vehicles: vehicles[0]):
if d > 200.0:
break
vehicle_type = get_actor_display_name(vehicle, truncate=22)
self._info_text.append('% 4dm %s' % (d, vehicle_type))
def toggle_info(self):
self._show_info = not self._show_info
def notification(self, text, seconds=2.0):
self._notifications.set_text(text, seconds=seconds)
def error(self, text):
self._notifications.set_text('Error: %s' % text, (255, 0, 0))
def render(self, display):
if self._show_info:
info_surface = pygame.Surface((220, self.dim[1]))
info_surface.set_alpha(100)
display.blit(info_surface, (0, 0))
v_offset = 4
bar_h_offset = 100
bar_width = 106
for item in self._info_text:
if v_offset + 18 > self.dim[1]:
break
if isinstance(item, list):
if len(item) > 1:
points = [(x + 8, v_offset + 8 + (1.0 - y) * 30) for x, y in enumerate(item)]
pygame.draw.lines(display, (255, 136, 0), False, points, 2)
item = None
v_offset += 18
elif isinstance(item, tuple):
if isinstance(item[1], bool):
rect = pygame.Rect((bar_h_offset, v_offset + 8), (6, 6))
pygame.draw.rect(display, (255, 255, 255), rect, 0 if item[1] else 1)
else:
rect_border = pygame.Rect((bar_h_offset, v_offset + 8), (bar_width, 6))
pygame.draw.rect(display, (255, 255, 255), rect_border, 1)
f = (item[1] - item[2]) / (item[3] - item[2])
if item[2] < 0.0:
rect = pygame.Rect((bar_h_offset + f * (bar_width - 6), v_offset + 8), (6, 6))
else:
rect = pygame.Rect((bar_h_offset, v_offset + 8), (f * bar_width, 6))
pygame.draw.rect(display, (255, 255, 255), rect)
item = item[0]
if item: # At this point has to be a str.
surface = self._font_mono.render(item, True, (255, 255, 255))
display.blit(surface, (8, v_offset))
v_offset += 18
self._notifications.render(display)
self.help.render(display)
# ==============================================================================
# -- FadingText ----------------------------------------------------------------
# ==============================================================================
class FadingText(object):
def __init__(self, font, dim, pos):
self.font = font
self.dim = dim
self.pos = pos
self.seconds_left = 0
self.surface = pygame.Surface(self.dim)
def set_text(self, text, color=(255, 255, 255), seconds=2.0):
text_texture = self.font.render(text, True, color)
self.surface = pygame.Surface(self.dim)
self.seconds_left = seconds
self.surface.fill((0, 0, 0, 0))
self.surface.blit(text_texture, (10, 11))
def tick(self, _, clock):
delta_seconds = 1e-3 * clock.get_time()
self.seconds_left = max(0.0, self.seconds_left - delta_seconds)
self.surface.set_alpha(500.0 * self.seconds_left)
def render(self, display):
display.blit(self.surface, self.pos)
# ==============================================================================
# -- HelpText ------------------------------------------------------------------
# ==============================================================================
class HelpText(object):
"""Helper class to handle text output using pygame"""
def __init__(self, font, width, height):
lines = __doc__.split('\n')
self.font = font
self.line_space = 18
self.dim = (780, len(lines) * self.line_space + 12)
self.pos = (0.5 * width - 0.5 * self.dim[0], 0.5 * height - 0.5 * self.dim[1])
self.seconds_left = 0
self.surface = pygame.Surface(self.dim)
self.surface.fill((0, 0, 0, 0))
for n, line in enumerate(lines):
text_texture = self.font.render(line, True, (255, 255, 255))
self.surface.blit(text_texture, (22, n * self.line_space))
self._render = False
self.surface.set_alpha(220)
def toggle(self):
self._render = not self._render
def render(self, display):
if self._render:
display.blit(self.surface, self.pos)
# ==============================================================================
# -- CollisionSensor -----------------------------------------------------------
# ==============================================================================
class CollisionSensor(object):
def __init__(self, parent_actor, hud):
self.sensor = None
self.history = []
self._parent = parent_actor
self.hud = hud
world = self._parent.get_world()
bp = world.get_blueprint_library().find('sensor.other.collision')
self.sensor = world.spawn_actor(bp, carla.Transform(), attach_to=self._parent)
# We need to pass the lambda a weak reference to self to avoid circular
# reference.
weak_self = weakref.ref(self)
self.sensor.listen(lambda event: CollisionSensor._on_collision(weak_self, event))
def get_collision_history(self):
history = collections.defaultdict(int)
for frame, intensity in self.history:
history[frame] += intensity
return history
@staticmethod
def _on_collision(weak_self, event):
self = weak_self()
if not self:
return
actor_type = get_actor_display_name(event.other_actor)
self.hud.notification('Collision with %r' % actor_type)
impulse = event.normal_impulse
intensity = math.sqrt(impulse.x**2 + impulse.y**2 + impulse.z**2)
self.history.append((event.frame, intensity))
if len(self.history) > 4000:
self.history.pop(0)
# ==============================================================================
# -- LaneInvasionSensor --------------------------------------------------------
# ==============================================================================
class LaneInvasionSensor(object):
def __init__(self, parent_actor, hud):
self.sensor = None
# If the spawn object is not a vehicle, we cannot use the Lane Invasion Sensor
if parent_actor.type_id.startswith("vehicle."):
self._parent = parent_actor
self.hud = hud
world = self._parent.get_world()
bp = world.get_blueprint_library().find('sensor.other.lane_invasion')
self.sensor = world.spawn_actor(bp, carla.Transform(), attach_to=self._parent)
# We need to pass the lambda a weak reference to self to avoid circular
# reference.
weak_self = weakref.ref(self)
self.sensor.listen(lambda event: LaneInvasionSensor._on_invasion(weak_self, event))
@staticmethod
def _on_invasion(weak_self, event):
self = weak_self()
if not self:
return
lane_types = set(x.type for x in event.crossed_lane_markings)
text = ['%r' % str(x).split()[-1] for x in lane_types]
self.hud.notification('Crossed line %s' % ' and '.join(text))
# ==============================================================================
# -- GnssSensor ----------------------------------------------------------------
# ==============================================================================
class GnssSensor(object):
def __init__(self, parent_actor):
self.sensor = None
self._parent = parent_actor
self.lat = 0.0
self.lon = 0.0
world = self._parent.get_world()
bp = world.get_blueprint_library().find('sensor.other.gnss')
self.sensor = world.spawn_actor(bp, carla.Transform(carla.Location(x=1.0, z=2.8)), attach_to=self._parent)
# We need to pass the lambda a weak reference to self to avoid circular
# reference.
weak_self = weakref.ref(self)
self.sensor.listen(lambda event: GnssSensor._on_gnss_event(weak_self, event))
@staticmethod
def _on_gnss_event(weak_self, event):
self = weak_self()
if not self:
return
self.lat = event.latitude
self.lon = event.longitude
# ==============================================================================
# -- IMUSensor -----------------------------------------------------------------
# ==============================================================================
class IMUSensor(object):
def __init__(self, parent_actor):
self.sensor = None
self._parent = parent_actor
self.accelerometer = (0.0, 0.0, 0.0)
self.gyroscope = (0.0, 0.0, 0.0)
self.compass = 0.0
world = self._parent.get_world()
bp = world.get_blueprint_library().find('sensor.other.imu')
self.sensor = world.spawn_actor(
bp, carla.Transform(), attach_to=self._parent)
# We need to pass the lambda a weak reference to self to avoid circular
# reference.
weak_self = weakref.ref(self)
self.sensor.listen(
lambda sensor_data: IMUSensor._IMU_callback(weak_self, sensor_data))
@staticmethod
def _IMU_callback(weak_self, sensor_data):
self = weak_self()
if not self:
return
limits = (-99.9, 99.9)
self.accelerometer = (
max(limits[0], min(limits[1], sensor_data.accelerometer.x)),
max(limits[0], min(limits[1], sensor_data.accelerometer.y)),
max(limits[0], min(limits[1], sensor_data.accelerometer.z)))
self.gyroscope = (
max(limits[0], min(limits[1], math.degrees(sensor_data.gyroscope.x))),
max(limits[0], min(limits[1], math.degrees(sensor_data.gyroscope.y))),
max(limits[0], min(limits[1], math.degrees(sensor_data.gyroscope.z))))
self.compass = math.degrees(sensor_data.compass)
# ==============================================================================
# -- RadarSensor ---------------------------------------------------------------
# ==============================================================================
class RadarSensor(object):
def __init__(self, parent_actor):
self.sensor = None
self._parent = parent_actor
bound_x = 0.5 + self._parent.bounding_box.extent.x
bound_y = 0.5 + self._parent.bounding_box.extent.y
bound_z = 0.5 + self._parent.bounding_box.extent.z
self.velocity_range = 7.5 # m/s
world = self._parent.get_world()
self.debug = world.debug
bp = world.get_blueprint_library().find('sensor.other.radar')
bp.set_attribute('horizontal_fov', str(35))
bp.set_attribute('vertical_fov', str(20))
self.sensor = world.spawn_actor(
bp,
carla.Transform(
carla.Location(x=bound_x + 0.05, z=bound_z+0.05),
carla.Rotation(pitch=5)),
attach_to=self._parent)
# We need a weak reference to self to avoid circular reference.
weak_self = weakref.ref(self)
self.sensor.listen(
lambda radar_data: RadarSensor._Radar_callback(weak_self, radar_data))
@staticmethod
def _Radar_callback(weak_self, radar_data):
self = weak_self()
if not self:
return
# To get a numpy [[vel, altitude, azimuth, depth],...[,,,]]:
# points = np.frombuffer(radar_data.raw_data, dtype=np.dtype('f4'))
# points = np.reshape(points, (len(radar_data), 4))
current_rot = radar_data.transform.rotation
for detect in radar_data:
azi = math.degrees(detect.azimuth)
alt = math.degrees(detect.altitude)
# The 0.25 adjusts a bit the distance so the dots can
# be properly seen
fw_vec = carla.Vector3D(x=detect.depth - 0.25)
carla.Transform(
carla.Location(),
carla.Rotation(
pitch=current_rot.pitch + alt,
yaw=current_rot.yaw + azi,
roll=current_rot.roll)).transform(fw_vec)
def clamp(min_v, max_v, value):
return max(min_v, min(value, max_v))
norm_velocity = detect.velocity / self.velocity_range # range [-1, 1]
r = int(clamp(0.0, 1.0, 1.0 - norm_velocity) * 255.0)
g = int(clamp(0.0, 1.0, 1.0 - abs(norm_velocity)) * 255.0)
b = int(abs(clamp(- 1.0, 0.0, - 1.0 - norm_velocity)) * 255.0)
self.debug.draw_point(
radar_data.transform.location + fw_vec,
size=0.075,
life_time=0.06,
persistent_lines=False,
color=carla.Color(r, g, b))
# ==============================================================================
# -- CameraManager -------------------------------------------------------------
# ==============================================================================
class CameraManager(object):
def __init__(self, parent_actor, hud, gamma_correction):
self.sensor = None
self.surface = None
self._parent = parent_actor
self.hud = hud
self.recording = False
bound_x = 0.5 + self._parent.bounding_box.extent.x
bound_y = 0.5 + self._parent.bounding_box.extent.y
bound_z = 0.5 + self._parent.bounding_box.extent.z
Attachment = carla.AttachmentType
if not self._parent.type_id.startswith("walker.pedestrian"):
self._camera_transforms = [
(carla.Transform(carla.Location(x=-2.0*bound_x, y=+0.0*bound_y, z=2.0*bound_z), carla.Rotation(pitch=8.0)), Attachment.SpringArm),
(carla.Transform(carla.Location(x=+0.8*bound_x, y=+0.0*bound_y, z=1.3*bound_z)), Attachment.Rigid),
(carla.Transform(carla.Location(x=+1.9*bound_x, y=+1.0*bound_y, z=1.2*bound_z)), Attachment.SpringArm),
(carla.Transform(carla.Location(x=-2.8*bound_x, y=+0.0*bound_y, z=4.6*bound_z), carla.Rotation(pitch=6.0)), Attachment.SpringArm),
(carla.Transform(carla.Location(x=-1.0, y=-1.0*bound_y, z=0.4*bound_z)), Attachment.Rigid)]
else:
self._camera_transforms = [
(carla.Transform(carla.Location(x=-2.5, z=0.0), carla.Rotation(pitch=-8.0)), Attachment.SpringArm),
(carla.Transform(carla.Location(x=1.6, z=1.7)), Attachment.Rigid),
(carla.Transform(carla.Location(x=2.5, y=0.5, z=0.0), carla.Rotation(pitch=-8.0)), Attachment.SpringArm),
(carla.Transform(carla.Location(x=-4.0, z=2.0), carla.Rotation(pitch=6.0)), Attachment.SpringArm),
(carla.Transform(carla.Location(x=0, y=-2.5, z=-0.0), carla.Rotation(yaw=90.0)), Attachment.Rigid)]
self.transform_index = 1
self.sensors = [
['sensor.camera.rgb', cc.Raw, 'Camera RGB', {}],
['sensor.camera.depth', cc.Raw, 'Camera Depth (Raw)', {}],
['sensor.camera.depth', cc.Depth, 'Camera Depth (Gray Scale)', {}],
['sensor.camera.depth', cc.LogarithmicDepth, 'Camera Depth (Logarithmic Gray Scale)', {}],
['sensor.camera.semantic_segmentation', cc.Raw, 'Camera Semantic Segmentation (Raw)', {}],
['sensor.camera.semantic_segmentation', cc.CityScapesPalette,
'Camera Semantic Segmentation (CityScapes Palette)', {}],
['sensor.lidar.ray_cast', None, 'Lidar (Ray-Cast)', {'range': '50'}],
['sensor.camera.dvs', cc.Raw, 'Dynamic Vision Sensor', {}],
['sensor.camera.rgb', cc.Raw, 'Camera RGB Distorted',
{'lens_circle_multiplier': '3.0',
'lens_circle_falloff': '3.0',
'chromatic_aberration_intensity': '0.5',
'chromatic_aberration_offset': '0'}],
['sensor.camera.optical_flow', cc.Raw, 'Optical Flow', {}],
]
world = self._parent.get_world()
bp_library = world.get_blueprint_library()
for item in self.sensors:
bp = bp_library.find(item[0])
if item[0].startswith('sensor.camera'):
bp.set_attribute('image_size_x', str(hud.dim[0]))
bp.set_attribute('image_size_y', str(hud.dim[1]))
if bp.has_attribute('gamma'):
bp.set_attribute('gamma', str(gamma_correction))
for attr_name, attr_value in item[3].items():
bp.set_attribute(attr_name, attr_value)
elif item[0].startswith('sensor.lidar'):
self.lidar_range = 50
for attr_name, attr_value in item[3].items():
bp.set_attribute(attr_name, attr_value)
if attr_name == 'range':
self.lidar_range = float(attr_value)
item.append(bp)
self.index = None
def toggle_camera(self):
self.transform_index = (self.transform_index + 1) % len(self._camera_transforms)
self.set_sensor(self.index, notify=False, force_respawn=True)
def set_sensor(self, index, notify=True, force_respawn=False):
index = index % len(self.sensors)
needs_respawn = True if self.index is None else \
(force_respawn or (self.sensors[index][2] != self.sensors[self.index][2]))
if needs_respawn:
if self.sensor is not None:
self.sensor.destroy()
self.surface = None
self.sensor = self._parent.get_world().spawn_actor(
self.sensors[index][-1],
self._camera_transforms[self.transform_index][0],
attach_to=self._parent,
attachment_type=self._camera_transforms[self.transform_index][1])
# We need to pass the lambda a weak reference to self to avoid
# circular reference.
weak_self = weakref.ref(self)
self.sensor.listen(lambda image: CameraManager._parse_image(weak_self, image))
if notify:
self.hud.notification(self.sensors[index][2])
self.index = index
def next_sensor(self):
self.set_sensor(self.index + 1)
def toggle_recording(self):
self.recording = not self.recording
self.hud.notification('Recording %s' % ('On' if self.recording else 'Off'))
def render(self, display):
if self.surface is not None:
display.blit(self.surface, (0, 0))
@staticmethod
def scale_steer(steer):
if (steer >= 1):
return 1
elif (steer <= -1):
return -1
else:
return steer
@staticmethod
def predict_steering(img_rgb):
img_size = (66, 200, 3)
input_img = resize(img_rgb, img_size[:2])
input_img = expand_dims(input_img, 0) # Create batch axis
steering_pred= model.predict(input_img)[0][0]
return steering_pred
@staticmethod
def _parse_image(weak_self, image):
self = weak_self()
if not self:
return
if self.sensors[self.index][0].startswith('sensor.lidar'):
points = np.frombuffer(image.raw_data, dtype=np.dtype('f4'))
points = np.reshape(points, (int(points.shape[0] / 4), 4))
lidar_data = np.array(points[:, :2])
lidar_data *= min(self.hud.dim) / (2.0 * self.lidar_range)
lidar_data += (0.5 * self.hud.dim[0], 0.5 * self.hud.dim[1])
lidar_data = np.fabs(lidar_data) # pylint: disable=E1111
lidar_data = lidar_data.astype(np.int32)
lidar_data = np.reshape(lidar_data, (-1, 2))
lidar_img_size = (self.hud.dim[0], self.hud.dim[1], 3)
lidar_img = np.zeros((lidar_img_size), dtype=np.uint8)
lidar_img[tuple(lidar_data.T)] = (255, 255, 255)
self.surface = pygame.surfarray.make_surface(lidar_img)
elif self.sensors[self.index][0].startswith('sensor.camera.dvs'):
# Example of converting the raw_data from a carla.DVSEventArray
# sensor into a NumPy array and using it as an image
dvs_events = np.frombuffer(image.raw_data, dtype=np.dtype([
('x', np.uint16), ('y', np.uint16), ('t', np.int64), ('pol', np.bool)]))
dvs_img = np.zeros((image.height, image.width, 3), dtype=np.uint8)
# Blue is positive, red is negative
dvs_img[dvs_events[:]['y'], dvs_events[:]['x'], dvs_events[:]['pol'] * 2] = 255
self.surface = pygame.surfarray.make_surface(dvs_img.swapaxes(0, 1))
elif self.sensors[self.index][0].startswith('sensor.camera.optical_flow'):
image = image.get_color_coded_flow()
array = np.frombuffer(image.raw_data, dtype=np.dtype("uint8"))
array = np.reshape(array, (image.height, image.width, 4))
array = array[:, :, :3]
array = array[:, :, ::-1]
self.surface = pygame.surfarray.make_surface(array.swapaxes(0, 1))
else:
image.convert(self.sensors[self.index][1])
array = np.frombuffer(image.raw_data, dtype=np.dtype("uint8"))
array = np.reshape(array, (image.height, image.width, 4)) # RGBA
array = array[:, :, :3] # RGBA -> RGB
# array = array[:, :, ::-1]
img_rgb = cv2.resize(np.float32(array), (320, 180))
pred_steering = CameraManager.predict_steering(img_rgb)
print('before scale', pred_steering)
pred_steering /=70
print('after scale', pred_steering)
pred_steering = CameraManager.scale_steer(pred_steering)
print("Predicted steering: ", pred_steering)
self._parent.apply_control(carla.VehicleControl(throttle=0.9, steer=1))
self.surface = pygame.surfarray.make_surface(array.swapaxes(0, 1))
if self.recording:
image.save_to_disk('_out/%08d' % image.frame)
# ==============================================================================
# -- game_loop() ---------------------------------------------------------------
# ==============================================================================
def game_loop(args):
pygame.init()
pygame.font.init()
world = None
original_settings = None
try:
client = carla.Client(args.host, args.port)
client.set_timeout(20.0)
sim_world = client.get_world()
if args.sync:
original_settings = sim_world.get_settings()
settings = sim_world.get_settings()
if not settings.synchronous_mode:
settings.synchronous_mode = True
settings.fixed_delta_seconds = 0.05
sim_world.apply_settings(settings)
traffic_manager = client.get_trafficmanager()
traffic_manager.set_synchronous_mode(True)
if args.autopilot and not sim_world.get_settings().synchronous_mode:
print("WARNING: You are currently in asynchronous mode and could "
"experience some issues with the traffic simulation")
# Remove all layer for fast rendering
sim_world.unload_map_layer(carla.MapLayer.All)
# settings = sim_world.get_settings()
# settings.fixed_delta_seconds = None # Set a variable time-step
# sim_world.apply_settings(settings)
display = pygame.display.set_mode(
(args.width, args.height),
pygame.HWSURFACE | pygame.DOUBLEBUF)
display.fill((0,0,0))
pygame.display.flip()
hud = HUD(args.width, args.height)
world = World(sim_world, hud, args)
controller = KeyboardControl(world, args.autopilot)
if args.sync:
sim_world.tick()
else:
sim_world.wait_for_tick()
clock = pygame.time.Clock()
while True:
if args.sync:
sim_world.tick()
clock.tick_busy_loop(60)
if controller.parse_events(client, world, clock, args.sync):
return
world.tick(clock)
world.render(display)
pygame.display.flip()
finally:
if original_settings:
sim_world.apply_settings(original_settings)
if (world and world.recording_enabled):
client.stop_recorder()
if world is not None:
world.destroy()
pygame.quit()
# ==============================================================================
# -- main() --------------------------------------------------------------------
# ==============================================================================
def main():
argparser = argparse.ArgumentParser(
description='CARLA Manual Control Client')
argparser.add_argument(
'-v', '--verbose',
action='store_true',
dest='debug',
help='print debug information')
argparser.add_argument(
'--host',
metavar='H',
default='127.0.0.1',
help='IP of the host server (default: 127.0.0.1)')
argparser.add_argument(
'-p', '--port',
metavar='P',
default=2000,
type=int,
help='TCP port to listen to (default: 2000)')
argparser.add_argument(
'-a', '--autopilot',
action='store_true',
help='enable autopilot')
argparser.add_argument(
'--res',
metavar='WIDTHxHEIGHT',
default='1280x720',
help='window resolution (default: 1280x720)')
argparser.add_argument(
'--filter',
metavar='PATTERN',
default='vehicle.*',
help='actor filter (default: "vehicle.*")')
argparser.add_argument(
'--generation',
metavar='G',
default='2',
help='restrict to certain actor generation (values: "1","2","All" - default: "2")')
argparser.add_argument(
'--rolename',
metavar='NAME',
default='hero',
help='actor role name (default: "hero")')
argparser.add_argument(
'--gamma',
default=2.2,
type=float,
help='Gamma correction of the camera (default: 2.2)')
argparser.add_argument(
'--sync',
action='store_true',
help='Activate synchronous mode execution')
args = argparser.parse_args()
args.width, args.height = [int(x) for x in args.res.split('x')]
log_level = logging.DEBUG if args.debug else logging.INFO
logging.basicConfig(format='%(levelname)s: %(message)s', level=log_level)
logging.info('listening to server %s:%s', args.host, args.port)
print(__doc__)
try:
game_loop(args)
except KeyboardInterrupt:
print('\nCancelled by user. Bye!')
if __name__ == '__main__':
main()
| 43.409192 | 146 | 0.54878 |
from __future__ import print_function
import glob
import os
import sys
try:
sys.path.append(glob.glob('../carla/dist/carla-*%d.%d-%s.egg' % (
sys.version_info.major,
sys.version_info.minor,
'win-amd64' if os.name == 'nt' else 'linux-x86_64'))[0])
except IndexError:
pass
import carla
from carla import ColorConverter as cc
import argparse
import collections
import datetime
import logging
import math
import random
import re
import weakref
.locals import K_l
from pygame.locals import K_m
from pygame.locals import K_n
from pygame.locals import K_p
from pygame.locals import K_q
from pygame.locals import K_r
from pygame.locals import K_s
from pygame.locals import K_t
from pygame.locals import K_v
from pygame.locals import K_w
from pygame.locals import K_x
from pygame.locals import K_z
from pygame.locals import K_MINUS
from pygame.locals import K_EQUALS
except ImportError:
raise RuntimeError('cannot import pygame, make sure pygame package is installed')
try:
import numpy as np
except ImportError:
raise RuntimeError('cannot import numpy, make sure numpy package is installed')
def find_weather_presets():
rgx = re.compile('.+?(?:(?<=[a-z])(?=[A-Z])|(?<=[A-Z])(?=[A-Z][a-z])|$)')
name = lambda x: ' '.join(m.group(0) for m in rgx.finditer(x))
presets = [x for x in dir(carla.WeatherParameters) if re.match('[A-Z].+', x)]
return [(getattr(carla.WeatherParameters, x), name(x)) for x in presets]
def get_actor_display_name(actor, truncate=250):
name = ' '.join(actor.type_id.replace('_', '.').title().split('.')[1:])
return (name[:truncate - 1] + u'\u2026') if len(name) > truncate else name
def get_actor_blueprints(world, filter, generation):
bps = world.get_blueprint_library().filter(filter)
if generation.lower() == "all":
return bps
if len(bps) == 1:
return bps
try:
int_generation = int(generation)
if int_generation in [1, 2]:
bps = [x for x in bps if int(x.get_attribute('generation')) == int_generation]
return bps
else:
print(" Warning! Actor Generation is not valid. No actor will be spawned.")
return []
except:
print(" Warning! Actor Generation is not valid. No actor will be spawned.")
return []
class World(object):
def __init__(self, carla_world, hud, args):
self.world = carla_world
self.sync = args.sync
self.actor_role_name = args.rolename
try:
self.map = self.world.get_map()
except RuntimeError as error:
print('RuntimeError: {}'.format(error))
print(' The server could not send the OpenDRIVE (.xodr) file:')
print(' Make sure it exists, has the same name of your town, and is correct.')
sys.exit(1)
self.hud = hud
self.player = None
self.collision_sensor = None
self.lane_invasion_sensor = None
self.gnss_sensor = None
self.imu_sensor = None
self.radar_sensor = None
self.camera_manager = None
self._weather_presets = find_weather_presets()
self._weather_index = 0
self._actor_filter = args.filter
self._actor_generation = args.generation
self._gamma = args.gamma
self.restart()
self.world.on_tick(hud.on_world_tick)
self.recording_enabled = False
self.recording_start = 0
self.constant_velocity_enabled = False
self.show_vehicle_telemetry = False
self.current_map_layer = 0
self.map_layer_names = [
carla.MapLayer.NONE,
carla.MapLayer.Buildings,
carla.MapLayer.Decals,
carla.MapLayer.Foliage,
carla.MapLayer.Ground,
carla.MapLayer.ParkedVehicles,
carla.MapLayer.Particles,
carla.MapLayer.Props,
carla.MapLayer.StreetLights,
carla.MapLayer.Walls,
carla.MapLayer.All
]
def restart(self):
self.player_max_speed = 1.589
self.player_max_speed_fast = 3.713
cam_index = self.camera_manager.index if self.camera_manager is not None else 0
cam_pos_index = self.camera_manager.transform_index if self.camera_manager is not None else 0
blueprint = random.choice(get_actor_blueprints(self.world, self._actor_filter, self._actor_generation))
blueprint.set_attribute('role_name', self.actor_role_name)
if blueprint.has_attribute('color'):
color = random.choice(blueprint.get_attribute('color').recommended_values)
blueprint.set_attribute('color', color)
if blueprint.has_attribute('driver_id'):
driver_id = random.choice(blueprint.get_attribute('driver_id').recommended_values)
blueprint.set_attribute('driver_id', driver_id)
if blueprint.has_attribute('is_invincible'):
blueprint.set_attribute('is_invincible', 'true')
if blueprint.has_attribute('speed'):
self.player_max_speed = float(blueprint.get_attribute('speed').recommended_values[1])
self.player_max_speed_fast = float(blueprint.get_attribute('speed').recommended_values[2])
if self.player is not None:
spawn_point = self.player.get_transform()
spawn_point.location.z += 2.0
spawn_point.rotation.roll = 0.0
spawn_point.rotation.pitch = 0.0
self.destroy()
self.player = self.world.try_spawn_actor(blueprint, spawn_point)
self.modify_vehicle_physics(self.player)
while self.player is None:
if not self.map.get_spawn_points():
print('There are no spawn points available in your map/town.')
print('Please add some Vehicle Spawn Point to your UE4 scene.')
sys.exit(1)
spawn_points = self.map.get_spawn_points()
spawn_point = random.choice(spawn_points) if spawn_points else carla.Transform()
self.player = self.world.try_spawn_actor(blueprint, spawn_point)
self.modify_vehicle_physics(self.player)
self.collision_sensor = CollisionSensor(self.player, self.hud)
self.lane_invasion_sensor = LaneInvasionSensor(self.player, self.hud)
self.gnss_sensor = GnssSensor(self.player)
self.imu_sensor = IMUSensor(self.player)
self.camera_manager = CameraManager(self.player, self.hud, self._gamma)
self.camera_manager.transform_index = cam_pos_index
self.camera_manager.set_sensor(cam_index, notify=False)
actor_type = get_actor_display_name(self.player)
self.hud.notification(actor_type)
if self.sync:
self.world.tick()
else:
self.world.wait_for_tick()
def next_weather(self, reverse=False):
self._weather_index += -1 if reverse else 1
self._weather_index %= len(self._weather_presets)
preset = self._weather_presets[self._weather_index]
self.hud.notification('Weather: %s' % preset[1])
self.player.get_world().set_weather(preset[0])
def next_map_layer(self, reverse=False):
self.current_map_layer += -1 if reverse else 1
self.current_map_layer %= len(self.map_layer_names)
selected = self.map_layer_names[self.current_map_layer]
self.hud.notification('LayerMap selected: %s' % selected)
def load_map_layer(self, unload=False):
selected = self.map_layer_names[self.current_map_layer]
if unload:
self.hud.notification('Unloading map layer: %s' % selected)
self.world.unload_map_layer(selected)
else:
self.hud.notification('Loading map layer: %s' % selected)
self.world.load_map_layer(selected)
def toggle_radar(self):
if self.radar_sensor is None:
self.radar_sensor = RadarSensor(self.player)
elif self.radar_sensor.sensor is not None:
self.radar_sensor.sensor.destroy()
self.radar_sensor = None
def modify_vehicle_physics(self, actor):
try:
physics_control = actor.get_physics_control()
physics_control.use_sweep_wheel_collision = True
actor.apply_physics_control(physics_control)
except Exception:
pass
def tick(self, clock):
self.hud.tick(self, clock)
def render(self, display):
self.camera_manager.render(display)
self.hud.render(display)
def destroy_sensors(self):
self.camera_manager.sensor.destroy()
self.camera_manager.sensor = None
self.camera_manager.index = None
def destroy(self):
if self.radar_sensor is not None:
self.toggle_radar()
sensors = [
self.camera_manager.sensor,
self.collision_sensor.sensor,
self.lane_invasion_sensor.sensor,
self.gnss_sensor.sensor,
self.imu_sensor.sensor]
for sensor in sensors:
if sensor is not None:
sensor.stop()
sensor.destroy()
if self.player is not None:
self.player.destroy()
class KeyboardControl(object):
def __init__(self, world, start_in_autopilot):
self._autopilot_enabled = start_in_autopilot
if isinstance(world.player, carla.Vehicle):
self._control = carla.VehicleControl()
self._lights = carla.VehicleLightState.NONE
world.player.set_autopilot(self._autopilot_enabled)
world.player.set_light_state(self._lights)
elif isinstance(world.player, carla.Walker):
self._control = carla.WalkerControl()
self._autopilot_enabled = False
self._rotation = world.player.get_transform().rotation
else:
raise NotImplementedError("Actor type not supported")
self._steer_cache = 0.0
world.hud.notification("Press 'H' or '?' for help.", seconds=4.0)
def parse_events(self, client, world, clock, sync_mode):
if isinstance(self._control, carla.VehicleControl):
current_lights = self._lights
for event in pygame.event.get():
if event.type == pygame.QUIT:
return True
elif event.type == pygame.KEYUP:
if self._is_quit_shortcut(event.key):
return True
elif event.key == K_BACKSPACE:
if self._autopilot_enabled:
world.player.set_autopilot(False)
world.restart()
world.player.set_autopilot(True)
else:
world.restart()
elif event.key == K_F1:
world.hud.toggle_info()
elif event.key == K_v and pygame.key.get_mods() & KMOD_SHIFT:
world.next_map_layer(reverse=True)
elif event.key == K_v:
world.next_map_layer()
elif event.key == K_b and pygame.key.get_mods() & KMOD_SHIFT:
world.load_map_layer(unload=True)
elif event.key == K_b:
world.load_map_layer(unload = True)
elif event.key == K_h or (event.key == K_SLASH and pygame.key.get_mods() & KMOD_SHIFT):
world.hud.help.toggle()
elif event.key == K_TAB:
world.camera_manager.toggle_camera()
elif event.key == K_c and pygame.key.get_mods() & KMOD_SHIFT:
world.next_weather(reverse=True)
elif event.key == K_c:
world.next_weather()
elif event.key == K_g:
world.toggle_radar()
elif event.key == K_BACKQUOTE:
world.camera_manager.next_sensor()
elif event.key == K_n:
world.camera_manager.next_sensor()
elif event.key == K_w and (pygame.key.get_mods() & KMOD_CTRL):
if world.constant_velocity_enabled:
world.player.disable_constant_velocity()
world.constant_velocity_enabled = False
world.hud.notification("Disabled Constant Velocity Mode")
else:
world.player.enable_constant_velocity(carla.Vector3D(17, 0, 0))
world.constant_velocity_enabled = True
world.hud.notification("Enabled Constant Velocity Mode at 60 km/h")
elif event.key == K_t:
if world.show_vehicle_telemetry:
world.player.show_debug_telemetry(False)
world.show_vehicle_telemetry = False
world.hud.notification("Disabled Vehicle Telemetry")
else:
try:
world.player.show_debug_telemetry(True)
world.show_vehicle_telemetry = True
world.hud.notification("Enabled Vehicle Telemetry")
except Exception:
pass
elif event.key > K_0 and event.key <= K_9:
index_ctrl = 0
if pygame.key.get_mods() & KMOD_CTRL:
index_ctrl = 9
world.camera_manager.set_sensor(event.key - 1 - K_0 + index_ctrl)
elif event.key == K_r and not (pygame.key.get_mods() & KMOD_CTRL):
world.camera_manager.toggle_recording()
elif event.key == K_r and (pygame.key.get_mods() & KMOD_CTRL):
if (world.recording_enabled):
client.stop_recorder()
world.recording_enabled = False
world.hud.notification("Recorder is OFF")
else:
client.start_recorder("manual_recording.rec")
world.recording_enabled = True
world.hud.notification("Recorder is ON")
elif event.key == K_p and (pygame.key.get_mods() & KMOD_CTRL):
client.stop_recorder()
world.recording_enabled = False
current_index = world.camera_manager.index
world.destroy_sensors()
self._autopilot_enabled = False
world.player.set_autopilot(self._autopilot_enabled)
world.hud.notification("Replaying file 'manual_recording.rec'")
client.replay_file("manual_recording.rec", world.recording_start, 0, 0)
world.camera_manager.set_sensor(current_index)
elif event.key == K_MINUS and (pygame.key.get_mods() & KMOD_CTRL):
if pygame.key.get_mods() & KMOD_SHIFT:
world.recording_start -= 10
else:
world.recording_start -= 1
world.hud.notification("Recording start time is %d" % (world.recording_start))
elif event.key == K_EQUALS and (pygame.key.get_mods() & KMOD_CTRL):
if pygame.key.get_mods() & KMOD_SHIFT:
world.recording_start += 10
else:
world.recording_start += 1
world.hud.notification("Recording start time is %d" % (world.recording_start))
if isinstance(self._control, carla.VehicleControl):
if event.key == K_q:
self._control.gear = 1 if self._control.reverse else -1
elif event.key == K_m:
self._control.manual_gear_shift = not self._control.manual_gear_shift
self._control.gear = world.player.get_control().gear
world.hud.notification('%s Transmission' %
('Manual' if self._control.manual_gear_shift else 'Automatic'))
elif self._control.manual_gear_shift and event.key == K_COMMA:
self._control.gear = max(-1, self._control.gear - 1)
elif self._control.manual_gear_shift and event.key == K_PERIOD:
self._control.gear = self._control.gear + 1
elif event.key == K_p and not pygame.key.get_mods() & KMOD_CTRL:
if not self._autopilot_enabled and not sync_mode:
print("WARNING: You are currently in asynchronous mode and could "
"experience some issues with the traffic simulation")
self._autopilot_enabled = not self._autopilot_enabled
world.player.set_autopilot(self._autopilot_enabled)
world.hud.notification(
'Autopilot %s' % ('On' if self._autopilot_enabled else 'Off'))
elif event.key == K_l and pygame.key.get_mods() & KMOD_CTRL:
current_lights ^= carla.VehicleLightState.Special1
elif event.key == K_l and pygame.key.get_mods() & KMOD_SHIFT:
current_lights ^= carla.VehicleLightState.HighBeam
elif event.key == K_l:
if not self._lights & carla.VehicleLightState.Position:
world.hud.notification("Position lights")
current_lights |= carla.VehicleLightState.Position
else:
world.hud.notification("Low beam lights")
current_lights |= carla.VehicleLightState.LowBeam
if self._lights & carla.VehicleLightState.LowBeam:
world.hud.notification("Fog lights")
current_lights |= carla.VehicleLightState.Fog
if self._lights & carla.VehicleLightState.Fog:
world.hud.notification("Lights off")
current_lights ^= carla.VehicleLightState.Position
current_lights ^= carla.VehicleLightState.LowBeam
current_lights ^= carla.VehicleLightState.Fog
elif event.key == K_i:
current_lights ^= carla.VehicleLightState.Interior
elif event.key == K_z:
current_lights ^= carla.VehicleLightState.LeftBlinker
elif event.key == K_x:
current_lights ^= carla.VehicleLightState.RightBlinker
if not self._autopilot_enabled:
if isinstance(self._control, carla.VehicleControl):
self._parse_vehicle_keys(pygame.key.get_pressed(), clock.get_time())
self._control.reverse = self._control.gear < 0
if self._control.brake:
current_lights |= carla.VehicleLightState.Brake
else:
current_lights &= ~carla.VehicleLightState.Brake
if self._control.reverse:
current_lights |= carla.VehicleLightState.Reverse
else:
current_lights &= ~carla.VehicleLightState.Reverse
if current_lights != self._lights:
self._lights = current_lights
world.player.set_light_state(carla.VehicleLightState(self._lights))
elif isinstance(self._control, carla.WalkerControl):
self._parse_walker_keys(pygame.key.get_pressed(), clock.get_time(), world)
world.player.apply_control(self._control)
def _parse_vehicle_keys(self, keys, milliseconds):
if keys[K_UP] or keys[K_w]:
self._control.throttle = min(self._control.throttle + 0.01, 1.00)
else:
self._control.throttle = 0.0
if keys[K_DOWN] or keys[K_s]:
self._control.brake = min(self._control.brake + 0.2, 1)
else:
self._control.brake = 0
steer_increment = 5e-4 * milliseconds
if keys[K_LEFT] or keys[K_a]:
if self._steer_cache > 0:
self._steer_cache = 0
else:
self._steer_cache -= steer_increment
elif keys[K_RIGHT] or keys[K_d]:
if self._steer_cache < 0:
self._steer_cache = 0
else:
self._steer_cache += steer_increment
else:
self._steer_cache = 0.0
self._steer_cache = min(0.7, max(-0.7, self._steer_cache))
self._control.steer = round(self._steer_cache, 1)
self._control.hand_brake = keys[K_SPACE]
def _parse_walker_keys(self, keys, milliseconds, world):
self._control.speed = 0.0
if keys[K_DOWN] or keys[K_s]:
self._control.speed = 0.0
if keys[K_LEFT] or keys[K_a]:
self._control.speed = .01
self._rotation.yaw -= 0.08 * milliseconds
if keys[K_RIGHT] or keys[K_d]:
self._control.speed = .01
self._rotation.yaw += 0.08 * milliseconds
if keys[K_UP] or keys[K_w]:
self._control.speed = world.player_max_speed_fast if pygame.key.get_mods() & KMOD_SHIFT else world.player_max_speed
self._control.jump = keys[K_SPACE]
self._rotation.yaw = round(self._rotation.yaw, 1)
self._control.direction = self._rotation.get_forward_vector()
@staticmethod
def _is_quit_shortcut(key):
return (key == K_ESCAPE) or (key == K_q and pygame.key.get_mods() & KMOD_CTRL)
class HUD(object):
def __init__(self, width, height):
self.dim = (width, height)
font = pygame.font.Font(pygame.font.get_default_font(), 20)
font_name = 'courier' if os.name == 'nt' else 'mono'
fonts = [x for x in pygame.font.get_fonts() if font_name in x]
default_font = 'ubuntumono'
mono = default_font if default_font in fonts else fonts[0]
mono = pygame.font.match_font(mono)
self._font_mono = pygame.font.Font(mono, 12 if os.name == 'nt' else 14)
self._notifications = FadingText(font, (width, 40), (0, height - 40))
self.help = HelpText(pygame.font.Font(mono, 16), width, height)
self.server_fps = 0
self.frame = 0
self.simulation_time = 0
self._show_info = True
self._info_text = []
self._server_clock = pygame.time.Clock()
def on_world_tick(self, timestamp):
self._server_clock.tick()
self.server_fps = self._server_clock.get_fps()
self.frame = timestamp.frame
self.simulation_time = timestamp.elapsed_seconds
def tick(self, world, clock):
self._notifications.tick(world, clock)
if not self._show_info:
return
t = world.player.get_transform()
v = world.player.get_velocity()
c = world.player.get_control()
compass = world.imu_sensor.compass
heading = 'N' if compass > 270.5 or compass < 89.5 else ''
heading += 'S' if 90.5 < compass < 269.5 else ''
heading += 'E' if 0.5 < compass < 179.5 else ''
heading += 'W' if 180.5 < compass < 359.5 else ''
colhist = world.collision_sensor.get_collision_history()
collision = [colhist[x + self.frame - 200] for x in range(0, 200)]
max_col = max(1.0, max(collision))
collision = [x / max_col for x in collision]
vehicles = world.world.get_actors().filter('vehicle.*')
self._info_text = [
'Server: % 16.0f FPS' % self.server_fps,
'Client: % 16.0f FPS' % clock.get_fps(),
'',
'Vehicle: % 20s' % get_actor_display_name(world.player, truncate=20),
'Map: % 20s' % world.map.name.split('/')[-1],
'Simulation time: % 12s' % datetime.timedelta(seconds=int(self.simulation_time)),
'',
'Speed: % 15.0f km/h' % (3.6 * math.sqrt(v.x**2 + v.y**2 + v.z**2)),
u'Compass:% 17.0f\N{DEGREE SIGN} % 2s' % (compass, heading),
'Accelero: (%5.1f,%5.1f,%5.1f)' % (world.imu_sensor.accelerometer),
'Gyroscop: (%5.1f,%5.1f,%5.1f)' % (world.imu_sensor.gyroscope),
'Location:% 20s' % ('(% 5.1f, % 5.1f)' % (t.location.x, t.location.y)),
'GNSS:% 24s' % ('(% 2.6f, % 3.6f)' % (world.gnss_sensor.lat, world.gnss_sensor.lon)),
'Height: % 18.0f m' % t.location.z,
'']
if isinstance(c, carla.VehicleControl):
self._info_text += [
('Throttle:', c.throttle, 0.0, 1.0),
('Steer:', c.steer, -1.0, 1.0),
('Brake:', c.brake, 0.0, 1.0),
('Reverse:', c.reverse),
('Hand brake:', c.hand_brake),
('Manual:', c.manual_gear_shift),
'Gear: %s' % {-1: 'R', 0: 'N'}.get(c.gear, c.gear)]
elif isinstance(c, carla.WalkerControl):
self._info_text += [
('Speed:', c.speed, 0.0, 5.556),
('Jump:', c.jump)]
self._info_text += [
'',
'Collision:',
collision,
'',
'Number of vehicles: % 8d' % len(vehicles)]
if len(vehicles) > 1:
self._info_text += ['Nearby vehicles:']
distance = lambda l: math.sqrt((l.x - t.location.x)**2 + (l.y - t.location.y)**2 + (l.z - t.location.z)**2)
vehicles = [(distance(x.get_location()), x) for x in vehicles if x.id != world.player.id]
for d, vehicle in sorted(vehicles, key=lambda vehicles: vehicles[0]):
if d > 200.0:
break
vehicle_type = get_actor_display_name(vehicle, truncate=22)
self._info_text.append('% 4dm %s' % (d, vehicle_type))
def toggle_info(self):
self._show_info = not self._show_info
def notification(self, text, seconds=2.0):
self._notifications.set_text(text, seconds=seconds)
def error(self, text):
self._notifications.set_text('Error: %s' % text, (255, 0, 0))
def render(self, display):
if self._show_info:
info_surface = pygame.Surface((220, self.dim[1]))
info_surface.set_alpha(100)
display.blit(info_surface, (0, 0))
v_offset = 4
bar_h_offset = 100
bar_width = 106
for item in self._info_text:
if v_offset + 18 > self.dim[1]:
break
if isinstance(item, list):
if len(item) > 1:
points = [(x + 8, v_offset + 8 + (1.0 - y) * 30) for x, y in enumerate(item)]
pygame.draw.lines(display, (255, 136, 0), False, points, 2)
item = None
v_offset += 18
elif isinstance(item, tuple):
if isinstance(item[1], bool):
rect = pygame.Rect((bar_h_offset, v_offset + 8), (6, 6))
pygame.draw.rect(display, (255, 255, 255), rect, 0 if item[1] else 1)
else:
rect_border = pygame.Rect((bar_h_offset, v_offset + 8), (bar_width, 6))
pygame.draw.rect(display, (255, 255, 255), rect_border, 1)
f = (item[1] - item[2]) / (item[3] - item[2])
if item[2] < 0.0:
rect = pygame.Rect((bar_h_offset + f * (bar_width - 6), v_offset + 8), (6, 6))
else:
rect = pygame.Rect((bar_h_offset, v_offset + 8), (f * bar_width, 6))
pygame.draw.rect(display, (255, 255, 255), rect)
item = item[0]
if item:
surface = self._font_mono.render(item, True, (255, 255, 255))
display.blit(surface, (8, v_offset))
v_offset += 18
self._notifications.render(display)
self.help.render(display)
class FadingText(object):
def __init__(self, font, dim, pos):
self.font = font
self.dim = dim
self.pos = pos
self.seconds_left = 0
self.surface = pygame.Surface(self.dim)
def set_text(self, text, color=(255, 255, 255), seconds=2.0):
text_texture = self.font.render(text, True, color)
self.surface = pygame.Surface(self.dim)
self.seconds_left = seconds
self.surface.fill((0, 0, 0, 0))
self.surface.blit(text_texture, (10, 11))
def tick(self, _, clock):
delta_seconds = 1e-3 * clock.get_time()
self.seconds_left = max(0.0, self.seconds_left - delta_seconds)
self.surface.set_alpha(500.0 * self.seconds_left)
def render(self, display):
display.blit(self.surface, self.pos)
class HelpText(object):
def __init__(self, font, width, height):
lines = __doc__.split('\n')
self.font = font
self.line_space = 18
self.dim = (780, len(lines) * self.line_space + 12)
self.pos = (0.5 * width - 0.5 * self.dim[0], 0.5 * height - 0.5 * self.dim[1])
self.seconds_left = 0
self.surface = pygame.Surface(self.dim)
self.surface.fill((0, 0, 0, 0))
for n, line in enumerate(lines):
text_texture = self.font.render(line, True, (255, 255, 255))
self.surface.blit(text_texture, (22, n * self.line_space))
self._render = False
self.surface.set_alpha(220)
def toggle(self):
self._render = not self._render
def render(self, display):
if self._render:
display.blit(self.surface, self.pos)
class CollisionSensor(object):
def __init__(self, parent_actor, hud):
self.sensor = None
self.history = []
self._parent = parent_actor
self.hud = hud
world = self._parent.get_world()
bp = world.get_blueprint_library().find('sensor.other.collision')
self.sensor = world.spawn_actor(bp, carla.Transform(), attach_to=self._parent)
weak_self = weakref.ref(self)
self.sensor.listen(lambda event: CollisionSensor._on_collision(weak_self, event))
def get_collision_history(self):
history = collections.defaultdict(int)
for frame, intensity in self.history:
history[frame] += intensity
return history
@staticmethod
def _on_collision(weak_self, event):
self = weak_self()
if not self:
return
actor_type = get_actor_display_name(event.other_actor)
self.hud.notification('Collision with %r' % actor_type)
impulse = event.normal_impulse
intensity = math.sqrt(impulse.x**2 + impulse.y**2 + impulse.z**2)
self.history.append((event.frame, intensity))
if len(self.history) > 4000:
self.history.pop(0)
class LaneInvasionSensor(object):
def __init__(self, parent_actor, hud):
self.sensor = None
if parent_actor.type_id.startswith("vehicle."):
self._parent = parent_actor
self.hud = hud
world = self._parent.get_world()
bp = world.get_blueprint_library().find('sensor.other.lane_invasion')
self.sensor = world.spawn_actor(bp, carla.Transform(), attach_to=self._parent)
weak_self = weakref.ref(self)
self.sensor.listen(lambda event: LaneInvasionSensor._on_invasion(weak_self, event))
@staticmethod
def _on_invasion(weak_self, event):
self = weak_self()
if not self:
return
lane_types = set(x.type for x in event.crossed_lane_markings)
text = ['%r' % str(x).split()[-1] for x in lane_types]
self.hud.notification('Crossed line %s' % ' and '.join(text))
class GnssSensor(object):
def __init__(self, parent_actor):
self.sensor = None
self._parent = parent_actor
self.lat = 0.0
self.lon = 0.0
world = self._parent.get_world()
bp = world.get_blueprint_library().find('sensor.other.gnss')
self.sensor = world.spawn_actor(bp, carla.Transform(carla.Location(x=1.0, z=2.8)), attach_to=self._parent)
weak_self = weakref.ref(self)
self.sensor.listen(lambda event: GnssSensor._on_gnss_event(weak_self, event))
@staticmethod
def _on_gnss_event(weak_self, event):
self = weak_self()
if not self:
return
self.lat = event.latitude
self.lon = event.longitude
class IMUSensor(object):
def __init__(self, parent_actor):
self.sensor = None
self._parent = parent_actor
self.accelerometer = (0.0, 0.0, 0.0)
self.gyroscope = (0.0, 0.0, 0.0)
self.compass = 0.0
world = self._parent.get_world()
bp = world.get_blueprint_library().find('sensor.other.imu')
self.sensor = world.spawn_actor(
bp, carla.Transform(), attach_to=self._parent)
weak_self = weakref.ref(self)
self.sensor.listen(
lambda sensor_data: IMUSensor._IMU_callback(weak_self, sensor_data))
@staticmethod
def _IMU_callback(weak_self, sensor_data):
self = weak_self()
if not self:
return
limits = (-99.9, 99.9)
self.accelerometer = (
max(limits[0], min(limits[1], sensor_data.accelerometer.x)),
max(limits[0], min(limits[1], sensor_data.accelerometer.y)),
max(limits[0], min(limits[1], sensor_data.accelerometer.z)))
self.gyroscope = (
max(limits[0], min(limits[1], math.degrees(sensor_data.gyroscope.x))),
max(limits[0], min(limits[1], math.degrees(sensor_data.gyroscope.y))),
max(limits[0], min(limits[1], math.degrees(sensor_data.gyroscope.z))))
self.compass = math.degrees(sensor_data.compass)
class RadarSensor(object):
def __init__(self, parent_actor):
self.sensor = None
self._parent = parent_actor
bound_x = 0.5 + self._parent.bounding_box.extent.x
bound_y = 0.5 + self._parent.bounding_box.extent.y
bound_z = 0.5 + self._parent.bounding_box.extent.z
self.velocity_range = 7.5
world = self._parent.get_world()
self.debug = world.debug
bp = world.get_blueprint_library().find('sensor.other.radar')
bp.set_attribute('horizontal_fov', str(35))
bp.set_attribute('vertical_fov', str(20))
self.sensor = world.spawn_actor(
bp,
carla.Transform(
carla.Location(x=bound_x + 0.05, z=bound_z+0.05),
carla.Rotation(pitch=5)),
attach_to=self._parent)
weak_self = weakref.ref(self)
self.sensor.listen(
lambda radar_data: RadarSensor._Radar_callback(weak_self, radar_data))
@staticmethod
def _Radar_callback(weak_self, radar_data):
self = weak_self()
if not self:
return
current_rot = radar_data.transform.rotation
for detect in radar_data:
azi = math.degrees(detect.azimuth)
alt = math.degrees(detect.altitude)
fw_vec = carla.Vector3D(x=detect.depth - 0.25)
carla.Transform(
carla.Location(),
carla.Rotation(
pitch=current_rot.pitch + alt,
yaw=current_rot.yaw + azi,
roll=current_rot.roll)).transform(fw_vec)
def clamp(min_v, max_v, value):
return max(min_v, min(value, max_v))
norm_velocity = detect.velocity / self.velocity_range
r = int(clamp(0.0, 1.0, 1.0 - norm_velocity) * 255.0)
g = int(clamp(0.0, 1.0, 1.0 - abs(norm_velocity)) * 255.0)
b = int(abs(clamp(- 1.0, 0.0, - 1.0 - norm_velocity)) * 255.0)
self.debug.draw_point(
radar_data.transform.location + fw_vec,
size=0.075,
life_time=0.06,
persistent_lines=False,
color=carla.Color(r, g, b))
class CameraManager(object):
def __init__(self, parent_actor, hud, gamma_correction):
self.sensor = None
self.surface = None
self._parent = parent_actor
self.hud = hud
self.recording = False
bound_x = 0.5 + self._parent.bounding_box.extent.x
bound_y = 0.5 + self._parent.bounding_box.extent.y
bound_z = 0.5 + self._parent.bounding_box.extent.z
Attachment = carla.AttachmentType
if not self._parent.type_id.startswith("walker.pedestrian"):
self._camera_transforms = [
(carla.Transform(carla.Location(x=-2.0*bound_x, y=+0.0*bound_y, z=2.0*bound_z), carla.Rotation(pitch=8.0)), Attachment.SpringArm),
(carla.Transform(carla.Location(x=+0.8*bound_x, y=+0.0*bound_y, z=1.3*bound_z)), Attachment.Rigid),
(carla.Transform(carla.Location(x=+1.9*bound_x, y=+1.0*bound_y, z=1.2*bound_z)), Attachment.SpringArm),
(carla.Transform(carla.Location(x=-2.8*bound_x, y=+0.0*bound_y, z=4.6*bound_z), carla.Rotation(pitch=6.0)), Attachment.SpringArm),
(carla.Transform(carla.Location(x=-1.0, y=-1.0*bound_y, z=0.4*bound_z)), Attachment.Rigid)]
else:
self._camera_transforms = [
(carla.Transform(carla.Location(x=-2.5, z=0.0), carla.Rotation(pitch=-8.0)), Attachment.SpringArm),
(carla.Transform(carla.Location(x=1.6, z=1.7)), Attachment.Rigid),
(carla.Transform(carla.Location(x=2.5, y=0.5, z=0.0), carla.Rotation(pitch=-8.0)), Attachment.SpringArm),
(carla.Transform(carla.Location(x=-4.0, z=2.0), carla.Rotation(pitch=6.0)), Attachment.SpringArm),
(carla.Transform(carla.Location(x=0, y=-2.5, z=-0.0), carla.Rotation(yaw=90.0)), Attachment.Rigid)]
self.transform_index = 1
self.sensors = [
['sensor.camera.rgb', cc.Raw, 'Camera RGB', {}],
['sensor.camera.depth', cc.Raw, 'Camera Depth (Raw)', {}],
['sensor.camera.depth', cc.Depth, 'Camera Depth (Gray Scale)', {}],
['sensor.camera.depth', cc.LogarithmicDepth, 'Camera Depth (Logarithmic Gray Scale)', {}],
['sensor.camera.semantic_segmentation', cc.Raw, 'Camera Semantic Segmentation (Raw)', {}],
['sensor.camera.semantic_segmentation', cc.CityScapesPalette,
'Camera Semantic Segmentation (CityScapes Palette)', {}],
['sensor.lidar.ray_cast', None, 'Lidar (Ray-Cast)', {'range': '50'}],
['sensor.camera.dvs', cc.Raw, 'Dynamic Vision Sensor', {}],
['sensor.camera.rgb', cc.Raw, 'Camera RGB Distorted',
{'lens_circle_multiplier': '3.0',
'lens_circle_falloff': '3.0',
'chromatic_aberration_intensity': '0.5',
'chromatic_aberration_offset': '0'}],
['sensor.camera.optical_flow', cc.Raw, 'Optical Flow', {}],
]
world = self._parent.get_world()
bp_library = world.get_blueprint_library()
for item in self.sensors:
bp = bp_library.find(item[0])
if item[0].startswith('sensor.camera'):
bp.set_attribute('image_size_x', str(hud.dim[0]))
bp.set_attribute('image_size_y', str(hud.dim[1]))
if bp.has_attribute('gamma'):
bp.set_attribute('gamma', str(gamma_correction))
for attr_name, attr_value in item[3].items():
bp.set_attribute(attr_name, attr_value)
elif item[0].startswith('sensor.lidar'):
self.lidar_range = 50
for attr_name, attr_value in item[3].items():
bp.set_attribute(attr_name, attr_value)
if attr_name == 'range':
self.lidar_range = float(attr_value)
item.append(bp)
self.index = None
def toggle_camera(self):
self.transform_index = (self.transform_index + 1) % len(self._camera_transforms)
self.set_sensor(self.index, notify=False, force_respawn=True)
def set_sensor(self, index, notify=True, force_respawn=False):
index = index % len(self.sensors)
needs_respawn = True if self.index is None else \
(force_respawn or (self.sensors[index][2] != self.sensors[self.index][2]))
if needs_respawn:
if self.sensor is not None:
self.sensor.destroy()
self.surface = None
self.sensor = self._parent.get_world().spawn_actor(
self.sensors[index][-1],
self._camera_transforms[self.transform_index][0],
attach_to=self._parent,
attachment_type=self._camera_transforms[self.transform_index][1])
weak_self = weakref.ref(self)
self.sensor.listen(lambda image: CameraManager._parse_image(weak_self, image))
if notify:
self.hud.notification(self.sensors[index][2])
self.index = index
def next_sensor(self):
self.set_sensor(self.index + 1)
def toggle_recording(self):
self.recording = not self.recording
self.hud.notification('Recording %s' % ('On' if self.recording else 'Off'))
def render(self, display):
if self.surface is not None:
display.blit(self.surface, (0, 0))
@staticmethod
def scale_steer(steer):
if (steer >= 1):
return 1
elif (steer <= -1):
return -1
else:
return steer
@staticmethod
def predict_steering(img_rgb):
img_size = (66, 200, 3)
input_img = resize(img_rgb, img_size[:2])
input_img = expand_dims(input_img, 0)
steering_pred= model.predict(input_img)[0][0]
return steering_pred
@staticmethod
def _parse_image(weak_self, image):
self = weak_self()
if not self:
return
if self.sensors[self.index][0].startswith('sensor.lidar'):
points = np.frombuffer(image.raw_data, dtype=np.dtype('f4'))
points = np.reshape(points, (int(points.shape[0] / 4), 4))
lidar_data = np.array(points[:, :2])
lidar_data *= min(self.hud.dim) / (2.0 * self.lidar_range)
lidar_data += (0.5 * self.hud.dim[0], 0.5 * self.hud.dim[1])
lidar_data = np.fabs(lidar_data)
lidar_data = lidar_data.astype(np.int32)
lidar_data = np.reshape(lidar_data, (-1, 2))
lidar_img_size = (self.hud.dim[0], self.hud.dim[1], 3)
lidar_img = np.zeros((lidar_img_size), dtype=np.uint8)
lidar_img[tuple(lidar_data.T)] = (255, 255, 255)
self.surface = pygame.surfarray.make_surface(lidar_img)
elif self.sensors[self.index][0].startswith('sensor.camera.dvs'):
dvs_events = np.frombuffer(image.raw_data, dtype=np.dtype([
('x', np.uint16), ('y', np.uint16), ('t', np.int64), ('pol', np.bool)]))
dvs_img = np.zeros((image.height, image.width, 3), dtype=np.uint8)
dvs_img[dvs_events[:]['y'], dvs_events[:]['x'], dvs_events[:]['pol'] * 2] = 255
self.surface = pygame.surfarray.make_surface(dvs_img.swapaxes(0, 1))
elif self.sensors[self.index][0].startswith('sensor.camera.optical_flow'):
image = image.get_color_coded_flow()
array = np.frombuffer(image.raw_data, dtype=np.dtype("uint8"))
array = np.reshape(array, (image.height, image.width, 4))
array = array[:, :, :3]
array = array[:, :, ::-1]
self.surface = pygame.surfarray.make_surface(array.swapaxes(0, 1))
else:
image.convert(self.sensors[self.index][1])
array = np.frombuffer(image.raw_data, dtype=np.dtype("uint8"))
array = np.reshape(array, (image.height, image.width, 4))
array = array[:, :, :3]
img_rgb = cv2.resize(np.float32(array), (320, 180))
pred_steering = CameraManager.predict_steering(img_rgb)
print('before scale', pred_steering)
pred_steering /=70
print('after scale', pred_steering)
pred_steering = CameraManager.scale_steer(pred_steering)
print("Predicted steering: ", pred_steering)
self._parent.apply_control(carla.VehicleControl(throttle=0.9, steer=1))
self.surface = pygame.surfarray.make_surface(array.swapaxes(0, 1))
if self.recording:
image.save_to_disk('_out/%08d' % image.frame)
def game_loop(args):
pygame.init()
pygame.font.init()
world = None
original_settings = None
try:
client = carla.Client(args.host, args.port)
client.set_timeout(20.0)
sim_world = client.get_world()
if args.sync:
original_settings = sim_world.get_settings()
settings = sim_world.get_settings()
if not settings.synchronous_mode:
settings.synchronous_mode = True
settings.fixed_delta_seconds = 0.05
sim_world.apply_settings(settings)
traffic_manager = client.get_trafficmanager()
traffic_manager.set_synchronous_mode(True)
if args.autopilot and not sim_world.get_settings().synchronous_mode:
print("WARNING: You are currently in asynchronous mode and could "
"experience some issues with the traffic simulation")
sim_world.unload_map_layer(carla.MapLayer.All)
= pygame.display.set_mode(
(args.width, args.height),
pygame.HWSURFACE | pygame.DOUBLEBUF)
display.fill((0,0,0))
pygame.display.flip()
hud = HUD(args.width, args.height)
world = World(sim_world, hud, args)
controller = KeyboardControl(world, args.autopilot)
if args.sync:
sim_world.tick()
else:
sim_world.wait_for_tick()
clock = pygame.time.Clock()
while True:
if args.sync:
sim_world.tick()
clock.tick_busy_loop(60)
if controller.parse_events(client, world, clock, args.sync):
return
world.tick(clock)
world.render(display)
pygame.display.flip()
finally:
if original_settings:
sim_world.apply_settings(original_settings)
if (world and world.recording_enabled):
client.stop_recorder()
if world is not None:
world.destroy()
pygame.quit()
def main():
argparser = argparse.ArgumentParser(
description='CARLA Manual Control Client')
argparser.add_argument(
'-v', '--verbose',
action='store_true',
dest='debug',
help='print debug information')
argparser.add_argument(
'--host',
metavar='H',
default='127.0.0.1',
help='IP of the host server (default: 127.0.0.1)')
argparser.add_argument(
'-p', '--port',
metavar='P',
default=2000,
type=int,
help='TCP port to listen to (default: 2000)')
argparser.add_argument(
'-a', '--autopilot',
action='store_true',
help='enable autopilot')
argparser.add_argument(
'--res',
metavar='WIDTHxHEIGHT',
default='1280x720',
help='window resolution (default: 1280x720)')
argparser.add_argument(
'--filter',
metavar='PATTERN',
default='vehicle.*',
help='actor filter (default: "vehicle.*")')
argparser.add_argument(
'--generation',
metavar='G',
default='2',
help='restrict to certain actor generation (values: "1","2","All" - default: "2")')
argparser.add_argument(
'--rolename',
metavar='NAME',
default='hero',
help='actor role name (default: "hero")')
argparser.add_argument(
'--gamma',
default=2.2,
type=float,
help='Gamma correction of the camera (default: 2.2)')
argparser.add_argument(
'--sync',
action='store_true',
help='Activate synchronous mode execution')
args = argparser.parse_args()
args.width, args.height = [int(x) for x in args.res.split('x')]
log_level = logging.DEBUG if args.debug else logging.INFO
logging.basicConfig(format='%(levelname)s: %(message)s', level=log_level)
logging.info('listening to server %s:%s', args.host, args.port)
print(__doc__)
try:
game_loop(args)
except KeyboardInterrupt:
print('\nCancelled by user. Bye!')
if __name__ == '__main__':
main()
| true | true |
f724b704c5f9a3e24eab2468a1fc1abdf02661db | 3,636 | py | Python | geist/backends/_x11_common.py | kebarr/Geist | 9bda3bc4e11ef06ebf8cb7c2b0ecd666ef40491a | [
"MIT"
] | 5 | 2015-05-01T15:58:48.000Z | 2017-04-19T03:38:25.000Z | geist/backends/_x11_common.py | tonysimpson/Geist | a1ef16d8b4c3777735008b671a50acfde3ce7bf1 | [
"MIT"
] | 1 | 2016-08-05T17:05:02.000Z | 2016-08-05T17:05:02.000Z | geist/backends/_x11_common.py | tonysimpson/Geist | a1ef16d8b4c3777735008b671a50acfde3ce7bf1 | [
"MIT"
] | 2 | 2016-09-27T13:45:31.000Z | 2017-05-21T14:08:57.000Z | from __future__ import division, absolute_import, print_function
from ooxcb.protocol import (
xtest,
)
from ooxcb.constant import (
ButtonPress,
ButtonRelease,
KeyPress,
KeyRelease,
MotionNotify
)
import ooxcb
from ooxcb.keysymdef import keysyms
import subprocess
import os
from ._common import BackendActionBuilder
xtest.mixin()
class _ActionsTransaction(object):
def __init__(self, backend):
self._conn = backend._conn
self._actions_builder = BackendActionBuilder(backend)
def __enter__(self):
return self._actions_builder
def __exit__(self, *args):
#with self._conn.bunch():
self._actions_builder.execute()
return False
class GeistXBase(object):
KEY_NAME_TO_CODE = keysyms
KEY_NAME_TO_CODE_IGNORE_CASE = {name.lower(): value
for name, value in keysyms.iteritems()}
def __init__(self, **kwargs):
display = kwargs.get('display', ':0')
self._display = display
self._conn = ooxcb.connect(display)
self._root = self._conn.setup.roots[self._conn.pref_screen].root
@property
def display(self):
return self._display
def create_process(self, command, shell=True, stdout=None, stderr=None,
env=None):
"""
Execute a process using subprocess.Popen, setting the backend's DISPLAY
"""
env = env if env is not None else dict(os.environ)
env['DISPLAY'] = self.display
return subprocess.Popen(command, shell=shell,
stdout=stdout, stderr=stderr,
env=env)
def actions_transaction(self):
return _ActionsTransaction(self)
def _get_key_code_from_name(self, name):
if name == 'shift':
symb = GeistXBase.KEY_NAME_TO_CODE['Shift_L']
elif name in GeistXBase.KEY_NAME_TO_CODE:
symb = GeistXBase.KEY_NAME_TO_CODE[name]
elif name.lower() in GeistXBase.KEY_NAME_TO_CODE_IGNORE_CASE:
symb = GeistXBase.KEY_NAME_TO_CODE_IGNORE_CASE[name]
else:
raise ValueError('unhandled key %r' % (name,))
return self._conn.keysyms.get_keycode(symb)
def key_down(self, name):
key_code = self._get_key_code_from_name(name)
with self._conn.bunch():
self._conn.xtest.fake_input_checked(
KeyPress,
detail=key_code
)
def key_up(self, name):
key_code = self._get_key_code_from_name(name)
with self._conn.bunch():
self._conn.xtest.fake_input_checked(
KeyRelease,
detail=key_code
)
def button_down(self, button_num):
with self._conn.bunch():
self._conn.xtest.fake_input_checked(
ButtonPress,
detail=button_num
)
def button_up(self, button_num):
with self._conn.bunch():
self._conn.xtest.fake_input_checked(
ButtonRelease,
detail=button_num
)
def move(self, point):
x, y = point
with self._conn.bunch():
self._conn.xtest.fake_input_checked(
MotionNotify,
rootX=x,
rootY=y,
)
def cursor_position(self):
reply = self._root.query_pointer().reply()
return reply.root_x, reply.root_y
def close(self):
if hasattr(self, '_conn'):
self._conn.disconnect()
del self._conn
def __del__(self):
self.close()
| 28.186047 | 79 | 0.59791 | from __future__ import division, absolute_import, print_function
from ooxcb.protocol import (
xtest,
)
from ooxcb.constant import (
ButtonPress,
ButtonRelease,
KeyPress,
KeyRelease,
MotionNotify
)
import ooxcb
from ooxcb.keysymdef import keysyms
import subprocess
import os
from ._common import BackendActionBuilder
xtest.mixin()
class _ActionsTransaction(object):
def __init__(self, backend):
self._conn = backend._conn
self._actions_builder = BackendActionBuilder(backend)
def __enter__(self):
return self._actions_builder
def __exit__(self, *args):
self._actions_builder.execute()
return False
class GeistXBase(object):
KEY_NAME_TO_CODE = keysyms
KEY_NAME_TO_CODE_IGNORE_CASE = {name.lower(): value
for name, value in keysyms.iteritems()}
def __init__(self, **kwargs):
display = kwargs.get('display', ':0')
self._display = display
self._conn = ooxcb.connect(display)
self._root = self._conn.setup.roots[self._conn.pref_screen].root
@property
def display(self):
return self._display
def create_process(self, command, shell=True, stdout=None, stderr=None,
env=None):
env = env if env is not None else dict(os.environ)
env['DISPLAY'] = self.display
return subprocess.Popen(command, shell=shell,
stdout=stdout, stderr=stderr,
env=env)
def actions_transaction(self):
return _ActionsTransaction(self)
def _get_key_code_from_name(self, name):
if name == 'shift':
symb = GeistXBase.KEY_NAME_TO_CODE['Shift_L']
elif name in GeistXBase.KEY_NAME_TO_CODE:
symb = GeistXBase.KEY_NAME_TO_CODE[name]
elif name.lower() in GeistXBase.KEY_NAME_TO_CODE_IGNORE_CASE:
symb = GeistXBase.KEY_NAME_TO_CODE_IGNORE_CASE[name]
else:
raise ValueError('unhandled key %r' % (name,))
return self._conn.keysyms.get_keycode(symb)
def key_down(self, name):
key_code = self._get_key_code_from_name(name)
with self._conn.bunch():
self._conn.xtest.fake_input_checked(
KeyPress,
detail=key_code
)
def key_up(self, name):
key_code = self._get_key_code_from_name(name)
with self._conn.bunch():
self._conn.xtest.fake_input_checked(
KeyRelease,
detail=key_code
)
def button_down(self, button_num):
with self._conn.bunch():
self._conn.xtest.fake_input_checked(
ButtonPress,
detail=button_num
)
def button_up(self, button_num):
with self._conn.bunch():
self._conn.xtest.fake_input_checked(
ButtonRelease,
detail=button_num
)
def move(self, point):
x, y = point
with self._conn.bunch():
self._conn.xtest.fake_input_checked(
MotionNotify,
rootX=x,
rootY=y,
)
def cursor_position(self):
reply = self._root.query_pointer().reply()
return reply.root_x, reply.root_y
def close(self):
if hasattr(self, '_conn'):
self._conn.disconnect()
del self._conn
def __del__(self):
self.close()
| true | true |
f724b7df3714254f6a67ad6934798425f668dd4c | 565 | bzl | Python | src/test/java/com/google/devtools/build/skydoc/testdata/java_basic_test/input.bzl | ArielleA/bazel | f7be80e47445a1ddf301b1af0dda2f97b5a271ad | [
"Apache-2.0"
] | null | null | null | src/test/java/com/google/devtools/build/skydoc/testdata/java_basic_test/input.bzl | ArielleA/bazel | f7be80e47445a1ddf301b1af0dda2f97b5a271ad | [
"Apache-2.0"
] | null | null | null | src/test/java/com/google/devtools/build/skydoc/testdata/java_basic_test/input.bzl | ArielleA/bazel | f7be80e47445a1ddf301b1af0dda2f97b5a271ad | [
"Apache-2.0"
] | null | null | null | def exercise_the_api():
var1 = java_common.JavaRuntimeInfo
var2 = JavaInfo
var3 = java_proto_common
exercise_the_api()
def my_rule_impl(ctx):
return struct()
java_related_rule = rule(
implementation = my_rule_impl,
doc = "This rule does java-related things.",
attrs = {
"first": attr.label(mandatory = True, allow_files = True, single_file = True),
"second": attr.string_dict(mandatory = True),
"third": attr.output(mandatory = True),
"fourth": attr.bool(default = False, mandatory = False),
},
)
| 25.681818 | 86 | 0.656637 | def exercise_the_api():
var1 = java_common.JavaRuntimeInfo
var2 = JavaInfo
var3 = java_proto_common
exercise_the_api()
def my_rule_impl(ctx):
return struct()
java_related_rule = rule(
implementation = my_rule_impl,
doc = "This rule does java-related things.",
attrs = {
"first": attr.label(mandatory = True, allow_files = True, single_file = True),
"second": attr.string_dict(mandatory = True),
"third": attr.output(mandatory = True),
"fourth": attr.bool(default = False, mandatory = False),
},
)
| true | true |
f724b8126766f5f31898fdfe933c66db94b09011 | 18,759 | py | Python | rootfs/usr/lib/python3/dist-packages/numpy/polynomial/tests/test_chebyshev.py | kappaIO-Dev/kappaIO-sdk-armhf-crosscompile | 66fc5fc21e6235f7a3be72a7ccac68e2224b7fb2 | [
"MIT"
] | null | null | null | rootfs/usr/lib/python3/dist-packages/numpy/polynomial/tests/test_chebyshev.py | kappaIO-Dev/kappaIO-sdk-armhf-crosscompile | 66fc5fc21e6235f7a3be72a7ccac68e2224b7fb2 | [
"MIT"
] | null | null | null | rootfs/usr/lib/python3/dist-packages/numpy/polynomial/tests/test_chebyshev.py | kappaIO-Dev/kappaIO-sdk-armhf-crosscompile | 66fc5fc21e6235f7a3be72a7ccac68e2224b7fb2 | [
"MIT"
] | null | null | null | """Tests for chebyshev module.
"""
import numpy as np
import numpy.polynomial.chebyshev as ch
from numpy.testing import *
def trim(x) :
return ch.chebtrim(x, tol=1e-6)
T0 = [ 1]
T1 = [ 0, 1]
T2 = [-1, 0, 2]
T3 = [ 0, -3, 0, 4]
T4 = [ 1, 0, -8, 0, 8]
T5 = [ 0, 5, 0, -20, 0, 16]
T6 = [-1, 0, 18, 0, -48, 0, 32]
T7 = [ 0, -7, 0, 56, 0, -112, 0, 64]
T8 = [ 1, 0, -32, 0, 160, 0, -256, 0, 128]
T9 = [ 0, 9, 0, -120, 0, 432, 0, -576, 0, 256]
Tlist = [T0, T1, T2, T3, T4, T5, T6, T7, T8, T9]
class TestPrivate(TestCase) :
def test__cseries_to_zseries(self) :
for i in range(5) :
inp = np.array([2] + [1]*i, np.double)
tgt = np.array([.5]*i + [2] + [.5]*i, np.double)
res = ch._cseries_to_zseries(inp)
assert_equal(res, tgt)
def test__zseries_to_cseries(self) :
for i in range(5) :
inp = np.array([.5]*i + [2] + [.5]*i, np.double)
tgt = np.array([2] + [1]*i, np.double)
res = ch._zseries_to_cseries(inp)
assert_equal(res, tgt)
class TestConstants(TestCase) :
def test_chebdomain(self) :
assert_equal(ch.chebdomain, [-1, 1])
def test_chebzero(self) :
assert_equal(ch.chebzero, [0])
def test_chebone(self) :
assert_equal(ch.chebone, [1])
def test_chebx(self) :
assert_equal(ch.chebx, [0, 1])
class TestArithmetic(TestCase) :
def test_chebadd(self) :
for i in range(5) :
for j in range(5) :
msg = "At i=%d, j=%d" % (i,j)
tgt = np.zeros(max(i,j) + 1)
tgt[i] += 1
tgt[j] += 1
res = ch.chebadd([0]*i + [1], [0]*j + [1])
assert_equal(trim(res), trim(tgt), err_msg=msg)
def test_chebsub(self) :
for i in range(5) :
for j in range(5) :
msg = "At i=%d, j=%d" % (i,j)
tgt = np.zeros(max(i,j) + 1)
tgt[i] += 1
tgt[j] -= 1
res = ch.chebsub([0]*i + [1], [0]*j + [1])
assert_equal(trim(res), trim(tgt), err_msg=msg)
def test_chebmulx(self):
assert_equal(ch.chebmulx([0]), [0])
assert_equal(ch.chebmulx([1]), [0,1])
for i in range(1, 5):
ser = [0]*i + [1]
tgt = [0]*(i - 1) + [.5, 0, .5]
assert_equal(ch.chebmulx(ser), tgt)
def test_chebmul(self) :
for i in range(5) :
for j in range(5) :
msg = "At i=%d, j=%d" % (i,j)
tgt = np.zeros(i + j + 1)
tgt[i + j] += .5
tgt[abs(i - j)] += .5
res = ch.chebmul([0]*i + [1], [0]*j + [1])
assert_equal(trim(res), trim(tgt), err_msg=msg)
def test_chebdiv(self) :
for i in range(5) :
for j in range(5) :
msg = "At i=%d, j=%d" % (i,j)
ci = [0]*i + [1]
cj = [0]*j + [1]
tgt = ch.chebadd(ci, cj)
quo, rem = ch.chebdiv(tgt, ci)
res = ch.chebadd(ch.chebmul(quo, ci), rem)
assert_equal(trim(res), trim(tgt), err_msg=msg)
def test_chebval(self) :
def f(x) :
return x*(x**2 - 1)
#check empty input
assert_equal(ch.chebval([], [1]).size, 0)
#check normal input)
for i in range(5) :
tgt = 1
res = ch.chebval(1, [0]*i + [1])
assert_almost_equal(res, tgt)
tgt = (-1)**i
res = ch.chebval(-1, [0]*i + [1])
assert_almost_equal(res, tgt)
zeros = np.cos(np.linspace(-np.pi, 0, 2*i + 1)[1::2])
tgt = 0
res = ch.chebval(zeros, [0]*i + [1])
assert_almost_equal(res, tgt)
x = np.linspace(-1,1)
tgt = f(x)
res = ch.chebval(x, [0, -.25, 0, .25])
assert_almost_equal(res, tgt)
#check that shape is preserved
for i in range(3) :
dims = [2]*i
x = np.zeros(dims)
assert_equal(ch.chebval(x, [1]).shape, dims)
assert_equal(ch.chebval(x, [1,0]).shape, dims)
assert_equal(ch.chebval(x, [1,0,0]).shape, dims)
class TestCalculus(TestCase) :
def test_chebint(self) :
# check exceptions
assert_raises(ValueError, ch.chebint, [0], .5)
assert_raises(ValueError, ch.chebint, [0], -1)
assert_raises(ValueError, ch.chebint, [0], 1, [0,0])
# test integration of zero polynomial
for i in range(2, 5):
k = [0]*(i - 2) + [1]
res = ch.chebint([0], m=i, k=k)
assert_almost_equal(res, [0, 1])
# check single integration with integration constant
for i in range(5) :
scl = i + 1
pol = [0]*i + [1]
tgt = [i] + [0]*i + [1/scl]
chebpol = ch.poly2cheb(pol)
chebint = ch.chebint(chebpol, m=1, k=[i])
res = ch.cheb2poly(chebint)
assert_almost_equal(trim(res), trim(tgt))
# check single integration with integration constant and lbnd
for i in range(5) :
scl = i + 1
pol = [0]*i + [1]
chebpol = ch.poly2cheb(pol)
chebint = ch.chebint(chebpol, m=1, k=[i], lbnd=-1)
assert_almost_equal(ch.chebval(-1, chebint), i)
# check single integration with integration constant and scaling
for i in range(5) :
scl = i + 1
pol = [0]*i + [1]
tgt = [i] + [0]*i + [2/scl]
chebpol = ch.poly2cheb(pol)
chebint = ch.chebint(chebpol, m=1, k=[i], scl=2)
res = ch.cheb2poly(chebint)
assert_almost_equal(trim(res), trim(tgt))
# check multiple integrations with default k
for i in range(5) :
for j in range(2,5) :
pol = [0]*i + [1]
tgt = pol[:]
for k in range(j) :
tgt = ch.chebint(tgt, m=1)
res = ch.chebint(pol, m=j)
assert_almost_equal(trim(res), trim(tgt))
# check multiple integrations with defined k
for i in range(5) :
for j in range(2,5) :
pol = [0]*i + [1]
tgt = pol[:]
for k in range(j) :
tgt = ch.chebint(tgt, m=1, k=[k])
res = ch.chebint(pol, m=j, k=list(range(j)))
assert_almost_equal(trim(res), trim(tgt))
# check multiple integrations with lbnd
for i in range(5) :
for j in range(2,5) :
pol = [0]*i + [1]
tgt = pol[:]
for k in range(j) :
tgt = ch.chebint(tgt, m=1, k=[k], lbnd=-1)
res = ch.chebint(pol, m=j, k=list(range(j)), lbnd=-1)
assert_almost_equal(trim(res), trim(tgt))
# check multiple integrations with scaling
for i in range(5) :
for j in range(2,5) :
pol = [0]*i + [1]
tgt = pol[:]
for k in range(j) :
tgt = ch.chebint(tgt, m=1, k=[k], scl=2)
res = ch.chebint(pol, m=j, k=list(range(j)), scl=2)
assert_almost_equal(trim(res), trim(tgt))
def test_chebder(self) :
# check exceptions
assert_raises(ValueError, ch.chebder, [0], .5)
assert_raises(ValueError, ch.chebder, [0], -1)
# check that zeroth deriviative does nothing
for i in range(5) :
tgt = [1] + [0]*i
res = ch.chebder(tgt, m=0)
assert_equal(trim(res), trim(tgt))
# check that derivation is the inverse of integration
for i in range(5) :
for j in range(2,5) :
tgt = [1] + [0]*i
res = ch.chebder(ch.chebint(tgt, m=j), m=j)
assert_almost_equal(trim(res), trim(tgt))
# check derivation with scaling
for i in range(5) :
for j in range(2,5) :
tgt = [1] + [0]*i
res = ch.chebder(ch.chebint(tgt, m=j, scl=2), m=j, scl=.5)
assert_almost_equal(trim(res), trim(tgt))
class TestMisc(TestCase) :
def test_chebfromroots(self) :
res = ch.chebfromroots([])
assert_almost_equal(trim(res), [1])
for i in range(1,5) :
roots = np.cos(np.linspace(-np.pi, 0, 2*i + 1)[1::2])
tgt = [0]*i + [1]
res = ch.chebfromroots(roots)*2**(i-1)
assert_almost_equal(trim(res),trim(tgt))
def test_chebroots(self) :
assert_almost_equal(ch.chebroots([1]), [])
assert_almost_equal(ch.chebroots([1, 2]), [-.5])
for i in range(2,5) :
tgt = np.linspace(-1, 1, i)
res = ch.chebroots(ch.chebfromroots(tgt))
assert_almost_equal(trim(res), trim(tgt))
def test_chebvander(self) :
# check for 1d x
x = np.arange(3)
v = ch.chebvander(x, 3)
assert_(v.shape == (3,4))
for i in range(4) :
coef = [0]*i + [1]
assert_almost_equal(v[...,i], ch.chebval(x, coef))
# check for 2d x
x = np.array([[1,2],[3,4],[5,6]])
v = ch.chebvander(x, 3)
assert_(v.shape == (3,2,4))
for i in range(4) :
coef = [0]*i + [1]
assert_almost_equal(v[...,i], ch.chebval(x, coef))
def test_chebfit(self) :
def f(x) :
return x*(x - 1)*(x - 2)
# Test exceptions
assert_raises(ValueError, ch.chebfit, [1], [1], -1)
assert_raises(TypeError, ch.chebfit, [[1]], [1], 0)
assert_raises(TypeError, ch.chebfit, [], [1], 0)
assert_raises(TypeError, ch.chebfit, [1], [[[1]]], 0)
assert_raises(TypeError, ch.chebfit, [1, 2], [1], 0)
assert_raises(TypeError, ch.chebfit, [1], [1, 2], 0)
assert_raises(TypeError, ch.chebfit, [1], [1], 0, w=[[1]])
assert_raises(TypeError, ch.chebfit, [1], [1], 0, w=[1,1])
# Test fit
x = np.linspace(0,2)
y = f(x)
#
coef3 = ch.chebfit(x, y, 3)
assert_equal(len(coef3), 4)
assert_almost_equal(ch.chebval(x, coef3), y)
#
coef4 = ch.chebfit(x, y, 4)
assert_equal(len(coef4), 5)
assert_almost_equal(ch.chebval(x, coef4), y)
#
coef2d = ch.chebfit(x, np.array([y,y]).T, 3)
assert_almost_equal(coef2d, np.array([coef3,coef3]).T)
# test weighting
w = np.zeros_like(x)
yw = y.copy()
w[1::2] = 1
y[0::2] = 0
wcoef3 = ch.chebfit(x, yw, 3, w=w)
assert_almost_equal(wcoef3, coef3)
#
wcoef2d = ch.chebfit(x, np.array([yw,yw]).T, 3, w=w)
assert_almost_equal(wcoef2d, np.array([coef3,coef3]).T)
def test_chebtrim(self) :
coef = [2, -1, 1, 0]
# Test exceptions
assert_raises(ValueError, ch.chebtrim, coef, -1)
# Test results
assert_equal(ch.chebtrim(coef), coef[:-1])
assert_equal(ch.chebtrim(coef, 1), coef[:-3])
assert_equal(ch.chebtrim(coef, 2), [0])
def test_chebline(self) :
assert_equal(ch.chebline(3,4), [3, 4])
def test_cheb2poly(self) :
for i in range(10) :
assert_almost_equal(ch.cheb2poly([0]*i + [1]), Tlist[i])
def test_poly2cheb(self) :
for i in range(10) :
assert_almost_equal(ch.poly2cheb(Tlist[i]), [0]*i + [1])
def test_chebpts1(self):
#test exceptions
assert_raises(ValueError, ch.chebpts1, 1.5)
assert_raises(ValueError, ch.chebpts1, 0)
#test points
tgt = [0]
assert_almost_equal(ch.chebpts1(1), tgt)
tgt = [-0.70710678118654746, 0.70710678118654746]
assert_almost_equal(ch.chebpts1(2), tgt)
tgt = [-0.86602540378443871, 0, 0.86602540378443871]
assert_almost_equal(ch.chebpts1(3), tgt)
tgt = [-0.9238795325, -0.3826834323, 0.3826834323, 0.9238795325]
assert_almost_equal(ch.chebpts1(4), tgt)
def test_chebpts2(self):
#test exceptions
assert_raises(ValueError, ch.chebpts2, 1.5)
assert_raises(ValueError, ch.chebpts2, 1)
#test points
tgt = [-1, 1]
assert_almost_equal(ch.chebpts2(2), tgt)
tgt = [-1, 0, 1]
assert_almost_equal(ch.chebpts2(3), tgt)
tgt = [-1, -0.5, .5, 1]
assert_almost_equal(ch.chebpts2(4), tgt)
tgt = [-1.0, -0.707106781187, 0, 0.707106781187, 1.0]
assert_almost_equal(ch.chebpts2(5), tgt)
class TestChebyshevClass(TestCase) :
p1 = ch.Chebyshev([1,2,3])
p2 = ch.Chebyshev([1,2,3], [0,1])
p3 = ch.Chebyshev([1,2])
p4 = ch.Chebyshev([2,2,3])
p5 = ch.Chebyshev([3,2,3])
def test_equal(self) :
assert_(self.p1 == self.p1)
assert_(self.p2 == self.p2)
assert_(not self.p1 == self.p2)
assert_(not self.p1 == self.p3)
assert_(not self.p1 == [1,2,3])
def test_not_equal(self) :
assert_(not self.p1 != self.p1)
assert_(not self.p2 != self.p2)
assert_(self.p1 != self.p2)
assert_(self.p1 != self.p3)
assert_(self.p1 != [1,2,3])
def test_add(self) :
tgt = ch.Chebyshev([2,4,6])
assert_(self.p1 + self.p1 == tgt)
assert_(self.p1 + [1,2,3] == tgt)
assert_([1,2,3] + self.p1 == tgt)
def test_sub(self) :
tgt = ch.Chebyshev([1])
assert_(self.p4 - self.p1 == tgt)
assert_(self.p4 - [1,2,3] == tgt)
assert_([2,2,3] - self.p1 == tgt)
def test_mul(self) :
tgt = ch.Chebyshev([7.5, 10., 8., 6., 4.5])
assert_(self.p1 * self.p1 == tgt)
assert_(self.p1 * [1,2,3] == tgt)
assert_([1,2,3] * self.p1 == tgt)
def test_floordiv(self) :
tgt = ch.Chebyshev([1])
assert_(self.p4 // self.p1 == tgt)
assert_(self.p4 // [1,2,3] == tgt)
assert_([2,2,3] // self.p1 == tgt)
def test_mod(self) :
tgt = ch.Chebyshev([1])
assert_((self.p4 % self.p1) == tgt)
assert_((self.p4 % [1,2,3]) == tgt)
assert_(([2,2,3] % self.p1) == tgt)
def test_divmod(self) :
tquo = ch.Chebyshev([1])
trem = ch.Chebyshev([2])
quo, rem = divmod(self.p5, self.p1)
assert_(quo == tquo and rem == trem)
quo, rem = divmod(self.p5, [1,2,3])
assert_(quo == tquo and rem == trem)
quo, rem = divmod([3,2,3], self.p1)
assert_(quo == tquo and rem == trem)
def test_pow(self) :
tgt = ch.Chebyshev([1])
for i in range(5) :
res = self.p1**i
assert_(res == tgt)
tgt *= self.p1
def test_call(self) :
# domain = [-1, 1]
x = np.linspace(-1, 1)
tgt = 3*(2*x**2 - 1) + 2*x + 1
assert_almost_equal(self.p1(x), tgt)
# domain = [0, 1]
x = np.linspace(0, 1)
xx = 2*x - 1
assert_almost_equal(self.p2(x), self.p1(xx))
def test_degree(self) :
assert_equal(self.p1.degree(), 2)
def test_cutdeg(self) :
assert_raises(ValueError, self.p1.cutdeg, .5)
assert_raises(ValueError, self.p1.cutdeg, -1)
assert_equal(len(self.p1.cutdeg(3)), 3)
assert_equal(len(self.p1.cutdeg(2)), 3)
assert_equal(len(self.p1.cutdeg(1)), 2)
assert_equal(len(self.p1.cutdeg(0)), 1)
def test_convert(self) :
x = np.linspace(-1,1)
p = self.p1.convert(domain=[0,1])
assert_almost_equal(p(x), self.p1(x))
def test_mapparms(self) :
parms = self.p2.mapparms()
assert_almost_equal(parms, [-1, 2])
def test_trim(self) :
coef = [1, 1e-6, 1e-12, 0]
p = ch.Chebyshev(coef)
assert_equal(p.trim().coef, coef[:3])
assert_equal(p.trim(1e-10).coef, coef[:2])
assert_equal(p.trim(1e-5).coef, coef[:1])
def test_truncate(self) :
assert_raises(ValueError, self.p1.truncate, .5)
assert_raises(ValueError, self.p1.truncate, 0)
assert_equal(len(self.p1.truncate(4)), 3)
assert_equal(len(self.p1.truncate(3)), 3)
assert_equal(len(self.p1.truncate(2)), 2)
assert_equal(len(self.p1.truncate(1)), 1)
def test_copy(self) :
p = self.p1.copy()
assert_(self.p1 == p)
def test_integ(self) :
p = self.p2.integ()
assert_almost_equal(p.coef, ch.chebint([1,2,3], 1, 0, scl=.5))
p = self.p2.integ(lbnd=0)
assert_almost_equal(p(0), 0)
p = self.p2.integ(1, 1)
assert_almost_equal(p.coef, ch.chebint([1,2,3], 1, 1, scl=.5))
p = self.p2.integ(2, [1, 2])
assert_almost_equal(p.coef, ch.chebint([1,2,3], 2, [1,2], scl=.5))
def test_deriv(self) :
p = self.p2.integ(2, [1, 2])
assert_almost_equal(p.deriv(1).coef, self.p2.integ(1, [1]).coef)
assert_almost_equal(p.deriv(2).coef, self.p2.coef)
def test_roots(self) :
p = ch.Chebyshev(ch.poly2cheb([0, -1, 0, 1]), [0, 1])
res = p.roots()
tgt = [0, .5, 1]
assert_almost_equal(res, tgt)
def test_linspace(self):
xdes = np.linspace(0, 1, 20)
ydes = self.p2(xdes)
xres, yres = self.p2.linspace(20)
assert_almost_equal(xres, xdes)
assert_almost_equal(yres, ydes)
def test_fromroots(self) :
roots = [0, .5, 1]
p = ch.Chebyshev.fromroots(roots, domain=[0, 1])
res = p.coef
tgt = ch.poly2cheb([0, -1, 0, 1])
assert_almost_equal(res, tgt)
def test_fit(self) :
def f(x) :
return x*(x - 1)*(x - 2)
x = np.linspace(0,3)
y = f(x)
# test default value of domain
p = ch.Chebyshev.fit(x, y, 3)
assert_almost_equal(p.domain, [0,3])
# test that fit works in given domains
p = ch.Chebyshev.fit(x, y, 3, None)
assert_almost_equal(p(x), y)
assert_almost_equal(p.domain, [0,3])
p = ch.Chebyshev.fit(x, y, 3, [])
assert_almost_equal(p(x), y)
assert_almost_equal(p.domain, [-1, 1])
# test that fit accepts weights.
w = np.zeros_like(x)
yw = y.copy()
w[1::2] = 1
yw[0::2] = 0
p = ch.Chebyshev.fit(x, yw, 3, w=w)
assert_almost_equal(p(x), y)
def test_identity(self) :
x = np.linspace(0,3)
p = ch.Chebyshev.identity()
assert_almost_equal(p(x), x)
p = ch.Chebyshev.identity([1,3])
assert_almost_equal(p(x), x)
#
if __name__ == "__main__":
run_module_suite()
| 32.85289 | 74 | 0.504824 |
import numpy as np
import numpy.polynomial.chebyshev as ch
from numpy.testing import *
def trim(x) :
return ch.chebtrim(x, tol=1e-6)
T0 = [ 1]
T1 = [ 0, 1]
T2 = [-1, 0, 2]
T3 = [ 0, -3, 0, 4]
T4 = [ 1, 0, -8, 0, 8]
T5 = [ 0, 5, 0, -20, 0, 16]
T6 = [-1, 0, 18, 0, -48, 0, 32]
T7 = [ 0, -7, 0, 56, 0, -112, 0, 64]
T8 = [ 1, 0, -32, 0, 160, 0, -256, 0, 128]
T9 = [ 0, 9, 0, -120, 0, 432, 0, -576, 0, 256]
Tlist = [T0, T1, T2, T3, T4, T5, T6, T7, T8, T9]
class TestPrivate(TestCase) :
def test__cseries_to_zseries(self) :
for i in range(5) :
inp = np.array([2] + [1]*i, np.double)
tgt = np.array([.5]*i + [2] + [.5]*i, np.double)
res = ch._cseries_to_zseries(inp)
assert_equal(res, tgt)
def test__zseries_to_cseries(self) :
for i in range(5) :
inp = np.array([.5]*i + [2] + [.5]*i, np.double)
tgt = np.array([2] + [1]*i, np.double)
res = ch._zseries_to_cseries(inp)
assert_equal(res, tgt)
class TestConstants(TestCase) :
def test_chebdomain(self) :
assert_equal(ch.chebdomain, [-1, 1])
def test_chebzero(self) :
assert_equal(ch.chebzero, [0])
def test_chebone(self) :
assert_equal(ch.chebone, [1])
def test_chebx(self) :
assert_equal(ch.chebx, [0, 1])
class TestArithmetic(TestCase) :
def test_chebadd(self) :
for i in range(5) :
for j in range(5) :
msg = "At i=%d, j=%d" % (i,j)
tgt = np.zeros(max(i,j) + 1)
tgt[i] += 1
tgt[j] += 1
res = ch.chebadd([0]*i + [1], [0]*j + [1])
assert_equal(trim(res), trim(tgt), err_msg=msg)
def test_chebsub(self) :
for i in range(5) :
for j in range(5) :
msg = "At i=%d, j=%d" % (i,j)
tgt = np.zeros(max(i,j) + 1)
tgt[i] += 1
tgt[j] -= 1
res = ch.chebsub([0]*i + [1], [0]*j + [1])
assert_equal(trim(res), trim(tgt), err_msg=msg)
def test_chebmulx(self):
assert_equal(ch.chebmulx([0]), [0])
assert_equal(ch.chebmulx([1]), [0,1])
for i in range(1, 5):
ser = [0]*i + [1]
tgt = [0]*(i - 1) + [.5, 0, .5]
assert_equal(ch.chebmulx(ser), tgt)
def test_chebmul(self) :
for i in range(5) :
for j in range(5) :
msg = "At i=%d, j=%d" % (i,j)
tgt = np.zeros(i + j + 1)
tgt[i + j] += .5
tgt[abs(i - j)] += .5
res = ch.chebmul([0]*i + [1], [0]*j + [1])
assert_equal(trim(res), trim(tgt), err_msg=msg)
def test_chebdiv(self) :
for i in range(5) :
for j in range(5) :
msg = "At i=%d, j=%d" % (i,j)
ci = [0]*i + [1]
cj = [0]*j + [1]
tgt = ch.chebadd(ci, cj)
quo, rem = ch.chebdiv(tgt, ci)
res = ch.chebadd(ch.chebmul(quo, ci), rem)
assert_equal(trim(res), trim(tgt), err_msg=msg)
def test_chebval(self) :
def f(x) :
return x*(x**2 - 1)
assert_equal(ch.chebval([], [1]).size, 0)
for i in range(5) :
tgt = 1
res = ch.chebval(1, [0]*i + [1])
assert_almost_equal(res, tgt)
tgt = (-1)**i
res = ch.chebval(-1, [0]*i + [1])
assert_almost_equal(res, tgt)
zeros = np.cos(np.linspace(-np.pi, 0, 2*i + 1)[1::2])
tgt = 0
res = ch.chebval(zeros, [0]*i + [1])
assert_almost_equal(res, tgt)
x = np.linspace(-1,1)
tgt = f(x)
res = ch.chebval(x, [0, -.25, 0, .25])
assert_almost_equal(res, tgt)
for i in range(3) :
dims = [2]*i
x = np.zeros(dims)
assert_equal(ch.chebval(x, [1]).shape, dims)
assert_equal(ch.chebval(x, [1,0]).shape, dims)
assert_equal(ch.chebval(x, [1,0,0]).shape, dims)
class TestCalculus(TestCase) :
def test_chebint(self) :
assert_raises(ValueError, ch.chebint, [0], .5)
assert_raises(ValueError, ch.chebint, [0], -1)
assert_raises(ValueError, ch.chebint, [0], 1, [0,0])
for i in range(2, 5):
k = [0]*(i - 2) + [1]
res = ch.chebint([0], m=i, k=k)
assert_almost_equal(res, [0, 1])
for i in range(5) :
scl = i + 1
pol = [0]*i + [1]
tgt = [i] + [0]*i + [1/scl]
chebpol = ch.poly2cheb(pol)
chebint = ch.chebint(chebpol, m=1, k=[i])
res = ch.cheb2poly(chebint)
assert_almost_equal(trim(res), trim(tgt))
for i in range(5) :
scl = i + 1
pol = [0]*i + [1]
chebpol = ch.poly2cheb(pol)
chebint = ch.chebint(chebpol, m=1, k=[i], lbnd=-1)
assert_almost_equal(ch.chebval(-1, chebint), i)
for i in range(5) :
scl = i + 1
pol = [0]*i + [1]
tgt = [i] + [0]*i + [2/scl]
chebpol = ch.poly2cheb(pol)
chebint = ch.chebint(chebpol, m=1, k=[i], scl=2)
res = ch.cheb2poly(chebint)
assert_almost_equal(trim(res), trim(tgt))
for i in range(5) :
for j in range(2,5) :
pol = [0]*i + [1]
tgt = pol[:]
for k in range(j) :
tgt = ch.chebint(tgt, m=1)
res = ch.chebint(pol, m=j)
assert_almost_equal(trim(res), trim(tgt))
for i in range(5) :
for j in range(2,5) :
pol = [0]*i + [1]
tgt = pol[:]
for k in range(j) :
tgt = ch.chebint(tgt, m=1, k=[k])
res = ch.chebint(pol, m=j, k=list(range(j)))
assert_almost_equal(trim(res), trim(tgt))
for i in range(5) :
for j in range(2,5) :
pol = [0]*i + [1]
tgt = pol[:]
for k in range(j) :
tgt = ch.chebint(tgt, m=1, k=[k], lbnd=-1)
res = ch.chebint(pol, m=j, k=list(range(j)), lbnd=-1)
assert_almost_equal(trim(res), trim(tgt))
for i in range(5) :
for j in range(2,5) :
pol = [0]*i + [1]
tgt = pol[:]
for k in range(j) :
tgt = ch.chebint(tgt, m=1, k=[k], scl=2)
res = ch.chebint(pol, m=j, k=list(range(j)), scl=2)
assert_almost_equal(trim(res), trim(tgt))
def test_chebder(self) :
assert_raises(ValueError, ch.chebder, [0], .5)
assert_raises(ValueError, ch.chebder, [0], -1)
for i in range(5) :
tgt = [1] + [0]*i
res = ch.chebder(tgt, m=0)
assert_equal(trim(res), trim(tgt))
for i in range(5) :
for j in range(2,5) :
tgt = [1] + [0]*i
res = ch.chebder(ch.chebint(tgt, m=j), m=j)
assert_almost_equal(trim(res), trim(tgt))
for i in range(5) :
for j in range(2,5) :
tgt = [1] + [0]*i
res = ch.chebder(ch.chebint(tgt, m=j, scl=2), m=j, scl=.5)
assert_almost_equal(trim(res), trim(tgt))
class TestMisc(TestCase) :
def test_chebfromroots(self) :
res = ch.chebfromroots([])
assert_almost_equal(trim(res), [1])
for i in range(1,5) :
roots = np.cos(np.linspace(-np.pi, 0, 2*i + 1)[1::2])
tgt = [0]*i + [1]
res = ch.chebfromroots(roots)*2**(i-1)
assert_almost_equal(trim(res),trim(tgt))
def test_chebroots(self) :
assert_almost_equal(ch.chebroots([1]), [])
assert_almost_equal(ch.chebroots([1, 2]), [-.5])
for i in range(2,5) :
tgt = np.linspace(-1, 1, i)
res = ch.chebroots(ch.chebfromroots(tgt))
assert_almost_equal(trim(res), trim(tgt))
def test_chebvander(self) :
x = np.arange(3)
v = ch.chebvander(x, 3)
assert_(v.shape == (3,4))
for i in range(4) :
coef = [0]*i + [1]
assert_almost_equal(v[...,i], ch.chebval(x, coef))
x = np.array([[1,2],[3,4],[5,6]])
v = ch.chebvander(x, 3)
assert_(v.shape == (3,2,4))
for i in range(4) :
coef = [0]*i + [1]
assert_almost_equal(v[...,i], ch.chebval(x, coef))
def test_chebfit(self) :
def f(x) :
return x*(x - 1)*(x - 2)
assert_raises(ValueError, ch.chebfit, [1], [1], -1)
assert_raises(TypeError, ch.chebfit, [[1]], [1], 0)
assert_raises(TypeError, ch.chebfit, [], [1], 0)
assert_raises(TypeError, ch.chebfit, [1], [[[1]]], 0)
assert_raises(TypeError, ch.chebfit, [1, 2], [1], 0)
assert_raises(TypeError, ch.chebfit, [1], [1, 2], 0)
assert_raises(TypeError, ch.chebfit, [1], [1], 0, w=[[1]])
assert_raises(TypeError, ch.chebfit, [1], [1], 0, w=[1,1])
x = np.linspace(0,2)
y = f(x)
coef3 = ch.chebfit(x, y, 3)
assert_equal(len(coef3), 4)
assert_almost_equal(ch.chebval(x, coef3), y)
coef4 = ch.chebfit(x, y, 4)
assert_equal(len(coef4), 5)
assert_almost_equal(ch.chebval(x, coef4), y)
coef2d = ch.chebfit(x, np.array([y,y]).T, 3)
assert_almost_equal(coef2d, np.array([coef3,coef3]).T)
w = np.zeros_like(x)
yw = y.copy()
w[1::2] = 1
y[0::2] = 0
wcoef3 = ch.chebfit(x, yw, 3, w=w)
assert_almost_equal(wcoef3, coef3)
wcoef2d = ch.chebfit(x, np.array([yw,yw]).T, 3, w=w)
assert_almost_equal(wcoef2d, np.array([coef3,coef3]).T)
def test_chebtrim(self) :
coef = [2, -1, 1, 0]
assert_raises(ValueError, ch.chebtrim, coef, -1)
assert_equal(ch.chebtrim(coef), coef[:-1])
assert_equal(ch.chebtrim(coef, 1), coef[:-3])
assert_equal(ch.chebtrim(coef, 2), [0])
def test_chebline(self) :
assert_equal(ch.chebline(3,4), [3, 4])
def test_cheb2poly(self) :
for i in range(10) :
assert_almost_equal(ch.cheb2poly([0]*i + [1]), Tlist[i])
def test_poly2cheb(self) :
for i in range(10) :
assert_almost_equal(ch.poly2cheb(Tlist[i]), [0]*i + [1])
def test_chebpts1(self):
assert_raises(ValueError, ch.chebpts1, 1.5)
assert_raises(ValueError, ch.chebpts1, 0)
tgt = [0]
assert_almost_equal(ch.chebpts1(1), tgt)
tgt = [-0.70710678118654746, 0.70710678118654746]
assert_almost_equal(ch.chebpts1(2), tgt)
tgt = [-0.86602540378443871, 0, 0.86602540378443871]
assert_almost_equal(ch.chebpts1(3), tgt)
tgt = [-0.9238795325, -0.3826834323, 0.3826834323, 0.9238795325]
assert_almost_equal(ch.chebpts1(4), tgt)
def test_chebpts2(self):
assert_raises(ValueError, ch.chebpts2, 1.5)
assert_raises(ValueError, ch.chebpts2, 1)
tgt = [-1, 1]
assert_almost_equal(ch.chebpts2(2), tgt)
tgt = [-1, 0, 1]
assert_almost_equal(ch.chebpts2(3), tgt)
tgt = [-1, -0.5, .5, 1]
assert_almost_equal(ch.chebpts2(4), tgt)
tgt = [-1.0, -0.707106781187, 0, 0.707106781187, 1.0]
assert_almost_equal(ch.chebpts2(5), tgt)
class TestChebyshevClass(TestCase) :
p1 = ch.Chebyshev([1,2,3])
p2 = ch.Chebyshev([1,2,3], [0,1])
p3 = ch.Chebyshev([1,2])
p4 = ch.Chebyshev([2,2,3])
p5 = ch.Chebyshev([3,2,3])
def test_equal(self) :
assert_(self.p1 == self.p1)
assert_(self.p2 == self.p2)
assert_(not self.p1 == self.p2)
assert_(not self.p1 == self.p3)
assert_(not self.p1 == [1,2,3])
def test_not_equal(self) :
assert_(not self.p1 != self.p1)
assert_(not self.p2 != self.p2)
assert_(self.p1 != self.p2)
assert_(self.p1 != self.p3)
assert_(self.p1 != [1,2,3])
def test_add(self) :
tgt = ch.Chebyshev([2,4,6])
assert_(self.p1 + self.p1 == tgt)
assert_(self.p1 + [1,2,3] == tgt)
assert_([1,2,3] + self.p1 == tgt)
def test_sub(self) :
tgt = ch.Chebyshev([1])
assert_(self.p4 - self.p1 == tgt)
assert_(self.p4 - [1,2,3] == tgt)
assert_([2,2,3] - self.p1 == tgt)
def test_mul(self) :
tgt = ch.Chebyshev([7.5, 10., 8., 6., 4.5])
assert_(self.p1 * self.p1 == tgt)
assert_(self.p1 * [1,2,3] == tgt)
assert_([1,2,3] * self.p1 == tgt)
def test_floordiv(self) :
tgt = ch.Chebyshev([1])
assert_(self.p4 // self.p1 == tgt)
assert_(self.p4 // [1,2,3] == tgt)
assert_([2,2,3] // self.p1 == tgt)
def test_mod(self) :
tgt = ch.Chebyshev([1])
assert_((self.p4 % self.p1) == tgt)
assert_((self.p4 % [1,2,3]) == tgt)
assert_(([2,2,3] % self.p1) == tgt)
def test_divmod(self) :
tquo = ch.Chebyshev([1])
trem = ch.Chebyshev([2])
quo, rem = divmod(self.p5, self.p1)
assert_(quo == tquo and rem == trem)
quo, rem = divmod(self.p5, [1,2,3])
assert_(quo == tquo and rem == trem)
quo, rem = divmod([3,2,3], self.p1)
assert_(quo == tquo and rem == trem)
def test_pow(self) :
tgt = ch.Chebyshev([1])
for i in range(5) :
res = self.p1**i
assert_(res == tgt)
tgt *= self.p1
def test_call(self) :
x = np.linspace(-1, 1)
tgt = 3*(2*x**2 - 1) + 2*x + 1
assert_almost_equal(self.p1(x), tgt)
x = np.linspace(0, 1)
xx = 2*x - 1
assert_almost_equal(self.p2(x), self.p1(xx))
def test_degree(self) :
assert_equal(self.p1.degree(), 2)
def test_cutdeg(self) :
assert_raises(ValueError, self.p1.cutdeg, .5)
assert_raises(ValueError, self.p1.cutdeg, -1)
assert_equal(len(self.p1.cutdeg(3)), 3)
assert_equal(len(self.p1.cutdeg(2)), 3)
assert_equal(len(self.p1.cutdeg(1)), 2)
assert_equal(len(self.p1.cutdeg(0)), 1)
def test_convert(self) :
x = np.linspace(-1,1)
p = self.p1.convert(domain=[0,1])
assert_almost_equal(p(x), self.p1(x))
def test_mapparms(self) :
parms = self.p2.mapparms()
assert_almost_equal(parms, [-1, 2])
def test_trim(self) :
coef = [1, 1e-6, 1e-12, 0]
p = ch.Chebyshev(coef)
assert_equal(p.trim().coef, coef[:3])
assert_equal(p.trim(1e-10).coef, coef[:2])
assert_equal(p.trim(1e-5).coef, coef[:1])
def test_truncate(self) :
assert_raises(ValueError, self.p1.truncate, .5)
assert_raises(ValueError, self.p1.truncate, 0)
assert_equal(len(self.p1.truncate(4)), 3)
assert_equal(len(self.p1.truncate(3)), 3)
assert_equal(len(self.p1.truncate(2)), 2)
assert_equal(len(self.p1.truncate(1)), 1)
def test_copy(self) :
p = self.p1.copy()
assert_(self.p1 == p)
def test_integ(self) :
p = self.p2.integ()
assert_almost_equal(p.coef, ch.chebint([1,2,3], 1, 0, scl=.5))
p = self.p2.integ(lbnd=0)
assert_almost_equal(p(0), 0)
p = self.p2.integ(1, 1)
assert_almost_equal(p.coef, ch.chebint([1,2,3], 1, 1, scl=.5))
p = self.p2.integ(2, [1, 2])
assert_almost_equal(p.coef, ch.chebint([1,2,3], 2, [1,2], scl=.5))
def test_deriv(self) :
p = self.p2.integ(2, [1, 2])
assert_almost_equal(p.deriv(1).coef, self.p2.integ(1, [1]).coef)
assert_almost_equal(p.deriv(2).coef, self.p2.coef)
def test_roots(self) :
p = ch.Chebyshev(ch.poly2cheb([0, -1, 0, 1]), [0, 1])
res = p.roots()
tgt = [0, .5, 1]
assert_almost_equal(res, tgt)
def test_linspace(self):
xdes = np.linspace(0, 1, 20)
ydes = self.p2(xdes)
xres, yres = self.p2.linspace(20)
assert_almost_equal(xres, xdes)
assert_almost_equal(yres, ydes)
def test_fromroots(self) :
roots = [0, .5, 1]
p = ch.Chebyshev.fromroots(roots, domain=[0, 1])
res = p.coef
tgt = ch.poly2cheb([0, -1, 0, 1])
assert_almost_equal(res, tgt)
def test_fit(self) :
def f(x) :
return x*(x - 1)*(x - 2)
x = np.linspace(0,3)
y = f(x)
p = ch.Chebyshev.fit(x, y, 3)
assert_almost_equal(p.domain, [0,3])
p = ch.Chebyshev.fit(x, y, 3, None)
assert_almost_equal(p(x), y)
assert_almost_equal(p.domain, [0,3])
p = ch.Chebyshev.fit(x, y, 3, [])
assert_almost_equal(p(x), y)
assert_almost_equal(p.domain, [-1, 1])
w = np.zeros_like(x)
yw = y.copy()
w[1::2] = 1
yw[0::2] = 0
p = ch.Chebyshev.fit(x, yw, 3, w=w)
assert_almost_equal(p(x), y)
def test_identity(self) :
x = np.linspace(0,3)
p = ch.Chebyshev.identity()
assert_almost_equal(p(x), x)
p = ch.Chebyshev.identity([1,3])
assert_almost_equal(p(x), x)
if __name__ == "__main__":
run_module_suite()
| true | true |
f724b89350fc66e26a9bf5bea0145e663b49d42f | 379 | py | Python | sandbox/team_members/pudumula/ros/gazebo_ws_1/build/rrbot_gazebo/catkin_generated/pkg.installspace.context.pc.py | Project-Heisenberg/quantum | f3ad8f4693007e45e80a88f928273adcfdc8529d | [
"Apache-2.0"
] | 1 | 2017-04-23T14:23:54.000Z | 2017-04-23T14:23:54.000Z | sandbox/team_members/pudumula/ros/gazebo_ws_1/build/rrbot_gazebo/catkin_generated/pkg.installspace.context.pc.py | Project-Heisenberg/quantum | f3ad8f4693007e45e80a88f928273adcfdc8529d | [
"Apache-2.0"
] | 13 | 2016-03-25T05:15:17.000Z | 2018-05-30T15:53:12.000Z | sandbox/team_members/pudumula/ros/gazebo_ws_1/build/rrbot_gazebo/catkin_generated/pkg.installspace.context.pc.py | Project-Heisenberg/quantum | f3ad8f4693007e45e80a88f928273adcfdc8529d | [
"Apache-2.0"
] | null | null | null | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "rrbot_gazebo"
PROJECT_SPACE_DIR = "/home/neo/ros/gazebo_ws_1/install"
PROJECT_VERSION = "0.1.0"
| 42.111111 | 68 | 0.707124 |
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "rrbot_gazebo"
PROJECT_SPACE_DIR = "/home/neo/ros/gazebo_ws_1/install"
PROJECT_VERSION = "0.1.0"
| true | true |
f724b89596ee0599327fb7ccded8ba2067f2a7b5 | 597 | py | Python | cashflow/utils.py | wg-git/cashflow-contract | e08b175113e7bdcd0089dd7a0d0432a73a83a724 | [
"MIT"
] | 1 | 2018-04-05T13:14:26.000Z | 2018-04-05T13:14:26.000Z | cashflow/utils.py | wg-git/cashflow-contract | e08b175113e7bdcd0089dd7a0d0432a73a83a724 | [
"MIT"
] | null | null | null | cashflow/utils.py | wg-git/cashflow-contract | e08b175113e7bdcd0089dd7a0d0432a73a83a724 | [
"MIT"
] | 1 | 2018-07-23T15:05:05.000Z | 2018-07-23T15:05:05.000Z | from boa.blockchain.vm.Neo.Blockchain import GetHeight, GetHeader
from boa.blockchain.vm.Neo.Header import GetTimestamp, GetConsensusData
from boa.blockchain.vm.Neo.Runtime import Log
from boa.code.builtins import concat, list, range, take, substr
def blockTimeStamp():
current_height = GetHeight()
current_header = GetHeader(current_height)
current_time = GetTimestamp(current_header)
return current_time
def isInByteArray(haystack,needle):
if not len(haystack):
return False
for item in haystack:
if item == needle:
return True
else:
n=0
return False
| 22.111111 | 75 | 0.757119 | from boa.blockchain.vm.Neo.Blockchain import GetHeight, GetHeader
from boa.blockchain.vm.Neo.Header import GetTimestamp, GetConsensusData
from boa.blockchain.vm.Neo.Runtime import Log
from boa.code.builtins import concat, list, range, take, substr
def blockTimeStamp():
current_height = GetHeight()
current_header = GetHeader(current_height)
current_time = GetTimestamp(current_header)
return current_time
def isInByteArray(haystack,needle):
if not len(haystack):
return False
for item in haystack:
if item == needle:
return True
else:
n=0
return False
| true | true |
f724b8e8834118693a545cb670d7b52bbf2cbbeb | 209 | py | Python | PyCharm/3.py | LazarevaDarya/work5 | 77c29c3453e81995f36c176bb74a610313b882e0 | [
"MIT"
] | null | null | null | PyCharm/3.py | LazarevaDarya/work5 | 77c29c3453e81995f36c176bb74a610313b882e0 | [
"MIT"
] | null | null | null | PyCharm/3.py | LazarevaDarya/work5 | 77c29c3453e81995f36c176bb74a610313b882e0 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
if __name__ == '__main__':
m = int
n = int(input("Value of n? "))
for i in range(1, int(n / 2) + 1):
if n % i == 0:
print(i)
| 14.928571 | 38 | 0.449761 |
if __name__ == '__main__':
m = int
n = int(input("Value of n? "))
for i in range(1, int(n / 2) + 1):
if n % i == 0:
print(i)
| true | true |
f724b9955f815b8ade788558ec0d789cd6cf0ddc | 15,387 | py | Python | tests/automation_framework/src/utilities/submit_request_utility.py | anjalirx-intel/avalon | 5efd20612948a324b8a393bfe22872aeb8527097 | [
"Apache-2.0"
] | null | null | null | tests/automation_framework/src/utilities/submit_request_utility.py | anjalirx-intel/avalon | 5efd20612948a324b8a393bfe22872aeb8527097 | [
"Apache-2.0"
] | null | null | null | tests/automation_framework/src/utilities/submit_request_utility.py | anjalirx-intel/avalon | 5efd20612948a324b8a393bfe22872aeb8527097 | [
"Apache-2.0"
] | null | null | null | import logging
import json
import time
import os
import config.config as pconfig
import env
from avalon_sdk.connector.direct.jrpc.jrpc_worker_registry import \
JRPCWorkerRegistryImpl
from avalon_sdk.connector.direct.jrpc.jrpc_work_order import \
JRPCWorkOrderImpl
from avalon_sdk.worker.worker_details import \
WorkerType, WorkerStatus
from avalon_sdk.connector.direct.jrpc.jrpc_work_order_receipt \
import JRPCWorkOrderReceiptImpl
from avalon_sdk.connector.blockchains.fabric.fabric_worker_registry \
import FabricWorkerRegistryImpl
from avalon_sdk.connector.blockchains.fabric.fabric_work_order \
import FabricWorkOrderImpl
from avalon_sdk.connector.blockchains.ethereum.ethereum_worker_registry \
import EthereumWorkerRegistryImpl
from avalon_sdk.connector.blockchains.ethereum.ethereum_work_order \
import EthereumWorkOrderProxyImpl
import avalon_sdk.worker.worker_details as worker_details
logger = logging.getLogger(__name__)
TCFHOME = os.environ.get("TCF_HOME", "../../")
def config_file_read():
config = pconfig.parse_configuration_files(
env.conffiles, env.confpaths)
logger.info(" URI client %s \n", config["tcf"]["json_rpc_uri"])
config["tcf"]["json_rpc_uri"] = env.uri_client_sdk
return config
def _create_worker_registry_instance(blockchain_type, config):
# create worker registry instance for direct/proxy model
if env.proxy_mode and blockchain_type == 'fabric':
return FabricWorkerRegistryImpl(config)
elif env.proxy_mode and blockchain_type == 'ethereum':
return EthereumWorkerRegistryImpl(config)
else:
logger.info("Direct SDK code path\n")
return JRPCWorkerRegistryImpl(config)
def _create_work_order_instance(blockchain_type, config):
# create work order instance for direct/proxy model
if env.proxy_mode and blockchain_type == 'fabric':
return FabricWorkOrderImpl(config)
elif env.proxy_mode and blockchain_type == 'ethereum':
return EthereumWorkOrderProxyImpl(config)
else:
logger.info("Direct SDK code path\n")
return JRPCWorkOrderImpl(config)
def _create_work_order_receipt_instance(blockchain_type, config):
# create work order receipt instance for direct/proxy model
if env.proxy_mode and blockchain_type == 'fabric':
return None
elif env.proxy_mode and blockchain_type == 'ethereum':
# TODO need to implement
return None
else:
logger.info("Direct SDK code path\n")
return JRPCWorkOrderReceiptImpl(config)
def submit_request_listener(
uri_client, input_json_str, output_json_file_name):
logger.info("Listener code path\n")
req_time = time.strftime("%Y%m%d_%H%M%S")
request_method = input_json_str["method"]
input_json_str = json.dumps(input_json_str)
# write request to file
signed_input_file = ('./results/' + output_json_file_name + '_' + req_time
+ '_request.json')
with open(signed_input_file, 'w') as req_file:
req_file.write(json.dumps(input_json_str, ensure_ascii=False))
logger.info("in submit listener %s", input_json_str)
if request_method == "WorkOrderGetResult":
logger.info("- Validating WorkOrderGetResult Response-")
response = {}
response_timeout_start = time.time()
response_timeout_multiplier = ((6000 / 3600) + 6) * 3
while "result" not in response:
if "error" in response:
if response["error"]["code"] != 5:
logger.info('WorkOrderGetResult - '
'Response received with error code. ')
err_cd = 1
break
response_timeout_end = time.time()
if ((response_timeout_end - response_timeout_start) >
response_timeout_multiplier):
logger.info('ERROR: WorkOrderGetResult response is not \
received within expected time.')
break
response = uri_client._postmsg(input_json_str)
else:
logger.info('**********Received Request*********\n%s\n', input_json_str)
response = uri_client._postmsg(input_json_str)
logger.info('**********Received Response*********\n%s\n', response)
# write response to file
response_output_file = ('./results/' + output_json_file_name + '_'
+ req_time + '_response.json')
with open(response_output_file, 'w') as resp_file:
resp_file.write(json.dumps(response, ensure_ascii=False))
return response
def workorder_submit_sdk(wo_params, input_json_obj=None):
logger.info("WorkOrderSubmit SDK code path\n")
if input_json_obj is None:
req_id = 3
else:
req_id = input_json_obj["id"]
config = config_file_read()
work_order = _create_work_order_instance(env.blockchain_type, config)
logger.info(" work order id %s \n", wo_params.get_work_order_id())
logger.info(" worker id %s \n", wo_params.get_worker_id())
logger.info(" Requester ID %s \n", wo_params.get_requester_id())
logger.info(" To string %s \n", wo_params.to_string())
logger.info(" worker id %s \n", wo_params.get_worker_id())
logger.info("Work order submit request : %s, \n \n ",
wo_params.to_jrpc_string(req_id))
response = work_order.work_order_submit(
wo_params.get_work_order_id(),
wo_params.get_worker_id(),
wo_params.get_requester_id(),
wo_params.to_string(),
id=req_id
)
if env.proxy_mode and (type(response) != dict):
if response.value == 0:
response = {"error": {"code": 5}}
else:
response = {"error": {"code": response.value}}
response["workOrderId"] = wo_params.get_work_order_id()
logger.info('**********Received Response*********\n%s\n', response)
return response
def worker_lookup_sdk(worker_type, input_json=None):
logger.info("WorkerLookUp SDK code path\n")
if input_json is None:
jrpc_req_id = 3
else:
jrpc_req_id = input_json["id"]
config = config_file_read()
worker_dict = {'SGX': WorkerType.TEE_SGX,
'MPC': WorkerType.MPC, 'ZK': WorkerType.ZK}
worker_registry = _create_worker_registry_instance(env.blockchain_type, config)
if env.blockchain_type == "ethereum":
if worker_type in worker_dict.keys():
worker = WorkerType.TEE_SGX
else:
worker = worker_type
worker_lookup_response = worker_registry.worker_lookup(
worker,
config["WorkerConfig"]["OrganizationId"],
config["WorkerConfig"]["ApplicationTypeId"],
jrpc_req_id
)
else:
worker_lookup_response = worker_registry.worker_lookup(
worker_type=worker_dict.get(worker_type, worker_type), id=jrpc_req_id)
logger.info("\n Worker lookup response: {}\n".format(
json.dumps(worker_lookup_response, indent=4)
))
return worker_lookup_response
def worker_register_sdk(register_params, input_json):
logger.info("WorkerRegister SDK code path\n")
jrpc_req_id = input_json["id"]
if input_json is None:
jrpc_req_id = 3
else:
jrpc_req_id = input_json["id"]
worker_dict = {'SGX': WorkerType.TEE_SGX,
'MPC': WorkerType.MPC, 'ZK': WorkerType.ZK}
config = config_file_read()
worker_registry = _create_worker_registry_instance(env.blockchain_type, config)
if env.proxy_mode and (env.blockchain_type == "ethereum"):
worker_register_result = worker_registry.worker_register(
register_params["worker_id"],
worker_dict[register_params["workerType"]],
register_params["organization_id"],
register_params["application_type_id"],
json.dumps(register_params["details"]))
else:
worker_register_result = worker_registry.worker_register(
register_params["worker_id"],
worker_dict[register_params["workerType"]],
register_params["organization_id"],
register_params["application_type_id"],
json.dumps(register_params["details"]), jrpc_req_id)
logger.info("\n Worker register response: {}\n".format(
json.dumps(worker_register_result, indent=4)))
return worker_register_result
def worker_setstatus_sdk(set_status_params, input_json):
logger.info("WorkerSetStatus SDK code path\n")
logger.info("Worker status params %s \n", set_status_params)
if input_json is None:
jrpc_req_id = 3
else:
jrpc_req_id = input_json["id"]
status_dict = {1: WorkerStatus.ACTIVE, 2: WorkerStatus.OFF_LINE,
3: WorkerStatus.DECOMMISSIONED,
4: WorkerStatus.COMPROMISED}
config = config_file_read()
worker_registry = _create_worker_registry_instance(env.blockchain_type, config)
if env.proxy_mode and (env.blockchain_type == "ethereum"):
worker_setstatus_result = worker_registry.worker_set_status(
set_status_params["worker_id"],
status_dict[set_status_params["status"]])
else:
worker_setstatus_result = worker_registry.worker_set_status(
set_status_params["worker_id"],
status_dict[set_status_params["status"]], jrpc_req_id)
if env.proxy_mode:
result = worker_setstatus_result
worker_setstatus_result = {}
worker_setstatus_result["error"] = {"code" : result.value, "message" : ""}
logger.info("\n Worker setstatus response: {}\n".format(worker_setstatus_result))
return worker_setstatus_result
def worker_retrieve_sdk(worker_id, input_json=None):
logger.info("WorkerRetrieve SDK code path\n")
worker_obj = worker_details.SGXWorkerDetails()
if input_json is None:
jrpc_req_id = 11
else:
jrpc_req_id = input_json["id"]
config = config_file_read()
worker_registry = _create_worker_registry_instance(env.blockchain_type, config)
worker_retrieve_result = worker_registry.worker_retrieve(worker_id, jrpc_req_id)
if env.proxy_mode:
if worker_retrieve_result is None:
worker_retrieve_result = {"error": {"code": '', "message": "Worker Id not found"}}
else:
response = worker_retrieve_result
worker_obj.load_worker(json.loads(response[4]))
worker_retrieve_result = {}
result = {"workerType": response[1],
"organizationId": response[2],
"applicationTypeId": response[3],
"details": json.loads(response[4])}
worker_retrieve_result["result"] = result
if "error" in worker_retrieve_result:
logger.error("Unable to retrieve worker details\n")
return worker_retrieve_result
logger.info("\n Worker retrieve response: {}\n".format(worker_retrieve_result))
worker_obj.worker_id = worker_id
worker_retrieve_result["workerId"] = worker_id
logger.info("\n Worker ID\n%s\n", worker_id)
return worker_retrieve_result
def worker_update_sdk(update_params, input_json=None):
logger.info("WorkerUpdate SDK code path\n")
logger.info("Worker update params %s \n", update_params)
worker_obj = worker_details.SGXWorkerDetails()
# update_params = json.loads(update_params)
if input_json is None:
jrpc_req_id = 11
else:
jrpc_req_id = input_json["id"]
config = config_file_read()
worker_registry = _create_worker_registry_instance(env.blockchain_type, config)
if env.proxy_mode and (env.blockchain_type == "ethereum"):
worker_update_result = worker_registry.worker_update(
update_params["worker_id"],
json.dumps(update_params["details"]))
else:
worker_update_result = worker_registry.worker_update(
update_params["worker_id"],
json.dumps(update_params["details"]), jrpc_req_id)
if env.proxy_mode and (type(worker_update_result) != dict):
response = worker_update_result.value
worker_update_result = {"error": {"code": response, "message" : ""}}
logger.info("\n Worker update response: {}\n".format(worker_update_result))
return worker_update_result
def workorder_receiptcreate_sdk(wo_create_receipt, input_json):
logger.info("WorkerReceiptCreate SDK code path\n")
jrpc_req_id = input_json["id"]
config = config_file_read()
# Create receipt
wo_receipt = _create_work_order_receipt_instance(env.blockchain_type, config)
# Submit work order create receipt jrpc request
wo_receipt_resp = wo_receipt.work_order_receipt_create(
wo_create_receipt["workOrderId"],
wo_create_receipt["workerServiceId"],
wo_create_receipt["workerId"],
wo_create_receipt["requesterId"],
wo_create_receipt["receiptCreateStatus"],
wo_create_receipt["workOrderRequestHash"],
wo_create_receipt["requesterGeneratedNonce"],
wo_create_receipt["requesterSignature"],
wo_create_receipt["signatureRules"],
wo_create_receipt["receiptVerificationKey"],
jrpc_req_id
)
logger.info("Work order create receipt response : {} \n \n ".format(
wo_receipt_resp
))
return wo_receipt_resp
def workorder_receiptretrieve_sdk(workorderId, input_json):
logger.info("ReceiptRetrieve SDK code path\n")
jrpc_req_id = input_json["id"]
config = config_file_read()
# Create receipt
wo_receipt = _create_work_order_receipt_instance(env.blockchain_type, config)
wo_receipt_resp = wo_receipt.work_order_receipt_retrieve(
workorderId, jrpc_req_id)
logger.info("Work order retrieve receipt response : {} \n \n ".format(
wo_receipt_resp
))
# Retrieve last update to receipt by passing 0xFFFFFFFF
jrpc_req_id += 1
receipt_update_retrieve = \
wo_receipt.work_order_receipt_update_retrieve(
workorderId,
None,
1 << 32,
id=jrpc_req_id)
logger.info("\n Last update to receipt receipt is:\n {}".format(
json.dumps(receipt_update_retrieve, indent=4)
))
return receipt_update_retrieve
def workorder_getresult_sdk(workorderId, input_json):
logger.info("WorkOderGetResult SDK code path\n")
jrpc_req_id = input_json["id"]
config = config_file_read()
work_order = _create_work_order_instance(env.blockchain_type, config)
logger.info("----- Validating WorkOrderGetResult Response ------")
get_result_res = work_order.work_order_get_result(
workorderId, jrpc_req_id)
logger.info("****** WorkOrderGetResult Received Response*****\n%s\n", get_result_res)
if env.proxy_mode and (get_result_res is None):
get_result_res = {"error": {"code": -1}}
return get_result_res
def workorder_receiptlookup_sdk(requesterId, input_json):
logger.info("ReceiptRetrieve SDK code path\n")
jrpc_req_id = input_json["id"]
config = config_file_read()
wo_receipt = _create_work_order_receipt_instance(env.blockchain_type, config)
wo_receipt_resp = wo_receipt.work_order_receipt_lookup(
requester_id=requesterId, id=jrpc_req_id)
logger.info("Work order receipt lookup response : {} \n \n ".format(
wo_receipt_resp
))
return wo_receipt_resp
| 39.966234 | 94 | 0.682394 | import logging
import json
import time
import os
import config.config as pconfig
import env
from avalon_sdk.connector.direct.jrpc.jrpc_worker_registry import \
JRPCWorkerRegistryImpl
from avalon_sdk.connector.direct.jrpc.jrpc_work_order import \
JRPCWorkOrderImpl
from avalon_sdk.worker.worker_details import \
WorkerType, WorkerStatus
from avalon_sdk.connector.direct.jrpc.jrpc_work_order_receipt \
import JRPCWorkOrderReceiptImpl
from avalon_sdk.connector.blockchains.fabric.fabric_worker_registry \
import FabricWorkerRegistryImpl
from avalon_sdk.connector.blockchains.fabric.fabric_work_order \
import FabricWorkOrderImpl
from avalon_sdk.connector.blockchains.ethereum.ethereum_worker_registry \
import EthereumWorkerRegistryImpl
from avalon_sdk.connector.blockchains.ethereum.ethereum_work_order \
import EthereumWorkOrderProxyImpl
import avalon_sdk.worker.worker_details as worker_details
logger = logging.getLogger(__name__)
TCFHOME = os.environ.get("TCF_HOME", "../../")
def config_file_read():
config = pconfig.parse_configuration_files(
env.conffiles, env.confpaths)
logger.info(" URI client %s \n", config["tcf"]["json_rpc_uri"])
config["tcf"]["json_rpc_uri"] = env.uri_client_sdk
return config
def _create_worker_registry_instance(blockchain_type, config):
if env.proxy_mode and blockchain_type == 'fabric':
return FabricWorkerRegistryImpl(config)
elif env.proxy_mode and blockchain_type == 'ethereum':
return EthereumWorkerRegistryImpl(config)
else:
logger.info("Direct SDK code path\n")
return JRPCWorkerRegistryImpl(config)
def _create_work_order_instance(blockchain_type, config):
if env.proxy_mode and blockchain_type == 'fabric':
return FabricWorkOrderImpl(config)
elif env.proxy_mode and blockchain_type == 'ethereum':
return EthereumWorkOrderProxyImpl(config)
else:
logger.info("Direct SDK code path\n")
return JRPCWorkOrderImpl(config)
def _create_work_order_receipt_instance(blockchain_type, config):
if env.proxy_mode and blockchain_type == 'fabric':
return None
elif env.proxy_mode and blockchain_type == 'ethereum':
return None
else:
logger.info("Direct SDK code path\n")
return JRPCWorkOrderReceiptImpl(config)
def submit_request_listener(
uri_client, input_json_str, output_json_file_name):
logger.info("Listener code path\n")
req_time = time.strftime("%Y%m%d_%H%M%S")
request_method = input_json_str["method"]
input_json_str = json.dumps(input_json_str)
signed_input_file = ('./results/' + output_json_file_name + '_' + req_time
+ '_request.json')
with open(signed_input_file, 'w') as req_file:
req_file.write(json.dumps(input_json_str, ensure_ascii=False))
logger.info("in submit listener %s", input_json_str)
if request_method == "WorkOrderGetResult":
logger.info("- Validating WorkOrderGetResult Response-")
response = {}
response_timeout_start = time.time()
response_timeout_multiplier = ((6000 / 3600) + 6) * 3
while "result" not in response:
if "error" in response:
if response["error"]["code"] != 5:
logger.info('WorkOrderGetResult - '
'Response received with error code. ')
err_cd = 1
break
response_timeout_end = time.time()
if ((response_timeout_end - response_timeout_start) >
response_timeout_multiplier):
logger.info('ERROR: WorkOrderGetResult response is not \
received within expected time.')
break
response = uri_client._postmsg(input_json_str)
else:
logger.info('**********Received Request*********\n%s\n', input_json_str)
response = uri_client._postmsg(input_json_str)
logger.info('**********Received Response*********\n%s\n', response)
response_output_file = ('./results/' + output_json_file_name + '_'
+ req_time + '_response.json')
with open(response_output_file, 'w') as resp_file:
resp_file.write(json.dumps(response, ensure_ascii=False))
return response
def workorder_submit_sdk(wo_params, input_json_obj=None):
logger.info("WorkOrderSubmit SDK code path\n")
if input_json_obj is None:
req_id = 3
else:
req_id = input_json_obj["id"]
config = config_file_read()
work_order = _create_work_order_instance(env.blockchain_type, config)
logger.info(" work order id %s \n", wo_params.get_work_order_id())
logger.info(" worker id %s \n", wo_params.get_worker_id())
logger.info(" Requester ID %s \n", wo_params.get_requester_id())
logger.info(" To string %s \n", wo_params.to_string())
logger.info(" worker id %s \n", wo_params.get_worker_id())
logger.info("Work order submit request : %s, \n \n ",
wo_params.to_jrpc_string(req_id))
response = work_order.work_order_submit(
wo_params.get_work_order_id(),
wo_params.get_worker_id(),
wo_params.get_requester_id(),
wo_params.to_string(),
id=req_id
)
if env.proxy_mode and (type(response) != dict):
if response.value == 0:
response = {"error": {"code": 5}}
else:
response = {"error": {"code": response.value}}
response["workOrderId"] = wo_params.get_work_order_id()
logger.info('**********Received Response*********\n%s\n', response)
return response
def worker_lookup_sdk(worker_type, input_json=None):
logger.info("WorkerLookUp SDK code path\n")
if input_json is None:
jrpc_req_id = 3
else:
jrpc_req_id = input_json["id"]
config = config_file_read()
worker_dict = {'SGX': WorkerType.TEE_SGX,
'MPC': WorkerType.MPC, 'ZK': WorkerType.ZK}
worker_registry = _create_worker_registry_instance(env.blockchain_type, config)
if env.blockchain_type == "ethereum":
if worker_type in worker_dict.keys():
worker = WorkerType.TEE_SGX
else:
worker = worker_type
worker_lookup_response = worker_registry.worker_lookup(
worker,
config["WorkerConfig"]["OrganizationId"],
config["WorkerConfig"]["ApplicationTypeId"],
jrpc_req_id
)
else:
worker_lookup_response = worker_registry.worker_lookup(
worker_type=worker_dict.get(worker_type, worker_type), id=jrpc_req_id)
logger.info("\n Worker lookup response: {}\n".format(
json.dumps(worker_lookup_response, indent=4)
))
return worker_lookup_response
def worker_register_sdk(register_params, input_json):
logger.info("WorkerRegister SDK code path\n")
jrpc_req_id = input_json["id"]
if input_json is None:
jrpc_req_id = 3
else:
jrpc_req_id = input_json["id"]
worker_dict = {'SGX': WorkerType.TEE_SGX,
'MPC': WorkerType.MPC, 'ZK': WorkerType.ZK}
config = config_file_read()
worker_registry = _create_worker_registry_instance(env.blockchain_type, config)
if env.proxy_mode and (env.blockchain_type == "ethereum"):
worker_register_result = worker_registry.worker_register(
register_params["worker_id"],
worker_dict[register_params["workerType"]],
register_params["organization_id"],
register_params["application_type_id"],
json.dumps(register_params["details"]))
else:
worker_register_result = worker_registry.worker_register(
register_params["worker_id"],
worker_dict[register_params["workerType"]],
register_params["organization_id"],
register_params["application_type_id"],
json.dumps(register_params["details"]), jrpc_req_id)
logger.info("\n Worker register response: {}\n".format(
json.dumps(worker_register_result, indent=4)))
return worker_register_result
def worker_setstatus_sdk(set_status_params, input_json):
logger.info("WorkerSetStatus SDK code path\n")
logger.info("Worker status params %s \n", set_status_params)
if input_json is None:
jrpc_req_id = 3
else:
jrpc_req_id = input_json["id"]
status_dict = {1: WorkerStatus.ACTIVE, 2: WorkerStatus.OFF_LINE,
3: WorkerStatus.DECOMMISSIONED,
4: WorkerStatus.COMPROMISED}
config = config_file_read()
worker_registry = _create_worker_registry_instance(env.blockchain_type, config)
if env.proxy_mode and (env.blockchain_type == "ethereum"):
worker_setstatus_result = worker_registry.worker_set_status(
set_status_params["worker_id"],
status_dict[set_status_params["status"]])
else:
worker_setstatus_result = worker_registry.worker_set_status(
set_status_params["worker_id"],
status_dict[set_status_params["status"]], jrpc_req_id)
if env.proxy_mode:
result = worker_setstatus_result
worker_setstatus_result = {}
worker_setstatus_result["error"] = {"code" : result.value, "message" : ""}
logger.info("\n Worker setstatus response: {}\n".format(worker_setstatus_result))
return worker_setstatus_result
def worker_retrieve_sdk(worker_id, input_json=None):
logger.info("WorkerRetrieve SDK code path\n")
worker_obj = worker_details.SGXWorkerDetails()
if input_json is None:
jrpc_req_id = 11
else:
jrpc_req_id = input_json["id"]
config = config_file_read()
worker_registry = _create_worker_registry_instance(env.blockchain_type, config)
worker_retrieve_result = worker_registry.worker_retrieve(worker_id, jrpc_req_id)
if env.proxy_mode:
if worker_retrieve_result is None:
worker_retrieve_result = {"error": {"code": '', "message": "Worker Id not found"}}
else:
response = worker_retrieve_result
worker_obj.load_worker(json.loads(response[4]))
worker_retrieve_result = {}
result = {"workerType": response[1],
"organizationId": response[2],
"applicationTypeId": response[3],
"details": json.loads(response[4])}
worker_retrieve_result["result"] = result
if "error" in worker_retrieve_result:
logger.error("Unable to retrieve worker details\n")
return worker_retrieve_result
logger.info("\n Worker retrieve response: {}\n".format(worker_retrieve_result))
worker_obj.worker_id = worker_id
worker_retrieve_result["workerId"] = worker_id
logger.info("\n Worker ID\n%s\n", worker_id)
return worker_retrieve_result
def worker_update_sdk(update_params, input_json=None):
logger.info("WorkerUpdate SDK code path\n")
logger.info("Worker update params %s \n", update_params)
worker_obj = worker_details.SGXWorkerDetails()
if input_json is None:
jrpc_req_id = 11
else:
jrpc_req_id = input_json["id"]
config = config_file_read()
worker_registry = _create_worker_registry_instance(env.blockchain_type, config)
if env.proxy_mode and (env.blockchain_type == "ethereum"):
worker_update_result = worker_registry.worker_update(
update_params["worker_id"],
json.dumps(update_params["details"]))
else:
worker_update_result = worker_registry.worker_update(
update_params["worker_id"],
json.dumps(update_params["details"]), jrpc_req_id)
if env.proxy_mode and (type(worker_update_result) != dict):
response = worker_update_result.value
worker_update_result = {"error": {"code": response, "message" : ""}}
logger.info("\n Worker update response: {}\n".format(worker_update_result))
return worker_update_result
def workorder_receiptcreate_sdk(wo_create_receipt, input_json):
logger.info("WorkerReceiptCreate SDK code path\n")
jrpc_req_id = input_json["id"]
config = config_file_read()
wo_receipt = _create_work_order_receipt_instance(env.blockchain_type, config)
wo_receipt_resp = wo_receipt.work_order_receipt_create(
wo_create_receipt["workOrderId"],
wo_create_receipt["workerServiceId"],
wo_create_receipt["workerId"],
wo_create_receipt["requesterId"],
wo_create_receipt["receiptCreateStatus"],
wo_create_receipt["workOrderRequestHash"],
wo_create_receipt["requesterGeneratedNonce"],
wo_create_receipt["requesterSignature"],
wo_create_receipt["signatureRules"],
wo_create_receipt["receiptVerificationKey"],
jrpc_req_id
)
logger.info("Work order create receipt response : {} \n \n ".format(
wo_receipt_resp
))
return wo_receipt_resp
def workorder_receiptretrieve_sdk(workorderId, input_json):
logger.info("ReceiptRetrieve SDK code path\n")
jrpc_req_id = input_json["id"]
config = config_file_read()
wo_receipt = _create_work_order_receipt_instance(env.blockchain_type, config)
wo_receipt_resp = wo_receipt.work_order_receipt_retrieve(
workorderId, jrpc_req_id)
logger.info("Work order retrieve receipt response : {} \n \n ".format(
wo_receipt_resp
))
jrpc_req_id += 1
receipt_update_retrieve = \
wo_receipt.work_order_receipt_update_retrieve(
workorderId,
None,
1 << 32,
id=jrpc_req_id)
logger.info("\n Last update to receipt receipt is:\n {}".format(
json.dumps(receipt_update_retrieve, indent=4)
))
return receipt_update_retrieve
def workorder_getresult_sdk(workorderId, input_json):
logger.info("WorkOderGetResult SDK code path\n")
jrpc_req_id = input_json["id"]
config = config_file_read()
work_order = _create_work_order_instance(env.blockchain_type, config)
logger.info("----- Validating WorkOrderGetResult Response ------")
get_result_res = work_order.work_order_get_result(
workorderId, jrpc_req_id)
logger.info("****** WorkOrderGetResult Received Response*****\n%s\n", get_result_res)
if env.proxy_mode and (get_result_res is None):
get_result_res = {"error": {"code": -1}}
return get_result_res
def workorder_receiptlookup_sdk(requesterId, input_json):
logger.info("ReceiptRetrieve SDK code path\n")
jrpc_req_id = input_json["id"]
config = config_file_read()
wo_receipt = _create_work_order_receipt_instance(env.blockchain_type, config)
wo_receipt_resp = wo_receipt.work_order_receipt_lookup(
requester_id=requesterId, id=jrpc_req_id)
logger.info("Work order receipt lookup response : {} \n \n ".format(
wo_receipt_resp
))
return wo_receipt_resp
| true | true |
f724badd51da8585ae02ba905411f8a5bc0b42c6 | 7,728 | py | Python | pandas/tests/indexes/timedeltas/test_timedelta.py | hkennyv/pandas | 31875eb3d8a56f359c2f529f86b867572d5dfeb1 | [
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"MIT-0",
"ECL-2.0",
"BSD-3-Clause"
] | 1 | 2020-04-26T22:11:21.000Z | 2020-04-26T22:11:21.000Z | pandas/tests/indexes/timedeltas/test_timedelta.py | hkennyv/pandas | 31875eb3d8a56f359c2f529f86b867572d5dfeb1 | [
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"MIT-0",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null | pandas/tests/indexes/timedeltas/test_timedelta.py | hkennyv/pandas | 31875eb3d8a56f359c2f529f86b867572d5dfeb1 | [
"PSF-2.0",
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"MIT",
"MIT-0",
"ECL-2.0",
"BSD-3-Clause"
] | 1 | 2020-06-19T11:52:05.000Z | 2020-06-19T11:52:05.000Z | from datetime import timedelta
import numpy as np
import pytest
import pandas as pd
from pandas import (
DataFrame,
Index,
Int64Index,
Series,
Timedelta,
TimedeltaIndex,
array,
date_range,
timedelta_range,
)
import pandas._testing as tm
from ..datetimelike import DatetimeLike
randn = np.random.randn
class TestTimedeltaIndex(DatetimeLike):
_holder = TimedeltaIndex
@pytest.fixture
def indices(self):
return tm.makeTimedeltaIndex(10)
def create_index(self) -> TimedeltaIndex:
index = pd.to_timedelta(range(5), unit="d")._with_freq("infer")
assert index.freq == "D"
return index + pd.offsets.Hour(1)
def test_numeric_compat(self):
# Dummy method to override super's version; this test is now done
# in test_arithmetic.py
pass
def test_shift(self):
pass # this is handled in test_arithmetic.py
def test_pickle_compat_construction(self):
pass
def test_isin(self):
index = tm.makeTimedeltaIndex(4)
result = index.isin(index)
assert result.all()
result = index.isin(list(index))
assert result.all()
tm.assert_almost_equal(
index.isin([index[2], 5]), np.array([False, False, True, False])
)
def test_factorize(self):
idx1 = TimedeltaIndex(["1 day", "1 day", "2 day", "2 day", "3 day", "3 day"])
exp_arr = np.array([0, 0, 1, 1, 2, 2], dtype=np.intp)
exp_idx = TimedeltaIndex(["1 day", "2 day", "3 day"])
arr, idx = idx1.factorize()
tm.assert_numpy_array_equal(arr, exp_arr)
tm.assert_index_equal(idx, exp_idx)
arr, idx = idx1.factorize(sort=True)
tm.assert_numpy_array_equal(arr, exp_arr)
tm.assert_index_equal(idx, exp_idx)
# freq must be preserved
idx3 = timedelta_range("1 day", periods=4, freq="s")
exp_arr = np.array([0, 1, 2, 3], dtype=np.intp)
arr, idx = idx3.factorize()
tm.assert_numpy_array_equal(arr, exp_arr)
tm.assert_index_equal(idx, idx3)
def test_sort_values(self):
idx = TimedeltaIndex(["4d", "1d", "2d"])
ordered = idx.sort_values()
assert ordered.is_monotonic
ordered = idx.sort_values(ascending=False)
assert ordered[::-1].is_monotonic
ordered, dexer = idx.sort_values(return_indexer=True)
assert ordered.is_monotonic
tm.assert_numpy_array_equal(dexer, np.array([1, 2, 0]), check_dtype=False)
ordered, dexer = idx.sort_values(return_indexer=True, ascending=False)
assert ordered[::-1].is_monotonic
tm.assert_numpy_array_equal(dexer, np.array([0, 2, 1]), check_dtype=False)
@pytest.mark.parametrize("klass", [list, np.array, array, Series])
def test_searchsorted_different_argument_classes(self, klass):
idx = TimedeltaIndex(["1 day", "2 days", "3 days"])
result = idx.searchsorted(klass(idx))
expected = np.arange(len(idx), dtype=result.dtype)
tm.assert_numpy_array_equal(result, expected)
result = idx._data.searchsorted(klass(idx))
tm.assert_numpy_array_equal(result, expected)
@pytest.mark.parametrize(
"arg",
[[1, 2], ["a", "b"], [pd.Timestamp("2020-01-01", tz="Europe/London")] * 2],
)
def test_searchsorted_invalid_argument_dtype(self, arg):
idx = TimedeltaIndex(["1 day", "2 days", "3 days"])
msg = "searchsorted requires compatible dtype"
with pytest.raises(TypeError, match=msg):
idx.searchsorted(arg)
def test_argmin_argmax(self):
idx = TimedeltaIndex(["1 day 00:00:05", "1 day 00:00:01", "1 day 00:00:02"])
assert idx.argmin() == 1
assert idx.argmax() == 0
def test_misc_coverage(self):
rng = timedelta_range("1 day", periods=5)
result = rng.groupby(rng.days)
assert isinstance(list(result.values())[0][0], Timedelta)
idx = TimedeltaIndex(["3d", "1d", "2d"])
assert not idx.equals(list(idx))
non_td = Index(list("abc"))
assert not idx.equals(list(non_td))
def test_map(self):
# test_map_dictlike generally tests
rng = timedelta_range("1 day", periods=10)
f = lambda x: x.days
result = rng.map(f)
exp = Int64Index([f(x) for x in rng])
tm.assert_index_equal(result, exp)
def test_pass_TimedeltaIndex_to_index(self):
rng = timedelta_range("1 days", "10 days")
idx = Index(rng, dtype=object)
expected = Index(rng.to_pytimedelta(), dtype=object)
tm.assert_numpy_array_equal(idx.values, expected.values)
def test_append_numpy_bug_1681(self):
td = timedelta_range("1 days", "10 days", freq="2D")
a = DataFrame()
c = DataFrame({"A": "foo", "B": td}, index=td)
str(c)
result = a.append(c)
assert (result["B"] == td).all()
def test_fields(self):
rng = timedelta_range("1 days, 10:11:12.100123456", periods=2, freq="s")
tm.assert_index_equal(rng.days, Index([1, 1], dtype="int64"))
tm.assert_index_equal(
rng.seconds,
Index([10 * 3600 + 11 * 60 + 12, 10 * 3600 + 11 * 60 + 13], dtype="int64"),
)
tm.assert_index_equal(
rng.microseconds, Index([100 * 1000 + 123, 100 * 1000 + 123], dtype="int64")
)
tm.assert_index_equal(rng.nanoseconds, Index([456, 456], dtype="int64"))
msg = "'TimedeltaIndex' object has no attribute '{}'"
with pytest.raises(AttributeError, match=msg.format("hours")):
rng.hours
with pytest.raises(AttributeError, match=msg.format("minutes")):
rng.minutes
with pytest.raises(AttributeError, match=msg.format("milliseconds")):
rng.milliseconds
# with nat
s = Series(rng)
s[1] = np.nan
tm.assert_series_equal(s.dt.days, Series([1, np.nan], index=[0, 1]))
tm.assert_series_equal(
s.dt.seconds, Series([10 * 3600 + 11 * 60 + 12, np.nan], index=[0, 1])
)
# preserve name (GH15589)
rng.name = "name"
assert rng.days.name == "name"
def test_freq_conversion(self):
# doc example
# series
td = Series(date_range("20130101", periods=4)) - Series(
date_range("20121201", periods=4)
)
td[2] += timedelta(minutes=5, seconds=3)
td[3] = np.nan
result = td / np.timedelta64(1, "D")
expected = Series([31, 31, (31 * 86400 + 5 * 60 + 3) / 86400.0, np.nan])
tm.assert_series_equal(result, expected)
result = td.astype("timedelta64[D]")
expected = Series([31, 31, 31, np.nan])
tm.assert_series_equal(result, expected)
result = td / np.timedelta64(1, "s")
expected = Series([31 * 86400, 31 * 86400, 31 * 86400 + 5 * 60 + 3, np.nan])
tm.assert_series_equal(result, expected)
result = td.astype("timedelta64[s]")
tm.assert_series_equal(result, expected)
# tdi
td = TimedeltaIndex(td)
result = td / np.timedelta64(1, "D")
expected = Index([31, 31, (31 * 86400 + 5 * 60 + 3) / 86400.0, np.nan])
tm.assert_index_equal(result, expected)
result = td.astype("timedelta64[D]")
expected = Index([31, 31, 31, np.nan])
tm.assert_index_equal(result, expected)
result = td / np.timedelta64(1, "s")
expected = Index([31 * 86400, 31 * 86400, 31 * 86400 + 5 * 60 + 3, np.nan])
tm.assert_index_equal(result, expected)
result = td.astype("timedelta64[s]")
tm.assert_index_equal(result, expected)
| 31.672131 | 88 | 0.601708 | from datetime import timedelta
import numpy as np
import pytest
import pandas as pd
from pandas import (
DataFrame,
Index,
Int64Index,
Series,
Timedelta,
TimedeltaIndex,
array,
date_range,
timedelta_range,
)
import pandas._testing as tm
from ..datetimelike import DatetimeLike
randn = np.random.randn
class TestTimedeltaIndex(DatetimeLike):
_holder = TimedeltaIndex
@pytest.fixture
def indices(self):
return tm.makeTimedeltaIndex(10)
def create_index(self) -> TimedeltaIndex:
index = pd.to_timedelta(range(5), unit="d")._with_freq("infer")
assert index.freq == "D"
return index + pd.offsets.Hour(1)
def test_numeric_compat(self):
# in test_arithmetic.py
pass
def test_shift(self):
pass # this is handled in test_arithmetic.py
def test_pickle_compat_construction(self):
pass
def test_isin(self):
index = tm.makeTimedeltaIndex(4)
result = index.isin(index)
assert result.all()
result = index.isin(list(index))
assert result.all()
tm.assert_almost_equal(
index.isin([index[2], 5]), np.array([False, False, True, False])
)
def test_factorize(self):
idx1 = TimedeltaIndex(["1 day", "1 day", "2 day", "2 day", "3 day", "3 day"])
exp_arr = np.array([0, 0, 1, 1, 2, 2], dtype=np.intp)
exp_idx = TimedeltaIndex(["1 day", "2 day", "3 day"])
arr, idx = idx1.factorize()
tm.assert_numpy_array_equal(arr, exp_arr)
tm.assert_index_equal(idx, exp_idx)
arr, idx = idx1.factorize(sort=True)
tm.assert_numpy_array_equal(arr, exp_arr)
tm.assert_index_equal(idx, exp_idx)
# freq must be preserved
idx3 = timedelta_range("1 day", periods=4, freq="s")
exp_arr = np.array([0, 1, 2, 3], dtype=np.intp)
arr, idx = idx3.factorize()
tm.assert_numpy_array_equal(arr, exp_arr)
tm.assert_index_equal(idx, idx3)
def test_sort_values(self):
idx = TimedeltaIndex(["4d", "1d", "2d"])
ordered = idx.sort_values()
assert ordered.is_monotonic
ordered = idx.sort_values(ascending=False)
assert ordered[::-1].is_monotonic
ordered, dexer = idx.sort_values(return_indexer=True)
assert ordered.is_monotonic
tm.assert_numpy_array_equal(dexer, np.array([1, 2, 0]), check_dtype=False)
ordered, dexer = idx.sort_values(return_indexer=True, ascending=False)
assert ordered[::-1].is_monotonic
tm.assert_numpy_array_equal(dexer, np.array([0, 2, 1]), check_dtype=False)
@pytest.mark.parametrize("klass", [list, np.array, array, Series])
def test_searchsorted_different_argument_classes(self, klass):
idx = TimedeltaIndex(["1 day", "2 days", "3 days"])
result = idx.searchsorted(klass(idx))
expected = np.arange(len(idx), dtype=result.dtype)
tm.assert_numpy_array_equal(result, expected)
result = idx._data.searchsorted(klass(idx))
tm.assert_numpy_array_equal(result, expected)
@pytest.mark.parametrize(
"arg",
[[1, 2], ["a", "b"], [pd.Timestamp("2020-01-01", tz="Europe/London")] * 2],
)
def test_searchsorted_invalid_argument_dtype(self, arg):
idx = TimedeltaIndex(["1 day", "2 days", "3 days"])
msg = "searchsorted requires compatible dtype"
with pytest.raises(TypeError, match=msg):
idx.searchsorted(arg)
def test_argmin_argmax(self):
idx = TimedeltaIndex(["1 day 00:00:05", "1 day 00:00:01", "1 day 00:00:02"])
assert idx.argmin() == 1
assert idx.argmax() == 0
def test_misc_coverage(self):
rng = timedelta_range("1 day", periods=5)
result = rng.groupby(rng.days)
assert isinstance(list(result.values())[0][0], Timedelta)
idx = TimedeltaIndex(["3d", "1d", "2d"])
assert not idx.equals(list(idx))
non_td = Index(list("abc"))
assert not idx.equals(list(non_td))
def test_map(self):
# test_map_dictlike generally tests
rng = timedelta_range("1 day", periods=10)
f = lambda x: x.days
result = rng.map(f)
exp = Int64Index([f(x) for x in rng])
tm.assert_index_equal(result, exp)
def test_pass_TimedeltaIndex_to_index(self):
rng = timedelta_range("1 days", "10 days")
idx = Index(rng, dtype=object)
expected = Index(rng.to_pytimedelta(), dtype=object)
tm.assert_numpy_array_equal(idx.values, expected.values)
def test_append_numpy_bug_1681(self):
td = timedelta_range("1 days", "10 days", freq="2D")
a = DataFrame()
c = DataFrame({"A": "foo", "B": td}, index=td)
str(c)
result = a.append(c)
assert (result["B"] == td).all()
def test_fields(self):
rng = timedelta_range("1 days, 10:11:12.100123456", periods=2, freq="s")
tm.assert_index_equal(rng.days, Index([1, 1], dtype="int64"))
tm.assert_index_equal(
rng.seconds,
Index([10 * 3600 + 11 * 60 + 12, 10 * 3600 + 11 * 60 + 13], dtype="int64"),
)
tm.assert_index_equal(
rng.microseconds, Index([100 * 1000 + 123, 100 * 1000 + 123], dtype="int64")
)
tm.assert_index_equal(rng.nanoseconds, Index([456, 456], dtype="int64"))
msg = "'TimedeltaIndex' object has no attribute '{}'"
with pytest.raises(AttributeError, match=msg.format("hours")):
rng.hours
with pytest.raises(AttributeError, match=msg.format("minutes")):
rng.minutes
with pytest.raises(AttributeError, match=msg.format("milliseconds")):
rng.milliseconds
# with nat
s = Series(rng)
s[1] = np.nan
tm.assert_series_equal(s.dt.days, Series([1, np.nan], index=[0, 1]))
tm.assert_series_equal(
s.dt.seconds, Series([10 * 3600 + 11 * 60 + 12, np.nan], index=[0, 1])
)
# preserve name (GH15589)
rng.name = "name"
assert rng.days.name == "name"
def test_freq_conversion(self):
# doc example
# series
td = Series(date_range("20130101", periods=4)) - Series(
date_range("20121201", periods=4)
)
td[2] += timedelta(minutes=5, seconds=3)
td[3] = np.nan
result = td / np.timedelta64(1, "D")
expected = Series([31, 31, (31 * 86400 + 5 * 60 + 3) / 86400.0, np.nan])
tm.assert_series_equal(result, expected)
result = td.astype("timedelta64[D]")
expected = Series([31, 31, 31, np.nan])
tm.assert_series_equal(result, expected)
result = td / np.timedelta64(1, "s")
expected = Series([31 * 86400, 31 * 86400, 31 * 86400 + 5 * 60 + 3, np.nan])
tm.assert_series_equal(result, expected)
result = td.astype("timedelta64[s]")
tm.assert_series_equal(result, expected)
# tdi
td = TimedeltaIndex(td)
result = td / np.timedelta64(1, "D")
expected = Index([31, 31, (31 * 86400 + 5 * 60 + 3) / 86400.0, np.nan])
tm.assert_index_equal(result, expected)
result = td.astype("timedelta64[D]")
expected = Index([31, 31, 31, np.nan])
tm.assert_index_equal(result, expected)
result = td / np.timedelta64(1, "s")
expected = Index([31 * 86400, 31 * 86400, 31 * 86400 + 5 * 60 + 3, np.nan])
tm.assert_index_equal(result, expected)
result = td.astype("timedelta64[s]")
tm.assert_index_equal(result, expected)
| true | true |
f724bb27bccc8713b9577f3921f30f709db72e74 | 476 | py | Python | week05/server/week06lab2.py | tullowhurler/datarep | 38b145d9a49c1b29c02597381de93bbac4a7edae | [
"Apache-2.0"
] | null | null | null | week05/server/week06lab2.py | tullowhurler/datarep | 38b145d9a49c1b29c02597381de93bbac4a7edae | [
"Apache-2.0"
] | null | null | null | week05/server/week06lab2.py | tullowhurler/datarep | 38b145d9a49c1b29c02597381de93bbac4a7edae | [
"Apache-2.0"
] | null | null | null | import requests
import json
#html = '<h1>hello world</h1>This is html'
f = open("../../week05/carviewer.html", "r")
html = f.read()
#print (html)
apiKey = '46ceed910c24ff7cce8240e89ec7b71912f6f40f2ec55fd217ce150a d6d4f1c4'
url = 'https://api.html2pdf.app/v1/generate'
data = {'html': html,'apiKey': apiKey}
response = requests.post(url, json=data)
print (response.status_code)
newFile = open("lab06.02.01.htmlaspdf.pdf", "wb")
newFile.write(response.content) | 28 | 77 | 0.707983 | import requests
import json
f = open("../../week05/carviewer.html", "r")
html = f.read()
apiKey = '46ceed910c24ff7cce8240e89ec7b71912f6f40f2ec55fd217ce150a d6d4f1c4'
url = 'https://api.html2pdf.app/v1/generate'
data = {'html': html,'apiKey': apiKey}
response = requests.post(url, json=data)
print (response.status_code)
newFile = open("lab06.02.01.htmlaspdf.pdf", "wb")
newFile.write(response.content) | true | true |
f724bb485ff4b58748536534ebf2f5897e7eed1f | 2,258 | py | Python | azext_iot/__init__.py | YingXue/azure-iot-cli-extension | efe7897b1ae1d2a9953f501abe7654b84d69372d | [
"MIT"
] | 79 | 2017-09-25T19:29:17.000Z | 2022-03-30T20:55:57.000Z | azext_iot/__init__.py | YingXue/azure-iot-cli-extension | efe7897b1ae1d2a9953f501abe7654b84d69372d | [
"MIT"
] | 305 | 2018-01-17T01:12:10.000Z | 2022-03-23T22:38:11.000Z | azext_iot/__init__.py | YingXue/azure-iot-cli-extension | efe7897b1ae1d2a9953f501abe7654b84d69372d | [
"MIT"
] | 69 | 2017-11-14T00:30:46.000Z | 2022-03-01T17:11:45.000Z | # coding=utf-8
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
from azure.cli.core import AzCommandsLoader
from azure.cli.core.commands import CliCommandType
from azext_iot._factory import iot_service_provisioning_factory
from azext_iot.constants import VERSION
import azext_iot._help # noqa: F401
from azext_iot.product.command_map import load_product_commands
iothub_ops = CliCommandType(operations_tmpl="azext_iot.operations.hub#{}")
iotdps_ops = CliCommandType(
operations_tmpl="azext_iot.operations.dps#{}",
client_factory=iot_service_provisioning_factory,
)
class IoTExtCommandsLoader(AzCommandsLoader):
def __init__(self, cli_ctx=None):
super(IoTExtCommandsLoader, self).__init__(cli_ctx=cli_ctx)
def load_command_table(self, args):
from azext_iot.commands import load_command_table
from azext_iot.iothub.command_map import load_iothub_commands
from azext_iot.central.command_map import load_central_commands
from azext_iot.digitaltwins.command_map import load_digitaltwins_commands
load_command_table(self, args)
load_iothub_commands(self, args)
load_central_commands(self, args)
load_digitaltwins_commands(self, args)
load_product_commands(self, args)
return self.command_table
def load_arguments(self, command):
from azext_iot._params import load_arguments
from azext_iot.iothub.params import load_iothub_arguments
from azext_iot.central.params import load_central_arguments
from azext_iot.digitaltwins.params import load_digitaltwins_arguments
from azext_iot.product.params import load_product_params
load_arguments(self, command)
load_iothub_arguments(self, command)
load_central_arguments(self, command)
load_digitaltwins_arguments(self, command)
load_product_params(self, command)
COMMAND_LOADER_CLS = IoTExtCommandsLoader
__version__ = VERSION
| 39.614035 | 94 | 0.712578 |
from azure.cli.core import AzCommandsLoader
from azure.cli.core.commands import CliCommandType
from azext_iot._factory import iot_service_provisioning_factory
from azext_iot.constants import VERSION
import azext_iot._help
from azext_iot.product.command_map import load_product_commands
iothub_ops = CliCommandType(operations_tmpl="azext_iot.operations.hub#{}")
iotdps_ops = CliCommandType(
operations_tmpl="azext_iot.operations.dps#{}",
client_factory=iot_service_provisioning_factory,
)
class IoTExtCommandsLoader(AzCommandsLoader):
def __init__(self, cli_ctx=None):
super(IoTExtCommandsLoader, self).__init__(cli_ctx=cli_ctx)
def load_command_table(self, args):
from azext_iot.commands import load_command_table
from azext_iot.iothub.command_map import load_iothub_commands
from azext_iot.central.command_map import load_central_commands
from azext_iot.digitaltwins.command_map import load_digitaltwins_commands
load_command_table(self, args)
load_iothub_commands(self, args)
load_central_commands(self, args)
load_digitaltwins_commands(self, args)
load_product_commands(self, args)
return self.command_table
def load_arguments(self, command):
from azext_iot._params import load_arguments
from azext_iot.iothub.params import load_iothub_arguments
from azext_iot.central.params import load_central_arguments
from azext_iot.digitaltwins.params import load_digitaltwins_arguments
from azext_iot.product.params import load_product_params
load_arguments(self, command)
load_iothub_arguments(self, command)
load_central_arguments(self, command)
load_digitaltwins_arguments(self, command)
load_product_params(self, command)
COMMAND_LOADER_CLS = IoTExtCommandsLoader
__version__ = VERSION
| true | true |
f724bc1d7c934473af2dfd10b1805761e9c49da5 | 97 | py | Python | Coursera_HSE_Py_Ass-5.4-Stairs.py | YuriRevin/Coursera_HSE_Py_Ass-5.4-Stairs | a6327a94c4b65fc75e3d99475e27464c85df0185 | [
"MIT"
] | null | null | null | Coursera_HSE_Py_Ass-5.4-Stairs.py | YuriRevin/Coursera_HSE_Py_Ass-5.4-Stairs | a6327a94c4b65fc75e3d99475e27464c85df0185 | [
"MIT"
] | null | null | null | Coursera_HSE_Py_Ass-5.4-Stairs.py | YuriRevin/Coursera_HSE_Py_Ass-5.4-Stairs | a6327a94c4b65fc75e3d99475e27464c85df0185 | [
"MIT"
] | null | null | null | for i in range(int(input())):
for a in range(1, i + 2):
print(a, end='')
print()
| 19.4 | 29 | 0.484536 | for i in range(int(input())):
for a in range(1, i + 2):
print(a, end='')
print()
| true | true |
f724bcbe159f2f3f6bd41ab81d7cfc844b8fbfab | 10,961 | bzl | Python | internal/pkg_npm/pkg_npm.bzl | mr-tim/rules_nodejs | 7648412d96828875343b0d9c74ddf4d7531eed72 | [
"Apache-2.0"
] | 1 | 2020-10-25T10:29:06.000Z | 2020-10-25T10:29:06.000Z | internal/pkg_npm/pkg_npm.bzl | samschlegel/rules_nodejs | 21836475c2294476a5a792d5fb0ee3e713f7c6fe | [
"Apache-2.0"
] | 8 | 2021-03-11T00:12:31.000Z | 2022-02-27T07:35:43.000Z | internal/pkg_npm/pkg_npm.bzl | samschlegel/rules_nodejs | 21836475c2294476a5a792d5fb0ee3e713f7c6fe | [
"Apache-2.0"
] | null | null | null | """npm packaging
Note, this is intended for sharing library code with non-Bazel consumers.
If all users of your library code use Bazel, they should just add your library
to the `deps` of one of their targets.
"""
load("//:providers.bzl", "DeclarationInfo", "JSNamedModuleInfo", "LinkablePackageInfo", "NodeContextInfo")
_DOC = """The pkg_npm rule creates a directory containing a publishable npm artifact.
Example:
```python
load("@build_bazel_rules_nodejs//:index.bzl", "pkg_npm")
pkg_npm(
name = "my_package",
srcs = ["package.json"],
deps = [":my_typescript_lib"],
substitutions = {"//internal/": "//"},
)
```
You can use a pair of `// BEGIN-INTERNAL ... // END-INTERNAL` comments to mark regions of files that should be elided during publishing.
For example:
```javascript
function doThing() {
// BEGIN-INTERNAL
// This is a secret internal-only comment
doInternalOnlyThing();
// END-INTERNAL
}
```
With the Bazel stamping feature, pkg_npm will replace any placeholder version in your package with the actual version control tag.
See the [stamping documentation](https://github.com/bazelbuild/rules_nodejs/blob/master/docs/index.md#stamping)
Usage:
`pkg_npm` yields three labels. Build the package directory using the default label:
```sh
$ bazel build :my_package
Target //:my_package up-to-date:
bazel-out/fastbuild/bin/my_package
$ ls -R bazel-out/fastbuild/bin/my_package
```
Dry-run of publishing to npm, calling `npm pack` (it builds the package first if needed):
```sh
$ bazel run :my_package.pack
INFO: Running command line: bazel-out/fastbuild/bin/my_package.pack
my-package-name-1.2.3.tgz
$ tar -tzf my-package-name-1.2.3.tgz
```
Actually publish the package with `npm publish` (also builds first):
```sh
# Check login credentials
$ bazel run @nodejs//:npm_node_repositories who
# Publishes the package
$ bazel run :my_package.publish
```
You can pass arguments to npm by escaping them from Bazel using a double-hyphen, for example:
`bazel run my_package.publish -- --tag=next`
"""
# Used in angular/angular /packages/bazel/src/ng_package/ng_package.bzl
PKG_NPM_ATTRS = {
"package_name": attr.string(
doc = """Optional package_name that this npm package may be imported as.""",
),
"srcs": attr.label_list(
doc = """Files inside this directory which are simply copied into the package.""",
allow_files = True,
),
"hide_build_files": attr.bool(
doc = """If set BUILD and BUILD.bazel files are prefixed with `_` in the npm package.
The default is True since npm packages that contain BUILD files don't work with
`yarn_install` and `npm_install` without a post-install step that deletes or renames them.
NB: Bazel has a change in https://github.com/bazelbuild/bazel/pull/10261
(expected in version 2.1) that adds .bazelignore
support for external repositories, which will make this attribute obsolete.""",
default = True,
),
"nested_packages": attr.label_list(
doc = """Other pkg_npm rules whose content is copied into this package.""",
allow_files = True,
),
"node_context_data": attr.label(
default = "@build_bazel_rules_nodejs//internal:node_context_data",
providers = [NodeContextInfo],
doc = "Internal use only",
),
"replace_with_version": attr.string(
doc = """If set this value is replaced with the version stamp data.
See the section on stamping in the README.""",
default = "0.0.0-PLACEHOLDER",
),
"substitutions": attr.string_dict(
doc = """Key-value pairs which are replaced in all the files while building the package.""",
),
"vendor_external": attr.string_list(
doc = """External workspaces whose contents should be vendored into this workspace.
Avoids 'external/foo' path segments in the resulting package.""",
),
"deps": attr.label_list(
doc = """Other targets which produce files that should be included in the package, such as `rollup_bundle`""",
allow_files = True,
),
"_npm_script_generator": attr.label(
default = Label("//internal/pkg_npm:npm_script_generator"),
cfg = "host",
executable = True,
),
"_packager": attr.label(
default = Label("//internal/pkg_npm:packager"),
cfg = "host",
executable = True,
),
"_run_npm_template": attr.label(
default = Label("@nodejs//:run_npm.sh.template"),
allow_single_file = True,
),
}
# Used in angular/angular /packages/bazel/src/ng_package/ng_package.bzl
PKG_NPM_OUTPUTS = {
"pack": "%{name}.pack",
"publish": "%{name}.publish",
}
# Takes a depset of files and returns a corresponding list of file paths without any files
# that aren't part of the specified package path. Also include files from external repositories
# that explicitly specified in the vendor_external list.
def _filter_out_external_files(ctx, files, package_path):
result = []
for file in files:
# NB: package_path may be an empty string
if file.short_path.startswith(package_path) and not file.short_path.startswith("../"):
result.append(file.path)
else:
for v in ctx.attr.vendor_external:
if file.short_path.startswith("../%s/" % v):
result.append(file.path)
return result
# Used in angular/angular /packages/bazel/src/ng_package/ng_package.bzl
def create_package(ctx, deps_files, nested_packages):
"""Creates an action that produces the npm package.
It copies srcs and deps into the artifact and produces the .pack and .publish
scripts.
Args:
ctx: the skylark rule context
deps_files: list of files to include in the package which have been
specified as dependencies
nested_packages: list of TreeArtifact outputs from other actions which are
to be nested inside this package
Returns:
The tree artifact which is the publishable directory.
"""
stamp = ctx.attr.node_context_data[NodeContextInfo].stamp
all_files = deps_files + ctx.files.srcs
if not stamp and len(all_files) == 1 and all_files[0].is_directory and len(ctx.files.nested_packages) == 0:
# Special case where these is a single dep that is a directory artifact and there are no
# source files or nested_packages; in that case we assume the package is contained within
# that single directory and there is no work to do
package_dir = all_files[0]
_create_npm_scripts(ctx, package_dir)
return package_dir
package_dir = ctx.actions.declare_directory(ctx.label.name)
package_path = ctx.label.package
# List of dependency sources which are local to the package that defines the current
# target. Also include files from external repositories that explicitly specified in
# the vendor_external list. We only want to package deps files which are inside of the
# current package unless explicitely specified.
filtered_deps_sources = _filter_out_external_files(ctx, deps_files, package_path)
args = ctx.actions.args()
args.use_param_file("%s", use_always = True)
args.add(package_dir.path)
args.add(package_path)
args.add_joined([s.path for s in ctx.files.srcs], join_with = ",", omit_if_empty = False)
args.add(ctx.bin_dir.path)
args.add(ctx.genfiles_dir.path)
args.add_joined(filtered_deps_sources, join_with = ",", omit_if_empty = False)
args.add_joined([p.path for p in nested_packages], join_with = ",", omit_if_empty = False)
args.add(ctx.attr.substitutions)
args.add(ctx.attr.replace_with_version)
args.add(ctx.version_file.path if stamp else "")
args.add_joined(ctx.attr.vendor_external, join_with = ",", omit_if_empty = False)
args.add("1" if ctx.attr.hide_build_files else "0")
inputs = ctx.files.srcs + deps_files + nested_packages
# The version_file is an undocumented attribute of the ctx that lets us read the volatile-status.txt file
# produced by the --workspace_status_command. That command will be executed whenever
# this action runs, so we get the latest version info on each execution.
# See https://github.com/bazelbuild/bazel/issues/1054
if stamp:
inputs.append(ctx.version_file)
ctx.actions.run(
progress_message = "Assembling npm package %s" % package_dir.short_path,
mnemonic = "AssembleNpmPackage",
executable = ctx.executable._packager,
inputs = inputs,
outputs = [package_dir],
arguments = [args],
)
_create_npm_scripts(ctx, package_dir)
return package_dir
def _create_npm_scripts(ctx, package_dir):
args = ctx.actions.args()
args.add_all([
package_dir.path,
ctx.outputs.pack.path,
ctx.outputs.publish.path,
ctx.file._run_npm_template.path,
])
ctx.actions.run(
progress_message = "Generating npm pack & publish scripts",
mnemonic = "GenerateNpmScripts",
executable = ctx.executable._npm_script_generator,
inputs = [ctx.file._run_npm_template, package_dir],
outputs = [ctx.outputs.pack, ctx.outputs.publish],
arguments = [args],
# Must be run local (no sandbox) so that the pwd is the actual execroot
# in the script which is used to generate the path in the pack & publish
# scripts.
execution_requirements = {"local": "1"},
)
def _pkg_npm(ctx):
deps_files_depsets = []
for dep in ctx.attr.deps:
# Collect whatever is in the "data"
deps_files_depsets.append(dep.data_runfiles.files)
# Only collect DefaultInfo files (not transitive)
deps_files_depsets.append(dep.files)
# All direct & transitive JavaScript-producing deps
# TODO: switch to JSModuleInfo when it is available
if JSNamedModuleInfo in dep:
deps_files_depsets.append(dep[JSNamedModuleInfo].sources)
# Include all transitive declerations
if DeclarationInfo in dep:
deps_files_depsets.append(dep[DeclarationInfo].transitive_declarations)
# Note: to_list() should be called once per rule!
deps_files = depset(transitive = deps_files_depsets).to_list()
package_dir = create_package(ctx, deps_files, ctx.files.nested_packages)
package_dir_depset = depset([package_dir])
result = [
DefaultInfo(
files = package_dir_depset,
runfiles = ctx.runfiles([package_dir]),
),
]
if ctx.attr.package_name:
result.append(LinkablePackageInfo(
package_name = ctx.attr.package_name,
path = package_dir.path,
files = package_dir_depset,
))
return result
pkg_npm = rule(
implementation = _pkg_npm,
attrs = PKG_NPM_ATTRS,
doc = _DOC,
outputs = PKG_NPM_OUTPUTS,
)
| 36.055921 | 136 | 0.684609 |
load("//:providers.bzl", "DeclarationInfo", "JSNamedModuleInfo", "LinkablePackageInfo", "NodeContextInfo")
_DOC = """The pkg_npm rule creates a directory containing a publishable npm artifact.
Example:
```python
load("@build_bazel_rules_nodejs//:index.bzl", "pkg_npm")
pkg_npm(
name = "my_package",
srcs = ["package.json"],
deps = [":my_typescript_lib"],
substitutions = {"//internal/": "//"},
)
```
You can use a pair of `// BEGIN-INTERNAL ... // END-INTERNAL` comments to mark regions of files that should be elided during publishing.
For example:
```javascript
function doThing() {
// BEGIN-INTERNAL
// This is a secret internal-only comment
doInternalOnlyThing();
// END-INTERNAL
}
```
With the Bazel stamping feature, pkg_npm will replace any placeholder version in your package with the actual version control tag.
See the [stamping documentation](https://github.com/bazelbuild/rules_nodejs/blob/master/docs/index.md#stamping)
Usage:
`pkg_npm` yields three labels. Build the package directory using the default label:
```sh
$ bazel build :my_package
Target //:my_package up-to-date:
bazel-out/fastbuild/bin/my_package
$ ls -R bazel-out/fastbuild/bin/my_package
```
Dry-run of publishing to npm, calling `npm pack` (it builds the package first if needed):
```sh
$ bazel run :my_package.pack
INFO: Running command line: bazel-out/fastbuild/bin/my_package.pack
my-package-name-1.2.3.tgz
$ tar -tzf my-package-name-1.2.3.tgz
```
Actually publish the package with `npm publish` (also builds first):
```sh
# Check login credentials
$ bazel run @nodejs//:npm_node_repositories who
# Publishes the package
$ bazel run :my_package.publish
```
You can pass arguments to npm by escaping them from Bazel using a double-hyphen, for example:
`bazel run my_package.publish -- --tag=next`
"""
PKG_NPM_ATTRS = {
"package_name": attr.string(
doc = """Optional package_name that this npm package may be imported as.""",
),
"srcs": attr.label_list(
doc = """Files inside this directory which are simply copied into the package.""",
allow_files = True,
),
"hide_build_files": attr.bool(
doc = """If set BUILD and BUILD.bazel files are prefixed with `_` in the npm package.
The default is True since npm packages that contain BUILD files don't work with
`yarn_install` and `npm_install` without a post-install step that deletes or renames them.
NB: Bazel has a change in https://github.com/bazelbuild/bazel/pull/10261
(expected in version 2.1) that adds .bazelignore
support for external repositories, which will make this attribute obsolete.""",
default = True,
),
"nested_packages": attr.label_list(
doc = """Other pkg_npm rules whose content is copied into this package.""",
allow_files = True,
),
"node_context_data": attr.label(
default = "@build_bazel_rules_nodejs//internal:node_context_data",
providers = [NodeContextInfo],
doc = "Internal use only",
),
"replace_with_version": attr.string(
doc = """If set this value is replaced with the version stamp data.
See the section on stamping in the README.""",
default = "0.0.0-PLACEHOLDER",
),
"substitutions": attr.string_dict(
doc = """Key-value pairs which are replaced in all the files while building the package.""",
),
"vendor_external": attr.string_list(
doc = """External workspaces whose contents should be vendored into this workspace.
Avoids 'external/foo' path segments in the resulting package.""",
),
"deps": attr.label_list(
doc = """Other targets which produce files that should be included in the package, such as `rollup_bundle`""",
allow_files = True,
),
"_npm_script_generator": attr.label(
default = Label("//internal/pkg_npm:npm_script_generator"),
cfg = "host",
executable = True,
),
"_packager": attr.label(
default = Label("//internal/pkg_npm:packager"),
cfg = "host",
executable = True,
),
"_run_npm_template": attr.label(
default = Label("@nodejs//:run_npm.sh.template"),
allow_single_file = True,
),
}
# Used in angular/angular /packages/bazel/src/ng_package/ng_package.bzl
PKG_NPM_OUTPUTS = {
"pack": "%{name}.pack",
"publish": "%{name}.publish",
}
# Takes a depset of files and returns a corresponding list of file paths without any files
# that aren't part of the specified package path. Also include files from external repositories
def _filter_out_external_files(ctx, files, package_path):
result = []
for file in files:
if file.short_path.startswith(package_path) and not file.short_path.startswith("../"):
result.append(file.path)
else:
for v in ctx.attr.vendor_external:
if file.short_path.startswith("../%s/" % v):
result.append(file.path)
return result
def create_package(ctx, deps_files, nested_packages):
stamp = ctx.attr.node_context_data[NodeContextInfo].stamp
all_files = deps_files + ctx.files.srcs
if not stamp and len(all_files) == 1 and all_files[0].is_directory and len(ctx.files.nested_packages) == 0:
package_dir = all_files[0]
_create_npm_scripts(ctx, package_dir)
return package_dir
package_dir = ctx.actions.declare_directory(ctx.label.name)
package_path = ctx.label.package
filtered_deps_sources = _filter_out_external_files(ctx, deps_files, package_path)
args = ctx.actions.args()
args.use_param_file("%s", use_always = True)
args.add(package_dir.path)
args.add(package_path)
args.add_joined([s.path for s in ctx.files.srcs], join_with = ",", omit_if_empty = False)
args.add(ctx.bin_dir.path)
args.add(ctx.genfiles_dir.path)
args.add_joined(filtered_deps_sources, join_with = ",", omit_if_empty = False)
args.add_joined([p.path for p in nested_packages], join_with = ",", omit_if_empty = False)
args.add(ctx.attr.substitutions)
args.add(ctx.attr.replace_with_version)
args.add(ctx.version_file.path if stamp else "")
args.add_joined(ctx.attr.vendor_external, join_with = ",", omit_if_empty = False)
args.add("1" if ctx.attr.hide_build_files else "0")
inputs = ctx.files.srcs + deps_files + nested_packages
if stamp:
inputs.append(ctx.version_file)
ctx.actions.run(
progress_message = "Assembling npm package %s" % package_dir.short_path,
mnemonic = "AssembleNpmPackage",
executable = ctx.executable._packager,
inputs = inputs,
outputs = [package_dir],
arguments = [args],
)
_create_npm_scripts(ctx, package_dir)
return package_dir
def _create_npm_scripts(ctx, package_dir):
args = ctx.actions.args()
args.add_all([
package_dir.path,
ctx.outputs.pack.path,
ctx.outputs.publish.path,
ctx.file._run_npm_template.path,
])
ctx.actions.run(
progress_message = "Generating npm pack & publish scripts",
mnemonic = "GenerateNpmScripts",
executable = ctx.executable._npm_script_generator,
inputs = [ctx.file._run_npm_template, package_dir],
outputs = [ctx.outputs.pack, ctx.outputs.publish],
arguments = [args],
execution_requirements = {"local": "1"},
)
def _pkg_npm(ctx):
deps_files_depsets = []
for dep in ctx.attr.deps:
deps_files_depsets.append(dep.data_runfiles.files)
deps_files_depsets.append(dep.files)
if JSNamedModuleInfo in dep:
deps_files_depsets.append(dep[JSNamedModuleInfo].sources)
if DeclarationInfo in dep:
deps_files_depsets.append(dep[DeclarationInfo].transitive_declarations)
deps_files = depset(transitive = deps_files_depsets).to_list()
package_dir = create_package(ctx, deps_files, ctx.files.nested_packages)
package_dir_depset = depset([package_dir])
result = [
DefaultInfo(
files = package_dir_depset,
runfiles = ctx.runfiles([package_dir]),
),
]
if ctx.attr.package_name:
result.append(LinkablePackageInfo(
package_name = ctx.attr.package_name,
path = package_dir.path,
files = package_dir_depset,
))
return result
pkg_npm = rule(
implementation = _pkg_npm,
attrs = PKG_NPM_ATTRS,
doc = _DOC,
outputs = PKG_NPM_OUTPUTS,
)
| true | true |
f724bd849c4133d14569f85a315c436a9c3794b3 | 3,021 | py | Python | cosmo_tester/test_suites/cluster/cluster_to_aio_agents_migration_test.py | Ilanad/cloudify-system-tests | acb31f28fade27f118c6a6c528a080376f24ca46 | [
"Apache-2.0"
] | 10 | 2016-06-26T11:05:57.000Z | 2021-11-04T11:51:50.000Z | cosmo_tester/test_suites/cluster/cluster_to_aio_agents_migration_test.py | Ilanad/cloudify-system-tests | acb31f28fade27f118c6a6c528a080376f24ca46 | [
"Apache-2.0"
] | 89 | 2015-03-19T06:20:26.000Z | 2022-01-31T09:23:35.000Z | cosmo_tester/test_suites/cluster/cluster_to_aio_agents_migration_test.py | Ilanad/cloudify-system-tests | acb31f28fade27f118c6a6c528a080376f24ca46 | [
"Apache-2.0"
] | 19 | 2015-01-21T17:13:07.000Z | 2021-06-07T08:09:51.000Z | from os.path import join
import pytest
from cosmo_tester.test_suites.agent import validate_agent
from cosmo_tester.framework.examples import get_example_deployment
from cosmo_tester.test_suites.snapshots import (
create_copy_and_restore_snapshot,
)
@pytest.mark.four_vms
def test_migrate_agents_cluster_to_aio(
three_node_cluster_with_extra_manager, module_tmpdir,
ssh_key, logger, test_config):
node1, node2, node3, aio_mgr = three_node_cluster_with_extra_manager
aio_mgr.bootstrap()
logger.info('Installing example deployment on cluster')
example = get_example_deployment(node1, ssh_key, logger,
'cluster_to_aio_agents', test_config)
example.inputs['server_ip'] = node1.ip_address
example.upload_and_verify_install()
validate_agent(node2, example, test_config)
logger.info('Creating snapshot on cluster')
snapshot_id = 'cluster_to_aio_agents'
snapshot_path = join(str(module_tmpdir), snapshot_id) + '.zip'
create_copy_and_restore_snapshot(
node1, aio_mgr, snapshot_id, snapshot_path, logger,
cert_path=aio_mgr.api_ca_path)
logger.info('Migrating to new agents, stopping old agents')
aio_mgr.run_command(
'cfy agents install --stop-old-agent --tenant-name {}'.format(
example.tenant,
)
)
logger.info('Verifying agent connectivity on AIO manager')
example.manager = aio_mgr
validate_agent(aio_mgr, example, test_config, upgrade=True)
example.uninstall()
@pytest.mark.four_vms
def test_migrate_agents_aio_to_cluster(
three_node_cluster_with_extra_manager, module_tmpdir,
ssh_key, logger, test_config):
node1, node2, node3, aio_mgr = three_node_cluster_with_extra_manager
aio_mgr.bootstrap()
logger.info('Installing example deployment on AIO manager')
example = get_example_deployment(aio_mgr, ssh_key, logger,
'aio_to_cluster_agents', test_config)
example.inputs['server_ip'] = aio_mgr.ip_address
example.upload_and_verify_install()
validate_agent(aio_mgr, example, test_config)
logger.info('Creating snapshot on AIO manager')
snapshot_id = 'aio_to_cluster_agents'
snapshot_path = join(str(module_tmpdir), snapshot_id) + '.zip'
create_copy_and_restore_snapshot(
aio_mgr, node1, snapshot_id, snapshot_path, logger,
cert_path=aio_mgr.api_ca_path)
for mgr in node1, node2, node3:
# Restart restservice to use correct rest secret
mgr.run_command('sudo supervisorctl restart cloudify-restservice')
mgr.wait_for_manager()
logger.info('Migrating to new agents, stopping old agents')
node1.run_command(
'cfy agents install --stop-old-agent --tenant-name {}'.format(
example.tenant,
)
)
logger.info('Verifying agent connectivity on cluster')
example.manager = node1
validate_agent(node3, example, test_config, upgrade=True)
example.uninstall()
| 35.541176 | 74 | 0.71996 | from os.path import join
import pytest
from cosmo_tester.test_suites.agent import validate_agent
from cosmo_tester.framework.examples import get_example_deployment
from cosmo_tester.test_suites.snapshots import (
create_copy_and_restore_snapshot,
)
@pytest.mark.four_vms
def test_migrate_agents_cluster_to_aio(
three_node_cluster_with_extra_manager, module_tmpdir,
ssh_key, logger, test_config):
node1, node2, node3, aio_mgr = three_node_cluster_with_extra_manager
aio_mgr.bootstrap()
logger.info('Installing example deployment on cluster')
example = get_example_deployment(node1, ssh_key, logger,
'cluster_to_aio_agents', test_config)
example.inputs['server_ip'] = node1.ip_address
example.upload_and_verify_install()
validate_agent(node2, example, test_config)
logger.info('Creating snapshot on cluster')
snapshot_id = 'cluster_to_aio_agents'
snapshot_path = join(str(module_tmpdir), snapshot_id) + '.zip'
create_copy_and_restore_snapshot(
node1, aio_mgr, snapshot_id, snapshot_path, logger,
cert_path=aio_mgr.api_ca_path)
logger.info('Migrating to new agents, stopping old agents')
aio_mgr.run_command(
'cfy agents install --stop-old-agent --tenant-name {}'.format(
example.tenant,
)
)
logger.info('Verifying agent connectivity on AIO manager')
example.manager = aio_mgr
validate_agent(aio_mgr, example, test_config, upgrade=True)
example.uninstall()
@pytest.mark.four_vms
def test_migrate_agents_aio_to_cluster(
three_node_cluster_with_extra_manager, module_tmpdir,
ssh_key, logger, test_config):
node1, node2, node3, aio_mgr = three_node_cluster_with_extra_manager
aio_mgr.bootstrap()
logger.info('Installing example deployment on AIO manager')
example = get_example_deployment(aio_mgr, ssh_key, logger,
'aio_to_cluster_agents', test_config)
example.inputs['server_ip'] = aio_mgr.ip_address
example.upload_and_verify_install()
validate_agent(aio_mgr, example, test_config)
logger.info('Creating snapshot on AIO manager')
snapshot_id = 'aio_to_cluster_agents'
snapshot_path = join(str(module_tmpdir), snapshot_id) + '.zip'
create_copy_and_restore_snapshot(
aio_mgr, node1, snapshot_id, snapshot_path, logger,
cert_path=aio_mgr.api_ca_path)
for mgr in node1, node2, node3:
mgr.run_command('sudo supervisorctl restart cloudify-restservice')
mgr.wait_for_manager()
logger.info('Migrating to new agents, stopping old agents')
node1.run_command(
'cfy agents install --stop-old-agent --tenant-name {}'.format(
example.tenant,
)
)
logger.info('Verifying agent connectivity on cluster')
example.manager = node1
validate_agent(node3, example, test_config, upgrade=True)
example.uninstall()
| true | true |
f724be57c123ae69e82f64587350fa4ee8ad8e02 | 44,331 | py | Python | python/test/feature_extractor_test.py | da2x/vmaf | 8ba4a4b84beb40f97fa01d902e45dde69b18b517 | [
"BSD-2-Clause-Patent"
] | null | null | null | python/test/feature_extractor_test.py | da2x/vmaf | 8ba4a4b84beb40f97fa01d902e45dde69b18b517 | [
"BSD-2-Clause-Patent"
] | null | null | null | python/test/feature_extractor_test.py | da2x/vmaf | 8ba4a4b84beb40f97fa01d902e45dde69b18b517 | [
"BSD-2-Clause-Patent"
] | null | null | null | from __future__ import absolute_import
import os
import unittest
import re
from vmaf.config import VmafConfig
from vmaf.core.feature_extractor import VmafFeatureExtractor, \
MomentFeatureExtractor, \
PsnrFeatureExtractor, SsimFeatureExtractor, MsSsimFeatureExtractor, \
VifFrameDifferenceFeatureExtractor, \
AnsnrFeatureExtractor, PypsnrFeatureExtractor, VmafIntegerFeatureExtractor
from vmaf.core.asset import Asset
from vmaf.core.result_store import FileSystemResultStore
from test.testutil import set_default_576_324_videos_for_testing, set_default_flat_1920_1080_videos_for_testing, \
set_default_576_324_10bit_videos_for_testing, set_default_576_324_12bit_videos_for_testing, \
set_default_576_324_16bit_videos_for_testing, set_default_576_324_10bit_videos_for_testing_b
__copyright__ = "Copyright 2016-2020, Netflix, Inc."
__license__ = "BSD+Patent"
class FeatureExtractorTest(unittest.TestCase):
def setUp(self) -> None:
self.verificationErrors = []
self.maxDiff = None
def tearDown(self):
if hasattr(self, 'fextractor'):
self.fextractor.remove_results()
pass
self.assertEqual([], self.verificationErrors)
def test_executor_id(self):
asset = Asset(dataset="test", content_id=0, asset_id=1,
ref_path="dir/refvideo.yuv", dis_path="dir/disvideo.yuv",
asset_dict={'width': 720, 'height': 480})
fextractor = VmafFeatureExtractor([asset], None)
self.assertEqual(fextractor.executor_id, "VMAF_feature_V0.2.7")
def test_get_log_file_path(self):
import hashlib
asset = Asset(dataset="test", content_id=0, asset_id=1,
ref_path="dir/refvideo.yuv", dis_path="dir/disvideo.yuv",
asset_dict={'width':720, 'height':480,},
workdir_root="my_workdir_root")
fextractor = VmafFeatureExtractor([asset], None)
log_file_path = fextractor._get_log_file_path(asset)
h = hashlib.sha1("test_0_1_refvideo_720x480_vs_disvideo_720x480_q_720x480".encode("utf-8")).hexdigest()
self.assertTrue(re.match(r"^my_workdir_root/[a-zA-Z0-9-]+/VMAF_feature_V0.2.7_{}$".format(h), log_file_path))
def test_run_vmaf_fextractor(self):
ref_path, dis_path, asset, asset_original = set_default_576_324_videos_for_testing()
self.fextractor = VmafFeatureExtractor(
[asset, asset_original],
None, fifo_mode=True,
result_store=None
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
self.assertAlmostEqual(results[0]['VMAF_feature_vif_score'], 0.4460930625, places=4)
self.assertAlmostEqual(results[0]['VMAF_feature_motion_score'], 4.04982535417, places=4)
self.assertAlmostEqual(results[0]['VMAF_feature_motion2_score'], 3.8953518541666665, places=4)
self.assertAlmostEqual(results[0]['VMAF_feature_motion0_score'], 0.0, places=4)
self.assertAlmostEqual(results[0]['VMAF_feature_adm_score'], 0.9345148541666667, places=4)
self.assertAlmostEqual(results[0]['VMAF_feature_adm2_score'], 0.9345148541666667, places=4) # at version 0.2.4b (ioannis adm fix), adm and adm2 are now identical
self.assertAlmostEqual(results[0]['VMAF_feature_ansnr_score'], 23.5095715208, places=4)
self.assertAlmostEqual(results[0]['VMAF_feature_vif_num_score'], 712650.023478, places=0)
self.assertAlmostEqual(results[0]['VMAF_feature_vif_den_score'], 1597314.95249, places=0)
self.assertAlmostEqual(results[0]['VMAF_feature_adm_num_score'], 371.80645372916666, places=4)
self.assertAlmostEqual(results[0]['VMAF_feature_adm_den_score'], 397.83378972916671, places=4)
self.assertAlmostEqual(results[0]['VMAF_feature_anpsnr_score'], 34.164776875, places=4)
self.assertAlmostEqual(results[0]['VMAF_feature_vif_scale0_score'], 0.363420489439, places=4)
self.assertAlmostEqual(results[0]['VMAF_feature_vif_scale1_score'], 0.766647542135, places=4)
self.assertAlmostEqual(results[0]['VMAF_feature_vif_scale2_score'], 0.862854666902, places=4)
self.assertAlmostEqual(results[0]['VMAF_feature_vif_scale3_score'], 0.915971778036, places=4)
self.assertAlmostEqual(results[0]['VMAF_feature_adm_scale0_score'], 0.90791933424090698, places=4)
self.assertAlmostEqual(results[0]['VMAF_feature_adm_scale1_score'], 0.8938705209242691, places=4)
self.assertAlmostEqual(results[0]['VMAF_feature_adm_scale2_score'], 0.9300123587874962, places=4)
self.assertAlmostEqual(results[0]['VMAF_feature_adm_scale3_score'], 0.9649663148179196, places=4)
self.assertAlmostEqual(results[0]['VMAF_feature_vif2_score'], 0.72722361912801026, places=4)
self.assertAlmostEqual(results[0]['VMAF_feature_adm3_score'], 0.9241841443734412, places=4)
self.assertAlmostEqual(results[1]['VMAF_feature_vif_score'], 1.0, places=4)
self.assertAlmostEqual(results[1]['VMAF_feature_motion_score'], 4.04982535417, places=4)
self.assertAlmostEqual(results[1]['VMAF_feature_motion2_score'], 3.8953518541666665, places=4)
self.assertAlmostEqual(results[1]['VMAF_feature_adm_score'], 1.0, places=4)
self.assertAlmostEqual(results[1]['VMAF_feature_adm2_score'], 1.0, places=4)
self.assertAlmostEqual(results[1]['VMAF_feature_ansnr_score'], 31.2714392708, places=4)
self.assertAlmostEqual(results[1]['VMAF_feature_vif_num_score'], 1597314.86733, places=0)
self.assertAlmostEqual(results[1]['VMAF_feature_vif_den_score'], 1597314.95249, places=0)
self.assertAlmostEqual(results[1]['VMAF_feature_adm_num_score'], 397.83378972916671, places=4)
self.assertAlmostEqual(results[1]['VMAF_feature_adm_den_score'], 397.83378972916671, places=4)
self.assertAlmostEqual(results[1]['VMAF_feature_anpsnr_score'], 41.9266444375, places=4)
self.assertAlmostEqual(results[1]['VMAF_feature_vif_scale0_score'], 1.0, places=4)
self.assertAlmostEqual(results[1]['VMAF_feature_vif_scale1_score'], 1.0, places=4)
self.assertAlmostEqual(results[1]['VMAF_feature_vif_scale2_score'], 1.0, places=4)
self.assertAlmostEqual(results[1]['VMAF_feature_vif_scale3_score'], 1.0, places=4)
self.assertAlmostEqual(results[1]['VMAF_feature_adm_scale0_score'], 1.0, places=4)
self.assertAlmostEqual(results[1]['VMAF_feature_adm_scale1_score'], 1.0, places=4)
self.assertAlmostEqual(results[1]['VMAF_feature_adm_scale2_score'], 1.0, places=4)
self.assertAlmostEqual(results[1]['VMAF_feature_adm_scale3_score'], 1.0, places=4)
self.assertAlmostEqual(results[1]['VMAF_feature_vif2_score'], 1.0, places=4)
self.assertAlmostEqual(results[1]['VMAF_feature_adm3_score'], 1.0, places=4)
def test_run_vmaf_integer_fextractor(self):
ref_path, dis_path, asset, asset_original = set_default_576_324_videos_for_testing()
self.fextractor = VmafIntegerFeatureExtractor(
[asset, asset_original],
None, fifo_mode=False,
result_store=None
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_vif_score'], 0.44642331250000006, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_motion_score'], 4.04982535417, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_motion2_score'], 3.8953518541666665, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_adm_score'], 0.9345148541666667, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_adm2_score'], 0.9345148541666667, places=4) # at version 0.2.4b (ioannis adm fix), adm and adm2 are now identical
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_ansnr_score'], 23.5095715208, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_vif_num_score'], 713111.410502125, places=0)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_vif_den_score'], 1597165.5464884583, places=0)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_adm_num_score'], 371.8243668541666, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_adm_den_score'], 397.8567857291667, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_anpsnr_score'], 34.164776875, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_vif_scale0_score'], 0.3636620710647402, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_vif_scale1_score'], 0.7674952820232231, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_vif_scale2_score'], 0.8631077727416296, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_vif_scale3_score'], 0.9157200890843669, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_adm_scale0_score'], 0.90791933424090698, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_adm_scale1_score'], 0.8938705209242691, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_adm_scale2_score'], 0.9300123587874962, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_adm_scale3_score'], 0.9649663148179196, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_vif2_score'], 0.72749630372849, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_adm3_score'], 0.9241841443734412, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_vif_score'], 1.0, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_motion_score'], 4.04982535417, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_motion2_score'], 3.8953518541666665, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_adm_score'], 1.0, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_adm2_score'], 1.0, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_ansnr_score'], 31.2714392708, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_vif_num_score'], 1597165.34910075, places=0)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_vif_den_score'], 1597165.5464884583, places=0)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_adm_num_score'], 397.8576817708333, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_adm_den_score'], 397.8567857291667, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_anpsnr_score'], 41.9266444375, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_vif_scale0_score'], 1.0, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_vif_scale1_score'], 1.0, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_vif_scale2_score'], 1.0, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_vif_scale3_score'], 1.0, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_adm_scale0_score'], 1.0, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_adm_scale1_score'], 1.0, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_adm_scale2_score'], 1.0, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_adm_scale3_score'], 1.0, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_vif2_score'], 1.0, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_adm3_score'], 1.0, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
@unittest.skip("vifdiff alternative needed, vmaf_feature executable deprecated")
def test_run_vif_frame_difference_fextractor(self):
ref_path, dis_path, asset, asset_original = set_default_576_324_videos_for_testing()
self.fextractor = VifFrameDifferenceFeatureExtractor(
[asset, asset_original],
None, fifo_mode=True,
result_store=None
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
self.assertAlmostEqual(results[0]['VifDiff_feature_vifdiff_score'], 0.26745858333333333, places=4)
self.assertAlmostEqual(results[0]['VifDiff_feature_vifdiff_num_score'], 305412.7661844375, places=0)
self.assertAlmostEqual(results[0]['VifDiff_feature_vifdiff_den_score'], 1113927.6002349583, places=0)
self.assertAlmostEqual(results[1]['VifDiff_feature_vifdiff_score'], 0.9791655833333334, places=4)
self.assertAlmostEqual(results[1]['VifDiff_feature_vifdiff_num_score'], 1113926.2941030415, places=0)
self.assertAlmostEqual(results[1]['VifDiff_feature_vifdiff_den_score'], 1113927.6002349583, places=0)
def test_run_moment_fextractor(self):
ref_path, dis_path, asset, asset_original = set_default_576_324_videos_for_testing()
self.fextractor = MomentFeatureExtractor(
[asset, asset_original],
None, fifo_mode=True,
result_store=None
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
self.assertAlmostEqual(results[0]['Moment_feature_ref1st_score'], 59.788567297525134, places=4)
self.assertAlmostEqual(results[0]['Moment_feature_ref2nd_score'], 4696.668388042269, places=4)
self.assertAlmostEqual(results[0]['Moment_feature_refvar_score'], 1121.519917231203, places=4)
self.assertAlmostEqual(results[0]['Moment_feature_dis1st_score'], 61.332006624999984, places=4)
self.assertAlmostEqual(results[0]['Moment_feature_dis2nd_score'], 4798.659574041666, places=4)
self.assertAlmostEqual(results[0]['Moment_feature_disvar_score'], 1036.837184348847, places=4)
self.assertAlmostEqual(results[1]['Moment_feature_ref1st_score'], 59.788567297525134, places=4)
self.assertAlmostEqual(results[1]['Moment_feature_ref2nd_score'], 4696.668388042269, places=4)
self.assertAlmostEqual(results[1]['Moment_feature_refvar_score'], 1121.519917231203, places=4)
self.assertAlmostEqual(results[1]['Moment_feature_dis1st_score'], 59.788567297525134, places=4)
self.assertAlmostEqual(results[1]['Moment_feature_dis2nd_score'], 4696.668388042269, places=4)
self.assertAlmostEqual(results[1]['Moment_feature_disvar_score'], 1121.519917231203, places=4)
def test_run_moment_fextractor_10bit(self):
ref_path, dis_path, asset, asset_original = set_default_576_324_10bit_videos_for_testing()
self.fextractor = MomentFeatureExtractor(
[asset, asset_original],
None, fifo_mode=True,
result_store=None
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
self.assertAlmostEqual(results[0]['Moment_feature_ref1st_score'], 59.788567297525134 * 4, places=4)
self.assertAlmostEqual(results[0]['Moment_feature_ref2nd_score'], 4696.668388042269 * 16, places=4)
self.assertAlmostEqual(results[0]['Moment_feature_refvar_score'], 1121.519917231203 * 16, places=4)
self.assertAlmostEqual(results[0]['Moment_feature_dis1st_score'], 61.332006624999984 * 4, places=4)
self.assertAlmostEqual(results[0]['Moment_feature_dis2nd_score'], 4798.659574041666 * 16, places=4)
self.assertAlmostEqual(results[0]['Moment_feature_disvar_score'], 1036.837184348847 * 16, places=4)
self.assertAlmostEqual(results[1]['Moment_feature_ref1st_score'], 59.788567297525134 * 4, places=4)
self.assertAlmostEqual(results[1]['Moment_feature_ref2nd_score'], 4696.668388042269 * 16, places=4)
self.assertAlmostEqual(results[1]['Moment_feature_refvar_score'], 1121.519917231203 * 16, places=4)
self.assertAlmostEqual(results[1]['Moment_feature_dis1st_score'], 59.788567297525134 * 4, places=4)
self.assertAlmostEqual(results[1]['Moment_feature_dis2nd_score'], 4696.668388042269 * 16, places=4)
self.assertAlmostEqual(results[1]['Moment_feature_disvar_score'], 1121.519917231203 * 16, places=4)
def test_run_moment_fextractor_12bit(self):
ref_path, dis_path, asset, asset_original = set_default_576_324_12bit_videos_for_testing()
self.fextractor = MomentFeatureExtractor(
[asset, asset_original],
None, fifo_mode=True,
result_store=None
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
self.assertAlmostEqual(results[0]['Moment_feature_ref1st_score'], 979.6711819844536, places=4)
self.assertAlmostEqual(results[0]['Moment_feature_ref2nd_score'], 1238135.8363054413, places=4)
self.assertAlmostEqual(results[0]['Moment_feature_refvar_score'], 278292.25886465114, places=4)
self.assertAlmostEqual(results[0]['Moment_feature_dis1st_score'], 996.2818072702333, places=4)
self.assertAlmostEqual(results[0]['Moment_feature_dis2nd_score'], 1255533.4389574758, places=4)
self.assertAlmostEqual(results[0]['Moment_feature_disvar_score'], 262952.8893540034, places=4)
self.assertAlmostEqual(results[1]['Moment_feature_ref1st_score'], 979.6711819844536, places=4)
self.assertAlmostEqual(results[1]['Moment_feature_ref2nd_score'], 1238135.8363054413, places=4)
self.assertAlmostEqual(results[1]['Moment_feature_refvar_score'], 278292.25886465114, places=4)
self.assertAlmostEqual(results[1]['Moment_feature_dis1st_score'], 979.6711819844536, places=4)
self.assertAlmostEqual(results[1]['Moment_feature_dis2nd_score'], 1238135.8363054413, places=4)
self.assertAlmostEqual(results[1]['Moment_feature_disvar_score'], 278292.25886465114, places=4)
def test_run_moment_fextractor_16bit(self):
ref_path, dis_path, asset, asset_original = set_default_576_324_16bit_videos_for_testing()
self.fextractor = MomentFeatureExtractor(
[asset, asset_original],
None, fifo_mode=True,
result_store=None
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
self.assertAlmostEqual(results[0]['Moment_feature_ref1st_score'], 979.6711819844536 * 16.0, places=4)
self.assertAlmostEqual(results[0]['Moment_feature_ref2nd_score'], 1238135.8363054413 * 256.0, places=4)
self.assertAlmostEqual(results[0]['Moment_feature_refvar_score'], 278292.25886465114 * 256.0, places=4)
self.assertAlmostEqual(results[0]['Moment_feature_dis1st_score'], 996.2818072702333 * 16.0, places=4)
self.assertAlmostEqual(results[0]['Moment_feature_dis2nd_score'], 1255533.4389574758 * 256.0, places=4)
self.assertAlmostEqual(results[0]['Moment_feature_disvar_score'], 262952.8893540034 * 256.0, places=4)
self.assertAlmostEqual(results[1]['Moment_feature_ref1st_score'], 979.6711819844536 * 16.0, places=4)
self.assertAlmostEqual(results[1]['Moment_feature_ref2nd_score'], 1238135.8363054413 * 256.0, places=4)
self.assertAlmostEqual(results[1]['Moment_feature_refvar_score'], 278292.25886465114 * 256.0, places=4)
self.assertAlmostEqual(results[1]['Moment_feature_dis1st_score'], 979.6711819844536 * 16.0, places=4)
self.assertAlmostEqual(results[1]['Moment_feature_dis2nd_score'], 1238135.8363054413 * 256.0, places=4)
self.assertAlmostEqual(results[1]['Moment_feature_disvar_score'], 278292.25886465114 * 256.0, places=4)
def test_run_psnr_fextractor(self):
ref_path, dis_path, asset, asset_original = set_default_576_324_videos_for_testing()
self.fextractor = PsnrFeatureExtractor(
[asset, asset_original],
None, fifo_mode=True,
result_store=None
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
self.assertAlmostEqual(results[0]['PSNR_feature_psnr_score'], 30.755063979166664, places=4)
self.assertAlmostEqual(results[1]['PSNR_feature_psnr_score'], 60.0, places=4)
def test_run_ansnr_fextractor(self):
ref_path, dis_path, asset, asset_original = set_default_576_324_videos_for_testing()
self.fextractor = AnsnrFeatureExtractor(
[asset, asset_original],
None, fifo_mode=True,
result_store=None
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
self.assertAlmostEqual(results[0]['ANSNR_feature_ansnr_score'], 23.509571520833333, places=4)
self.assertAlmostEqual(results[0]['ANSNR_feature_anpsnr_score'], 34.16477641666666, places=4)
self.assertAlmostEqual(results[1]['ANSNR_feature_ansnr_score'], 31.271439270833337, places=4)
self.assertAlmostEqual(results[1]['ANSNR_feature_anpsnr_score'], 41.926644187499996, places=4)
def test_run_ssim_fextractor(self):
ref_path, dis_path, asset, asset_original = set_default_576_324_videos_for_testing()
self.fextractor = SsimFeatureExtractor(
[asset, asset_original],
None, fifo_mode=False,
result_store=None
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
self.assertAlmostEqual(results[0]['SSIM_feature_ssim_score'], 0.86322654166666657, places=4)
self.assertAlmostEqual(results[0]['SSIM_feature_ssim_l_score'], 0.9981474583333334, places=4)
self.assertAlmostEqual(results[0]['SSIM_feature_ssim_c_score'], 0.96126793750000006, places=4)
self.assertAlmostEqual(results[0]['SSIM_feature_ssim_s_score'], 0.89773633333333336, places=4)
self.assertAlmostEqual(results[1]['SSIM_feature_ssim_score'], 1.0, places=4)
self.assertAlmostEqual(results[1]['SSIM_feature_ssim_l_score'], 1.0, places=4)
self.assertAlmostEqual(results[1]['SSIM_feature_ssim_c_score'], 1.0, places=4)
self.assertAlmostEqual(results[1]['SSIM_feature_ssim_s_score'], 1.0, places=4)
def test_run_ssim_fextractor_flat(self):
ref_path, dis_path, asset, asset_original = set_default_flat_1920_1080_videos_for_testing()
self.fextractor = SsimFeatureExtractor(
[asset, asset_original],
None, fifo_mode=False,
result_store=None
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
try: self.assertAlmostEqual(results[0]['SSIM_feature_ssim_score'], 0.9087330000000001, places=8)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['SSIM_feature_ssim_l_score'], 0.9087330000000001, places=8)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['SSIM_feature_ssim_c_score'], 1.0, places=8)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['SSIM_feature_ssim_s_score'], 1.0, places=8)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['SSIM_feature_ssim_score'], 1.0, places=8)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['SSIM_feature_ssim_l_score'], 1.0, places=8)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['SSIM_feature_ssim_c_score'], 1.0, places=8)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['SSIM_feature_ssim_s_score'], 1.0, places=8)
except AssertionError as e: self.verificationErrors.append(str(e))
def test_run_ms_ssim_fextractor(self):
ref_path, dis_path, asset, asset_original = set_default_576_324_videos_for_testing()
self.fextractor = MsSsimFeatureExtractor(
[asset, asset_original],
None, fifo_mode=True,
result_store=None
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
self.assertAlmostEqual(results[0]['MS_SSIM_feature_ms_ssim_score'], 0.9632498125, places=4)
self.assertAlmostEqual(results[0]['MS_SSIM_feature_ms_ssim_l_scale0_score'], 0.9981474583333334, places=4)
self.assertAlmostEqual(results[0]['MS_SSIM_feature_ms_ssim_c_scale0_score'], 0.96126793750000006, places=4)
self.assertAlmostEqual(results[0]['MS_SSIM_feature_ms_ssim_s_scale0_score'], 0.89773633333333336, places=4)
self.assertAlmostEqual(results[0]['MS_SSIM_feature_ms_ssim_l_scale1_score'], 0.99899612500000001, places=4)
self.assertAlmostEqual(results[0]['MS_SSIM_feature_ms_ssim_c_scale1_score'], 0.9857694375, places=4)
self.assertAlmostEqual(results[0]['MS_SSIM_feature_ms_ssim_s_scale1_score'], 0.941185875, places=4)
self.assertAlmostEqual(results[0]['MS_SSIM_feature_ms_ssim_l_scale2_score'], 0.99923564583333324, places=4)
self.assertAlmostEqual(results[0]['MS_SSIM_feature_ms_ssim_c_scale2_score'], 0.997034020833, places=4)
self.assertAlmostEqual(results[0]['MS_SSIM_feature_ms_ssim_s_scale2_score'], 0.977992145833, places=4)
self.assertAlmostEqual(results[0]['MS_SSIM_feature_ms_ssim_l_scale3_score'], 0.99929210416666658, places=4)
self.assertAlmostEqual(results[0]['MS_SSIM_feature_ms_ssim_c_scale3_score'], 0.999588104167, places=4)
self.assertAlmostEqual(results[0]['MS_SSIM_feature_ms_ssim_s_scale3_score'], 0.99387125, places=4)
self.assertAlmostEqual(results[0]['MS_SSIM_feature_ms_ssim_l_scale4_score'], 0.99940356249999995, places=4)
self.assertAlmostEqual(results[0]['MS_SSIM_feature_ms_ssim_c_scale4_score'], 0.999907625, places=4)
self.assertAlmostEqual(results[0]['MS_SSIM_feature_ms_ssim_s_scale4_score'], 0.998222583333, places=4)
self.assertAlmostEqual(results[1]['MS_SSIM_feature_ms_ssim_score'], 1.0, places=4)
self.assertAlmostEqual(results[1]['MS_SSIM_feature_ms_ssim_l_scale0_score'], 1., places=4)
self.assertAlmostEqual(results[1]['MS_SSIM_feature_ms_ssim_c_scale0_score'], 1., places=4)
self.assertAlmostEqual(results[1]['MS_SSIM_feature_ms_ssim_s_scale0_score'], 1., places=4)
self.assertAlmostEqual(results[1]['MS_SSIM_feature_ms_ssim_l_scale1_score'], 1., places=4)
self.assertAlmostEqual(results[1]['MS_SSIM_feature_ms_ssim_c_scale1_score'], 1., places=4)
self.assertAlmostEqual(results[1]['MS_SSIM_feature_ms_ssim_s_scale1_score'], 1., places=4)
self.assertAlmostEqual(results[1]['MS_SSIM_feature_ms_ssim_l_scale2_score'], 1., places=4)
self.assertAlmostEqual(results[1]['MS_SSIM_feature_ms_ssim_c_scale2_score'], 1., places=4)
self.assertAlmostEqual(results[1]['MS_SSIM_feature_ms_ssim_s_scale2_score'], 1., places=4)
self.assertAlmostEqual(results[1]['MS_SSIM_feature_ms_ssim_l_scale3_score'], 1., places=4)
self.assertAlmostEqual(results[1]['MS_SSIM_feature_ms_ssim_c_scale3_score'], 1., places=4)
self.assertAlmostEqual(results[1]['MS_SSIM_feature_ms_ssim_s_scale3_score'], 1., places=4)
self.assertAlmostEqual(results[1]['MS_SSIM_feature_ms_ssim_l_scale4_score'], 1., places=4)
self.assertAlmostEqual(results[1]['MS_SSIM_feature_ms_ssim_c_scale4_score'], 1., places=4)
self.assertAlmostEqual(results[1]['MS_SSIM_feature_ms_ssim_s_scale4_score'], 1., places=4)
def test_run_vmaf_integer_fextractor_checkerboard(self):
ref_path = VmafConfig.test_resource_path("yuv", "checkerboard_1920_1080_10_3_0_0.yuv")
dis_path = VmafConfig.test_resource_path("yuv", "checkerboard_1920_1080_10_3_10_0.yuv")
dis_path2 = VmafConfig.test_resource_path("yuv", "checkerboard_1920_1080_10_3_1_0.yuv")
asset = Asset(dataset="test", content_id=0, asset_id=0,
workdir_root=VmafConfig.workdir_path(),
ref_path=ref_path,
dis_path=dis_path,
asset_dict={'width': 1920, 'height': 1080})
asset_original = Asset(dataset="test", content_id=0, asset_id=1,
workdir_root=VmafConfig.workdir_path(),
ref_path=ref_path,
dis_path=ref_path,
asset_dict={'width': 1920, 'height': 1080})
asset2 = Asset(dataset="test", content_id=0, asset_id=2,
workdir_root=VmafConfig.workdir_path(),
ref_path=ref_path,
dis_path=dis_path2,
asset_dict={'width': 1920, 'height': 1080})
self.fextractor = VmafIntegerFeatureExtractor(
[asset, asset_original, asset2],
None, fifo_mode=True,
result_store=None
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_adm_score'], 0.053996333333333334, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_adm2_score'], 0.053996333333333334, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_adm_scale0_score'], 0.23738393128710478, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_adm_scale1_score'], 0.08524788663335138, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_adm_scale2_score'], 0.024058909404945077, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_adm_scale3_score'], 0.018034879735107798, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_motion_score'], 12.554711666666668, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_motion2_score'], 12.554711666666668, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_adm_score'], 1.0, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_adm2_score'], 1.0, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_adm_scale0_score'], 1.0, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_adm_scale1_score'], 1.0, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_adm_scale2_score'], 1.0, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_adm_scale3_score'], 1.0, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_motion_score'], 12.554711666666668, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_motion2_score'], 12.554711666666668, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[2]['VMAF_integer_feature_adm_score'], 0.78533833333333336, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[2]['VMAF_integer_feature_adm2_score'], 0.7853384465157921, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[2]['VMAF_integer_feature_adm_scale0_score'], 0.72132189911792899, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[2]['VMAF_integer_feature_adm_scale1_score'], 0.69259738857522501, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[2]['VMAF_integer_feature_adm_scale2_score'], 0.80415911639244586, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[2]['VMAF_integer_feature_adm_scale3_score'], 0.82791889676239039, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[2]['VMAF_integer_feature_motion_score'], 12.554711666666668, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[2]['VMAF_integer_feature_motion2_score'], 12.554711666666668, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
def test_run_vmaf_integer_fextractor_flat(self):
ref_path, dis_path, asset, asset_original = set_default_flat_1920_1080_videos_for_testing()
self.fextractor = VmafIntegerFeatureExtractor(
[asset, asset_original],
None, fifo_mode=True,
result_store=None
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
self.assertAlmostEqual(results[0]['VMAF_integer_feature_adm_score'], 1.0, places=4)
self.assertAlmostEqual(results[0]['VMAF_integer_feature_adm2_score'], 1.0, places=4)
self.assertAlmostEqual(results[0]['VMAF_integer_feature_adm_scale0_score'], 1.0, places=4)
self.assertAlmostEqual(results[0]['VMAF_integer_feature_adm_scale1_score'], 1.0, places=4)
self.assertAlmostEqual(results[0]['VMAF_integer_feature_adm_scale2_score'], 1.0, places=4)
self.assertAlmostEqual(results[0]['VMAF_integer_feature_adm_scale3_score'], 1.0, places=4)
self.assertAlmostEqual(results[1]['VMAF_integer_feature_adm_score'], 1.0, places=4)
self.assertAlmostEqual(results[1]['VMAF_integer_feature_adm2_score'], 1.0, places=4)
self.assertAlmostEqual(results[1]['VMAF_integer_feature_adm_scale0_score'], 1.0, places=4)
self.assertAlmostEqual(results[1]['VMAF_integer_feature_adm_scale1_score'], 1.0, places=4)
self.assertAlmostEqual(results[1]['VMAF_integer_feature_adm_scale2_score'], 1.0, places=4)
self.assertAlmostEqual(results[1]['VMAF_integer_feature_adm_scale3_score'], 1.0, places=4)
def test_run_psnr_fextractor_proc(self):
ref_path, dis_path, asset, asset_original = set_default_576_324_videos_for_testing()
callback_dict = {
'ref_proc_callback': 'identity',
'dis_proc_callback': 'multiply',
}
asset.asset_dict.update(callback_dict)
asset_original.asset_dict.update(callback_dict)
self.fextractor = PsnrFeatureExtractor(
[asset, asset_original],
None, fifo_mode=False,
result_store=None,
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
self.assertAlmostEqual(results[0]['PSNR_feature_psnr_score'], 27.645446604166665, places=8)
self.assertAlmostEqual(results[1]['PSNR_feature_psnr_score'], 31.87683660416667, places=8)
def test_run_pypsnr_fextractor(self):
ref_path, dis_path, asset, asset_original = set_default_576_324_videos_for_testing()
self.fextractor = PypsnrFeatureExtractor(
[asset, asset_original],
None, fifo_mode=True,
result_store=None
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
self.assertAlmostEqual(results[0]['Pypsnr_feature_psnry_score'], 30.755063979166664, places=4)
self.assertAlmostEqual(results[0]['Pypsnr_feature_psnru_score'], 38.449441057158786, places=4)
self.assertAlmostEqual(results[0]['Pypsnr_feature_psnrv_score'], 40.9919102486235, places=4)
self.assertAlmostEqual(results[1]['Pypsnr_feature_psnry_score'], 60.0, places=4)
self.assertAlmostEqual(results[1]['Pypsnr_feature_psnru_score'], 60.0, places=4)
self.assertAlmostEqual(results[1]['Pypsnr_feature_psnrv_score'], 60.0, places=4)
def test_run_pypsnr_fextractor_10bit(self):
ref_path, dis_path, asset, asset_original = set_default_576_324_10bit_videos_for_testing()
self.fextractor = PypsnrFeatureExtractor(
[asset, asset_original],
None, fifo_mode=True,
result_store=None
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
self.assertAlmostEqual(results[0]['Pypsnr_feature_psnry_score'], 30.780573260053277, places=4)
self.assertAlmostEqual(results[0]['Pypsnr_feature_psnru_score'], 38.769832063651364, places=4)
self.assertAlmostEqual(results[0]['Pypsnr_feature_psnrv_score'], 41.28418847734209, places=4)
self.assertAlmostEqual(results[1]['Pypsnr_feature_psnry_score'], 72.0, places=4)
self.assertAlmostEqual(results[1]['Pypsnr_feature_psnru_score'], 72.0, places=4)
self.assertAlmostEqual(results[1]['Pypsnr_feature_psnrv_score'], 72.0, places=4)
def test_run_pypsnr_fextractor_10bit_b(self):
ref_path, dis_path, asset, asset_original = set_default_576_324_10bit_videos_for_testing_b()
self.fextractor = PypsnrFeatureExtractor(
[asset, asset_original],
None, fifo_mode=True,
result_store=None
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
self.assertAlmostEqual(results[0]['Pypsnr_feature_psnry_score'], 32.57145231892744, places=4)
self.assertAlmostEqual(results[0]['Pypsnr_feature_psnru_score'], 39.03859552689696, places=4)
self.assertAlmostEqual(results[0]['Pypsnr_feature_psnrv_score'], 41.28060001337217, places=4)
self.assertAlmostEqual(results[1]['Pypsnr_feature_psnry_score'], 72.0, places=4)
self.assertAlmostEqual(results[1]['Pypsnr_feature_psnru_score'], 72.0, places=4)
self.assertAlmostEqual(results[1]['Pypsnr_feature_psnrv_score'], 72.0, places=4)
def test_run_pypsnr_fextractor_12bit(self):
ref_path, dis_path, asset, asset_original = set_default_576_324_12bit_videos_for_testing()
self.fextractor = PypsnrFeatureExtractor(
[asset, asset_original],
None, fifo_mode=True,
result_store=None
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
self.assertAlmostEqual(results[0]['Pypsnr_feature_psnry_score'], 32.577817940053734, places=4)
self.assertAlmostEqual(results[0]['Pypsnr_feature_psnru_score'], 39.044961148023255, places=4)
self.assertAlmostEqual(results[0]['Pypsnr_feature_psnrv_score'], 41.28696563449846, places=4)
self.assertAlmostEqual(results[1]['Pypsnr_feature_psnry_score'], 84.0, places=4)
self.assertAlmostEqual(results[1]['Pypsnr_feature_psnru_score'], 84.0, places=4)
self.assertAlmostEqual(results[1]['Pypsnr_feature_psnrv_score'], 84.0, places=4)
def test_run_pypsnr_fextractor_16bit(self):
ref_path, dis_path, asset, asset_original = set_default_576_324_16bit_videos_for_testing()
self.fextractor = PypsnrFeatureExtractor(
[asset, asset_original],
None, fifo_mode=True,
result_store=None
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
self.assertAlmostEqual(results[0]['Pypsnr_feature_psnry_score'], 32.579806240311484, places=4)
self.assertAlmostEqual(results[0]['Pypsnr_feature_psnru_score'], 39.046949448281005, places=4)
self.assertAlmostEqual(results[0]['Pypsnr_feature_psnrv_score'], 41.288953934756215, places=4)
self.assertAlmostEqual(results[1]['Pypsnr_feature_psnry_score'], 108.0, places=4)
self.assertAlmostEqual(results[1]['Pypsnr_feature_psnru_score'], 108.0, places=4)
self.assertAlmostEqual(results[1]['Pypsnr_feature_psnrv_score'], 108.0, places=4)
if __name__ == '__main__':
unittest.main(verbosity=2)
| 60.561475 | 182 | 0.730121 | from __future__ import absolute_import
import os
import unittest
import re
from vmaf.config import VmafConfig
from vmaf.core.feature_extractor import VmafFeatureExtractor, \
MomentFeatureExtractor, \
PsnrFeatureExtractor, SsimFeatureExtractor, MsSsimFeatureExtractor, \
VifFrameDifferenceFeatureExtractor, \
AnsnrFeatureExtractor, PypsnrFeatureExtractor, VmafIntegerFeatureExtractor
from vmaf.core.asset import Asset
from vmaf.core.result_store import FileSystemResultStore
from test.testutil import set_default_576_324_videos_for_testing, set_default_flat_1920_1080_videos_for_testing, \
set_default_576_324_10bit_videos_for_testing, set_default_576_324_12bit_videos_for_testing, \
set_default_576_324_16bit_videos_for_testing, set_default_576_324_10bit_videos_for_testing_b
__copyright__ = "Copyright 2016-2020, Netflix, Inc."
__license__ = "BSD+Patent"
class FeatureExtractorTest(unittest.TestCase):
def setUp(self) -> None:
self.verificationErrors = []
self.maxDiff = None
def tearDown(self):
if hasattr(self, 'fextractor'):
self.fextractor.remove_results()
pass
self.assertEqual([], self.verificationErrors)
def test_executor_id(self):
asset = Asset(dataset="test", content_id=0, asset_id=1,
ref_path="dir/refvideo.yuv", dis_path="dir/disvideo.yuv",
asset_dict={'width': 720, 'height': 480})
fextractor = VmafFeatureExtractor([asset], None)
self.assertEqual(fextractor.executor_id, "VMAF_feature_V0.2.7")
def test_get_log_file_path(self):
import hashlib
asset = Asset(dataset="test", content_id=0, asset_id=1,
ref_path="dir/refvideo.yuv", dis_path="dir/disvideo.yuv",
asset_dict={'width':720, 'height':480,},
workdir_root="my_workdir_root")
fextractor = VmafFeatureExtractor([asset], None)
log_file_path = fextractor._get_log_file_path(asset)
h = hashlib.sha1("test_0_1_refvideo_720x480_vs_disvideo_720x480_q_720x480".encode("utf-8")).hexdigest()
self.assertTrue(re.match(r"^my_workdir_root/[a-zA-Z0-9-]+/VMAF_feature_V0.2.7_{}$".format(h), log_file_path))
def test_run_vmaf_fextractor(self):
ref_path, dis_path, asset, asset_original = set_default_576_324_videos_for_testing()
self.fextractor = VmafFeatureExtractor(
[asset, asset_original],
None, fifo_mode=True,
result_store=None
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
self.assertAlmostEqual(results[0]['VMAF_feature_vif_score'], 0.4460930625, places=4)
self.assertAlmostEqual(results[0]['VMAF_feature_motion_score'], 4.04982535417, places=4)
self.assertAlmostEqual(results[0]['VMAF_feature_motion2_score'], 3.8953518541666665, places=4)
self.assertAlmostEqual(results[0]['VMAF_feature_motion0_score'], 0.0, places=4)
self.assertAlmostEqual(results[0]['VMAF_feature_adm_score'], 0.9345148541666667, places=4)
self.assertAlmostEqual(results[0]['VMAF_feature_adm2_score'], 0.9345148541666667, places=4)
self.assertAlmostEqual(results[0]['VMAF_feature_ansnr_score'], 23.5095715208, places=4)
self.assertAlmostEqual(results[0]['VMAF_feature_vif_num_score'], 712650.023478, places=0)
self.assertAlmostEqual(results[0]['VMAF_feature_vif_den_score'], 1597314.95249, places=0)
self.assertAlmostEqual(results[0]['VMAF_feature_adm_num_score'], 371.80645372916666, places=4)
self.assertAlmostEqual(results[0]['VMAF_feature_adm_den_score'], 397.83378972916671, places=4)
self.assertAlmostEqual(results[0]['VMAF_feature_anpsnr_score'], 34.164776875, places=4)
self.assertAlmostEqual(results[0]['VMAF_feature_vif_scale0_score'], 0.363420489439, places=4)
self.assertAlmostEqual(results[0]['VMAF_feature_vif_scale1_score'], 0.766647542135, places=4)
self.assertAlmostEqual(results[0]['VMAF_feature_vif_scale2_score'], 0.862854666902, places=4)
self.assertAlmostEqual(results[0]['VMAF_feature_vif_scale3_score'], 0.915971778036, places=4)
self.assertAlmostEqual(results[0]['VMAF_feature_adm_scale0_score'], 0.90791933424090698, places=4)
self.assertAlmostEqual(results[0]['VMAF_feature_adm_scale1_score'], 0.8938705209242691, places=4)
self.assertAlmostEqual(results[0]['VMAF_feature_adm_scale2_score'], 0.9300123587874962, places=4)
self.assertAlmostEqual(results[0]['VMAF_feature_adm_scale3_score'], 0.9649663148179196, places=4)
self.assertAlmostEqual(results[0]['VMAF_feature_vif2_score'], 0.72722361912801026, places=4)
self.assertAlmostEqual(results[0]['VMAF_feature_adm3_score'], 0.9241841443734412, places=4)
self.assertAlmostEqual(results[1]['VMAF_feature_vif_score'], 1.0, places=4)
self.assertAlmostEqual(results[1]['VMAF_feature_motion_score'], 4.04982535417, places=4)
self.assertAlmostEqual(results[1]['VMAF_feature_motion2_score'], 3.8953518541666665, places=4)
self.assertAlmostEqual(results[1]['VMAF_feature_adm_score'], 1.0, places=4)
self.assertAlmostEqual(results[1]['VMAF_feature_adm2_score'], 1.0, places=4)
self.assertAlmostEqual(results[1]['VMAF_feature_ansnr_score'], 31.2714392708, places=4)
self.assertAlmostEqual(results[1]['VMAF_feature_vif_num_score'], 1597314.86733, places=0)
self.assertAlmostEqual(results[1]['VMAF_feature_vif_den_score'], 1597314.95249, places=0)
self.assertAlmostEqual(results[1]['VMAF_feature_adm_num_score'], 397.83378972916671, places=4)
self.assertAlmostEqual(results[1]['VMAF_feature_adm_den_score'], 397.83378972916671, places=4)
self.assertAlmostEqual(results[1]['VMAF_feature_anpsnr_score'], 41.9266444375, places=4)
self.assertAlmostEqual(results[1]['VMAF_feature_vif_scale0_score'], 1.0, places=4)
self.assertAlmostEqual(results[1]['VMAF_feature_vif_scale1_score'], 1.0, places=4)
self.assertAlmostEqual(results[1]['VMAF_feature_vif_scale2_score'], 1.0, places=4)
self.assertAlmostEqual(results[1]['VMAF_feature_vif_scale3_score'], 1.0, places=4)
self.assertAlmostEqual(results[1]['VMAF_feature_adm_scale0_score'], 1.0, places=4)
self.assertAlmostEqual(results[1]['VMAF_feature_adm_scale1_score'], 1.0, places=4)
self.assertAlmostEqual(results[1]['VMAF_feature_adm_scale2_score'], 1.0, places=4)
self.assertAlmostEqual(results[1]['VMAF_feature_adm_scale3_score'], 1.0, places=4)
self.assertAlmostEqual(results[1]['VMAF_feature_vif2_score'], 1.0, places=4)
self.assertAlmostEqual(results[1]['VMAF_feature_adm3_score'], 1.0, places=4)
def test_run_vmaf_integer_fextractor(self):
ref_path, dis_path, asset, asset_original = set_default_576_324_videos_for_testing()
self.fextractor = VmafIntegerFeatureExtractor(
[asset, asset_original],
None, fifo_mode=False,
result_store=None
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_vif_score'], 0.44642331250000006, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_motion_score'], 4.04982535417, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_motion2_score'], 3.8953518541666665, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_adm_score'], 0.9345148541666667, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_adm2_score'], 0.9345148541666667, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_ansnr_score'], 23.5095715208, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_vif_num_score'], 713111.410502125, places=0)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_vif_den_score'], 1597165.5464884583, places=0)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_adm_num_score'], 371.8243668541666, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_adm_den_score'], 397.8567857291667, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_anpsnr_score'], 34.164776875, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_vif_scale0_score'], 0.3636620710647402, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_vif_scale1_score'], 0.7674952820232231, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_vif_scale2_score'], 0.8631077727416296, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_vif_scale3_score'], 0.9157200890843669, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_adm_scale0_score'], 0.90791933424090698, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_adm_scale1_score'], 0.8938705209242691, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_adm_scale2_score'], 0.9300123587874962, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_adm_scale3_score'], 0.9649663148179196, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_vif2_score'], 0.72749630372849, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_adm3_score'], 0.9241841443734412, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_vif_score'], 1.0, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_motion_score'], 4.04982535417, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_motion2_score'], 3.8953518541666665, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_adm_score'], 1.0, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_adm2_score'], 1.0, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_ansnr_score'], 31.2714392708, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_vif_num_score'], 1597165.34910075, places=0)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_vif_den_score'], 1597165.5464884583, places=0)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_adm_num_score'], 397.8576817708333, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_adm_den_score'], 397.8567857291667, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_anpsnr_score'], 41.9266444375, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_vif_scale0_score'], 1.0, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_vif_scale1_score'], 1.0, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_vif_scale2_score'], 1.0, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_vif_scale3_score'], 1.0, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_adm_scale0_score'], 1.0, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_adm_scale1_score'], 1.0, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_adm_scale2_score'], 1.0, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_adm_scale3_score'], 1.0, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_vif2_score'], 1.0, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_adm3_score'], 1.0, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
@unittest.skip("vifdiff alternative needed, vmaf_feature executable deprecated")
def test_run_vif_frame_difference_fextractor(self):
ref_path, dis_path, asset, asset_original = set_default_576_324_videos_for_testing()
self.fextractor = VifFrameDifferenceFeatureExtractor(
[asset, asset_original],
None, fifo_mode=True,
result_store=None
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
self.assertAlmostEqual(results[0]['VifDiff_feature_vifdiff_score'], 0.26745858333333333, places=4)
self.assertAlmostEqual(results[0]['VifDiff_feature_vifdiff_num_score'], 305412.7661844375, places=0)
self.assertAlmostEqual(results[0]['VifDiff_feature_vifdiff_den_score'], 1113927.6002349583, places=0)
self.assertAlmostEqual(results[1]['VifDiff_feature_vifdiff_score'], 0.9791655833333334, places=4)
self.assertAlmostEqual(results[1]['VifDiff_feature_vifdiff_num_score'], 1113926.2941030415, places=0)
self.assertAlmostEqual(results[1]['VifDiff_feature_vifdiff_den_score'], 1113927.6002349583, places=0)
def test_run_moment_fextractor(self):
ref_path, dis_path, asset, asset_original = set_default_576_324_videos_for_testing()
self.fextractor = MomentFeatureExtractor(
[asset, asset_original],
None, fifo_mode=True,
result_store=None
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
self.assertAlmostEqual(results[0]['Moment_feature_ref1st_score'], 59.788567297525134, places=4)
self.assertAlmostEqual(results[0]['Moment_feature_ref2nd_score'], 4696.668388042269, places=4)
self.assertAlmostEqual(results[0]['Moment_feature_refvar_score'], 1121.519917231203, places=4)
self.assertAlmostEqual(results[0]['Moment_feature_dis1st_score'], 61.332006624999984, places=4)
self.assertAlmostEqual(results[0]['Moment_feature_dis2nd_score'], 4798.659574041666, places=4)
self.assertAlmostEqual(results[0]['Moment_feature_disvar_score'], 1036.837184348847, places=4)
self.assertAlmostEqual(results[1]['Moment_feature_ref1st_score'], 59.788567297525134, places=4)
self.assertAlmostEqual(results[1]['Moment_feature_ref2nd_score'], 4696.668388042269, places=4)
self.assertAlmostEqual(results[1]['Moment_feature_refvar_score'], 1121.519917231203, places=4)
self.assertAlmostEqual(results[1]['Moment_feature_dis1st_score'], 59.788567297525134, places=4)
self.assertAlmostEqual(results[1]['Moment_feature_dis2nd_score'], 4696.668388042269, places=4)
self.assertAlmostEqual(results[1]['Moment_feature_disvar_score'], 1121.519917231203, places=4)
def test_run_moment_fextractor_10bit(self):
ref_path, dis_path, asset, asset_original = set_default_576_324_10bit_videos_for_testing()
self.fextractor = MomentFeatureExtractor(
[asset, asset_original],
None, fifo_mode=True,
result_store=None
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
self.assertAlmostEqual(results[0]['Moment_feature_ref1st_score'], 59.788567297525134 * 4, places=4)
self.assertAlmostEqual(results[0]['Moment_feature_ref2nd_score'], 4696.668388042269 * 16, places=4)
self.assertAlmostEqual(results[0]['Moment_feature_refvar_score'], 1121.519917231203 * 16, places=4)
self.assertAlmostEqual(results[0]['Moment_feature_dis1st_score'], 61.332006624999984 * 4, places=4)
self.assertAlmostEqual(results[0]['Moment_feature_dis2nd_score'], 4798.659574041666 * 16, places=4)
self.assertAlmostEqual(results[0]['Moment_feature_disvar_score'], 1036.837184348847 * 16, places=4)
self.assertAlmostEqual(results[1]['Moment_feature_ref1st_score'], 59.788567297525134 * 4, places=4)
self.assertAlmostEqual(results[1]['Moment_feature_ref2nd_score'], 4696.668388042269 * 16, places=4)
self.assertAlmostEqual(results[1]['Moment_feature_refvar_score'], 1121.519917231203 * 16, places=4)
self.assertAlmostEqual(results[1]['Moment_feature_dis1st_score'], 59.788567297525134 * 4, places=4)
self.assertAlmostEqual(results[1]['Moment_feature_dis2nd_score'], 4696.668388042269 * 16, places=4)
self.assertAlmostEqual(results[1]['Moment_feature_disvar_score'], 1121.519917231203 * 16, places=4)
def test_run_moment_fextractor_12bit(self):
ref_path, dis_path, asset, asset_original = set_default_576_324_12bit_videos_for_testing()
self.fextractor = MomentFeatureExtractor(
[asset, asset_original],
None, fifo_mode=True,
result_store=None
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
self.assertAlmostEqual(results[0]['Moment_feature_ref1st_score'], 979.6711819844536, places=4)
self.assertAlmostEqual(results[0]['Moment_feature_ref2nd_score'], 1238135.8363054413, places=4)
self.assertAlmostEqual(results[0]['Moment_feature_refvar_score'], 278292.25886465114, places=4)
self.assertAlmostEqual(results[0]['Moment_feature_dis1st_score'], 996.2818072702333, places=4)
self.assertAlmostEqual(results[0]['Moment_feature_dis2nd_score'], 1255533.4389574758, places=4)
self.assertAlmostEqual(results[0]['Moment_feature_disvar_score'], 262952.8893540034, places=4)
self.assertAlmostEqual(results[1]['Moment_feature_ref1st_score'], 979.6711819844536, places=4)
self.assertAlmostEqual(results[1]['Moment_feature_ref2nd_score'], 1238135.8363054413, places=4)
self.assertAlmostEqual(results[1]['Moment_feature_refvar_score'], 278292.25886465114, places=4)
self.assertAlmostEqual(results[1]['Moment_feature_dis1st_score'], 979.6711819844536, places=4)
self.assertAlmostEqual(results[1]['Moment_feature_dis2nd_score'], 1238135.8363054413, places=4)
self.assertAlmostEqual(results[1]['Moment_feature_disvar_score'], 278292.25886465114, places=4)
def test_run_moment_fextractor_16bit(self):
ref_path, dis_path, asset, asset_original = set_default_576_324_16bit_videos_for_testing()
self.fextractor = MomentFeatureExtractor(
[asset, asset_original],
None, fifo_mode=True,
result_store=None
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
self.assertAlmostEqual(results[0]['Moment_feature_ref1st_score'], 979.6711819844536 * 16.0, places=4)
self.assertAlmostEqual(results[0]['Moment_feature_ref2nd_score'], 1238135.8363054413 * 256.0, places=4)
self.assertAlmostEqual(results[0]['Moment_feature_refvar_score'], 278292.25886465114 * 256.0, places=4)
self.assertAlmostEqual(results[0]['Moment_feature_dis1st_score'], 996.2818072702333 * 16.0, places=4)
self.assertAlmostEqual(results[0]['Moment_feature_dis2nd_score'], 1255533.4389574758 * 256.0, places=4)
self.assertAlmostEqual(results[0]['Moment_feature_disvar_score'], 262952.8893540034 * 256.0, places=4)
self.assertAlmostEqual(results[1]['Moment_feature_ref1st_score'], 979.6711819844536 * 16.0, places=4)
self.assertAlmostEqual(results[1]['Moment_feature_ref2nd_score'], 1238135.8363054413 * 256.0, places=4)
self.assertAlmostEqual(results[1]['Moment_feature_refvar_score'], 278292.25886465114 * 256.0, places=4)
self.assertAlmostEqual(results[1]['Moment_feature_dis1st_score'], 979.6711819844536 * 16.0, places=4)
self.assertAlmostEqual(results[1]['Moment_feature_dis2nd_score'], 1238135.8363054413 * 256.0, places=4)
self.assertAlmostEqual(results[1]['Moment_feature_disvar_score'], 278292.25886465114 * 256.0, places=4)
def test_run_psnr_fextractor(self):
ref_path, dis_path, asset, asset_original = set_default_576_324_videos_for_testing()
self.fextractor = PsnrFeatureExtractor(
[asset, asset_original],
None, fifo_mode=True,
result_store=None
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
self.assertAlmostEqual(results[0]['PSNR_feature_psnr_score'], 30.755063979166664, places=4)
self.assertAlmostEqual(results[1]['PSNR_feature_psnr_score'], 60.0, places=4)
def test_run_ansnr_fextractor(self):
ref_path, dis_path, asset, asset_original = set_default_576_324_videos_for_testing()
self.fextractor = AnsnrFeatureExtractor(
[asset, asset_original],
None, fifo_mode=True,
result_store=None
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
self.assertAlmostEqual(results[0]['ANSNR_feature_ansnr_score'], 23.509571520833333, places=4)
self.assertAlmostEqual(results[0]['ANSNR_feature_anpsnr_score'], 34.16477641666666, places=4)
self.assertAlmostEqual(results[1]['ANSNR_feature_ansnr_score'], 31.271439270833337, places=4)
self.assertAlmostEqual(results[1]['ANSNR_feature_anpsnr_score'], 41.926644187499996, places=4)
def test_run_ssim_fextractor(self):
ref_path, dis_path, asset, asset_original = set_default_576_324_videos_for_testing()
self.fextractor = SsimFeatureExtractor(
[asset, asset_original],
None, fifo_mode=False,
result_store=None
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
self.assertAlmostEqual(results[0]['SSIM_feature_ssim_score'], 0.86322654166666657, places=4)
self.assertAlmostEqual(results[0]['SSIM_feature_ssim_l_score'], 0.9981474583333334, places=4)
self.assertAlmostEqual(results[0]['SSIM_feature_ssim_c_score'], 0.96126793750000006, places=4)
self.assertAlmostEqual(results[0]['SSIM_feature_ssim_s_score'], 0.89773633333333336, places=4)
self.assertAlmostEqual(results[1]['SSIM_feature_ssim_score'], 1.0, places=4)
self.assertAlmostEqual(results[1]['SSIM_feature_ssim_l_score'], 1.0, places=4)
self.assertAlmostEqual(results[1]['SSIM_feature_ssim_c_score'], 1.0, places=4)
self.assertAlmostEqual(results[1]['SSIM_feature_ssim_s_score'], 1.0, places=4)
def test_run_ssim_fextractor_flat(self):
ref_path, dis_path, asset, asset_original = set_default_flat_1920_1080_videos_for_testing()
self.fextractor = SsimFeatureExtractor(
[asset, asset_original],
None, fifo_mode=False,
result_store=None
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
try: self.assertAlmostEqual(results[0]['SSIM_feature_ssim_score'], 0.9087330000000001, places=8)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['SSIM_feature_ssim_l_score'], 0.9087330000000001, places=8)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['SSIM_feature_ssim_c_score'], 1.0, places=8)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['SSIM_feature_ssim_s_score'], 1.0, places=8)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['SSIM_feature_ssim_score'], 1.0, places=8)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['SSIM_feature_ssim_l_score'], 1.0, places=8)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['SSIM_feature_ssim_c_score'], 1.0, places=8)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['SSIM_feature_ssim_s_score'], 1.0, places=8)
except AssertionError as e: self.verificationErrors.append(str(e))
def test_run_ms_ssim_fextractor(self):
ref_path, dis_path, asset, asset_original = set_default_576_324_videos_for_testing()
self.fextractor = MsSsimFeatureExtractor(
[asset, asset_original],
None, fifo_mode=True,
result_store=None
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
self.assertAlmostEqual(results[0]['MS_SSIM_feature_ms_ssim_score'], 0.9632498125, places=4)
self.assertAlmostEqual(results[0]['MS_SSIM_feature_ms_ssim_l_scale0_score'], 0.9981474583333334, places=4)
self.assertAlmostEqual(results[0]['MS_SSIM_feature_ms_ssim_c_scale0_score'], 0.96126793750000006, places=4)
self.assertAlmostEqual(results[0]['MS_SSIM_feature_ms_ssim_s_scale0_score'], 0.89773633333333336, places=4)
self.assertAlmostEqual(results[0]['MS_SSIM_feature_ms_ssim_l_scale1_score'], 0.99899612500000001, places=4)
self.assertAlmostEqual(results[0]['MS_SSIM_feature_ms_ssim_c_scale1_score'], 0.9857694375, places=4)
self.assertAlmostEqual(results[0]['MS_SSIM_feature_ms_ssim_s_scale1_score'], 0.941185875, places=4)
self.assertAlmostEqual(results[0]['MS_SSIM_feature_ms_ssim_l_scale2_score'], 0.99923564583333324, places=4)
self.assertAlmostEqual(results[0]['MS_SSIM_feature_ms_ssim_c_scale2_score'], 0.997034020833, places=4)
self.assertAlmostEqual(results[0]['MS_SSIM_feature_ms_ssim_s_scale2_score'], 0.977992145833, places=4)
self.assertAlmostEqual(results[0]['MS_SSIM_feature_ms_ssim_l_scale3_score'], 0.99929210416666658, places=4)
self.assertAlmostEqual(results[0]['MS_SSIM_feature_ms_ssim_c_scale3_score'], 0.999588104167, places=4)
self.assertAlmostEqual(results[0]['MS_SSIM_feature_ms_ssim_s_scale3_score'], 0.99387125, places=4)
self.assertAlmostEqual(results[0]['MS_SSIM_feature_ms_ssim_l_scale4_score'], 0.99940356249999995, places=4)
self.assertAlmostEqual(results[0]['MS_SSIM_feature_ms_ssim_c_scale4_score'], 0.999907625, places=4)
self.assertAlmostEqual(results[0]['MS_SSIM_feature_ms_ssim_s_scale4_score'], 0.998222583333, places=4)
self.assertAlmostEqual(results[1]['MS_SSIM_feature_ms_ssim_score'], 1.0, places=4)
self.assertAlmostEqual(results[1]['MS_SSIM_feature_ms_ssim_l_scale0_score'], 1., places=4)
self.assertAlmostEqual(results[1]['MS_SSIM_feature_ms_ssim_c_scale0_score'], 1., places=4)
self.assertAlmostEqual(results[1]['MS_SSIM_feature_ms_ssim_s_scale0_score'], 1., places=4)
self.assertAlmostEqual(results[1]['MS_SSIM_feature_ms_ssim_l_scale1_score'], 1., places=4)
self.assertAlmostEqual(results[1]['MS_SSIM_feature_ms_ssim_c_scale1_score'], 1., places=4)
self.assertAlmostEqual(results[1]['MS_SSIM_feature_ms_ssim_s_scale1_score'], 1., places=4)
self.assertAlmostEqual(results[1]['MS_SSIM_feature_ms_ssim_l_scale2_score'], 1., places=4)
self.assertAlmostEqual(results[1]['MS_SSIM_feature_ms_ssim_c_scale2_score'], 1., places=4)
self.assertAlmostEqual(results[1]['MS_SSIM_feature_ms_ssim_s_scale2_score'], 1., places=4)
self.assertAlmostEqual(results[1]['MS_SSIM_feature_ms_ssim_l_scale3_score'], 1., places=4)
self.assertAlmostEqual(results[1]['MS_SSIM_feature_ms_ssim_c_scale3_score'], 1., places=4)
self.assertAlmostEqual(results[1]['MS_SSIM_feature_ms_ssim_s_scale3_score'], 1., places=4)
self.assertAlmostEqual(results[1]['MS_SSIM_feature_ms_ssim_l_scale4_score'], 1., places=4)
self.assertAlmostEqual(results[1]['MS_SSIM_feature_ms_ssim_c_scale4_score'], 1., places=4)
self.assertAlmostEqual(results[1]['MS_SSIM_feature_ms_ssim_s_scale4_score'], 1., places=4)
def test_run_vmaf_integer_fextractor_checkerboard(self):
ref_path = VmafConfig.test_resource_path("yuv", "checkerboard_1920_1080_10_3_0_0.yuv")
dis_path = VmafConfig.test_resource_path("yuv", "checkerboard_1920_1080_10_3_10_0.yuv")
dis_path2 = VmafConfig.test_resource_path("yuv", "checkerboard_1920_1080_10_3_1_0.yuv")
asset = Asset(dataset="test", content_id=0, asset_id=0,
workdir_root=VmafConfig.workdir_path(),
ref_path=ref_path,
dis_path=dis_path,
asset_dict={'width': 1920, 'height': 1080})
asset_original = Asset(dataset="test", content_id=0, asset_id=1,
workdir_root=VmafConfig.workdir_path(),
ref_path=ref_path,
dis_path=ref_path,
asset_dict={'width': 1920, 'height': 1080})
asset2 = Asset(dataset="test", content_id=0, asset_id=2,
workdir_root=VmafConfig.workdir_path(),
ref_path=ref_path,
dis_path=dis_path2,
asset_dict={'width': 1920, 'height': 1080})
self.fextractor = VmafIntegerFeatureExtractor(
[asset, asset_original, asset2],
None, fifo_mode=True,
result_store=None
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_adm_score'], 0.053996333333333334, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_adm2_score'], 0.053996333333333334, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_adm_scale0_score'], 0.23738393128710478, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_adm_scale1_score'], 0.08524788663335138, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_adm_scale2_score'], 0.024058909404945077, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_adm_scale3_score'], 0.018034879735107798, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_motion_score'], 12.554711666666668, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[0]['VMAF_integer_feature_motion2_score'], 12.554711666666668, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_adm_score'], 1.0, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_adm2_score'], 1.0, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_adm_scale0_score'], 1.0, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_adm_scale1_score'], 1.0, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_adm_scale2_score'], 1.0, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_adm_scale3_score'], 1.0, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_motion_score'], 12.554711666666668, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[1]['VMAF_integer_feature_motion2_score'], 12.554711666666668, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[2]['VMAF_integer_feature_adm_score'], 0.78533833333333336, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[2]['VMAF_integer_feature_adm2_score'], 0.7853384465157921, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[2]['VMAF_integer_feature_adm_scale0_score'], 0.72132189911792899, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[2]['VMAF_integer_feature_adm_scale1_score'], 0.69259738857522501, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[2]['VMAF_integer_feature_adm_scale2_score'], 0.80415911639244586, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[2]['VMAF_integer_feature_adm_scale3_score'], 0.82791889676239039, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[2]['VMAF_integer_feature_motion_score'], 12.554711666666668, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
try: self.assertAlmostEqual(results[2]['VMAF_integer_feature_motion2_score'], 12.554711666666668, places=4)
except AssertionError as e: self.verificationErrors.append(str(e))
def test_run_vmaf_integer_fextractor_flat(self):
ref_path, dis_path, asset, asset_original = set_default_flat_1920_1080_videos_for_testing()
self.fextractor = VmafIntegerFeatureExtractor(
[asset, asset_original],
None, fifo_mode=True,
result_store=None
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
self.assertAlmostEqual(results[0]['VMAF_integer_feature_adm_score'], 1.0, places=4)
self.assertAlmostEqual(results[0]['VMAF_integer_feature_adm2_score'], 1.0, places=4)
self.assertAlmostEqual(results[0]['VMAF_integer_feature_adm_scale0_score'], 1.0, places=4)
self.assertAlmostEqual(results[0]['VMAF_integer_feature_adm_scale1_score'], 1.0, places=4)
self.assertAlmostEqual(results[0]['VMAF_integer_feature_adm_scale2_score'], 1.0, places=4)
self.assertAlmostEqual(results[0]['VMAF_integer_feature_adm_scale3_score'], 1.0, places=4)
self.assertAlmostEqual(results[1]['VMAF_integer_feature_adm_score'], 1.0, places=4)
self.assertAlmostEqual(results[1]['VMAF_integer_feature_adm2_score'], 1.0, places=4)
self.assertAlmostEqual(results[1]['VMAF_integer_feature_adm_scale0_score'], 1.0, places=4)
self.assertAlmostEqual(results[1]['VMAF_integer_feature_adm_scale1_score'], 1.0, places=4)
self.assertAlmostEqual(results[1]['VMAF_integer_feature_adm_scale2_score'], 1.0, places=4)
self.assertAlmostEqual(results[1]['VMAF_integer_feature_adm_scale3_score'], 1.0, places=4)
def test_run_psnr_fextractor_proc(self):
ref_path, dis_path, asset, asset_original = set_default_576_324_videos_for_testing()
callback_dict = {
'ref_proc_callback': 'identity',
'dis_proc_callback': 'multiply',
}
asset.asset_dict.update(callback_dict)
asset_original.asset_dict.update(callback_dict)
self.fextractor = PsnrFeatureExtractor(
[asset, asset_original],
None, fifo_mode=False,
result_store=None,
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
self.assertAlmostEqual(results[0]['PSNR_feature_psnr_score'], 27.645446604166665, places=8)
self.assertAlmostEqual(results[1]['PSNR_feature_psnr_score'], 31.87683660416667, places=8)
def test_run_pypsnr_fextractor(self):
ref_path, dis_path, asset, asset_original = set_default_576_324_videos_for_testing()
self.fextractor = PypsnrFeatureExtractor(
[asset, asset_original],
None, fifo_mode=True,
result_store=None
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
self.assertAlmostEqual(results[0]['Pypsnr_feature_psnry_score'], 30.755063979166664, places=4)
self.assertAlmostEqual(results[0]['Pypsnr_feature_psnru_score'], 38.449441057158786, places=4)
self.assertAlmostEqual(results[0]['Pypsnr_feature_psnrv_score'], 40.9919102486235, places=4)
self.assertAlmostEqual(results[1]['Pypsnr_feature_psnry_score'], 60.0, places=4)
self.assertAlmostEqual(results[1]['Pypsnr_feature_psnru_score'], 60.0, places=4)
self.assertAlmostEqual(results[1]['Pypsnr_feature_psnrv_score'], 60.0, places=4)
def test_run_pypsnr_fextractor_10bit(self):
ref_path, dis_path, asset, asset_original = set_default_576_324_10bit_videos_for_testing()
self.fextractor = PypsnrFeatureExtractor(
[asset, asset_original],
None, fifo_mode=True,
result_store=None
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
self.assertAlmostEqual(results[0]['Pypsnr_feature_psnry_score'], 30.780573260053277, places=4)
self.assertAlmostEqual(results[0]['Pypsnr_feature_psnru_score'], 38.769832063651364, places=4)
self.assertAlmostEqual(results[0]['Pypsnr_feature_psnrv_score'], 41.28418847734209, places=4)
self.assertAlmostEqual(results[1]['Pypsnr_feature_psnry_score'], 72.0, places=4)
self.assertAlmostEqual(results[1]['Pypsnr_feature_psnru_score'], 72.0, places=4)
self.assertAlmostEqual(results[1]['Pypsnr_feature_psnrv_score'], 72.0, places=4)
def test_run_pypsnr_fextractor_10bit_b(self):
ref_path, dis_path, asset, asset_original = set_default_576_324_10bit_videos_for_testing_b()
self.fextractor = PypsnrFeatureExtractor(
[asset, asset_original],
None, fifo_mode=True,
result_store=None
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
self.assertAlmostEqual(results[0]['Pypsnr_feature_psnry_score'], 32.57145231892744, places=4)
self.assertAlmostEqual(results[0]['Pypsnr_feature_psnru_score'], 39.03859552689696, places=4)
self.assertAlmostEqual(results[0]['Pypsnr_feature_psnrv_score'], 41.28060001337217, places=4)
self.assertAlmostEqual(results[1]['Pypsnr_feature_psnry_score'], 72.0, places=4)
self.assertAlmostEqual(results[1]['Pypsnr_feature_psnru_score'], 72.0, places=4)
self.assertAlmostEqual(results[1]['Pypsnr_feature_psnrv_score'], 72.0, places=4)
def test_run_pypsnr_fextractor_12bit(self):
ref_path, dis_path, asset, asset_original = set_default_576_324_12bit_videos_for_testing()
self.fextractor = PypsnrFeatureExtractor(
[asset, asset_original],
None, fifo_mode=True,
result_store=None
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
self.assertAlmostEqual(results[0]['Pypsnr_feature_psnry_score'], 32.577817940053734, places=4)
self.assertAlmostEqual(results[0]['Pypsnr_feature_psnru_score'], 39.044961148023255, places=4)
self.assertAlmostEqual(results[0]['Pypsnr_feature_psnrv_score'], 41.28696563449846, places=4)
self.assertAlmostEqual(results[1]['Pypsnr_feature_psnry_score'], 84.0, places=4)
self.assertAlmostEqual(results[1]['Pypsnr_feature_psnru_score'], 84.0, places=4)
self.assertAlmostEqual(results[1]['Pypsnr_feature_psnrv_score'], 84.0, places=4)
def test_run_pypsnr_fextractor_16bit(self):
ref_path, dis_path, asset, asset_original = set_default_576_324_16bit_videos_for_testing()
self.fextractor = PypsnrFeatureExtractor(
[asset, asset_original],
None, fifo_mode=True,
result_store=None
)
self.fextractor.run(parallelize=True)
results = self.fextractor.results
self.assertAlmostEqual(results[0]['Pypsnr_feature_psnry_score'], 32.579806240311484, places=4)
self.assertAlmostEqual(results[0]['Pypsnr_feature_psnru_score'], 39.046949448281005, places=4)
self.assertAlmostEqual(results[0]['Pypsnr_feature_psnrv_score'], 41.288953934756215, places=4)
self.assertAlmostEqual(results[1]['Pypsnr_feature_psnry_score'], 108.0, places=4)
self.assertAlmostEqual(results[1]['Pypsnr_feature_psnru_score'], 108.0, places=4)
self.assertAlmostEqual(results[1]['Pypsnr_feature_psnrv_score'], 108.0, places=4)
if __name__ == '__main__':
unittest.main(verbosity=2)
| true | true |
f724bec965759ccd317b2b385268f2ab47cb4ab2 | 1,838 | py | Python | scripts/convert_protocols_to_exams.py | timptner/farafmb.de | 2b154278d8b44ea3adecafcb8554c1b0b0055e01 | [
"MIT"
] | null | null | null | scripts/convert_protocols_to_exams.py | timptner/farafmb.de | 2b154278d8b44ea3adecafcb8554c1b0b0055e01 | [
"MIT"
] | 1 | 2022-02-17T20:28:19.000Z | 2022-02-17T20:28:19.000Z | scripts/convert_protocols_to_exams.py | timptner/farafmb.de | 2b154278d8b44ea3adecafcb8554c1b0b0055e01 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
import json
from pathlib import Path
def get_valid_file_path(file_path: str) -> Path:
"""Check if file exists and return valid Path object"""
path = Path(file_path).resolve()
if not path.is_file():
raise Exception("No file found! Please check your path and try again.")
return path
def convert_data(data: list) -> list:
"""Convert fixture to new format"""
print(f"Found {len(data)} entries, updating ... ", end='')
for item in data:
item['model'] = 'exams.exam'
fields: dict = item['fields']
fields['minute_author'] = fields.pop('author')
fields['minute_file'] = fields.pop('file')
fields['submitted_on'] = fields.pop('submitted')
fields['is_archived'] = False
print('Done!')
return data
def get_valid_folder_path(folder_path: str) -> Path:
"""Check if folder exists and return valid Path object"""
path = Path(folder_path).resolve()
if not path.parent.is_dir():
raise Exception("No folder found! Please check your path and try again.")
return path
def main():
"""Main entry-point for script"""
source = input("Please specify a file path where the dump file can be found.\n> ")
path = get_valid_file_path(source)
data: list = json.loads(path.read_text())
data = convert_data(data)
destination = input("Please specify a folder path where the new dump file should be stored.\n> ")
path = get_valid_folder_path(destination)
file = path / 'exams.json'
if file.exists():
raise Exception("File 'exams.json' already exists! Please move or delete the existing file first.")
else:
(path / 'exams.json').write_text(json.dumps(data, ensure_ascii=False))
print("New file 'exams.json' created!")
if __name__ == '__main__':
main()
| 31.689655 | 107 | 0.654516 |
import json
from pathlib import Path
def get_valid_file_path(file_path: str) -> Path:
path = Path(file_path).resolve()
if not path.is_file():
raise Exception("No file found! Please check your path and try again.")
return path
def convert_data(data: list) -> list:
print(f"Found {len(data)} entries, updating ... ", end='')
for item in data:
item['model'] = 'exams.exam'
fields: dict = item['fields']
fields['minute_author'] = fields.pop('author')
fields['minute_file'] = fields.pop('file')
fields['submitted_on'] = fields.pop('submitted')
fields['is_archived'] = False
print('Done!')
return data
def get_valid_folder_path(folder_path: str) -> Path:
path = Path(folder_path).resolve()
if not path.parent.is_dir():
raise Exception("No folder found! Please check your path and try again.")
return path
def main():
source = input("Please specify a file path where the dump file can be found.\n> ")
path = get_valid_file_path(source)
data: list = json.loads(path.read_text())
data = convert_data(data)
destination = input("Please specify a folder path where the new dump file should be stored.\n> ")
path = get_valid_folder_path(destination)
file = path / 'exams.json'
if file.exists():
raise Exception("File 'exams.json' already exists! Please move or delete the existing file first.")
else:
(path / 'exams.json').write_text(json.dumps(data, ensure_ascii=False))
print("New file 'exams.json' created!")
if __name__ == '__main__':
main()
| true | true |
f724bf1ecded89830c078e1879e035935da3e2ed | 6,858 | py | Python | doc/user-manual/conf.py | phadej/agda | 2fa8ede09451d43647f918dbfb24ff7b27c52edc | [
"BSD-3-Clause"
] | null | null | null | doc/user-manual/conf.py | phadej/agda | 2fa8ede09451d43647f918dbfb24ff7b27c52edc | [
"BSD-3-Clause"
] | null | null | null | doc/user-manual/conf.py | phadej/agda | 2fa8ede09451d43647f918dbfb24ff7b27c52edc | [
"BSD-3-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
#
# Configuration file for the Sphinx documentation builder.
#
# This file does only contain a selection of the most common options. For a
# full list see the documentation:
# http://www.sphinx-doc.org/en/master/config
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- Project information -----------------------------------------------------
project = 'Agda'
copyright = u'''2005-2018 remains with the authors.
Agda 2 was originally written by Ulf Norell,
partially based on code from Agda 1 by Catarina Coquand and Makoto Takeyama,
and from Agdalight by Ulf Norell and Andreas Abel.
Agda 2 is currently actively developed mainly by Andreas Abel,
Guillaume Allais, Jesper Cockx, Nils Anders Danielsson, Philipp
Hausmann, Fredrik Nordvall Forsberg, Ulf Norell, Víctor López Juan,
Andrés Sicard-Ramírez, and Andrea Vezzosi.
Further, Agda 2 has received contributions by, amongst others, Stevan
Andjelkovic, Marcin Benke, Jean-Philippe Bernardy, Guillaume Brunerie,
James Chapman, Dominique Devriese, Péter Diviánszki, Olle Fredriksson,
Adam Gundry, Daniel Gustafsson, Kuen-Bang Hou (favonia), Patrik
Jansson, Alan Jeffrey, Wolfram Kahl, Wen Kokke, John Leo, Fredrik Lindblad,
Francesco Mazzoli, Stefan Monnier, Darin Morrison, Guilhem Moulin,
Nicolas Pouillard, Benjamin Price, Nobuo Yamashita, Christian Sattler,
Makoto Takeyama and Tesla Ice Zhang. The full list of contributors is
available at https://github.com/agda/agda/graphs/contributors'''
author = u'The Agda Team'
# The short X.Y version
version = '2.6.0'
# The full version, including alpha/beta/rc tags
release = version
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#
# If your change the version here also change it in the
# `requirements.txt` file [Issue #1936].
needs_sphinx = '1.8.3'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.imgmath',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
#
# source_suffix = ['.rst', '.md']
source_suffix = ['.lagda.rst','.rst']
# The master toctree document.
master_doc = 'index'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
highlight_language = 'Agda'
# -- Options for HTML output -------------------------------------------------
import sphinx_rtd_theme
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
# html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#
# html_theme_options = {}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# html_static_path = ['_static']
# Custom sidebar templates, must be a dictionary that maps document names
# to template names.
#
# The default sidebars (for documents that don't match any pattern) are
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
# html_sidebars = {}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'Agdadoc'
# -- Options for LaTeX output ------------------------------------------------
latex_additional_files = ["mystyle.sty"]
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
'preamble': r'''
% Customised setup for certain characters.
\usepackage{amsmath}
\usepackage{bbm}
\usepackage{mathtools}
\usepackage{stmaryrd}
\usepackage{pifont}
\usepackage{keystroke}
\input{unicode-symbols-sphinx.tex.txt}
''',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
latex_additional_files = ["unicode-symbols-sphinx.tex.txt"]
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'Agda.tex', u'Agda User Manual', u'The Agda Team', 'manual'),
]
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'agda', 'Agda Documentation',
[author], 1)
]
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'Agda', 'Agda Documentation',
author, 'Agda', 'One line description of project.',
'Miscellaneous'),
]
# -- Options for Epub output -------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#
# epub_identifier = ''
# A unique identification for the text.
#
# epub_uid = ''
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
# -- Extension configuration -------------------------------------------------
| 31.897674 | 79 | 0.681248 |
project = 'Agda'
copyright = u'''2005-2018 remains with the authors.
Agda 2 was originally written by Ulf Norell,
partially based on code from Agda 1 by Catarina Coquand and Makoto Takeyama,
and from Agdalight by Ulf Norell and Andreas Abel.
Agda 2 is currently actively developed mainly by Andreas Abel,
Guillaume Allais, Jesper Cockx, Nils Anders Danielsson, Philipp
Hausmann, Fredrik Nordvall Forsberg, Ulf Norell, Víctor López Juan,
Andrés Sicard-Ramírez, and Andrea Vezzosi.
Further, Agda 2 has received contributions by, amongst others, Stevan
Andjelkovic, Marcin Benke, Jean-Philippe Bernardy, Guillaume Brunerie,
James Chapman, Dominique Devriese, Péter Diviánszki, Olle Fredriksson,
Adam Gundry, Daniel Gustafsson, Kuen-Bang Hou (favonia), Patrik
Jansson, Alan Jeffrey, Wolfram Kahl, Wen Kokke, John Leo, Fredrik Lindblad,
Francesco Mazzoli, Stefan Monnier, Darin Morrison, Guilhem Moulin,
Nicolas Pouillard, Benjamin Price, Nobuo Yamashita, Christian Sattler,
Makoto Takeyama and Tesla Ice Zhang. The full list of contributors is
available at https://github.com/agda/agda/graphs/contributors'''
author = u'The Agda Team'
version = '2.6.0'
release = version
sphinx = '1.8.3'
extensions = [
'sphinx.ext.imgmath',
]
templates_path = ['_templates']
source_suffix = ['.lagda.rst','.rst']
master_doc = 'index'
language = None
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
pygments_style = 'sphinx'
highlight_language = 'Agda'
import sphinx_rtd_theme
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# defined by theme itself. Builtin themes are using these templates by
# default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
# 'searchbox.html']``.
#
# html_sidebars = {}
# -- Options for HTMLHelp output ---------------------------------------------
# Output file base name for HTML help builder.
htmlhelp_basename = 'Agdadoc'
# -- Options for LaTeX output ------------------------------------------------
latex_additional_files = ["mystyle.sty"]
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#
'preamble': r'''
% Customised setup for certain characters.
\usepackage{amsmath}
\usepackage{bbm}
\usepackage{mathtools}
\usepackage{stmaryrd}
\usepackage{pifont}
\usepackage{keystroke}
\input{unicode-symbols-sphinx.tex.txt}
''',
# Latex figure (float) alignment
#
# 'figure_align': 'htbp',
}
latex_additional_files = ["unicode-symbols-sphinx.tex.txt"]
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'Agda.tex', u'Agda User Manual', u'The Agda Team', 'manual'),
]
# -- Options for manual page output ------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'agda', 'Agda Documentation',
[author], 1)
]
# -- Options for Texinfo output ----------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'Agda', 'Agda Documentation',
author, 'Agda', 'One line description of project.',
'Miscellaneous'),
]
# -- Options for Epub output -------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = project
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#
# epub_identifier = ''
# A unique identification for the text.
#
# epub_uid = ''
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
# -- Extension configuration -------------------------------------------------
| true | true |
f724bf3443b15e4d6d7686f56cce4260cbc558a0 | 460 | py | Python | fma/analyze.py | adipasquale/frontmatter-analysis | 068b8870ee35569a81600f637569ad589087e2a8 | [
"MIT"
] | null | null | null | fma/analyze.py | adipasquale/frontmatter-analysis | 068b8870ee35569a81600f637569ad589087e2a8 | [
"MIT"
] | null | null | null | fma/analyze.py | adipasquale/frontmatter-analysis | 068b8870ee35569a81600f637569ad589087e2a8 | [
"MIT"
] | null | null | null | import pandas as pd
import os
from pathlib import Path
import frontmatter
import argparse
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("path", help="path containing .md files")
args = parser.parse_args()
data = [frontmatter.load(path).metadata for path in Path(args.path).glob('*.md')]
df = pd.DataFrame(data)
with pd.option_context('display.width', 100):
print(df.describe().transpose())
| 28.75 | 85 | 0.702174 | import pandas as pd
import os
from pathlib import Path
import frontmatter
import argparse
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("path", help="path containing .md files")
args = parser.parse_args()
data = [frontmatter.load(path).metadata for path in Path(args.path).glob('*.md')]
df = pd.DataFrame(data)
with pd.option_context('display.width', 100):
print(df.describe().transpose())
| true | true |
f724bf5a11fa8d7fe68f54b4735fe3d897c56f22 | 499 | py | Python | python2/main.py | miptleha/bubble-sort | bae2212e80333de343e85c72c0ceef17136e88d5 | [
"MIT"
] | null | null | null | python2/main.py | miptleha/bubble-sort | bae2212e80333de343e85c72c0ceef17136e88d5 | [
"MIT"
] | null | null | null | python2/main.py | miptleha/bubble-sort | bae2212e80333de343e85c72c0ceef17136e88d5 | [
"MIT"
] | null | null | null | import time
def getMaxSubSum(a):
s = 0
s1 = s
for i in range(0, n):
s += a[i]
s1 = max(s1, s)
if (s < 0):
s = 0;
return s1
n = 10000
a = []
for i in range(0, n):
a.append(pow(-1, i) * i)
#for i in range(0, n):
# print(a[i], " ")
#print();
start = time.perf_counter()
res = 0;
for i in range(0, n):
a[0] += 1
res += getMaxSubSum(a)
# print(res, " ")
end = time.perf_counter()
print("{:.5f}".format(end - start), "seconds") | 17.206897 | 46 | 0.478958 | import time
def getMaxSubSum(a):
s = 0
s1 = s
for i in range(0, n):
s += a[i]
s1 = max(s1, s)
if (s < 0):
s = 0;
return s1
n = 10000
a = []
for i in range(0, n):
a.append(pow(-1, i) * i)
start = time.perf_counter()
res = 0;
for i in range(0, n):
a[0] += 1
res += getMaxSubSum(a)
end = time.perf_counter()
print("{:.5f}".format(end - start), "seconds") | true | true |
f724bfa6d406a2deee8665a8e2f1df9aceed69c7 | 15,613 | py | Python | fhirclient/r4models/chargeitemdefinition.py | cspears-mitre/CapStatement | 2390566ed75d420e0615e3a0aacb77e8c030fdcc | [
"Apache-2.0"
] | 1 | 2021-12-24T11:14:38.000Z | 2021-12-24T11:14:38.000Z | fhirclient/r4models/chargeitemdefinition.py | cspears-mitre/CapStatement | 2390566ed75d420e0615e3a0aacb77e8c030fdcc | [
"Apache-2.0"
] | null | null | null | fhirclient/r4models/chargeitemdefinition.py | cspears-mitre/CapStatement | 2390566ed75d420e0615e3a0aacb77e8c030fdcc | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated from FHIR 3.6.0-bd605d07 (http://hl7.org/fhir/StructureDefinition/ChargeItemDefinition) on 2018-12-20.
# 2018, SMART Health IT.
from . import domainresource
class ChargeItemDefinition(domainresource.DomainResource):
""" Definition of properties and rules about how the price and the
applicability of a ChargeItem can be determined.
The ChargeItemDefinition resource provides the properties that apply to the
(billing) codes necessary to calculate costs and prices. The properties may
differ largely depending on type and realm, therefore this resource gives
only a rough structure and requires profiling for each type of billing code
system.
"""
resource_type = "ChargeItemDefinition"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.applicability = None
""" Whether or not the billing code is applicable.
List of `ChargeItemDefinitionApplicability` items (represented as `dict` in JSON). """
self.approvalDate = None
""" When the charge item definition was approved by publisher.
Type `FHIRDate` (represented as `str` in JSON). """
self.code = None
""" Billing codes or product types this definition applies to.
Type `CodeableConcept` (represented as `dict` in JSON). """
self.contact = None
""" Contact details for the publisher.
List of `ContactDetail` items (represented as `dict` in JSON). """
self.copyright = None
""" Use and/or publishing restrictions.
Type `str`. """
self._copyright = None
""" extension for fhir primitive copyright"""
self.date = None
""" Date last changed.
Type `FHIRDate` (represented as `str` in JSON). """
self.derivedFromUri = None
""" Underlying externally-defined charge item definition.
List of `str` items. """
self._derivedFromUri = None
""" extension for fhir primitive derivedFromUri"""
self.description = None
""" Natural language description of the charge item definition.
Type `str`. """
self._description = None
""" extension for fhir primitive description"""
self.effectivePeriod = None
""" When the charge item definition is expected to be used.
Type `Period` (represented as `dict` in JSON). """
self.experimental = None
""" For testing purposes, not real usage.
Type `bool`. """
self._experimental = None
""" extension for fhir primitive experimental"""
self.identifier = None
""" Additional identifier for the charge item definition.
List of `Identifier` items (represented as `dict` in JSON). """
self.instance = None
""" Instances this definition applies to.
List of `FHIRReference` items (represented as `dict` in JSON). """
self.jurisdiction = None
""" Intended jurisdiction for charge item definition (if applicable).
List of `CodeableConcept` items (represented as `dict` in JSON). """
self.lastReviewDate = None
""" When the charge item definition was last reviewed.
Type `FHIRDate` (represented as `str` in JSON). """
self.partOf = None
""" A larger definition of which this particular definition is a
component or step.
List of `str` items. """
self._partOf = None
""" extension for fhir primitive partOf"""
self.propertyGroup = None
""" Group of properties which are applicable under the same conditions.
List of `ChargeItemDefinitionPropertyGroup` items (represented as `dict` in JSON). """
self.publisher = None
""" Name of the publisher (organization or individual).
Type `str`. """
self._publisher = None
""" extension for fhir primitive publisher"""
self.replaces = None
""" Completed or terminated request(s) whose function is taken by this
new request.
List of `str` items. """
self._replaces = None
""" extension for fhir primitive replaces"""
self.status = None
""" draft | active | retired | unknown.
Type `str`. """
self._status = None
""" extension for fhir primitive status"""
self.title = None
""" Name for this charge item definition (human friendly).
Type `str`. """
self._title = None
""" extension for fhir primitive title"""
self.url = None
""" Canonical identifier for this charge item definition, represented
as a URI (globally unique).
Type `str`. """
self._url = None
""" extension for fhir primitive url"""
self.useContext = None
""" The context that the content is intended to support.
List of `UsageContext` items (represented as `dict` in JSON). """
self.version = None
""" Business version of the charge item definition.
Type `str`. """
self._version = None
""" extension for fhir primitive version"""
super(ChargeItemDefinition, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(ChargeItemDefinition, self).elementProperties()
js.extend([
("applicability", "applicability", ChargeItemDefinitionApplicability, True, None, False),
("approvalDate", "approvalDate", fhirdate.FHIRDate, False, None, False),
("code", "code", codeableconcept.CodeableConcept, False, None, False),
("contact", "contact", contactdetail.ContactDetail, True, None, False),
("copyright", "copyright", str, False, None, False),
("_copyright", "_copyright",fhirprimitive.FHIRPrimitive, False, None, False),
("date", "date", fhirdate.FHIRDate, False, None, False),
("derivedFromUri", "derivedFromUri", str, True, None, False),
("_derivedFromUri", "_derivedFromUri",fhirprimitive.FHIRPrimitive, False, None, False),
("description", "description", str, False, None, False),
("_description", "_description",fhirprimitive.FHIRPrimitive, False, None, False),
("effectivePeriod", "effectivePeriod", period.Period, False, None, False),
("experimental", "experimental", bool, False, None, False),
("_experimental", "_experimental",fhirprimitive.FHIRPrimitive, False, None, False),
("identifier", "identifier", identifier.Identifier, True, None, False),
("instance", "instance", fhirreference.FHIRReference, True, None, False),
("jurisdiction", "jurisdiction", codeableconcept.CodeableConcept, True, None, False),
("lastReviewDate", "lastReviewDate", fhirdate.FHIRDate, False, None, False),
("partOf", "partOf", str, True, None, False),
("_partOf", "_partOf",fhirprimitive.FHIRPrimitive, False, None, False),
("propertyGroup", "propertyGroup", ChargeItemDefinitionPropertyGroup, True, None, False),
("publisher", "publisher", str, False, None, False),
("_publisher", "_publisher",fhirprimitive.FHIRPrimitive, False, None, False),
("replaces", "replaces", str, True, None, False),
("_replaces", "_replaces",fhirprimitive.FHIRPrimitive, False, None, False),
("status", "status", str, False, None, True),
("_status", "_status",fhirprimitive.FHIRPrimitive, False, None, False),
("title", "title", str, False, None, False),
("_title", "_title",fhirprimitive.FHIRPrimitive, False, None, False),
("url", "url", str, False, None, True),
("_url", "_url",fhirprimitive.FHIRPrimitive, False, None, False),
("useContext", "useContext", usagecontext.UsageContext, True, None, False),
("version", "version", str, False, None, False),
("_version", "_version",fhirprimitive.FHIRPrimitive, False, None, False),
])
return js
from . import backboneelement
class ChargeItemDefinitionApplicability(backboneelement.BackboneElement):
""" Whether or not the billing code is applicable.
Expressions that describe applicability criteria for the billing code.
"""
resource_type = "ChargeItemDefinitionApplicability"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.description = None
""" Natural language description of the condition.
Type `str`. """
self._description = None
""" extension for fhir primitive description"""
self.expression = None
""" Boolean-valued expression.
Type `str`. """
self._expression = None
""" extension for fhir primitive expression"""
self.language = None
""" Language of the expression.
Type `str`. """
self._language = None
""" extension for fhir primitive language"""
super(ChargeItemDefinitionApplicability, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(ChargeItemDefinitionApplicability, self).elementProperties()
js.extend([
("description", "description", str, False, None, False),
("_description", "_description",fhirprimitive.FHIRPrimitive, False, None, False),
("expression", "expression", str, False, None, False),
("_expression", "_expression",fhirprimitive.FHIRPrimitive, False, None, False),
("language", "language", str, False, None, False),
("_language", "_language",fhirprimitive.FHIRPrimitive, False, None, False),
])
return js
class ChargeItemDefinitionPropertyGroup(backboneelement.BackboneElement):
""" Group of properties which are applicable under the same conditions.
Group of properties which are applicable under the same conditions. If no
applicability rules are established for the group, then all properties
always apply.
"""
resource_type = "ChargeItemDefinitionPropertyGroup"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.applicability = None
""" Conditions under which the priceComponent is applicable.
List of `ChargeItemDefinitionApplicability` items (represented as `dict` in JSON). """
self.priceComponent = None
""" Components of total line item price.
List of `ChargeItemDefinitionPropertyGroupPriceComponent` items (represented as `dict` in JSON). """
super(ChargeItemDefinitionPropertyGroup, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(ChargeItemDefinitionPropertyGroup, self).elementProperties()
js.extend([
("applicability", "applicability", ChargeItemDefinitionApplicability, True, None, False),
("priceComponent", "priceComponent", ChargeItemDefinitionPropertyGroupPriceComponent, True, None, False),
])
return js
class ChargeItemDefinitionPropertyGroupPriceComponent(backboneelement.BackboneElement):
""" Components of total line item price.
The price for a ChargeItem may be calculated as a base price with
surcharges/deductions that apply in certain conditions. A
ChargeItemDefinition resource that defines the prices, factors and
conditions that apply to a billing code is currently under developement.
The priceComponent element can be used to offer transparency to the
recipient of the Invoice of how the prices have been calculated.
"""
resource_type = "ChargeItemDefinitionPropertyGroupPriceComponent"
def __init__(self, jsondict=None, strict=True):
""" Initialize all valid properties.
:raises: FHIRValidationError on validation errors, unless strict is False
:param dict jsondict: A JSON dictionary to use for initialization
:param bool strict: If True (the default), invalid variables will raise a TypeError
"""
self.amount = None
""" Monetary amount associated with this component.
Type `Money` (represented as `dict` in JSON). """
self.code = None
""" Code identifying the specific component.
Type `CodeableConcept` (represented as `dict` in JSON). """
self.factor = None
""" Factor used for calculating this component.
Type `float`. """
self._factor = None
""" extension for fhir primitive factor"""
self.type = None
""" base | surcharge | deduction | discount | tax | informational.
Type `str`. """
self._type = None
""" extension for fhir primitive type"""
super(ChargeItemDefinitionPropertyGroupPriceComponent, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(ChargeItemDefinitionPropertyGroupPriceComponent, self).elementProperties()
js.extend([
("amount", "amount", money.Money, False, None, False),
("code", "code", codeableconcept.CodeableConcept, False, None, False),
("factor", "factor", float, False, None, False),
("_factor", "_factor",fhirprimitive.FHIRPrimitive, False, None, False),
("type", "type", str, False, None, True),
("_type", "_type",fhirprimitive.FHIRPrimitive, False, None, False),
])
return js
from . import codeableconcept
from . import contactdetail
from . import fhirdate
from . import fhirreference
from . import identifier
from . import money
from . import period
from . import usagecontext
from . import fhirprimitive
| 38.173594 | 117 | 0.609172 |
from . import domainresource
class ChargeItemDefinition(domainresource.DomainResource):
resource_type = "ChargeItemDefinition"
def __init__(self, jsondict=None, strict=True):
self.applicability = None
self.approvalDate = None
self.code = None
self.contact = None
self.copyright = None
self._copyright = None
self.date = None
self.derivedFromUri = None
self._derivedFromUri = None
self.description = None
self._description = None
self.effectivePeriod = None
self.experimental = None
self._experimental = None
self.identifier = None
self.instance = None
self.jurisdiction = None
self.lastReviewDate = None
self.partOf = None
self._partOf = None
self.propertyGroup = None
self.publisher = None
self._publisher = None
self.replaces = None
self._replaces = None
self.status = None
self._status = None
self.title = None
self._title = None
self.url = None
self._url = None
self.useContext = None
self.version = None
self._version = None
super(ChargeItemDefinition, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(ChargeItemDefinition, self).elementProperties()
js.extend([
("applicability", "applicability", ChargeItemDefinitionApplicability, True, None, False),
("approvalDate", "approvalDate", fhirdate.FHIRDate, False, None, False),
("code", "code", codeableconcept.CodeableConcept, False, None, False),
("contact", "contact", contactdetail.ContactDetail, True, None, False),
("copyright", "copyright", str, False, None, False),
("_copyright", "_copyright",fhirprimitive.FHIRPrimitive, False, None, False),
("date", "date", fhirdate.FHIRDate, False, None, False),
("derivedFromUri", "derivedFromUri", str, True, None, False),
("_derivedFromUri", "_derivedFromUri",fhirprimitive.FHIRPrimitive, False, None, False),
("description", "description", str, False, None, False),
("_description", "_description",fhirprimitive.FHIRPrimitive, False, None, False),
("effectivePeriod", "effectivePeriod", period.Period, False, None, False),
("experimental", "experimental", bool, False, None, False),
("_experimental", "_experimental",fhirprimitive.FHIRPrimitive, False, None, False),
("identifier", "identifier", identifier.Identifier, True, None, False),
("instance", "instance", fhirreference.FHIRReference, True, None, False),
("jurisdiction", "jurisdiction", codeableconcept.CodeableConcept, True, None, False),
("lastReviewDate", "lastReviewDate", fhirdate.FHIRDate, False, None, False),
("partOf", "partOf", str, True, None, False),
("_partOf", "_partOf",fhirprimitive.FHIRPrimitive, False, None, False),
("propertyGroup", "propertyGroup", ChargeItemDefinitionPropertyGroup, True, None, False),
("publisher", "publisher", str, False, None, False),
("_publisher", "_publisher",fhirprimitive.FHIRPrimitive, False, None, False),
("replaces", "replaces", str, True, None, False),
("_replaces", "_replaces",fhirprimitive.FHIRPrimitive, False, None, False),
("status", "status", str, False, None, True),
("_status", "_status",fhirprimitive.FHIRPrimitive, False, None, False),
("title", "title", str, False, None, False),
("_title", "_title",fhirprimitive.FHIRPrimitive, False, None, False),
("url", "url", str, False, None, True),
("_url", "_url",fhirprimitive.FHIRPrimitive, False, None, False),
("useContext", "useContext", usagecontext.UsageContext, True, None, False),
("version", "version", str, False, None, False),
("_version", "_version",fhirprimitive.FHIRPrimitive, False, None, False),
])
return js
from . import backboneelement
class ChargeItemDefinitionApplicability(backboneelement.BackboneElement):
resource_type = "ChargeItemDefinitionApplicability"
def __init__(self, jsondict=None, strict=True):
self.description = None
self._description = None
self.expression = None
self._expression = None
self.language = None
self._language = None
super(ChargeItemDefinitionApplicability, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(ChargeItemDefinitionApplicability, self).elementProperties()
js.extend([
("description", "description", str, False, None, False),
("_description", "_description",fhirprimitive.FHIRPrimitive, False, None, False),
("expression", "expression", str, False, None, False),
("_expression", "_expression",fhirprimitive.FHIRPrimitive, False, None, False),
("language", "language", str, False, None, False),
("_language", "_language",fhirprimitive.FHIRPrimitive, False, None, False),
])
return js
class ChargeItemDefinitionPropertyGroup(backboneelement.BackboneElement):
resource_type = "ChargeItemDefinitionPropertyGroup"
def __init__(self, jsondict=None, strict=True):
self.applicability = None
self.priceComponent = None
super(ChargeItemDefinitionPropertyGroup, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(ChargeItemDefinitionPropertyGroup, self).elementProperties()
js.extend([
("applicability", "applicability", ChargeItemDefinitionApplicability, True, None, False),
("priceComponent", "priceComponent", ChargeItemDefinitionPropertyGroupPriceComponent, True, None, False),
])
return js
class ChargeItemDefinitionPropertyGroupPriceComponent(backboneelement.BackboneElement):
resource_type = "ChargeItemDefinitionPropertyGroupPriceComponent"
def __init__(self, jsondict=None, strict=True):
self.amount = None
self.code = None
self.factor = None
self._factor = None
self.type = None
self._type = None
super(ChargeItemDefinitionPropertyGroupPriceComponent, self).__init__(jsondict=jsondict, strict=strict)
def elementProperties(self):
js = super(ChargeItemDefinitionPropertyGroupPriceComponent, self).elementProperties()
js.extend([
("amount", "amount", money.Money, False, None, False),
("code", "code", codeableconcept.CodeableConcept, False, None, False),
("factor", "factor", float, False, None, False),
("_factor", "_factor",fhirprimitive.FHIRPrimitive, False, None, False),
("type", "type", str, False, None, True),
("_type", "_type",fhirprimitive.FHIRPrimitive, False, None, False),
])
return js
from . import codeableconcept
from . import contactdetail
from . import fhirdate
from . import fhirreference
from . import identifier
from . import money
from . import period
from . import usagecontext
from . import fhirprimitive
| true | true |
f724bfd76f144d7fb329df62913b3c4cd7da8450 | 1,114 | py | Python | app.py | GonzalezGise/CaC-Python-Grupo-10-2167 | e6e822ba17f9d2110ff41c2520f3b06a764ac0ed | [
"MIT"
] | 1 | 2021-12-03T16:10:27.000Z | 2021-12-03T16:10:27.000Z | app.py | GonzalezGise/CaC-Python-Grupo-10-2167 | e6e822ba17f9d2110ff41c2520f3b06a764ac0ed | [
"MIT"
] | null | null | null | app.py | GonzalezGise/CaC-Python-Grupo-10-2167 | e6e822ba17f9d2110ff41c2520f3b06a764ac0ed | [
"MIT"
] | 5 | 2021-11-15T23:30:05.000Z | 2021-11-30T13:10:59.000Z | # Crear una funcion que permita ingresar al usuario
# Numero enteros... y strings...
# 1- print -> imprime la lista que su fue cargando hasta el momento...
# 2- append a -> siendo a numero entero
# 3- remove b -> siendo b numero entero
# 4- sort
# 5- reverse
# 6- insert c d -> siendo ambos numeros enteros c le indice y d el valor
# 7- exit -> termina el programa
isRunning = True
myList = []
while isRunning:
userInput = input("Ingrese comando: ")
command = userInput.split()
if command[0] == "exit":
isRunning = False
elif command[0] == "append":
# Quizas debamos hacer un chequeo del input
argumentos = command[1]
if argumentos.isdigit():
myList.append(int(argumentos))
elif command[0] == "print":
print(myList)
elif command[0] == "sort":
myList.sort()
elif command[0] == "insert":
myList.insert(int(command[1]),int(command[2]))
#print("Se agrego",command[2],"en el indice",command[1])
# En Javascript teniamos las arrow functions que eran anonimas
#myFuncion = (x) => x**2
myFuncion = lambda x: x**2 | 30.108108 | 72 | 0.633752 |
isRunning = True
myList = []
while isRunning:
userInput = input("Ingrese comando: ")
command = userInput.split()
if command[0] == "exit":
isRunning = False
elif command[0] == "append":
argumentos = command[1]
if argumentos.isdigit():
myList.append(int(argumentos))
elif command[0] == "print":
print(myList)
elif command[0] == "sort":
myList.sort()
elif command[0] == "insert":
myList.insert(int(command[1]),int(command[2]))
myFuncion = lambda x: x**2 | true | true |
f724c14a74f53741ea1f5af11f5d2c8219bed97c | 2,073 | py | Python | contrib/devtools/check-doc.py | deyoonoo/bendos | 5e161bda7006ccc78233415ac3881fde523a3fe6 | [
"MIT"
] | null | null | null | contrib/devtools/check-doc.py | deyoonoo/bendos | 5e161bda7006ccc78233415ac3881fde523a3fe6 | [
"MIT"
] | null | null | null | contrib/devtools/check-doc.py | deyoonoo/bendos | 5e161bda7006ccc78233415ac3881fde523a3fe6 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) 2015-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
This checks if all command line args are documented.
Return value is 0 to indicate no error.
Author: @MarcoFalke
'''
from subprocess import check_output
import re
import sys
FOLDER_GREP = 'src'
FOLDER_TEST = 'src/test/'
CMD_ROOT_DIR = '`git rev-parse --show-toplevel`/{}'.format(FOLDER_GREP)
CMD_GREP_ARGS = r"egrep -r -I '(map(Multi)?Args(\.count\(|\[)|Get(Bool)?Arg\()\"\-[^\"]+?\"' {} | grep -v '{}'".format(CMD_ROOT_DIR, FOLDER_TEST)
CMD_GREP_DOCS = r"egrep -r -I 'HelpMessageOpt\(\"\-[^\"=]+?(=|\")' {}".format(CMD_ROOT_DIR)
REGEX_ARG = re.compile(r'(?:map(?:Multi)?Args(?:\.count\(|\[)|Get(?:Bool)?Arg\()\"(\-[^\"]+?)\"')
REGEX_DOC = re.compile(r'HelpMessageOpt\(\"(\-[^\"=]+?)(?:=|\")')
# list unsupported, deprecated and duplicate args as they need no documentation
SET_DOC_OPTIONAL = set(['-rpcssl', '-benchmark', '-h', '-help', '-socks', '-tor', '-debugnet', '-whitelistalwaysrelay', '-prematurewitness', '-walletprematurewitness', '-promiscuousmempoolflags', '-blockminsize', '-sendfreetransactions', '-checklevel', '-liquidityprovider', '-anonymizebdsamount'])
def main():
used = check_output(CMD_GREP_ARGS, shell=True, universal_newlines=True)
docd = check_output(CMD_GREP_DOCS, shell=True, universal_newlines=True)
args_used = set(re.findall(re.compile(REGEX_ARG), used))
args_docd = set(re.findall(re.compile(REGEX_DOC), docd)).union(SET_DOC_OPTIONAL)
args_need_doc = args_used.difference(args_docd)
args_unknown = args_docd.difference(args_used)
print("Args used : {}".format(len(args_used)))
print("Args documented : {}".format(len(args_docd)))
print("Args undocumented: {}".format(len(args_need_doc)))
print(args_need_doc)
print("Args unknown : {}".format(len(args_unknown)))
print(args_unknown)
sys.exit(len(args_need_doc))
if __name__ == "__main__":
main()
| 43.1875 | 298 | 0.687892 |
from subprocess import check_output
import re
import sys
FOLDER_GREP = 'src'
FOLDER_TEST = 'src/test/'
CMD_ROOT_DIR = '`git rev-parse --show-toplevel`/{}'.format(FOLDER_GREP)
CMD_GREP_ARGS = r"egrep -r -I '(map(Multi)?Args(\.count\(|\[)|Get(Bool)?Arg\()\"\-[^\"]+?\"' {} | grep -v '{}'".format(CMD_ROOT_DIR, FOLDER_TEST)
CMD_GREP_DOCS = r"egrep -r -I 'HelpMessageOpt\(\"\-[^\"=]+?(=|\")' {}".format(CMD_ROOT_DIR)
REGEX_ARG = re.compile(r'(?:map(?:Multi)?Args(?:\.count\(|\[)|Get(?:Bool)?Arg\()\"(\-[^\"]+?)\"')
REGEX_DOC = re.compile(r'HelpMessageOpt\(\"(\-[^\"=]+?)(?:=|\")')
SET_DOC_OPTIONAL = set(['-rpcssl', '-benchmark', '-h', '-help', '-socks', '-tor', '-debugnet', '-whitelistalwaysrelay', '-prematurewitness', '-walletprematurewitness', '-promiscuousmempoolflags', '-blockminsize', '-sendfreetransactions', '-checklevel', '-liquidityprovider', '-anonymizebdsamount'])
def main():
used = check_output(CMD_GREP_ARGS, shell=True, universal_newlines=True)
docd = check_output(CMD_GREP_DOCS, shell=True, universal_newlines=True)
args_used = set(re.findall(re.compile(REGEX_ARG), used))
args_docd = set(re.findall(re.compile(REGEX_DOC), docd)).union(SET_DOC_OPTIONAL)
args_need_doc = args_used.difference(args_docd)
args_unknown = args_docd.difference(args_used)
print("Args used : {}".format(len(args_used)))
print("Args documented : {}".format(len(args_docd)))
print("Args undocumented: {}".format(len(args_need_doc)))
print(args_need_doc)
print("Args unknown : {}".format(len(args_unknown)))
print(args_unknown)
sys.exit(len(args_need_doc))
if __name__ == "__main__":
main()
| true | true |
f724c1f56a74845c221aa7a44ad661b8138463aa | 3,255 | py | Python | pkg/core/core.py | GarrettHaley/ida-cfp | b1fdef053c71b4cb6508291990fc01d95ad4895e | [
"MIT"
] | null | null | null | pkg/core/core.py | GarrettHaley/ida-cfp | b1fdef053c71b4cb6508291990fc01d95ad4895e | [
"MIT"
] | null | null | null | pkg/core/core.py | GarrettHaley/ida-cfp | b1fdef053c71b4cb6508291990fc01d95ad4895e | [
"MIT"
] | 1 | 2020-05-14T20:04:31.000Z | 2020-05-14T20:04:31.000Z | """
Defines `Core`.
Instantiates the module-level logger with the appropriate naming
convention.
"""
import logging
from abc import ABC
from interface.interface import Interface
from astparser.astparser import AstParser
from record.record import Record
from exception.exception import NoFilesSpecifiedError
LOGGER = logging.getLogger(__name__)
class Core(ABC):
"""
Define the object responsible for the project's three main features.
`Core` is also responsible for administrating the `Interface` and
`AstParser` objects but mainly exists as a straightforward and
moderated view inside the complex internal functionality of IDA-CFP.
While the instances of `Interface` and `AstParser` can be explicitly
accessed by other third-party code, this is not recommended as both
objects contain no (strict) immutable state.
"""
def __init__(self) -> None:
"""
Initialize the `Core` object.
Unlike the __init__ of `AstParser`, the internal state of _intr
and _astp persists between files specified.
`self._intr` contains an instance of the `Interface` object and
is responsible for providing access to high level file I/O
functionality.
`self._astp` contains an instance of the `AstParser` object and
is responsible for processing and understanding the abstract
syntax tree (AST) that PycParser generates.
:return: returns nothing
"""
self._intr = Interface()
self._astp = AstParser()
def process_files(self, files: list) -> None:
"""
Process a list of file I/O objects.
For each file specified in the `files` list, its AST
is loaded and properly processed before it is added
to the module-level `Record`.
:param files: list of argparser IO wrappers
:return: returns nothing
"""
# If the `files` list is found to be empty or improperly
# populated then a `NoFileSpecifiedError` is raised
if not files:
raise NoFilesSpecifiedError()
for f_str in files:
ast = self._intr.load_new_ast(f_str.name)
self._astp.process_ast(ast)
# Rather than attempt to integrate the list and dict after
# every file, it saves huge computational complexity to just
# condense the operation and only do it once per run
Record.integrate_list_to_dict()
def generate_bundle(self) -> None:
"""
Generate the bundle interface for disk I/O.
Utilize the `Interface`-based conversion functionality to
convert from the master `Record` dictionary of string: function
pairs to a `json` string dump.
:return: returns nothing
"""
self._intr.convert_dict_to_json(Record.str_func_dict)
def export(self) -> None:
"""
Export the final bundle to disk.
Utilize the `Interface`-based file-I/O system to drop the
converted json string data to out/bundle.json.
:return: returns nothing
"""
self._intr.drop_bundle_to_disk(self._intr.json_data)
| 32.878788 | 73 | 0.651306 |
import logging
from abc import ABC
from interface.interface import Interface
from astparser.astparser import AstParser
from record.record import Record
from exception.exception import NoFilesSpecifiedError
LOGGER = logging.getLogger(__name__)
class Core(ABC):
def __init__(self) -> None:
self._intr = Interface()
self._astp = AstParser()
def process_files(self, files: list) -> None:
if not files:
raise NoFilesSpecifiedError()
for f_str in files:
ast = self._intr.load_new_ast(f_str.name)
self._astp.process_ast(ast)
Record.integrate_list_to_dict()
def generate_bundle(self) -> None:
self._intr.convert_dict_to_json(Record.str_func_dict)
def export(self) -> None:
self._intr.drop_bundle_to_disk(self._intr.json_data)
| true | true |
f724c21e1e6a61b8d8f476e230c0c8957dd47917 | 919 | py | Python | utillity/apicheck.py | dominik-air/lol-afk-buddy | b9e76336803922bd5f60dac33ec34f471eea3422 | [
"MIT"
] | 1 | 2021-10-11T23:02:19.000Z | 2021-10-11T23:02:19.000Z | utillity/apicheck.py | dominik-air/lol-afk-buddy | b9e76336803922bd5f60dac33ec34f471eea3422 | [
"MIT"
] | 2 | 2022-02-04T20:32:18.000Z | 2022-02-04T20:38:49.000Z | utillity/apicheck.py | dominik-air/lol-afk-buddy | b9e76336803922bd5f60dac33ec34f471eea3422 | [
"MIT"
] | 1 | 2022-02-05T15:12:15.000Z | 2022-02-05T15:12:15.000Z | from lcu_driver import Connector
import json
connector = Connector()
@connector.ready
async def connect(connection):
print("LCU API is ready to be used.")
# check if the user is already logged into his account
summoner = await connection.request("get", "/lol-summoner/v1/current-summoner")
if summoner.status != 200:
print(
"Please login into your account to change your icon and restart the script..."
)
else:
data = await summoner.json()
summonerId = data['summonerId']
#request = f"/lol-perks/v1/perks"
request = "/lol-perks/v1/pages"
#request = f"/lol-perks/v1/currentpage"
request_type = "get"
summoner_spells = await connection.request(request_type, request)
save = await summoner_spells.json()
with open("temp.json", "w+") as f:
json.dump(save, f, indent=4)
connector.start()
| 31.689655 | 90 | 0.638738 | from lcu_driver import Connector
import json
connector = Connector()
@connector.ready
async def connect(connection):
print("LCU API is ready to be used.")
summoner = await connection.request("get", "/lol-summoner/v1/current-summoner")
if summoner.status != 200:
print(
"Please login into your account to change your icon and restart the script..."
)
else:
data = await summoner.json()
summonerId = data['summonerId']
request = "/lol-perks/v1/pages"
request_type = "get"
summoner_spells = await connection.request(request_type, request)
save = await summoner_spells.json()
with open("temp.json", "w+") as f:
json.dump(save, f, indent=4)
connector.start()
| true | true |
f724c2d13ac7970d0010056bcfbce749495e3f07 | 4,414 | py | Python | pytorchvideo/models/memory_bank.py | kevinmtian/pytorchvideo | 168e16859a6029ef8ebeb476f9163bebb6c6b87d | [
"Apache-2.0"
] | 2,391 | 2021-04-13T18:10:18.000Z | 2022-03-31T15:07:09.000Z | pytorchvideo/models/memory_bank.py | kevinmtian/pytorchvideo | 168e16859a6029ef8ebeb476f9163bebb6c6b87d | [
"Apache-2.0"
] | 156 | 2021-04-13T18:51:49.000Z | 2022-03-31T08:05:50.000Z | pytorchvideo/models/memory_bank.py | kevinmtian/pytorchvideo | 168e16859a6029ef8ebeb476f9163bebb6c6b87d | [
"Apache-2.0"
] | 231 | 2021-04-14T05:04:55.000Z | 2022-03-22T09:35:46.000Z | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
import math
from typing import Optional
import torch
import torch.nn as nn
import torch.nn.functional as F
from pytorchvideo.layers.utils import set_attributes
class MemoryBank(nn.Module):
"""
Performs Non-Parametric Instance Discrimination for self supervised learning on
video. A memory bank is built to keep and update the historical feature embedding
and use them for contrastive learning.
The original paper is:
Unsupervised Feature Learning via Non-Parametric Instance Discrimination
https://arxiv.org/pdf/1805.01978.pdf
More details can be found from the memory bank part in the following paper:
Momentum Contrast for Unsupervised Visual Representation Learning
https://arxiv.org/pdf/1911.05722.pdf
"""
def __init__(
self,
backbone: nn.Module,
mlp: Optional[nn.Module] = None,
neg_size: int = 4096,
temperature: float = 0.07,
bank_size: int = 1280000,
dim: int = 2048,
mmt: float = 0.999,
) -> None:
"""
Args:
backbone (nn.Module): backbone used to forward the input.
mlp (nn.Module): multi-layer perception used in memory bank instance
discrimination model.
neg_size (int): size of negative samples per instance.
temperature (float): temperature to use for contrastive learning.
bank_size (int): size of the memory bank, expected to be the same size as
the training set.
dim (int): dimension of the channel.
mmt (float): momentum to use.
"""
super().__init__()
set_attributes(self, locals())
self._init_mem_bank(bank_size, dim)
def _init_mem_bank(self, bank_size: int, dim: int) -> None:
"""
Given the memory bank size and the channel dimension, initialize the memory
bank.
Args:
bank_size (int): size of the memory bank, expected to be the same size as
the training set.
dim (int): dimension of the channel.
"""
stdv = 1.0 / math.sqrt(dim / 3)
self.register_buffer(
"memory",
torch.rand(
bank_size,
dim,
)
.mul_(2 * stdv)
.add_(-stdv)
.to(next(self.backbone.parameters()).device),
)
def forward(self, x: torch.Tensor, x_ind: torch.Tensor) -> torch.Tensor:
"""
Perform contrastive learning with random sampled negative instance from the
memory bank. During training, update the memory bank with latest feature
embedding.
Args:
x (torch.tensor): a batch of image with augmentation. The input tensor
shape should able to be feed into the backbone.
x_ind (torch.tensor): the index of the image x from the dataset. Expected
shape is B.
"""
batch_size = x.shape[0]
x = self.backbone(x)
if self.mlp is not None:
x = self.mlp(x)
# Normalize the output embedding before multiplication.
x = F.normalize(x, p=2, dim=1)
# Random sample negative instances from the memory bank.
idx = torch.randint(0, self.bank_size, size=(batch_size, self.neg_size + 1)).to(
x.device
)
# Fill the first with positive instances.
idx.select(1, 0).copy_(x_ind.data)
weight = torch.index_select(self.memory, 0, idx.view(-1)).detach()
weight = weight.view(batch_size, self.neg_size + 1, self.dim)
# Multiplication for contrastive learning.
out = torch.einsum("bkc,bc->bk", weight, x)
out = torch.div(out, self.temperature)
gt = torch.zeros((batch_size,), device=x.device, dtype=torch.long)
loss = torch.nn.functional.cross_entropy(out, gt)
# Update memory during training.
if self.training:
with torch.no_grad():
pos = torch.index_select(self.memory, 0, x_ind.view(-1))
pos.mul_(self.mmt)
pos.add_(torch.mul(x, 1 - self.mmt))
norm = pos.pow(2).sum(1, keepdim=True).pow(0.5)
updated = pos.div(norm)
self.memory.index_copy_(0, x_ind, updated)
return loss
| 38.719298 | 88 | 0.599909 |
import math
from typing import Optional
import torch
import torch.nn as nn
import torch.nn.functional as F
from pytorchvideo.layers.utils import set_attributes
class MemoryBank(nn.Module):
def __init__(
self,
backbone: nn.Module,
mlp: Optional[nn.Module] = None,
neg_size: int = 4096,
temperature: float = 0.07,
bank_size: int = 1280000,
dim: int = 2048,
mmt: float = 0.999,
) -> None:
super().__init__()
set_attributes(self, locals())
self._init_mem_bank(bank_size, dim)
def _init_mem_bank(self, bank_size: int, dim: int) -> None:
stdv = 1.0 / math.sqrt(dim / 3)
self.register_buffer(
"memory",
torch.rand(
bank_size,
dim,
)
.mul_(2 * stdv)
.add_(-stdv)
.to(next(self.backbone.parameters()).device),
)
def forward(self, x: torch.Tensor, x_ind: torch.Tensor) -> torch.Tensor:
batch_size = x.shape[0]
x = self.backbone(x)
if self.mlp is not None:
x = self.mlp(x)
x = F.normalize(x, p=2, dim=1)
idx = torch.randint(0, self.bank_size, size=(batch_size, self.neg_size + 1)).to(
x.device
)
idx.select(1, 0).copy_(x_ind.data)
weight = torch.index_select(self.memory, 0, idx.view(-1)).detach()
weight = weight.view(batch_size, self.neg_size + 1, self.dim)
out = torch.einsum("bkc,bc->bk", weight, x)
out = torch.div(out, self.temperature)
gt = torch.zeros((batch_size,), device=x.device, dtype=torch.long)
loss = torch.nn.functional.cross_entropy(out, gt)
if self.training:
with torch.no_grad():
pos = torch.index_select(self.memory, 0, x_ind.view(-1))
pos.mul_(self.mmt)
pos.add_(torch.mul(x, 1 - self.mmt))
norm = pos.pow(2).sum(1, keepdim=True).pow(0.5)
updated = pos.div(norm)
self.memory.index_copy_(0, x_ind, updated)
return loss
| true | true |
f724c36991439fd46c3b0dcb954ba15f7db2cfd6 | 2,255 | py | Python | workbook/convert.py | hantongliu/BETTER-hacktron | 37a919dd225970649cd9e7a58c74e2d8f0cca88c | [
"Apache-2.0"
] | null | null | null | workbook/convert.py | hantongliu/BETTER-hacktron | 37a919dd225970649cd9e7a58c74e2d8f0cca88c | [
"Apache-2.0"
] | null | null | null | workbook/convert.py | hantongliu/BETTER-hacktron | 37a919dd225970649cd9e7a58c74e2d8f0cca88c | [
"Apache-2.0"
] | 1 | 2020-10-15T13:57:13.000Z | 2020-10-15T13:57:13.000Z | import json
prefix = "http://slipo.eu/id/poi"
rdftype = "<http://www.w3.org/1999/02/22-rdf-syntax-ns#type>"
poi = "<http://slipo.eu/def#POI>"
category = "<http://slipo.eu/def#category>"
termPrefix = "http://slipo.eu/id/term"
termValue = "<http://slipo.eu/def#termValue>"
rdf = ""
i = 0
hasGeometry = "<http://www.opengis.net/ont/geosparql#hasGeometry>"
geometryID = "<http://slipo.eu/id/poi/__id__/geometry>"
wkt = "<http://www.opengis.net/ont/geosparql#asWKT>"
epsg = "<http://www.opengis.net/def/crs/EPSG/0/4326>"
wktLiteral = "<http://www.opengis.net/ont/geosparql#wktLiteral>"
#'/home/mmami/FhG/Projects/BETTER/EOPEN/EnglishFloodTweets_10000.json'
with open('EnglishFloodTweets__.json') as json_file:# dummy_tweet.json
data = json.load(json_file)
temp = 0
for p in data['tweets']:
print(p['id'])
temp = temp+1
point_id = p['id']
concepts = p['image_concepts']
if concepts != "n/a":
subject = "<" + prefix + "/" + point_id + ">"
triple = subject + " " + rdftype + " " + poi + " . "
rdf += triple + "\n"
conceptsArray = concepts.split()
for cat in conceptsArray:
term = ("<" + termPrefix + "/%s>" % i)
triple2category = subject + " " + category + " " + term + " .\n"
categoryTerm = subject + " " + termValue + " \"" + cat + "\" .\n"
rdf += triple2category + categoryTerm
i = i+1
locations = p['estimated_locations']
geometry = locations[0]['geometry']
coordinates = geometry['coordinates']
lat = coordinates[0]
long = coordinates[1]
geometryObject = geometryID.replace("__id__", point_id)
geo = subject + " " + hasGeometry + " " + geometryObject + " ."
geoPoint = ((geometryObject + " " + wkt + " \"" + epsg + " POINT(%f %f)\"^^" + wktLiteral + " .") % (lat, long))
rdf += geo + "\n" + geoPoint + "\n"
# print(rdf)
# print(rdf)
output_file = open('EOPEN_POIs_100.nt', 'w+') # append mode
output_file.write(rdf)
output_file.close()
| 35.793651 | 124 | 0.531264 | import json
prefix = "http://slipo.eu/id/poi"
rdftype = "<http://www.w3.org/1999/02/22-rdf-syntax-ns#type>"
poi = "<http://slipo.eu/def#POI>"
category = "<http://slipo.eu/def#category>"
termPrefix = "http://slipo.eu/id/term"
termValue = "<http://slipo.eu/def#termValue>"
rdf = ""
i = 0
hasGeometry = "<http://www.opengis.net/ont/geosparql#hasGeometry>"
geometryID = "<http://slipo.eu/id/poi/__id__/geometry>"
wkt = "<http://www.opengis.net/ont/geosparql#asWKT>"
epsg = "<http://www.opengis.net/def/crs/EPSG/0/4326>"
wktLiteral = "<http://www.opengis.net/ont/geosparql#wktLiteral>"
with open('EnglishFloodTweets__.json') as json_file:
data = json.load(json_file)
temp = 0
for p in data['tweets']:
print(p['id'])
temp = temp+1
point_id = p['id']
concepts = p['image_concepts']
if concepts != "n/a":
subject = "<" + prefix + "/" + point_id + ">"
triple = subject + " " + rdftype + " " + poi + " . "
rdf += triple + "\n"
conceptsArray = concepts.split()
for cat in conceptsArray:
term = ("<" + termPrefix + "/%s>" % i)
triple2category = subject + " " + category + " " + term + " .\n"
categoryTerm = subject + " " + termValue + " \"" + cat + "\" .\n"
rdf += triple2category + categoryTerm
i = i+1
locations = p['estimated_locations']
geometry = locations[0]['geometry']
coordinates = geometry['coordinates']
lat = coordinates[0]
long = coordinates[1]
geometryObject = geometryID.replace("__id__", point_id)
geo = subject + " " + hasGeometry + " " + geometryObject + " ."
geoPoint = ((geometryObject + " " + wkt + " \"" + epsg + " POINT(%f %f)\"^^" + wktLiteral + " .") % (lat, long))
rdf += geo + "\n" + geoPoint + "\n"
output_file = open('EOPEN_POIs_100.nt', 'w+')
output_file.write(rdf)
output_file.close()
| true | true |
f724c39610fa99418467816bbb09a2a24a283c11 | 776 | py | Python | alpyro_msgs/tf2_msgs/tf2error.py | rho2/alpyro_msgs | b5a680976c40c83df70d61bb2db1de32a1cde8d3 | [
"MIT"
] | 1 | 2020-12-13T13:07:10.000Z | 2020-12-13T13:07:10.000Z | alpyro_msgs/tf2_msgs/tf2error.py | rho2/alpyro_msgs | b5a680976c40c83df70d61bb2db1de32a1cde8d3 | [
"MIT"
] | null | null | null | alpyro_msgs/tf2_msgs/tf2error.py | rho2/alpyro_msgs | b5a680976c40c83df70d61bb2db1de32a1cde8d3 | [
"MIT"
] | null | null | null | from typing import Final
from alpyro_msgs import RosMessage, string, uint8
class TF2Error(RosMessage):
__msg_typ__ = "tf2_msgs/TF2Error"
__msg_def__ = "dWludDggTk9fRVJST1I9MAp1aW50OCBMT09LVVBfRVJST1I9MQp1aW50OCBDT05ORUNUSVZJVFlfRVJST1I9Mgp1aW50OCBFWFRSQVBPTEFUSU9OX0VSUk9SPTMKdWludDggSU5WQUxJRF9BUkdVTUVOVF9FUlJPUj00CnVpbnQ4IFRJTUVPVVRfRVJST1I9NQp1aW50OCBUUkFOU0ZPUk1fRVJST1I9Ngp1aW50OCBlcnJvcgpzdHJpbmcgZXJyb3Jfc3RyaW5nCgo="
__md5_sum__ = "bc6848fd6fd750c92e38575618a4917d"
NO_ERROR: Final[uint8] = 0
LOOKUP_ERROR: Final[uint8] = 1
CONNECTIVITY_ERROR: Final[uint8] = 2
EXTRAPOLATION_ERROR: Final[uint8] = 3
INVALID_ARGUMENT_ERROR: Final[uint8] = 4
TIMEOUT_ERROR: Final[uint8] = 5
TRANSFORM_ERROR: Final[uint8] = 6
error: uint8
error_string: string
| 40.842105 | 290 | 0.837629 | from typing import Final
from alpyro_msgs import RosMessage, string, uint8
class TF2Error(RosMessage):
__msg_typ__ = "tf2_msgs/TF2Error"
__msg_def__ = "dWludDggTk9fRVJST1I9MAp1aW50OCBMT09LVVBfRVJST1I9MQp1aW50OCBDT05ORUNUSVZJVFlfRVJST1I9Mgp1aW50OCBFWFRSQVBPTEFUSU9OX0VSUk9SPTMKdWludDggSU5WQUxJRF9BUkdVTUVOVF9FUlJPUj00CnVpbnQ4IFRJTUVPVVRfRVJST1I9NQp1aW50OCBUUkFOU0ZPUk1fRVJST1I9Ngp1aW50OCBlcnJvcgpzdHJpbmcgZXJyb3Jfc3RyaW5nCgo="
__md5_sum__ = "bc6848fd6fd750c92e38575618a4917d"
NO_ERROR: Final[uint8] = 0
LOOKUP_ERROR: Final[uint8] = 1
CONNECTIVITY_ERROR: Final[uint8] = 2
EXTRAPOLATION_ERROR: Final[uint8] = 3
INVALID_ARGUMENT_ERROR: Final[uint8] = 4
TIMEOUT_ERROR: Final[uint8] = 5
TRANSFORM_ERROR: Final[uint8] = 6
error: uint8
error_string: string
| true | true |
f724c467f82e4312c2a73d80e60f4b58409440e2 | 4,977 | py | Python | tests/contrib/operators/test_hive_to_dynamodb_operator.py | fxdmhtt/airflow | cf88f7bc7bbd3e9bf110e98f025759a96c130235 | [
"Apache-2.0"
] | 3 | 2019-03-28T05:59:39.000Z | 2019-10-03T22:05:25.000Z | tests/contrib/operators/test_hive_to_dynamodb_operator.py | fxdmhtt/airflow | cf88f7bc7bbd3e9bf110e98f025759a96c130235 | [
"Apache-2.0"
] | 7 | 2019-03-27T07:58:14.000Z | 2020-02-12T17:42:33.000Z | tests/contrib/operators/test_hive_to_dynamodb_operator.py | fxdmhtt/airflow | cf88f7bc7bbd3e9bf110e98f025759a96c130235 | [
"Apache-2.0"
] | 5 | 2017-06-19T19:55:47.000Z | 2020-10-10T00:49:20.000Z | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import json
import unittest
from unittest import mock
import datetime
import pandas as pd
from airflow import DAG
from airflow.contrib.hooks.aws_dynamodb_hook import AwsDynamoDBHook
import airflow.contrib.operators.hive_to_dynamodb
DEFAULT_DATE = datetime.datetime(2015, 1, 1)
DEFAULT_DATE_ISO = DEFAULT_DATE.isoformat()
DEFAULT_DATE_DS = DEFAULT_DATE_ISO[:10]
try:
from moto import mock_dynamodb2
except ImportError:
mock_dynamodb2 = None
class HiveToDynamoDBTransferOperatorTest(unittest.TestCase):
def setUp(self):
args = {'owner': 'airflow', 'start_date': DEFAULT_DATE}
dag = DAG('test_dag_id', default_args=args)
self.dag = dag
self.sql = 'SELECT 1'
self.hook = AwsDynamoDBHook(
aws_conn_id='aws_default', region_name='us-east-1')
@staticmethod
def process_data(data, *args, **kwargs):
return json.loads(data.to_json(orient='records'))
@unittest.skipIf(mock_dynamodb2 is None, 'mock_dynamodb2 package not present')
@mock_dynamodb2
def test_get_conn_returns_a_boto3_connection(self):
hook = AwsDynamoDBHook(aws_conn_id='aws_default')
self.assertIsNotNone(hook.get_conn())
@mock.patch('airflow.hooks.hive_hooks.HiveServer2Hook.get_pandas_df',
return_value=pd.DataFrame(data=[('1', 'sid')], columns=['id', 'name']))
@unittest.skipIf(mock_dynamodb2 is None, 'mock_dynamodb2 package not present')
@mock_dynamodb2
def test_get_records_with_schema(self, get_results_mock):
# this table needs to be created in production
self.hook.get_conn().create_table(
TableName='test_airflow',
KeySchema=[
{
'AttributeName': 'id',
'KeyType': 'HASH'
},
],
AttributeDefinitions=[
{
'AttributeName': 'name',
'AttributeType': 'S'
}
],
ProvisionedThroughput={
'ReadCapacityUnits': 10,
'WriteCapacityUnits': 10
}
)
operator = airflow.contrib.operators.hive_to_dynamodb.HiveToDynamoDBTransferOperator(
sql=self.sql,
table_name="test_airflow",
task_id='hive_to_dynamodb_check',
table_keys=['id'],
dag=self.dag)
operator.execute(None)
table = self.hook.get_conn().Table('test_airflow')
table.meta.client.get_waiter(
'table_exists').wait(TableName='test_airflow')
self.assertEqual(table.item_count, 1)
@mock.patch('airflow.hooks.hive_hooks.HiveServer2Hook.get_pandas_df',
return_value=pd.DataFrame(data=[('1', 'sid'), ('1', 'gupta')], columns=['id', 'name']))
@unittest.skipIf(mock_dynamodb2 is None, 'mock_dynamodb2 package not present')
@mock_dynamodb2
def test_pre_process_records_with_schema(self, get_results_mock):
# this table needs to be created in production
self.hook.get_conn().create_table(
TableName='test_airflow',
KeySchema=[
{
'AttributeName': 'id',
'KeyType': 'HASH'
},
],
AttributeDefinitions=[
{
'AttributeName': 'name',
'AttributeType': 'S'
}
],
ProvisionedThroughput={
'ReadCapacityUnits': 10,
'WriteCapacityUnits': 10
}
)
operator = airflow.contrib.operators.hive_to_dynamodb.HiveToDynamoDBTransferOperator(
sql=self.sql,
table_name='test_airflow',
task_id='hive_to_dynamodb_check',
table_keys=['id'],
pre_process=self.process_data,
dag=self.dag)
operator.execute(None)
table = self.hook.get_conn().Table('test_airflow')
table.meta.client.get_waiter('table_exists').wait(TableName='test_airflow')
self.assertEqual(table.item_count, 1)
if __name__ == '__main__':
unittest.main()
| 34.089041 | 103 | 0.625477 |
import json
import unittest
from unittest import mock
import datetime
import pandas as pd
from airflow import DAG
from airflow.contrib.hooks.aws_dynamodb_hook import AwsDynamoDBHook
import airflow.contrib.operators.hive_to_dynamodb
DEFAULT_DATE = datetime.datetime(2015, 1, 1)
DEFAULT_DATE_ISO = DEFAULT_DATE.isoformat()
DEFAULT_DATE_DS = DEFAULT_DATE_ISO[:10]
try:
from moto import mock_dynamodb2
except ImportError:
mock_dynamodb2 = None
class HiveToDynamoDBTransferOperatorTest(unittest.TestCase):
def setUp(self):
args = {'owner': 'airflow', 'start_date': DEFAULT_DATE}
dag = DAG('test_dag_id', default_args=args)
self.dag = dag
self.sql = 'SELECT 1'
self.hook = AwsDynamoDBHook(
aws_conn_id='aws_default', region_name='us-east-1')
@staticmethod
def process_data(data, *args, **kwargs):
return json.loads(data.to_json(orient='records'))
@unittest.skipIf(mock_dynamodb2 is None, 'mock_dynamodb2 package not present')
@mock_dynamodb2
def test_get_conn_returns_a_boto3_connection(self):
hook = AwsDynamoDBHook(aws_conn_id='aws_default')
self.assertIsNotNone(hook.get_conn())
@mock.patch('airflow.hooks.hive_hooks.HiveServer2Hook.get_pandas_df',
return_value=pd.DataFrame(data=[('1', 'sid')], columns=['id', 'name']))
@unittest.skipIf(mock_dynamodb2 is None, 'mock_dynamodb2 package not present')
@mock_dynamodb2
def test_get_records_with_schema(self, get_results_mock):
self.hook.get_conn().create_table(
TableName='test_airflow',
KeySchema=[
{
'AttributeName': 'id',
'KeyType': 'HASH'
},
],
AttributeDefinitions=[
{
'AttributeName': 'name',
'AttributeType': 'S'
}
],
ProvisionedThroughput={
'ReadCapacityUnits': 10,
'WriteCapacityUnits': 10
}
)
operator = airflow.contrib.operators.hive_to_dynamodb.HiveToDynamoDBTransferOperator(
sql=self.sql,
table_name="test_airflow",
task_id='hive_to_dynamodb_check',
table_keys=['id'],
dag=self.dag)
operator.execute(None)
table = self.hook.get_conn().Table('test_airflow')
table.meta.client.get_waiter(
'table_exists').wait(TableName='test_airflow')
self.assertEqual(table.item_count, 1)
@mock.patch('airflow.hooks.hive_hooks.HiveServer2Hook.get_pandas_df',
return_value=pd.DataFrame(data=[('1', 'sid'), ('1', 'gupta')], columns=['id', 'name']))
@unittest.skipIf(mock_dynamodb2 is None, 'mock_dynamodb2 package not present')
@mock_dynamodb2
def test_pre_process_records_with_schema(self, get_results_mock):
self.hook.get_conn().create_table(
TableName='test_airflow',
KeySchema=[
{
'AttributeName': 'id',
'KeyType': 'HASH'
},
],
AttributeDefinitions=[
{
'AttributeName': 'name',
'AttributeType': 'S'
}
],
ProvisionedThroughput={
'ReadCapacityUnits': 10,
'WriteCapacityUnits': 10
}
)
operator = airflow.contrib.operators.hive_to_dynamodb.HiveToDynamoDBTransferOperator(
sql=self.sql,
table_name='test_airflow',
task_id='hive_to_dynamodb_check',
table_keys=['id'],
pre_process=self.process_data,
dag=self.dag)
operator.execute(None)
table = self.hook.get_conn().Table('test_airflow')
table.meta.client.get_waiter('table_exists').wait(TableName='test_airflow')
self.assertEqual(table.item_count, 1)
if __name__ == '__main__':
unittest.main()
| true | true |
f724c73e2a3a025bfe20a3a8f316a5cc999fcf47 | 1,013 | py | Python | dashboard/models.py | farahaulita/pbp-tk | fabf8e07ed0e1270d3e98a3d1bdd46267a1a4d6c | [
"Unlicense"
] | null | null | null | dashboard/models.py | farahaulita/pbp-tk | fabf8e07ed0e1270d3e98a3d1bdd46267a1a4d6c | [
"Unlicense"
] | null | null | null | dashboard/models.py | farahaulita/pbp-tk | fabf8e07ed0e1270d3e98a3d1bdd46267a1a4d6c | [
"Unlicense"
] | null | null | null | from django.db import models
from login.models import User #add this
from django.dispatch import receiver #add this
from django.db.models.signals import post_save
from datetime import datetime
# SOURCE: https://www.ordinarycoders.com/django-custom-user-profile
class Profile(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
image = models.ImageField(default = 'undraw_profile.svg', upload_to='profile_pics')
name = models.CharField(max_length=256, default="Enter Name")
birth_date = models.DateField(default=datetime.now)
address = models.CharField(max_length=256, default="Enter Address")
@receiver(post_save, sender=User) #add profile if user is created
def create_user_profile(sender, instance, created, **kwargs):
if created:
Profile.objects.create(user=instance)
@receiver(post_save, sender=User) #save profile if user is saved
def save_user_profile(sender, instance, **kwargs):
instance.profile.save()
| 38.961538 | 87 | 0.740375 | from django.db import models
from login.models import User
from django.dispatch import receiver
from django.db.models.signals import post_save
from datetime import datetime
class Profile(models.Model):
user = models.OneToOneField(User, on_delete=models.CASCADE)
image = models.ImageField(default = 'undraw_profile.svg', upload_to='profile_pics')
name = models.CharField(max_length=256, default="Enter Name")
birth_date = models.DateField(default=datetime.now)
address = models.CharField(max_length=256, default="Enter Address")
@receiver(post_save, sender=User)
def create_user_profile(sender, instance, created, **kwargs):
if created:
Profile.objects.create(user=instance)
@receiver(post_save, sender=User)
def save_user_profile(sender, instance, **kwargs):
instance.profile.save()
| true | true |
f724c7566d86613c348af51cc3a34cdf7fc5d540 | 97,191 | py | Python | snmp/tests/test_profiles.py | onurdialpad/integrations-core | e718b52d5878b20ff161a3ee6f24e5e845102d91 | [
"BSD-3-Clause"
] | null | null | null | snmp/tests/test_profiles.py | onurdialpad/integrations-core | e718b52d5878b20ff161a3ee6f24e5e845102d91 | [
"BSD-3-Clause"
] | null | null | null | snmp/tests/test_profiles.py | onurdialpad/integrations-core | e718b52d5878b20ff161a3ee6f24e5e845102d91 | [
"BSD-3-Clause"
] | null | null | null | # (C) Datadog, Inc. 2020-present
# All rights reserved
# Licensed under Simplified BSD License (see LICENSE)
import logging
import pytest
from datadog_checks.base import ConfigurationError
from datadog_checks.dev.utils import get_metadata_metrics
from datadog_checks.snmp import SnmpCheck
from datadog_checks.snmp.utils import (
_get_profile_name,
_is_abstract_profile,
_iter_default_profile_file_paths,
get_profile_definition,
recursively_expand_base_profiles,
)
from . import common
from .metrics import (
ADAPTER_IF_COUNTS,
CCCA_ROUTER_GAUGES,
CIE_METRICS,
COS_COUNTS,
COS_RATES,
CPU_METRICS,
DCU_COUNTS,
DISK_GAUGES,
DRS_GAUGES,
FIREWALL_COUNTS,
FRU_METRICS,
IF_BANDWIDTH_USAGE,
IF_COUNTS,
IF_GAUGES,
IF_RATES,
IP_COUNTS,
IP_IF_COUNTS,
IPX_COUNTS,
LTM_GAUGES,
LTM_NODES_COUNTS,
LTM_NODES_GAUGES,
LTM_NODES_RATES,
LTM_POOL_COUNTS,
LTM_POOL_GAUGES,
LTM_POOL_MEMBER_COUNTS,
LTM_POOL_MEMBER_GAUGES,
LTM_POOL_MEMBER_RATES,
LTM_POOL_RATES,
LTM_VIRTUAL_SERVER_COUNTS,
LTM_VIRTUAL_SERVER_GAUGES,
LTM_VIRTUAL_SERVER_RATES,
MEMORY_METRICS,
PEER_GAUGES,
PEER_RATES,
PROBE_GAUGES,
SCU_COUNTS,
SYSTEM_STATUS_GAUGES,
TCP_COUNTS,
TCP_GAUGES,
UDP_COUNTS,
USER_FIREWALL,
VIRTUAL_CHASSIS_COUNTS,
VIRTUAL_CHASSIS_RATES,
VOLTAGE_GAUGES,
)
pytestmark = common.python_autodiscovery_only
def test_load_profiles(caplog):
instance = common.generate_instance_config([])
check = SnmpCheck('snmp', {}, [instance])
caplog.at_level(logging.WARNING)
for name, profile in check.profiles.items():
try:
check._config.refresh_with_profile(profile)
except ConfigurationError as e:
pytest.fail("Profile `{}` is not configured correctly: {}".format(name, e))
assert "table doesn't have a 'metric_tags' section" not in caplog.text
caplog.clear()
def test_profile_hierarchy():
"""
* Only concrete profiles MUST inherit from '_base.yaml'.
* Only concrete profiles MUST define a `sysobjectid` field.
"""
errors = []
compat_base_profiles = ['_base_cisco', '_base_cisco_voice']
for path in _iter_default_profile_file_paths():
name = _get_profile_name(path)
definition = get_profile_definition({'definition_file': path})
extends = definition.get('extends', [])
sysobjectid = definition.get('sysobjectid')
if _is_abstract_profile(name):
if '_base.yaml' in extends and name not in compat_base_profiles:
errors.append("'{}': mixin wrongly extends '_base.yaml'".format(name))
if sysobjectid is not None:
errors.append("'{}': mixin wrongly defines a `sysobjectid`".format(name))
else:
if '_base.yaml' not in extends:
errors.append("'{}': concrete profile must directly extend '_base.yaml'".format(name))
if sysobjectid is None:
errors.append("'{}': concrete profile must define a `sysobjectid`".format(name))
if errors:
pytest.fail('\n'.join(sorted(errors)))
def run_profile_check(recording_name, profile_name=None):
"""
Run a single check with the provided `recording_name` used as
`community_string` by the docker SNMP endpoint.
"""
instance = common.generate_instance_config([])
instance['community_string'] = recording_name
instance['enforce_mib_constraints'] = False
check = SnmpCheck('snmp', {}, [instance])
# First, see if recording name is a profile, then use profile as definition.
if profile_name is not None:
profile = check.profiles.get(profile_name)
else:
profile = check.profiles.get(recording_name)
if profile:
try:
test_check = SnmpCheck('snmp', {}, [common.generate_instance_config([])])
test_check._config.refresh_with_profile(profile)
except ConfigurationError as e:
pytest.fail("Profile `{}` is not configured correctly: {}".format(recording_name, e))
check.check(instance)
@pytest.mark.unit
@pytest.mark.parametrize(
'definition_file, equivalent_definition',
[
pytest.param('_base_cisco.yaml', {'extends': ['_base.yaml', '_cisco-generic.yaml']}, id='generic'),
pytest.param(
'_base_cisco_voice.yaml',
{'extends': ['_base.yaml', '_cisco-generic.yaml', '_cisco-voice.yaml']},
id='voice',
),
],
)
def test_compat_cisco_base_profiles(definition_file, equivalent_definition):
# type: (str, dict) -> None
"""
Cisco and Cisco Voice base profiles were replaced by mixins (see Pull #6792).
But their definition files should still be present and contain equivalent metrics to ensure backward compatibility.
"""
definition = get_profile_definition({'definition_file': definition_file})
recursively_expand_base_profiles(definition)
recursively_expand_base_profiles(equivalent_definition)
assert definition == equivalent_definition
@pytest.mark.usefixtures("dd_environment")
def test_cisco_voice(aggregator):
run_profile_check('cisco_icm')
tags = [
'snmp_profile:cisco_icm',
'snmp_host:test',
'device_vendor:cisco',
] + common.CHECK_TAGS
resources = ["hrSWRunPerfMem", "hrSWRunPerfCPU"]
common.assert_common_metrics(aggregator, tags)
for resource in resources:
aggregator.assert_metric('snmp.{}'.format(resource), metric_type=aggregator.GAUGE, tags=tags)
run_indices = [4, 7, 8, 9, 10, 18, 24, 29, 30]
for index in run_indices:
status_tags = tags + ['run_index:{}'.format(index)]
aggregator.assert_metric('snmp.hrSWRunStatus', metric_type=aggregator.GAUGE, tags=status_tags)
cvp_gauges = [
"ccvpSipIntAvgLatency1",
"ccvpSipIntAvgLatency2",
"ccvpSipIntConnectsRcv",
"ccvpSipIntNewCalls",
"ccvpSipRtActiveCalls",
"ccvpSipRtTotalCallLegs",
"ccvpLicRtPortsInUse",
"ccvpLicAggMaxPortsInUse",
]
for cvp in cvp_gauges:
aggregator.assert_metric('snmp.{}'.format(cvp), metric_type=aggregator.GAUGE, tags=tags)
ccms_counts = ["ccmRejectedPhones", "ccmUnregisteredPhones"]
ccms_gauges = ["ccmRegisteredGateways", "ccmRegisteredPhones"]
for ccm in ccms_counts:
aggregator.assert_metric('snmp.{}'.format(ccm), metric_type=aggregator.RATE, tags=tags)
for ccm in ccms_gauges:
aggregator.assert_metric('snmp.{}'.format(ccm), metric_type=aggregator.GAUGE, tags=tags)
calls = [
"cvCallVolPeerIncomingCalls",
"cvCallVolPeerOutgoingCalls",
]
peers = [4, 13, 14, 17, 18, 22, 25, 30, 31]
for call in calls:
for peer in peers:
peer_tags = tags + ["peer_index:{}".format(peer)]
aggregator.assert_metric('snmp.{}'.format(call), metric_type=aggregator.GAUGE, tags=peer_tags)
calls = [
"cvCallVolMediaIncomingCalls",
"cvCallVolMediaOutgoingCalls",
]
for call in calls:
aggregator.assert_metric('snmp.{}'.format(call), metric_type=aggregator.GAUGE, tags=tags)
dial_controls = [
"dialCtlPeerStatsAcceptCalls",
"dialCtlPeerStatsFailCalls",
"dialCtlPeerStatsRefuseCalls",
"dialCtlPeerStatsSuccessCalls",
]
for ctl in dial_controls:
aggregator.assert_metric(
'snmp.{}'.format(ctl), metric_type=aggregator.MONOTONIC_COUNT, tags=["peer_index:7"] + tags
)
pim_tags = tags + ['pim_host:test', 'pim_name:name', 'pim_num:2']
aggregator.assert_metric('snmp.{}'.format("cccaPimStatus"), metric_type=aggregator.GAUGE, tags=pim_tags)
aggregator.assert_metric('snmp.{}'.format("sysUpTimeInstance"), metric_type=aggregator.GAUGE, tags=tags, count=1)
instance_numbers = ['4446', '5179', '12093', '19363', '25033', '37738', '42562', '51845', '62906', '63361']
for metric in CCCA_ROUTER_GAUGES:
for instance_number in instance_numbers:
instance_tags = tags + ['instance_number:{}'.format(instance_number)]
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=instance_tags)
aggregator.assert_all_metrics_covered()
@pytest.mark.usefixtures("dd_environment")
def test_f5(aggregator):
profile = 'f5-big-ip'
run_profile_check(profile)
gauges = [
'sysStatMemoryTotal',
'sysStatMemoryUsed',
'sysGlobalTmmStatMemoryTotal',
'sysGlobalTmmStatMemoryUsed',
'sysGlobalHostOtherMemoryTotal',
'sysGlobalHostOtherMemoryUsed',
'sysGlobalHostSwapTotal',
'sysGlobalHostSwapUsed',
'sysTcpStatOpen',
'sysTcpStatCloseWait',
'sysTcpStatFinWait',
'sysTcpStatTimeWait',
'sysUdpStatOpen',
'sysClientsslStatCurConns',
]
counts = [
'sysTcpStatAccepts',
'sysTcpStatAcceptfails',
'sysTcpStatConnects',
'sysTcpStatConnfails',
'sysUdpStatAccepts',
'sysUdpStatAcceptfails',
'sysUdpStatConnects',
'sysUdpStatConnfails',
'sysClientsslStatEncryptedBytesIn',
'sysClientsslStatEncryptedBytesOut',
'sysClientsslStatDecryptedBytesIn',
'sysClientsslStatDecryptedBytesOut',
'sysClientsslStatHandshakeFailures',
]
cpu_rates = [
'sysMultiHostCpuUser',
'sysMultiHostCpuNice',
'sysMultiHostCpuSystem',
'sysMultiHostCpuIdle',
'sysMultiHostCpuIrq',
'sysMultiHostCpuSoftirq',
'sysMultiHostCpuIowait',
]
interfaces = [
('1.0', 'desc2'),
('mgmt', 'desc1'),
('/Common/internal', 'desc5'),
('/Common/http-tunnel', 'desc3'),
('/Common/socks-tunnel', 'desc4'),
]
interfaces_with_bandwidth_usage = {
'1.0',
'mgmt',
'/Common/internal',
}
tags = [
'snmp_profile:' + profile,
'snmp_host:f5-big-ip-adc-good-byol-1-vm.c.datadog-integrations-lab.internal',
'device_vendor:f5',
]
tags += common.CHECK_TAGS
common.assert_common_metrics(aggregator, tags)
for metric in gauges:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
for metric in counts:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=tags, count=1)
for metric in cpu_rates:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.RATE, tags=['cpu:0'] + tags, count=1)
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.RATE, tags=['cpu:1'] + tags, count=1)
for interface, desc in interfaces:
interface_tags = ['interface:{}'.format(interface), 'interface_alias:{}'.format(desc)] + tags
for metric in IF_COUNTS:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=interface_tags, count=1
)
for metric in IF_RATES:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.RATE, tags=interface_tags, count=1
)
if interface in interfaces_with_bandwidth_usage:
for metric in IF_BANDWIDTH_USAGE:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.RATE, tags=interface_tags, count=1
)
for metric in IF_GAUGES:
aggregator.assert_metric(
'snmp.{}'.format(metric),
metric_type=aggregator.GAUGE,
tags=interface_tags,
count=1,
)
for version in ['ipv4', 'ipv6']:
ip_tags = ['ipversion:{}'.format(version)] + tags
for metric in IP_COUNTS:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=ip_tags, count=1
)
for metric in LTM_GAUGES:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
servers = ['server1', 'server2', 'server3']
for server in servers:
server_tags = tags + ['server:{}'.format(server)]
for metric in LTM_VIRTUAL_SERVER_GAUGES:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=server_tags, count=1)
for metric in LTM_VIRTUAL_SERVER_COUNTS:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=server_tags, count=1
)
for metric in LTM_VIRTUAL_SERVER_RATES:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.RATE, tags=server_tags, count=1)
nodes = ['node1', 'node2', 'node3']
for node in nodes:
node_tags = tags + ['node:{}'.format(node)]
for metric in LTM_NODES_GAUGES:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=node_tags, count=1)
for metric in LTM_NODES_COUNTS:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=node_tags, count=1
)
for metric in LTM_NODES_RATES:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.RATE, tags=node_tags, count=1)
pools = ['pool1', 'pool2']
for pool in pools:
pool_tags = tags + ['pool:{}'.format(pool)]
for metric in LTM_POOL_GAUGES:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=pool_tags, count=1)
for metric in LTM_POOL_COUNTS:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=pool_tags, count=1
)
for metric in LTM_POOL_RATES:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.RATE, tags=pool_tags, count=1)
pool_members = [('pool1', 'node1'), ('pool1', 'node2'), ('pool2', 'node3')]
for pool, node in pool_members:
pool_member_tags = tags + ['pool:{}'.format(pool), 'node:{}'.format(node)]
for metric in LTM_POOL_MEMBER_GAUGES:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=pool_member_tags, count=1
)
for metric in LTM_POOL_MEMBER_COUNTS:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=pool_member_tags, count=1
)
for metric in LTM_POOL_MEMBER_RATES:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.RATE, tags=pool_member_tags, count=1
)
aggregator.assert_metric('snmp.sysUpTimeInstance', count=1)
aggregator.assert_all_metrics_covered()
@pytest.mark.usefixtures("dd_environment")
def test_router(aggregator):
profile = "generic-router"
run_profile_check(profile)
common_tags = common.CHECK_TAGS + ['snmp_profile:' + profile]
common.assert_common_metrics(aggregator, common_tags)
interfaces = [
('eth0', 'kept'),
('eth1', 'their forward oxen'),
]
for interface, if_desc in interfaces:
tags = ['interface:{}'.format(interface), 'interface_alias:{}'.format(if_desc)] + common_tags
for metric in IF_COUNTS:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=tags, count=1
)
for metric in IF_RATES:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.RATE, tags=tags, count=1)
for metric in IF_GAUGES:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
for metric in IF_BANDWIDTH_USAGE:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.RATE, tags=tags, count=1)
for metric in TCP_COUNTS:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=common_tags, count=1
)
for metric in TCP_GAUGES:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=common_tags, count=1)
for metric in UDP_COUNTS:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=common_tags, count=1
)
for version in ['ipv4', 'ipv6']:
tags = ['ipversion:{}'.format(version)] + common_tags
for metric in IP_COUNTS + IPX_COUNTS:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=tags, count=1
)
for metric in IP_IF_COUNTS:
for interface in ['17', '21']:
tags = ['ipversion:{}'.format(version), 'interface:{}'.format(interface)] + common_tags
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=tags, count=1
)
aggregator.assert_all_metrics_covered()
@pytest.mark.usefixtures("dd_environment")
def test_f5_router(aggregator):
# Use the generic profile against the f5 device
instance = common.generate_instance_config([])
instance['community_string'] = 'f5-big-ip'
instance['enforce_mib_constraints'] = False
init_config = {'profiles': {'router': {'definition_file': 'generic-router.yaml'}}}
check = SnmpCheck('snmp', init_config, [instance])
check.check(instance)
interfaces = [
('1.0', 'desc2'),
('mgmt', 'desc1'),
('/Common/internal', 'desc5'),
('/Common/http-tunnel', 'desc3'),
('/Common/socks-tunnel', 'desc4'),
]
interfaces_with_bandwidth_usage = {
'1.0',
'mgmt',
'/Common/internal',
}
common_tags = [
'snmp_profile:router',
'snmp_host:f5-big-ip-adc-good-byol-1-vm.c.datadog-integrations-lab.internal',
]
common_tags.extend(common.CHECK_TAGS)
common.assert_common_metrics(aggregator, common_tags)
for interface, desc in interfaces:
tags = ['interface:{}'.format(interface), 'interface_alias:{}'.format(desc)] + common_tags
for metric in IF_COUNTS:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=tags, count=1
)
for metric in IF_RATES:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.RATE, tags=tags, count=1)
for metric in IF_GAUGES:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
if interface in interfaces_with_bandwidth_usage:
for metric in IF_BANDWIDTH_USAGE:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.RATE, tags=tags, count=1)
for version in ['ipv4', 'ipv6']:
tags = ['ipversion:{}'.format(version)] + common_tags
for metric in IP_COUNTS:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=tags, count=1
)
aggregator.assert_metric('snmp.sysUpTimeInstance', count=1)
aggregator.assert_all_metrics_covered()
@pytest.mark.usefixtures("dd_environment")
def test_cisco_3850(aggregator):
profile = "cisco-3850"
run_profile_check(profile)
# We're not covering all interfaces
interfaces = ["Gi1/0/{}".format(i) for i in range(1, 48)]
common_tags = common.CHECK_TAGS + [
'snmp_host:Cat-3850-4th-Floor.companyname.local',
'snmp_profile:' + profile,
'device_vendor:cisco',
]
common.assert_common_metrics(aggregator, common_tags)
aliases = {
'Gi1/0/24': 'LWAP-example',
'Gi1/0/33': 'switchboard console',
'Gi1/0/38': 'Mitel Console',
'Gi1/1/3': 'Link to Switch',
'Gi2/0/13': 'AP01',
'Gi2/0/14': 'AP02',
'Gi2/0/15': 'AP03',
'Gi2/0/16': 'AP04',
'Gi2/0/17': 'AP05',
'Gi2/0/18': 'AP06',
'Gi2/1/4': 'Link to Switch',
}
for interface in interfaces:
alias = aliases.get(interface, '')
tags = ['interface:{}'.format(interface), 'interface_alias:{}'.format(alias)] + common_tags
for metric in IF_COUNTS:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=tags, count=1
)
for metric in IF_GAUGES:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
for metric in IF_BANDWIDTH_USAGE:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.RATE, tags=tags, count=1)
for metric in IF_RATES:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.RATE, tags=tags, count=1)
for metric in IP_COUNTS + IPX_COUNTS:
tags = common_tags + ['ipversion:ipv6']
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=tags, count=1)
for metric in TCP_COUNTS:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=common_tags, count=1
)
for metric in TCP_GAUGES:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=common_tags, count=1)
for metric in UDP_COUNTS:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=common_tags, count=1
)
sensors = [1006, 1007, 1008, 2006, 2007, 2008]
for sensor in sensors:
tags = ['sensor_id:{}'.format(sensor), 'sensor_type:8'] + common_tags
aggregator.assert_metric('snmp.entSensorValue', metric_type=aggregator.GAUGE, tags=tags, count=1)
frus = [1001, 1010, 2001, 2010]
for fru in frus:
tags = ['fru:{}'.format(fru)] + common_tags
for metric in FRU_METRICS:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
cpus = [1000, 2000]
for cpu in cpus:
tags = ['cpu:{}'.format(cpu)] + common_tags
for metric in CPU_METRICS:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
for interface in interfaces:
tags = ['interface:{}'.format(interface)] + common_tags
for metric in CIE_METRICS:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
aggregator.assert_metric('snmp.cieIfResetCount', metric_type=aggregator.MONOTONIC_COUNT, tags=tags, count=1)
power_supplies = [
(1, 'Switch 1 - Power Supply B, NotExist'),
(1, 'Switch 2 - Power Supply B, NotExist'),
(2, 'Switch 1 - Power Supply A, Normal'),
(2, 'Switch 2 - Power Supply A, Normal'),
]
for source, descr in power_supplies:
env_tags = ['power_source:{}'.format(source), 'power_status_descr:{}'.format(descr)]
aggregator.assert_metric(
'snmp.ciscoEnvMonSupplyState', metric_type=aggregator.GAUGE, tags=env_tags + common_tags
)
aggregator.assert_metric('snmp.ciscoEnvMonFanState', metric_type=aggregator.GAUGE, tags=common_tags)
aggregator.assert_metric('snmp.cswStackPortOperStatus', metric_type=aggregator.GAUGE)
for switch, mac_addr in [(1, '0x046c9d42b080'), (2, '0xdccec1430680')]:
tags = ['entity_name:Switch {}'.format(switch), 'mac_addr:{}'.format(mac_addr)] + common_tags
aggregator.assert_metric('snmp.cswSwitchState', metric_type=aggregator.GAUGE, tags=tags)
frus = [1011, 1012, 1013, 2011, 2012, 2013]
for fru in frus:
tags = ['fru:{}'.format(fru)] + common_tags
aggregator.assert_metric(
'snmp.cefcFanTrayOperStatus', metric_type=aggregator.GAUGE, tags=['fru:{}'.format(fru)] + common_tags
)
for metrics in MEMORY_METRICS:
for pool in ['Processor', 'IOS Process stack']:
tags = ['mem_pool_name:{}'.format(pool)] + common_tags
aggregator.assert_metric('snmp.{}'.format(metrics), metric_type=aggregator.GAUGE, tags=tags)
neighbor_metrics = [
('ospfNbrEvents', aggregator.RATE),
('ospfNbrState', aggregator.GAUGE),
('ospfNbrLsRetransQLen', aggregator.GAUGE),
]
for metric, metric_type in neighbor_metrics:
tags = ['neighbor_ip:192.29.116.26', 'neighbor_id:192.29.66.79'] + common_tags
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=metric_type, tags=tags, count=1)
lls_metrics = ['ospfIfRetransInterval', 'ospfIfState']
for metric in lls_metrics:
tags = ['ospf_ip_addr:192.29.116.25'] + common_tags
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
for temp_index in [1006, 1007, 1008, 2006, 2007, 2008]:
env_tag = ['temp_index:{}'.format(temp_index), 'temp_state:1']
aggregator.assert_metric(
'snmp.ciscoEnvMonTemperatureStatusValue', metric_type=aggregator.GAUGE, tags=env_tag + common_tags
)
aggregator.assert_metric('snmp.sysUpTimeInstance', count=1)
aggregator.assert_all_metrics_covered()
@pytest.mark.usefixtures("dd_environment")
def test_meraki_cloud_controller(aggregator):
run_profile_check('meraki-cloud-controller')
common_tags = common.CHECK_TAGS + [
'snmp_profile:meraki-cloud-controller',
'snmp_host:dashboard.meraki.com',
'device_vendor:meraki',
]
common.assert_common_metrics(aggregator, common_tags)
dev_metrics = ['devStatus', 'devClientCount']
dev_tags = ['device:Gymnasium', 'product:MR16-HW', 'network:L_NETWORK'] + common_tags
for metric in dev_metrics:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=dev_tags, count=1)
if_tags = ['interface:wifi0', 'index:4'] + common_tags
if_metrics = ['devInterfaceSentPkts', 'devInterfaceRecvPkts', 'devInterfaceSentBytes', 'devInterfaceRecvBytes']
for metric in if_metrics:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=if_tags, count=1)
# IF-MIB
if_tags = ['interface:eth0'] + common_tags
for metric in IF_COUNTS:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=if_tags, count=1
)
for metric in IF_GAUGES:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=if_tags, count=1)
for metric in IF_RATES:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.RATE, tags=if_tags, count=1)
for metric in IF_BANDWIDTH_USAGE:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.RATE, tags=if_tags, count=1)
aggregator.assert_metric('snmp.sysUpTimeInstance', count=1)
aggregator.assert_all_metrics_covered()
@pytest.mark.usefixtures("dd_environment")
def test_idrac(aggregator):
run_profile_check('idrac')
interfaces = ['eth0', 'en1']
common_tags = common.CHECK_TAGS + ['snmp_profile:idrac', 'device_vendor:dell']
common.assert_common_metrics(aggregator, common_tags)
for interface in interfaces:
tags = ['adapter:{}'.format(interface)] + common_tags
for count in ADAPTER_IF_COUNTS:
aggregator.assert_metric(
'snmp.{}'.format(count), metric_type=aggregator.MONOTONIC_COUNT, tags=tags, count=1
)
indexes = ['26', '29']
for index in indexes:
tags = ['chassis_index:{}'.format(index)] + common_tags
for gauge in SYSTEM_STATUS_GAUGES:
aggregator.assert_metric('snmp.{}'.format(gauge), metric_type=aggregator.GAUGE, tags=tags, count=1)
powers = ['supply1', 'supply2']
for power in powers:
tags = ['supply_name:{}'.format(power)] + common_tags
aggregator.assert_metric('snmp.enclosurePowerSupplyState', metric_type=aggregator.GAUGE, tags=tags, count=1)
disks = ['disk1', 'disk2']
for disk in disks:
tags = ['disk_name:{}'.format(disk)] + common_tags
for gauge in DISK_GAUGES:
aggregator.assert_metric('snmp.{}'.format(gauge), metric_type=aggregator.GAUGE, tags=tags, count=1)
batteries = ['battery1', 'battery2']
for battery_name in batteries:
tags = ['battery_name:{}'.format(battery_name)] + common_tags
aggregator.assert_metric('snmp.{}'.format("batteryState"), metric_type=aggregator.GAUGE, tags=tags, count=1)
controllers = ['controller1', 'controller2']
for controller in controllers:
tags = ['controller_name:{}'.format(controller)] + common_tags
aggregator.assert_metric(
'snmp.{}'.format("controllerRollUpStatus"), metric_type=aggregator.GAUGE, tags=tags, count=1
)
devices = ['device1', 'device2']
indexes = ['10', '20']
for device, index in zip(devices, indexes):
tags = ['device_descr_name:{}'.format(device), 'chassis_index:{}'.format(index)] + common_tags
aggregator.assert_metric('snmp.{}'.format("pCIDeviceStatus"), metric_type=aggregator.GAUGE, tags=tags, count=1)
slots = ['slot1', 'slot2']
indexes = ['19', '21']
for slot, index in zip(slots, indexes):
tags = ['slot_name:{}'.format(slot), 'chassis_index:{}'.format(index)] + common_tags
aggregator.assert_metric('snmp.{}'.format("systemSlotStatus"), metric_type=aggregator.GAUGE, tags=tags, count=1)
tag_mappings = [('29', 'device2', '0x9e00e0291401'), ('3', 'device1', '0x9e00e0291401')]
for index, device, mac in tag_mappings:
tags = [
'chassis_index:{}'.format(index),
'device_fqdd:{}'.format(device),
'mac_addr:{}'.format(mac),
] + common_tags
aggregator.assert_metric(
'snmp.{}'.format("networkDeviceStatus"), metric_type=aggregator.GAUGE, tags=tags, count=1
)
indexes = ['3', '31']
for index in indexes:
tags = ['chassis_index:{}'.format(index)] + common_tags
aggregator.assert_metric('snmp.{}'.format("systemBIOSStatus"), metric_type=aggregator.GAUGE, tags=tags, count=1)
indexes = ['9', '18']
probe_types = ['26', '26']
for index, probe_type in zip(indexes, probe_types):
tags = ['chassis_index:{}'.format(index), 'probe_type:{}'.format(probe_type)] + common_tags
for gauge in PROBE_GAUGES:
aggregator.assert_metric('snmp.{}'.format(gauge), metric_type=aggregator.GAUGE, tags=tags, count=1)
indexes = ['12', '22']
probe_types = ['6', '3']
for index, probe_type in zip(indexes, probe_types):
tags = ['chassis_index:{}'.format(index), 'probe_type:{}'.format(probe_type)] + common_tags
for gauge in VOLTAGE_GAUGES:
aggregator.assert_metric('snmp.{}'.format(gauge), metric_type=aggregator.GAUGE, tags=tags, count=1)
indexes = ['29', '22']
device_types = ['26', '4']
device_indexes = ['4', '21']
for index, device_type, device_index in zip(indexes, device_types, device_indexes):
tags = [
'chassis_index:{}'.format(index),
'device_type:{}'.format(device_type),
'device_index:{}'.format(device_index),
] + common_tags
aggregator.assert_metric(
'snmp.{}'.format("memoryDeviceStatus"), metric_type=aggregator.GAUGE, tags=tags, count=1
)
for gauge in DRS_GAUGES:
aggregator.assert_metric('snmp.{}'.format(gauge), metric_type=aggregator.GAUGE, tags=common_tags, count=1)
aggregator.assert_all_metrics_covered()
@pytest.mark.usefixtures("dd_environment")
def test_cisco_nexus(aggregator):
profile = "cisco-nexus"
run_profile_check(profile)
interfaces = ["GigabitEthernet1/0/{}".format(i) for i in range(1, 9)]
common_tags = common.CHECK_TAGS + [
'snmp_host:Nexus-eu1.companyname.managed',
'snmp_profile:' + profile,
'device_vendor:cisco',
]
common.assert_common_metrics(aggregator, common_tags)
for interface in interfaces:
tags = ['interface:{}'.format(interface)] + common_tags
aggregator.assert_metric('snmp.cieIfResetCount', metric_type=aggregator.MONOTONIC_COUNT, tags=tags, count=1)
for interface in interfaces:
tags = ['interface:{}'.format(interface), 'interface_alias:'] + common_tags
for metric in IF_COUNTS:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=tags, count=1
)
for metric in IF_RATES:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.RATE, tags=tags, count=1)
for metric in IF_GAUGES:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
for metric in IF_BANDWIDTH_USAGE:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.RATE, tags=tags, count=1)
for metric in TCP_COUNTS:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=common_tags, count=1
)
for metric in TCP_GAUGES:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=common_tags, count=1)
for metric in UDP_COUNTS:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=common_tags, count=1
)
sensors = [1, 9, 11, 12, 12, 14, 17, 26, 29, 31]
for sensor in sensors:
tags = ['sensor_id:{}'.format(sensor), 'sensor_type:8'] + common_tags
aggregator.assert_metric('snmp.entSensorValue', metric_type=aggregator.GAUGE, tags=tags, count=1)
frus = [6, 7, 15, 16, 19, 27, 30, 31]
for fru in frus:
tags = ['fru:{}'.format(fru)] + common_tags
for metric in FRU_METRICS:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
cpus = [3173, 6692, 11571, 19529, 30674, 38253, 52063, 54474, 55946, 63960]
for cpu in cpus:
tags = ['cpu:{}'.format(cpu)] + common_tags
for metric in CPU_METRICS:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
for (index, state) in [(3, 3), (6, 6), (8, 6), (11, 6), (13, 3), (14, 6), (20, 6), (21, 4), (31, 5)]:
aggregator.assert_metric(
'snmp.ciscoEnvMonTemperatureStatusValue',
metric_type=aggregator.GAUGE,
tags=['temp_state:{}'.format(state), 'temp_index:{}'.format(index)] + common_tags,
)
power_supply_tags = ['power_source:1', 'power_status_descr:Jaded driving their their their'] + common_tags
aggregator.assert_metric('snmp.ciscoEnvMonSupplyState', metric_type=aggregator.GAUGE, tags=power_supply_tags)
fan_indices = [4, 6, 7, 16, 21, 22, 25, 27]
for index in fan_indices:
tags = ['fan_status_index:{}'.format(index)] + common_tags
aggregator.assert_metric('snmp.ciscoEnvMonFanState', metric_type=aggregator.GAUGE, tags=tags)
aggregator.assert_metric(
'snmp.cswStackPortOperStatus',
metric_type=aggregator.GAUGE,
tags=common_tags + ['interface:GigabitEthernet1/0/1'],
)
aggregator.assert_metric(
'snmp.cswSwitchState', metric_type=aggregator.GAUGE, tags=['mac_addr:0xffffffffffff'] + common_tags
)
frus = [2, 7, 8, 21, 26, 27, 30, 31]
for fru in frus:
tags = ['fru:{}'.format(fru)] + common_tags
aggregator.assert_metric(
'snmp.cefcFanTrayOperStatus', metric_type=aggregator.GAUGE, tags=['fru:{}'.format(fru)] + common_tags
)
aggregator.assert_metric('snmp.sysUpTimeInstance', count=1)
aggregator.assert_all_metrics_covered()
@pytest.mark.usefixtures("dd_environment")
def test_dell_poweredge(aggregator):
run_profile_check('dell-poweredge')
# Poweredge
sys_mem_gauges = [
'operatingSystemMemoryAvailablePhysicalSize',
'operatingSystemMemoryTotalPageFileSize',
'operatingSystemMemoryAvailablePageFileSize',
'operatingSystemMemoryTotalVirtualSize',
'operatingSystemMemoryAvailableVirtualSize',
]
power_supply_gauges = [
'powerSupplyStatus',
'powerSupplyOutputWatts',
'powerSupplyMaximumInputVoltage',
'powerSupplyCurrentInputVoltage',
]
temperature_probe_gauges = ['temperatureProbeStatus', 'temperatureProbeReading']
processor_device_gauges = ['processorDeviceStatus', 'processorDeviceThreadCount']
cache_device_gauges = ['cacheDeviceStatus', 'cacheDeviceMaximumSize', 'cacheDeviceCurrentSize']
memory_device_gauges = ['memoryDeviceStatus', 'memoryDeviceFailureModes']
idrac_gauges = (
['batteryState', 'controllerRollUpStatus', 'pCIDeviceStatus', 'systemSlotStatus', 'systemBIOSStatus']
+ VOLTAGE_GAUGES
+ PROBE_GAUGES
)
common_tags = common.CHECK_TAGS + [
'snmp_profile:dell-poweredge',
'device_vendor:dell',
]
common.assert_common_metrics(aggregator, common_tags)
chassis_indexes = [29, 31]
for chassis_index in chassis_indexes:
tags = ['chassis_index:{}'.format(chassis_index)] + common_tags
for metric in sys_mem_gauges:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, at_least=1)
indexes = [5, 17]
for index in indexes:
tags = ['chassis_index:4', 'index:{}'.format(index)] + common_tags
for metric in power_supply_gauges:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, at_least=1)
indexes = [13]
for index in indexes:
tags = ['chassis_index:18', 'index:{}'.format(index)] + common_tags
for metric in temperature_probe_gauges:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, at_least=1)
indexes = [17, 28]
for index in indexes:
tags = ['chassis_index:5', 'index:{}'.format(index)] + common_tags
for metric in processor_device_gauges:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, at_least=1)
indexes = [15, 27]
for index in indexes:
tags = ['chassis_index:11', 'index:{}'.format(index)] + common_tags
for metric in cache_device_gauges:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, at_least=1)
serial_numbers = ['forward zombies acted Jaded', 'kept oxen their their oxen oxen']
for serial_number in serial_numbers:
tags = ['serial_number_name:{}'.format(serial_number), 'chassis_index:1'] + common_tags
for metric in memory_device_gauges:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, at_least=1)
ip_addresses = ['66.97.1.103', '62.148.76.32', '45.3.243.155']
for ip_address in ip_addresses:
tags = ['ip_address:{}'.format(ip_address)] + common_tags
aggregator.assert_metric('snmp.networkDeviceStatus', metric_type=aggregator.GAUGE, tags=tags, at_least=1)
# Intel Adapter
interfaces = ['eth0', 'en1']
for interface in interfaces:
tags = ['adapter:{}'.format(interface)] + common_tags
for count in ADAPTER_IF_COUNTS:
aggregator.assert_metric(
'snmp.{}'.format(count), metric_type=aggregator.MONOTONIC_COUNT, tags=tags, count=1
)
# IDRAC
indexes = ['26', '29']
for index in indexes:
tags = ['chassis_index:{}'.format(index)] + common_tags
for gauge in SYSTEM_STATUS_GAUGES:
aggregator.assert_metric('snmp.{}'.format(gauge), metric_type=aggregator.GAUGE, tags=tags, count=1)
powers = ['supply1', 'supply2']
for power in powers:
tags = ['supply_name:{}'.format(power)] + common_tags
aggregator.assert_metric('snmp.enclosurePowerSupplyState', metric_type=aggregator.GAUGE, tags=tags, count=1)
disks = ['disk1', 'disk2']
for disk in disks:
tags = ['disk_name:{}'.format(disk)] + common_tags
for gauge in DISK_GAUGES:
aggregator.assert_metric('snmp.{}'.format(gauge), metric_type=aggregator.GAUGE, tags=tags, count=1)
for gauge in idrac_gauges:
aggregator.assert_metric('snmp.{}'.format(gauge), metric_type=aggregator.GAUGE)
aggregator.assert_all_metrics_covered()
@pytest.mark.usefixtures("dd_environment")
def test_hp_ilo4(aggregator):
profile = "hp-ilo4"
run_profile_check(profile)
status_gauges = [
'cpqHeCritLogCondition',
'cpqHeCorrMemLogStatus',
'cpqHeCorrMemLogCondition',
'cpqHeAsrStatus',
'cpqHeAsrPost',
'cpqHeAsrCondition',
'cpqHeAsrNetworkAccessStatus',
'cpqHeThermalCondition',
'cpqHeThermalTempStatus',
'cpqHeThermalSystemFanStatus',
'cpqHeThermalCpuFanStatus',
'cpqNicVtVirusActivity',
'cpqSm2CntlrServerPowerState',
'cpqSm2CntlrBatteryStatus',
'cpqSm2CntlrRemoteSessionStatus',
'cpqSm2CntlrInterfaceStatus',
]
cpqhlth_counts = ['cpqHeAsrRebootCount', 'cpqHeCorrMemTotalErrs']
cpqhlth_gauges = ['cpqHeSysUtilEisaBusMin', 'cpqHePowerMeterCurrReading', 'cpqHeSysUtilLifeTime']
cpqsm2_gauges = [
'cpqSm2CntlrBatteryPercentCharged',
'cpqSm2CntlrSelfTestErrors',
'cpqSm2EventTotalEntries',
]
EMBEDDED = 2
PCMCIA = 3
card_locations = [EMBEDDED, PCMCIA]
network_card_counts = [
'cpqSm2NicXmitBytes',
'cpqSm2NicXmitTotalPackets',
'cpqSm2NicXmitDiscardPackets',
'cpqSm2NicXmitErrorPackets',
'cpqSm2NicXmitQueueLength',
'cpqSm2NicRecvBytes',
'cpqSm2NicRecvTotalPackets',
'cpqSm2NicRecvDiscardPackets',
'cpqSm2NicRecvErrorPackets',
'cpqSm2NicRecvUnknownPackets',
]
interfaces = ['eth0', 'en1']
phys_adapter_counts = [
'cpqNicIfPhysAdapterGoodTransmits',
'cpqNicIfPhysAdapterGoodReceives',
'cpqNicIfPhysAdapterBadTransmits',
'cpqNicIfPhysAdapterBadReceives',
'cpqNicIfPhysAdapterInOctets',
'cpqNicIfPhysAdapterOutOctets',
]
phys_adapter_gauges = ['cpqNicIfPhysAdapterSpeed', 'cpqNicIfPhysAdapterSpeedMbps']
temperature_sensors = [1, 13, 28]
batteries = [1, 3, 4, 5]
common_tags = common.CHECK_TAGS + ['snmp_profile:' + profile, 'device_vendor:hp']
common.assert_common_metrics(aggregator, common_tags)
for metric in status_gauges:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=common_tags, count=1)
for metric in cpqhlth_counts:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=common_tags, count=1
)
for metric in cpqhlth_gauges:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=common_tags, count=1)
for metric in cpqsm2_gauges:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=common_tags, count=1)
for index in temperature_sensors:
tags = ['temperature_index:{}'.format(index)] + common_tags
aggregator.assert_metric('snmp.cpqHeTemperatureCelsius', metric_type=aggregator.GAUGE, tags=tags, count=1)
aggregator.assert_metric('snmp.cpqHeTemperatureCondition', metric_type=aggregator.GAUGE, tags=tags, count=1)
for index in batteries:
tags = ['battery_index:{}'.format(index)] + common_tags
aggregator.assert_metric('snmp.cpqHeSysBatteryCondition', metric_type=aggregator.GAUGE, tags=tags, count=1)
aggregator.assert_metric('snmp.cpqHeSysBatteryStatus', metric_type=aggregator.GAUGE, tags=tags, count=1)
for location in card_locations:
tags = ['nic_stats_location:{}'.format(location)] + common_tags
for metric in network_card_counts:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=tags, count=1
)
for interface in interfaces:
tags = ['interface:{}'.format(interface)] + common_tags
for metric in phys_adapter_counts:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=tags, count=1
)
for metric in phys_adapter_gauges:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
drive_counts = [
"cpqDaPhyDrvUsedReallocs",
"cpqDaPhyDrvRefHours",
"cpqDaPhyDrvHardReadErrs",
"cpqDaPhyDrvRecvReadErrs",
"cpqDaPhyDrvHardWriteErrs",
"cpqDaPhyDrvRecvWriteErrs",
"cpqDaPhyDrvHSeekErrs",
"cpqDaPhyDrvSeekErrs",
]
drive_gauges = [
"cpqDaPhyDrvStatus",
"cpqDaPhyDrvFactReallocs",
"cpqDaPhyDrvSpinupTime",
"cpqDaPhyDrvSize",
"cpqDaPhyDrvSmartStatus",
"cpqDaPhyDrvCurrentTemperature",
]
drive_idx = [(0, 2), (0, 28), (8, 31), (9, 24), (9, 28), (10, 17), (11, 4), (12, 20), (18, 22), (23, 2)]
for drive_cntrl_idx, drive_index in drive_idx:
tags = ['drive_cntrl_idx:{}'.format(drive_cntrl_idx), "drive_index:{}".format(drive_index)] + common_tags
for metric in drive_counts:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=tags, count=1
)
for metric in drive_gauges:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
aggregator.assert_all_metrics_covered()
aggregator.assert_metrics_using_metadata(get_metadata_metrics(), check_submission_type=True)
@pytest.mark.usefixtures("dd_environment")
def test_proliant(aggregator):
run_profile_check('hpe-proliant')
common_tags = common.CHECK_TAGS + ['snmp_profile:hpe-proliant', 'device_vendor:hp']
common.assert_common_metrics(aggregator, common_tags)
for metric in TCP_COUNTS:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=common_tags, count=1
)
for metric in TCP_GAUGES:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=common_tags, count=1)
for metric in UDP_COUNTS:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=common_tags, count=1
)
cpu_gauges = [
"cpqSeCpuSlot",
"cpqSeCpuSpeed",
"cpqSeCpuStatus",
"cpqSeCpuExtSpeed",
"cpqSeCpuCore",
"cpqSeCPUCoreMaxThreads",
"cpqSeCpuPrimary",
]
cpu_indexes = [0, 4, 6, 8, 13, 15, 26, 27]
for idx in cpu_indexes:
tags = ['cpu_index:{}'.format(idx)] + common_tags
for metric in cpu_gauges:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
cpu_util_gauges = ["cpqHoCpuUtilMin", "cpqHoCpuUtilFiveMin", "cpqHoCpuUtilThirtyMin", "cpqHoCpuUtilHour"]
cpu_unit_idx = [4, 7, 13, 20, 22, 23, 29]
for idx in cpu_unit_idx:
tags = ['cpu_unit_index:{}'.format(idx)] + common_tags
for metric in cpu_util_gauges:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
file_sys_gauges = [
"cpqHoFileSysSpaceTotal",
"cpqHoFileSysSpaceUsed",
"cpqHoFileSysPercentSpaceUsed",
"cpqHoFileSysAllocUnitsTotal",
"cpqHoFileSysAllocUnitsUsed",
"cpqHoFileSysStatus",
]
file_sys_idx = [5, 8, 11, 15, 19, 21, 28, 30]
for idx in file_sys_idx:
tags = ['file_sys_index:{}'.format(idx)] + common_tags
for metric in file_sys_gauges:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
memory_gauges = [
"cpqSiMemModuleSize",
"cpqSiMemModuleType",
"cpqSiMemModuleSpeed",
"cpqSiMemModuleTechnology",
"cpqSiMemModuleECCStatus",
"cpqSiMemModuleFrequency",
"cpqSiMemModuleCellStatus",
]
memory_idx = [(6, 16), (7, 17), (7, 30), (8, 20), (10, 4), (15, 27), (20, 14), (21, 14), (23, 0), (28, 20)]
for board_idx, mem_module_index in memory_idx:
tags = ['mem_board_index:{}'.format(board_idx), "mem_module_index:{}".format(mem_module_index)] + common_tags
for metric in memory_gauges:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
drive_counts = [
"cpqDaPhyDrvUsedReallocs",
"cpqDaPhyDrvRefHours",
"cpqDaPhyDrvHardReadErrs",
"cpqDaPhyDrvRecvReadErrs",
"cpqDaPhyDrvHardWriteErrs",
"cpqDaPhyDrvRecvWriteErrs",
"cpqDaPhyDrvHSeekErrs",
"cpqDaPhyDrvSeekErrs",
]
drive_gauges = [
"cpqDaPhyDrvStatus",
"cpqDaPhyDrvFactReallocs",
"cpqDaPhyDrvSpinupTime",
"cpqDaPhyDrvSize",
"cpqDaPhyDrvSmartStatus",
"cpqDaPhyDrvCurrentTemperature",
]
drive_idx = [(0, 2), (0, 28), (8, 31), (9, 24), (9, 28), (10, 17), (11, 4), (12, 20), (18, 22), (23, 2)]
for drive_cntrl_idx, drive_index in drive_idx:
tags = ['drive_cntrl_idx:{}'.format(drive_cntrl_idx), "drive_index:{}".format(drive_index)] + common_tags
for metric in drive_counts:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=tags, count=1
)
for metric in drive_gauges:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
interfaces = [
('eth0', 'quaintly zombies quaintly forward'),
('eth1', 'quaintly but quaintly quaintly'),
]
for interface, desc in interfaces:
if_tags = ['interface:{}'.format(interface), 'interface_alias:{}'.format(desc)] + common_tags
for metric in IF_COUNTS:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=if_tags, count=1
)
for metric in IF_GAUGES:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=if_tags, count=1)
for metric in IF_RATES:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.RATE, tags=if_tags, count=1)
for metric in IF_BANDWIDTH_USAGE:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.RATE, tags=if_tags, count=1)
mem_boards = ['11', '12']
for board in mem_boards:
tags = ['mem_board_index:{}'.format(board)] + common_tags
aggregator.assert_metric('snmp.cpqHeResMem2ModuleCondition', metric_type=aggregator.GAUGE, tags=tags, count=1)
adapter_gauges = ['cpqNicIfPhysAdapterStatus', 'cpqNicIfPhysAdapterState']
for gauge in adapter_gauges:
tags = ['adapter_name:adapter', 'adapter_mac_addr:mac'] + common_tags
aggregator.assert_metric('snmp.{}'.format(gauge), metric_type=aggregator.GAUGE, tags=tags, count=1)
power_metrics = [
'cpqHeFltTolPowerSupplyStatus',
'cpqHeFltTolPowerSupplyCapacityUsed',
'cpqHeFltTolPowerSupplyCapacityMaximum',
]
for gauge in power_metrics:
tags = ['chassis_num:30'] + common_tags
aggregator.assert_metric('snmp.{}'.format(gauge), metric_type=aggregator.GAUGE, tags=tags, count=1)
controller_index = ['controller_index:3'] + common_tags
aggregator.assert_metric(
'snmp.{}'.format("cpqDaCntlrCondition"), metric_type=aggregator.GAUGE, tags=controller_index, count=1
)
thermal_metrics = ['cpqHeThermalCondition', 'cpqHeSysUtilLifeTime', 'cpqHeFltTolPwrSupplyStatus']
for metric in thermal_metrics:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=common_tags, count=1)
aggregator.assert_all_metrics_covered()
aggregator.assert_metrics_using_metadata(get_metadata_metrics(), check_submission_type=True)
@pytest.mark.usefixtures("dd_environment")
def test_generic_host_resources(aggregator):
instance = common.generate_instance_config([])
instance['community_string'] = 'generic_host'
instance['enforce_mib_constraints'] = False
instance['profile'] = 'generic'
init_config = {'profiles': {'generic': {'definition_file': '_generic-host-resources.yaml'}}}
check = SnmpCheck('snmp', init_config, [instance])
check.check(instance)
common_tags = common.CHECK_TAGS + ['snmp_profile:generic']
common.assert_common_metrics(aggregator, common_tags)
sys_metrics = [
'snmp.hrSystemUptime',
'snmp.hrSystemNumUsers',
'snmp.hrSystemProcesses',
'snmp.hrSystemMaxProcesses',
]
for metric in sys_metrics:
aggregator.assert_metric(metric, metric_type=aggregator.GAUGE, tags=common_tags, count=1)
aggregator.assert_metric('snmp.hrStorageAllocationUnits', count=2)
aggregator.assert_metric('snmp.hrStorageSize', count=2)
aggregator.assert_metric('snmp.hrStorageUsed', count=2)
aggregator.assert_metric('snmp.hrStorageAllocationFailures', count=2)
aggregator.assert_metric('snmp.hrProcessorLoad', count=2)
aggregator.assert_all_metrics_covered()
@pytest.mark.usefixtures("dd_environment")
def test_palo_alto(aggregator):
profile = "palo-alto"
run_profile_check(profile)
common_tags = common.CHECK_TAGS + [
'snmp_profile:' + profile,
'device_vendor:paloaltonetworks',
]
common.assert_common_metrics(aggregator, common_tags)
session = [
'panSessionUtilization',
'panSessionMax',
'panSessionActive',
'panSessionActiveTcp',
'panSessionActiveUdp',
'panSessionActiveICMP',
'panSessionActiveSslProxy',
'panSessionSslProxyUtilization',
]
global_protect = [
'panGPGWUtilizationPct',
'panGPGWUtilizationMaxTunnels',
'panGPGWUtilizationActiveTunnels',
]
entity = [
'panEntityTotalPowerAvail',
'panEntityTotalPowerUsed',
]
entry = ['panEntryFRUModulePowerUsed', 'panEntryFRUModuleNumPorts']
for metric in session:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=common_tags, count=1)
for metric in global_protect:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=common_tags, count=1)
for metric in entity:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=common_tags, count=1)
for metric in entry:
# Needs cross table entPhysicalIsFRU tag
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=common_tags)
# Needs cross table entLogicalDescr tag
aggregator.assert_metric('snmp.panEntryFanTrayPowerUsed', metric_type=aggregator.GAUGE, tags=common_tags)
aggregator.assert_all_metrics_covered()
@pytest.mark.usefixtures("dd_environment")
def test_cisco_asa_all(aggregator):
profile = "cisco-asa"
assert_cisco_asa(aggregator, profile)
@pytest.mark.usefixtures("dd_environment")
def test_cisco_asa_5525(aggregator):
profile = "cisco-asa-5525"
assert_cisco_asa(aggregator, profile)
def assert_cisco_asa(aggregator, profile):
run_profile_check(profile)
common_tags = common.CHECK_TAGS + [
'snmp_profile:' + profile,
'snmp_host:kept',
'device_vendor:cisco',
]
common.assert_common_metrics(aggregator, common_tags)
for metric in TCP_COUNTS:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=common_tags, count=1
)
for metric in TCP_GAUGES:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=common_tags, count=1)
for metric in UDP_COUNTS:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=common_tags, count=1
)
if_tags = ['interface:eth0'] + common_tags
for metric in IF_COUNTS:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=if_tags, count=1
)
for metric in IF_GAUGES:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=if_tags, count=1)
for metric in IF_RATES:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.RATE, tags=if_tags, count=1)
for metric in IF_BANDWIDTH_USAGE:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.RATE, tags=if_tags, count=1)
aggregator.assert_metric('snmp.cieIfResetCount', metric_type=aggregator.MONOTONIC_COUNT, tags=common_tags, count=1)
frus = [3, 4, 5, 7, 16, 17, 24, 25]
for fru in frus:
tags = ['fru:{}'.format(fru)] + common_tags
for metric in FRU_METRICS:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
cpus = [7746]
for cpu in cpus:
tags = ['cpu:{}'.format(cpu)] + common_tags
for metric in CPU_METRICS:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
sensor_tags = ['sensor_id:31', 'sensor_type:9'] + common_tags
aggregator.assert_metric('snmp.entPhySensorValue', metric_type=aggregator.GAUGE, tags=sensor_tags, count=1)
stat_tags = [(20, 2), (5, 5)]
for (svc, stat) in stat_tags:
aggregator.assert_metric(
'snmp.cfwConnectionStatValue',
metric_type=aggregator.GAUGE,
tags=['stat_type:{}'.format(stat), 'service_type:{}'.format(svc)] + common_tags,
)
aggregator.assert_metric('snmp.crasNumDeclinedSessions', metric_type=aggregator.GAUGE, tags=common_tags)
aggregator.assert_metric('snmp.crasNumSessions', metric_type=aggregator.GAUGE, tags=common_tags)
aggregator.assert_metric('snmp.crasNumUsers', metric_type=aggregator.GAUGE, tags=common_tags)
aggregator.assert_metric(
'snmp.crasNumSetupFailInsufResources', metric_type=aggregator.MONOTONIC_COUNT, tags=common_tags
)
aggregator.assert_metric('snmp.cipSecGlobalActiveTunnels', metric_type=aggregator.GAUGE, tags=common_tags)
aggregator.assert_metric('snmp.cipSecGlobalHcInOctets', metric_type=aggregator.MONOTONIC_COUNT, tags=common_tags)
aggregator.assert_metric('snmp.cipSecGlobalHcOutOctets', metric_type=aggregator.MONOTONIC_COUNT, tags=common_tags)
for (index, state) in [(3, 3), (6, 6), (8, 6), (11, 6), (13, 3), (14, 6), (20, 6), (21, 4), (31, 5)]:
aggregator.assert_metric(
'snmp.ciscoEnvMonTemperatureStatusValue',
metric_type=aggregator.GAUGE,
tags=['temp_state:{}'.format(state), 'temp_index:{}'.format(index)] + common_tags,
)
power_supply_tags = ['power_source:1', 'power_status_descr:Jaded driving their their their'] + common_tags
aggregator.assert_metric('snmp.ciscoEnvMonSupplyState', metric_type=aggregator.GAUGE, tags=power_supply_tags)
fan_indices = [4, 6, 7, 16, 21, 22, 25, 27]
for index in fan_indices:
tags = ['fan_status_index:{}'.format(index)] + common_tags
aggregator.assert_metric('snmp.ciscoEnvMonFanState', metric_type=aggregator.GAUGE, tags=tags)
aggregator.assert_metric('snmp.cswStackPortOperStatus', metric_type=aggregator.GAUGE)
aggregator.assert_metric(
'snmp.cswSwitchState', metric_type=aggregator.GAUGE, tags=['mac_addr:0xffffffffffff'] + common_tags
)
frus = [2, 7, 8, 21, 26, 27, 30, 31]
for fru in frus:
tags = ['fru:{}'.format(fru)] + common_tags
aggregator.assert_metric(
'snmp.cefcFanTrayOperStatus', metric_type=aggregator.GAUGE, tags=['fru:{}'.format(fru)] + common_tags
)
for metrics in MEMORY_METRICS:
tags = ['mem_pool_name:test_pool'] + common_tags
aggregator.assert_metric('snmp.{}'.format(metrics), metric_type=aggregator.GAUGE, tags=tags)
for conn in [1, 2, 5]:
conn_tags = ['connection_type:{}'.format(conn)] + common_tags
aggregator.assert_metric('snmp.cfwConnectionStatCount', metric_type=aggregator.RATE, tags=conn_tags)
hardware_tags = [(3, 'Secondary unit'), (5, 'Primary unit'), (6, 'Failover LAN Interface')]
for (htype, hdesc) in hardware_tags:
aggregator.assert_metric(
'snmp.cfwHardwareStatusValue',
metric_type=aggregator.GAUGE,
tags=['hardware_type:{}'.format(htype), 'hardware_desc:{}'.format(hdesc)] + common_tags,
)
for switch in [4684, 4850, 8851, 9997, 15228, 16580, 24389, 30813, 36264]:
aggregator.assert_metric(
'snmp.cvsChassisUpTime',
metric_type=aggregator.GAUGE,
tags=['chassis_switch_id:{}'.format(switch)] + common_tags,
)
aggregator.assert_metric('snmp.sysUpTimeInstance', count=1)
# RTT
rtt_indexes = [1, 7, 10, 13, 15, 18, 20]
rtt_types = [22, 21, 17, 6, 20, 8, 16]
rtt_states = [3, 1, 6, 4, 6, 1, 6]
rtt_gauges = ['rttMonLatestRttOperCompletionTime', 'rttMonLatestRttOperSense', 'rttMonCtrlOperTimeoutOccurred']
for i in range(len(rtt_indexes)):
tags = [
"rtt_index:{}".format(rtt_indexes[i]),
"rtt_type:{}".format(rtt_types[i]),
"rtt_state:{}".format(rtt_states[i]),
] + common_tags
for rtt in rtt_gauges:
aggregator.assert_metric('snmp.{}'.format(rtt), metric_type=aggregator.GAUGE, tags=tags)
aggregator.assert_all_metrics_covered()
@pytest.mark.usefixtures("dd_environment")
def test_cisco_csr(aggregator):
run_profile_check('cisco-csr1000v')
common_tags = common.CHECK_TAGS + [
'snmp_profile:cisco-csr1000v',
'device_vendor:cisco',
]
common.assert_common_metrics(aggregator, common_tags)
tags = ['neighbor:244.12.239.177'] + common_tags
for metric in PEER_GAUGES:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags)
for metric in PEER_RATES:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.RATE, tags=tags)
aggregator.assert_all_metrics_covered()
aggregator.assert_metrics_using_metadata(get_metadata_metrics())
@pytest.mark.usefixtures("dd_environment")
def test_checkpoint_firewall(aggregator):
run_profile_check('checkpoint-firewall')
common_tags = common.CHECK_TAGS + [
'snmp_profile:checkpoint-firewall',
'device_vendor:checkpoint',
]
common.assert_common_metrics(aggregator, common_tags)
cpu_metrics = [
'multiProcUserTime',
'multiProcSystemTime',
'multiProcIdleTime',
'multiProcUsage',
]
cpu_cores = [7097, 13039, 13761, 28994, 29751, 33826, 40053, 48847, 61593, 65044]
for core in cpu_cores:
tags = ['cpu_core:{}'.format(core)] + common_tags
for metric in cpu_metrics:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags)
aggregator.assert_metric('snmp.procNum', metric_type=aggregator.GAUGE, tags=common_tags)
mem_metrics = ['memTotalReal64', 'memActiveReal64', 'memFreeReal64', 'memTotalVirtual64', 'memActiveVirtual64']
for metric in mem_metrics:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=common_tags)
disk_metrics = [
'multiDiskSize',
'multiDiskUsed',
'multiDiskFreeTotalBytes',
'multiDiskFreeAvailableBytes',
'multiDiskFreeTotalPercent',
'multiDiskFreeAvailablePercent',
]
appliance_metrics = [
'fanSpeedSensorValue',
'fanSpeedSensorStatus',
'tempertureSensorValue',
'tempertureSensorStatus',
]
common_indices = range(10)
common_names = ['first', 'second', 'third', 'fourth', 'fifth', 'sixth', 'seventh', 'eighth', 'ninth', 'tenth']
for idx in common_indices:
name = common_names[idx]
tags = ['disk_index:{}'.format(idx), 'disk_name:{}'.format(name)] + common_tags
for metric in disk_metrics:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags)
tags = ['sensor_index:{}'.format(idx), 'sensor_name:{}'.format(name)] + common_tags
for metric in appliance_metrics:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags)
fw_count_metrics = ['fwAccepted', 'fwDropped', 'fwRejected']
for metric in fw_count_metrics:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=common_tags)
fw_gauge_metrics = ['fwNumConn', 'fwPeakNumConn']
for metric in fw_gauge_metrics:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=common_tags)
aggregator.assert_all_metrics_covered()
@pytest.mark.usefixtures("dd_environment")
def test_arista(aggregator):
run_profile_check('arista')
common_tags = common.CHECK_TAGS + ['snmp_profile:arista', 'device_vendor:arista']
common.assert_common_metrics(aggregator, common_tags)
aggregator.assert_metric(
'snmp.aristaEgressQueuePktsDropped',
metric_type=aggregator.MONOTONIC_COUNT,
tags=common_tags + ['interface_index:13', 'queue_index:10'],
count=1,
)
aggregator.assert_metric(
'snmp.aristaEgressQueuePktsDropped',
metric_type=aggregator.MONOTONIC_COUNT,
tags=common_tags + ['interface_index:28', 'queue_index:22'],
count=1,
)
aggregator.assert_metric(
'snmp.aristaIngressQueuePktsDropped',
metric_type=aggregator.MONOTONIC_COUNT,
tags=common_tags + ['interface_index:7', 'queue_index:25'],
count=1,
)
aggregator.assert_metric(
'snmp.aristaIngressQueuePktsDropped',
metric_type=aggregator.MONOTONIC_COUNT,
tags=common_tags + ['interface_index:8', 'queue_index:24'],
count=1,
)
for (sensor_id, sensor_type) in [(1, 11), (7, 8)]:
sensor_tags = ['sensor_id:{}'.format(sensor_id), 'sensor_type:{}'.format(sensor_type)] + common_tags
aggregator.assert_metric('snmp.entPhySensorValue', metric_type=aggregator.GAUGE, tags=sensor_tags, count=1)
aggregator.assert_metric('snmp.entPhySensorOperStatus', metric_type=aggregator.GAUGE, tags=sensor_tags, count=1)
aggregator.assert_all_metrics_covered()
@pytest.mark.usefixtures("dd_environment")
def test_aruba(aggregator):
run_profile_check('aruba')
common_tags = common.CHECK_TAGS + ['snmp_profile:aruba', 'device_vendor:aruba']
common.assert_common_metrics(aggregator, common_tags)
for fan in [18, 28]:
fan_tags = common_tags + ['fan_index:{}'.format(fan)]
aggregator.assert_metric('snmp.sysExtFanStatus', metric_type=aggregator.GAUGE, tags=fan_tags, count=1)
for psu in [1, 17]:
psu_tags = common_tags + ['powersupply_index:{}'.format(psu)]
aggregator.assert_metric('snmp.sysExtPowerSupplyStatus', metric_type=aggregator.GAUGE, tags=psu_tags, count=1)
for proc in [11, 26]:
proc_tags = common_tags + ['processor_index:{}'.format(proc)]
aggregator.assert_metric('snmp.sysExtProcessorLoad', metric_type=aggregator.GAUGE, tags=proc_tags, count=1)
for mem in [3, 20]:
mem_tags = common_tags + ['memory_index:{}'.format(mem)]
aggregator.assert_metric('snmp.sysExtMemorySize', metric_type=aggregator.GAUGE, tags=mem_tags, count=1)
aggregator.assert_metric('snmp.sysExtMemoryUsed', metric_type=aggregator.GAUGE, tags=mem_tags, count=1)
aggregator.assert_metric('snmp.sysExtMemoryFree', metric_type=aggregator.GAUGE, tags=mem_tags, count=1)
aggregator.assert_metric(
'snmp.wlsxSysExtPacketLossPercent', metric_type=aggregator.GAUGE, tags=common_tags, count=1
)
# OSPF metrics
neighbor_metrics = [
('ospfNbrEvents', aggregator.RATE),
('ospfNbrState', aggregator.GAUGE),
('ospfNbrLsRetransQLen', aggregator.GAUGE),
]
for metric, metric_type in neighbor_metrics:
tags = ['neighbor_ip:192.29.116.26', 'neighbor_id:192.29.66.79'] + common_tags
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=metric_type, tags=tags, count=1)
virtual_neighbor_metrics = [
('ospfVirtNbrState', aggregator.GAUGE),
('ospfVirtNbrEvents', aggregator.RATE),
('ospfVirtNbrLsRetransQLen', aggregator.GAUGE),
]
for metric, metric_type in virtual_neighbor_metrics:
for ip, nbr in [('74.210.82.1', '194.154.66.112'), ('122.226.86.1', '184.201.101.140')]:
tags = ['neighbor_ip:{}'.format(ip), 'neighbor_id:{}'.format(nbr)] + common_tags
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=metric_type, tags=tags, count=1)
lls_metrics = ['ospfIfRetransInterval', 'ospfIfState', 'ospfIfLsaCount']
for metric in lls_metrics:
for ip, nbr in [('58.115.169.188', '192.29.66.79'), ('18.2.8.29', '118.246.193.247')]:
tags = ['ospf_ip_addr:{}'.format(ip), 'neighbor_id:{}'.format(nbr)] + common_tags
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
virtual_lls_metrics = ['ospfVirtIfRetransInterval', 'ospfVirtIfState', 'ospfVirtIfLsaCount']
for metric in virtual_lls_metrics:
for nbr in ['194.154.66.112', '184.201.101.140']:
tags = ['neighbor_id:{}'.format(nbr)] + common_tags
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
aggregator.assert_all_metrics_covered()
@pytest.mark.usefixtures("dd_environment")
def test_chatsworth(aggregator):
profile = "chatsworth_pdu"
run_profile_check(profile)
# Legacy global tags are applied to all metrics
legacy_global_tags = [
'legacy_pdu_macaddress:00:0E:D3:AA:CC:EE',
'legacy_pdu_model:P10-1234-ABC',
'legacy_pdu_name:legacy-name1',
'legacy_pdu_version:1.2.3',
]
common_tags = common.CHECK_TAGS + legacy_global_tags + ['snmp_profile:' + profile, 'device_vendor:chatsworth']
common.assert_common_metrics(aggregator, common_tags)
# Legacy metrics
legacy_pdu_tags = common_tags
legacy_pdu_gauge_metrics = [
'snmp.pduRole',
'snmp.outOfService',
]
legacy_pdu_monotonic_count_metrics = []
for line in range(1, 4):
legacy_pdu_gauge_metrics.append('snmp.line{}curr'.format(line))
for branch in range(1, 3):
legacy_pdu_gauge_metrics.append('snmp.temperatureProbe{}'.format(branch))
legacy_pdu_gauge_metrics.append('snmp.humidityProbe{}'.format(branch))
for xyz in ['xy', 'yz', 'zx']:
legacy_pdu_monotonic_count_metrics.append('snmp.energy{}{}s'.format(xyz, branch))
legacy_pdu_gauge_metrics.append('snmp.voltage{}{}'.format(xyz, branch))
legacy_pdu_gauge_metrics.append('snmp.power{}{}'.format(xyz, branch))
legacy_pdu_gauge_metrics.append('snmp.powerFact{}{}'.format(xyz, branch))
legacy_pdu_gauge_metrics.append('snmp.current{}{}'.format(xyz, branch))
for branch in range(1, 25):
legacy_pdu_monotonic_count_metrics.append('snmp.receptacleEnergyoutlet{}s'.format(branch))
legacy_pdu_gauge_metrics.append('snmp.outlet{}Current'.format(branch))
for metric in legacy_pdu_gauge_metrics:
aggregator.assert_metric(metric, metric_type=aggregator.GAUGE, tags=legacy_pdu_tags, count=1)
for metric in legacy_pdu_monotonic_count_metrics:
aggregator.assert_metric(metric, metric_type=aggregator.MONOTONIC_COUNT, tags=legacy_pdu_tags, count=1)
# New metrics
pdu_tags = common_tags + [
'pdu_cabinetid:cab1',
'pdu_ipaddress:42.2.210.224',
'pdu_macaddress:0x00249b3503f6',
'pdu_model:model1',
'pdu_name:name1',
'pdu_version:v1.1',
]
aggregator.assert_metric('snmp.cpiPduNumberBranches', metric_type=aggregator.GAUGE, tags=pdu_tags, count=1)
aggregator.assert_metric('snmp.cpiPduNumberOutlets', metric_type=aggregator.GAUGE, tags=pdu_tags, count=1)
aggregator.assert_metric('snmp.cpiPduOutOfService', metric_type=aggregator.GAUGE, tags=pdu_tags, count=1)
aggregator.assert_metric('snmp.cpiPduUpgrade', metric_type=aggregator.GAUGE, tags=pdu_tags, count=1)
aggregator.assert_metric('snmp.cpiPduChainRole', metric_type=aggregator.GAUGE, tags=pdu_tags, count=1)
aggregator.assert_metric('snmp.cpiPduTotalPower', metric_type=aggregator.GAUGE, tags=pdu_tags, count=1)
for lock in [1, 2]:
lock_tags = common_tags + ['lock_id:{}'.format(lock)]
aggregator.assert_metric('snmp.cpiPduEasStatus', metric_type=aggregator.GAUGE, tags=lock_tags, count=1)
aggregator.assert_metric('snmp.cpiPduDoorStatus', metric_type=aggregator.GAUGE, tags=lock_tags, count=1)
aggregator.assert_metric('snmp.cpiPduLockStatus', metric_type=aggregator.GAUGE, tags=lock_tags, count=1)
for (sensor_name, sensor_index) in [('sensor1', 4), ('sensor2', 6)]:
sensor_tags = common_tags + [
'sensor_index:{}'.format(sensor_index),
'sensor_name:{}'.format(sensor_name),
'sensor_type:1',
]
aggregator.assert_metric('snmp.cpiPduSensorValue', metric_type=aggregator.GAUGE, tags=sensor_tags, count=1)
for line in [6, 18]:
line_tags = common_tags + ['line_id:{}'.format(line)]
aggregator.assert_metric('snmp.cpiPduLineCurrent', metric_type=aggregator.GAUGE, tags=line_tags, count=1)
for branch in [1, 17]:
branch_tags = common_tags + ['branch_id:{}'.format(branch), 'pdu_name:name1']
aggregator.assert_metric('snmp.cpiPduBranchCurrent', metric_type=aggregator.GAUGE, tags=branch_tags, count=1)
aggregator.assert_metric('snmp.cpiPduBranchMaxCurrent', metric_type=aggregator.GAUGE, tags=branch_tags, count=1)
aggregator.assert_metric('snmp.cpiPduBranchVoltage', metric_type=aggregator.GAUGE, tags=branch_tags, count=1)
aggregator.assert_metric('snmp.cpiPduBranchPower', metric_type=aggregator.GAUGE, tags=branch_tags, count=1)
aggregator.assert_metric(
'snmp.cpiPduBranchPowerFactor', metric_type=aggregator.GAUGE, tags=branch_tags, count=1
)
aggregator.assert_metric('snmp.cpiPduBranchStatus', metric_type=aggregator.GAUGE, tags=branch_tags, count=1)
aggregator.assert_metric(
'snmp.cpiPduBranchEnergy', metric_type=aggregator.MONOTONIC_COUNT, tags=branch_tags, count=1
)
for branch in [1]:
branch_tags = common_tags + ['branch_id:{}'.format(branch), 'pdu_name:name2']
aggregator.assert_metric(
'snmp.cpiPduBranchPowerFactor', metric_type=aggregator.GAUGE, tags=branch_tags, count=1
)
aggregator.assert_metric(
'snmp.cpiPduBranchEnergy', metric_type=aggregator.MONOTONIC_COUNT, tags=branch_tags, count=1
)
for (outlet_id, outlet_branch, outlet_name) in [(7, 29, 'outlet1'), (16, 23, 'outlet2')]:
outlet_tags = common_tags + [
'outlet_id:{}'.format(outlet_id),
'outlet_branchid:{}'.format(outlet_branch),
'outlet_name:{}'.format(outlet_name),
]
aggregator.assert_metric('snmp.cpiPduOutletCurrent', metric_type=aggregator.GAUGE, tags=outlet_tags, count=1)
aggregator.assert_metric('snmp.cpiPduOutletVoltage', metric_type=aggregator.GAUGE, tags=outlet_tags, count=1)
aggregator.assert_metric('snmp.cpiPduOutletPower', metric_type=aggregator.GAUGE, tags=outlet_tags, count=1)
aggregator.assert_metric('snmp.cpiPduOutletStatus', metric_type=aggregator.GAUGE, tags=outlet_tags, count=1)
aggregator.assert_metric(
'snmp.cpiPduOutletEnergy', metric_type=aggregator.MONOTONIC_COUNT, tags=outlet_tags, count=1
)
aggregator.assert_all_metrics_covered()
aggregator.assert_metrics_using_metadata(get_metadata_metrics(), check_submission_type=True)
@pytest.mark.usefixtures("dd_environment")
def test_isilon(aggregator):
run_profile_check('isilon')
common_tags = common.CHECK_TAGS + [
'snmp_profile:isilon',
'cluster_name:testcluster1',
'node_name:node1',
'node_type:1',
'device_vendor:dell',
]
cluster_rates = [
'clusterIfsInBytes',
'clusterIfsOutBytes',
]
node_rates = [
'nodeIfsOutBytes',
'nodeIfsInBytes',
]
protocol_metrics = [
'protocolOpsPerSecond',
'latencyMin',
'latencyMax',
'latencyAverage',
]
quota_metrics = ['quotaHardThreshold', 'quotaSoftThreshold', 'quotaUsage', 'quotaAdvisoryThreshold']
quota_ids_types = [
(422978632, 1),
(153533730, 5),
(3299369987, 4),
(2149993012, 3),
(1424325378, 1),
(4245321451, 0),
(2328145711, 1),
(1198032230, 4),
(1232918362, 1),
(1383990869, 1),
]
common.assert_common_metrics(aggregator, common_tags)
for metric in quota_metrics:
for qid, qtype in quota_ids_types:
tags = ['quota_id:{}'.format(qid), 'quota_type:{}'.format(qtype)] + common_tags
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.RATE, tags=tags, count=1)
for metric in protocol_metrics:
for num in range(1, 3):
tags = ['protocol_name:testprotocol{}'.format(num)] + common_tags
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
aggregator.assert_metric('snmp.clusterHealth', metric_type=aggregator.GAUGE, tags=common_tags, count=1)
for metric in cluster_rates:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.RATE, tags=common_tags, count=1)
aggregator.assert_metric('snmp.nodeHealth', metric_type=aggregator.GAUGE, tags=common_tags, count=1)
for metric in node_rates:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.RATE, tags=common_tags, count=1)
for fan in [4, 6, 10, 11, 14, 21, 22, 23, 25, 30]:
tags = ['fan_name:testfan', 'fan_number:{}'.format(fan)] + common_tags
aggregator.assert_metric('snmp.fanSpeed', metric_type=aggregator.GAUGE, tags=tags, count=1)
for status, bay in [('SMARTFAIL', 1), ('HEALTHY', 2), ('DEAD', 3)]:
tags = common_tags + ['disk_status:{}'.format(status), 'disk_bay:{}'.format((bay))]
aggregator.assert_metric('snmp.diskSizeBytes', metric_type=aggregator.RATE, tags=tags)
aggregator.assert_metric('snmp.ifsUsedBytes', metric_type=aggregator.RATE, tags=common_tags, count=1)
aggregator.assert_metric('snmp.ifsTotalBytes', metric_type=aggregator.RATE, tags=common_tags, count=1)
@pytest.mark.usefixtures("dd_environment")
def test_apc_ups(aggregator):
run_profile_check('apc_ups')
profile_tags = [
'snmp_profile:apc_ups',
'model:APC Smart-UPS 600',
'firmware_version:2.0.3-test',
'serial_num:test_serial',
'ups_name:testIdentName',
'device_vendor:apc',
]
tags = common.CHECK_TAGS + profile_tags
metrics = [
'upsAdvBatteryNumOfBadBattPacks',
'upsAdvBatteryReplaceIndicator',
'upsAdvBatteryRunTimeRemaining',
'upsAdvBatteryTemperature',
'upsAdvBatteryCapacity',
'upsHighPrecInputFrequency',
'upsHighPrecInputLineVoltage',
'upsHighPrecOutputCurrent',
'upsAdvInputLineFailCause',
'upsAdvOutputLoad',
'upsBasicBatteryTimeOnBattery',
'upsAdvTestDiagnosticsResults',
]
common.assert_common_metrics(aggregator, tags)
for metric in metrics:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
aggregator.assert_metric(
'snmp.upsOutletGroupStatusGroupState',
metric_type=aggregator.GAUGE,
tags=['outlet_group_name:test_outlet'] + tags,
)
aggregator.assert_metric(
'snmp.upsBasicStateOutputState.AVRTrimActive', 1, metric_type=aggregator.GAUGE, tags=tags, count=1
)
aggregator.assert_metric(
'snmp.upsBasicStateOutputState.BatteriesDischarged', 1, metric_type=aggregator.GAUGE, tags=tags, count=1
)
aggregator.assert_metric(
'snmp.upsBasicStateOutputState.LowBatteryOnBattery', 1, metric_type=aggregator.GAUGE, tags=tags, count=1
)
aggregator.assert_metric(
'snmp.upsBasicStateOutputState.NoBatteriesAttached', 1, metric_type=aggregator.GAUGE, tags=tags, count=1
)
aggregator.assert_metric(
'snmp.upsBasicStateOutputState.OnLine', 0, metric_type=aggregator.GAUGE, tags=tags, count=1
)
aggregator.assert_metric(
'snmp.upsBasicStateOutputState.ReplaceBattery', 1, metric_type=aggregator.GAUGE, tags=tags, count=1
)
aggregator.assert_metric('snmp.upsBasicStateOutputState.On', 1, metric_type=aggregator.GAUGE, tags=tags, count=1)
aggregator.assert_all_metrics_covered()
@pytest.mark.usefixtures("dd_environment")
def test_fortinet_fortigate(aggregator):
run_profile_check('fortinet-fortigate')
common_tags = common.CHECK_TAGS + [
'snmp_profile:fortinet-fortigate',
'device_vendor:fortinet',
]
common_gauge_metrics = [
'fgSysCpuUsage',
'fgSysMemUsage',
'fgSysMemCapacity',
'fgSysLowMemUsage',
'fgSysLowMemCapacity',
'fgSysDiskUsage',
'fgSysDiskCapacity',
'fgSysSesCount',
'fgSysSesRate1',
'fgSysSes6Count',
'fgSysSes6Rate1',
'fgApHTTPConnections',
'fgApHTTPMaxConnections',
'fgVdNumber',
'fgVdMaxVdoms',
]
processor_gauge_metrics = [
'fgProcessorUsage',
'fgProcessorSysUsage',
]
processor_count_metrics = [
'fgProcessorPktRxCount',
'fgProcessorPktTxCount',
'fgProcessorPktDroppedCount',
]
processor_tags = common_tags + ['processor_index:12']
vd_metrics = [
'fgVdEntOpMode',
'fgVdEntHaState',
'fgVdEntCpuUsage',
'fgVdEntMemUsage',
'fgVdEntSesCount',
'fgVdEntSesRate',
]
vd_tags = common_tags + ['virtualdomain_index:4', 'virtualdomain_name:their oxen quaintly']
common.assert_common_metrics(aggregator, common_tags)
for metric in common_gauge_metrics:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=common_tags, count=1)
for metric in processor_gauge_metrics:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=processor_tags, count=1)
for metric in processor_count_metrics:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=processor_tags, count=1
)
aggregator.assert_metric(
'snmp.{}.rate'.format(metric), metric_type=aggregator.RATE, tags=processor_tags, count=1
)
for metric in vd_metrics:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=vd_tags, count=1)
# Interface
aggregator.assert_metric('snmp.fgIntfEntVdom', metric_type=aggregator.GAUGE, count=1)
# Firewall
firewall_tags = common_tags + ['policy_index:22']
for metric in ['fgFwPolPktCount', 'fgFwPolByteCount']:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=firewall_tags, count=1
)
aggregator.assert_metric(
'snmp.{}.rate'.format(metric), metric_type=aggregator.RATE, tags=firewall_tags, count=1
)
# Firewall 6
firewall6_tags = common_tags + ['policy6_index:29']
for metric in ['fgFwPol6PktCount', 'fgFwPol6ByteCount']:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=firewall6_tags, count=1
)
aggregator.assert_metric(
'snmp.{}.rate'.format(metric), metric_type=aggregator.RATE, tags=firewall6_tags, count=1
)
aggregator.assert_all_metrics_covered()
aggregator.assert_metrics_using_metadata(get_metadata_metrics(), check_submission_type=True)
@pytest.mark.usefixtures("dd_environment")
def test_netapp(aggregator):
run_profile_check('netapp')
profile_tags = [
'snmp_profile:netapp',
'snmp_host:example-datacenter.company',
'device_vendor:netapp',
]
common_tags = common.CHECK_TAGS + profile_tags
common.assert_common_metrics(aggregator, common_tags)
gauges = [
'cfInterconnectStatus',
'miscCacheAge',
'ncHttpActiveCliConns',
]
counts = [
'extcache64Hits',
]
for metric in gauges:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=common_tags, count=1)
for metric in counts:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=common_tags, count=1
)
snapvault_counts = [
'svTotalFailures',
]
snapvaults = [('5', '/vol/dir1', '5'), ('6', '/vol/dir3', '2'), ('18', '/vol/dir9', '4')]
for metric in snapvault_counts:
for index, destination, state in snapvaults:
tags = [
'index:{}'.format(index),
'destination:{}'.format(destination),
'state:{}'.format(state),
] + common_tags
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=tags, count=1
)
snapmirrors = [('6', '1'), ('9', '5'), ('29', '1')]
snapmirror_gauges = [
'snapmirrorLag',
]
snapmirror_counts = [
'snapmirrorTotalFailures',
]
for index, state in snapmirrors:
tags = ['index:{}'.format(index), 'state:{}'.format(state)] + common_tags
for metric in snapmirror_gauges:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
for metric in snapmirror_counts:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=tags, count=1
)
filesystem_gauges = [
'dfHighTotalKBytes',
'dfHighAvailKBytes',
'dfInodesUsed',
'dfInodesFree',
]
filesystem_indexes = [
'1022',
'1023',
'1024',
'1025',
'1026',
'1027',
'1028',
'1029',
'1032',
'1033',
]
filesystems = ['/vol/dir{}'.format(n) for n in range(1, len(filesystem_indexes) + 1)]
for metric in filesystem_gauges:
for index, filesystem in zip(filesystem_indexes, filesystems):
tags = ['index:{}'.format(index), 'filesystem:{}'.format(filesystem)] + common_tags
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
if_counts = [
'ifHighInOctets',
]
if_rates = [
'ifHighInOctets.rate',
]
interfaces = [
# Interface descriptions will be normalized in the backend, but we receive the raw DisplayString values here.
('6', 'netgear ifX300 v1'),
('7', 'junyper proto12 12.3'),
('23', 'malabar yz42 10.2020'),
]
for index, descr in interfaces:
tags = ['index:{}'.format(index), 'interface:{}'.format(descr)] + common_tags
for metric in if_counts:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=tags, count=1
)
for metric in if_rates:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.RATE, tags=tags, count=1)
aggregator.assert_metric('snmp.sysUpTimeInstance', metric_type=aggregator.GAUGE, tags=common_tags, count=1)
aggregator.assert_all_metrics_covered()
@pytest.mark.usefixtures("dd_environment")
def test_cisco_catalyst(aggregator):
run_profile_check('cisco-catalyst')
common_tags = common.CHECK_TAGS + [
'snmp_host:catalyst-6000.example',
'snmp_profile:cisco-catalyst',
'device_vendor:cisco',
]
sensors = [5, 9]
for sensor in sensors:
tags = ['sensor_id:{}'.format(sensor), 'sensor_type:10'] + common_tags
aggregator.assert_metric('snmp.entSensorValue', metric_type=aggregator.GAUGE, tags=tags, count=1)
interfaces = ["Gi1/0/{}".format(i) for i in [6, 10, 12, 18, 22, 25, 27]]
for interface in interfaces:
tags = ['interface:{}'.format(interface)] + common_tags
for metric in CIE_METRICS:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
frus = [1001, 1010, 2001, 2010]
for fru in frus:
tags = ['fru:{}'.format(fru)] + common_tags
for metric in FRU_METRICS:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
aggregator.assert_metric('snmp.sysUpTimeInstance', count=1)
aggregator.assert_metric('snmp.devices_monitored', count=1)
aggregator.assert_all_metrics_covered()
@pytest.mark.usefixtures("dd_environment")
def test_juniper_ex(aggregator):
run_profile_check('juniper-ex')
common_tags = common.CHECK_TAGS + [
'snmp_profile:juniper-ex',
'device_vendor:juniper-networks',
]
_check_juniper_virtual_chassis(aggregator, common_tags)
_check_juniper_dcu(aggregator, common_tags)
_check_juniper_cos(aggregator, common_tags)
_check_juniper_firewall(aggregator, common_tags)
aggregator.assert_metric('snmp.devices_monitored', count=1)
aggregator.assert_all_metrics_covered()
aggregator.assert_metrics_using_metadata(get_metadata_metrics(), check_submission_type=True)
@pytest.mark.usefixtures("dd_environment")
def test_juniper_mx(aggregator):
run_profile_check('juniper-mx')
common_tags = common.CHECK_TAGS + [
'snmp_profile:juniper-mx',
'device_vendor:juniper-networks',
]
_check_juniper_virtual_chassis(aggregator, common_tags)
_check_juniper_firewall(aggregator, common_tags)
aggregator.assert_metric('snmp.devices_monitored', count=1)
aggregator.assert_all_metrics_covered()
aggregator.assert_metrics_using_metadata(get_metadata_metrics(), check_submission_type=True)
@pytest.mark.usefixtures("dd_environment")
def test_juniper_srx(aggregator):
run_profile_check('juniper-srx')
common_tags = common.CHECK_TAGS + [
'snmp_profile:juniper-srx',
'device_vendor:juniper-networks',
]
_check_juniper_userfirewall(aggregator, common_tags)
_check_juniper_dcu(aggregator, common_tags)
_check_juniper_scu(aggregator, common_tags)
aggregator.assert_metric('snmp.devices_monitored', count=1)
aggregator.assert_all_metrics_covered()
aggregator.assert_metrics_using_metadata(get_metadata_metrics(), check_submission_type=True)
def _check_juniper_scu(aggregator, common_tags):
"""
Shared testing function for Juniper profiles supporting scu
"""
scu_tags = [
['address_family:1', 'interface:kept but'],
['address_family:1', 'interface:quaintly driving oxen their zombies oxen acted acted'],
['address_family:1', 'interface:but forward kept but their driving oxen quaintly acted'],
]
for metric in SCU_COUNTS:
for tags in scu_tags:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=common_tags + tags, count=1
)
def _check_juniper_userfirewall(aggregator, common_tags):
"""
Shared testing function for Juniper profiles supporting userfirewall (user auth)
"""
userfirewall_tags = [
['ldap_domain_name:Mycroft Holmes', 'ldap_host:brother'],
['ldap_domain_name:Jim Moriarty', 'ldap_host:enemy'],
]
for metric in USER_FIREWALL:
for tags in userfirewall_tags:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=common_tags + tags, count=1
)
def _check_juniper_dcu(aggregator, common_tags):
"""
Shared testing function for Juniper profiles supporting DCU
"""
dcu_tags = [
[
'address_family:1',
'destination_class_name:their',
'interface:quaintly driving oxen their zombies oxen acted acted',
],
[
'address_family:1',
'destination_class_name:acted but forward acted zombies forward',
'interface:but forward kept but their driving oxen quaintly acted',
],
[
'address_family:2',
'destination_class_name:oxen Jaded oxen Jaded forward kept quaintly',
'interface:kept but',
],
]
for decu_metric in DCU_COUNTS:
for tags in dcu_tags:
aggregator.assert_metric(
'snmp.{}'.format(decu_metric), metric_type=aggregator.MONOTONIC_COUNT, tags=common_tags + tags, count=1
)
def _check_juniper_firewall(aggregator, common_tags):
"""
Shared testing function for Juniper profiles supporting firewall metrics
"""
firewall_tags = [
[
'counter_name:Jaded oxen kept their driving but kept',
'counter_type:4',
'firewall_filter_name:their driving quaintly but Jaded oxen',
],
[
'counter_name:but but but their their their kept kept forward',
'counter_type:4',
'firewall_filter_name:driving kept acted Jaded zombies kept acted',
],
]
for metric in FIREWALL_COUNTS:
for tags in firewall_tags:
aggregator.assert_metric(
'snmp.{}'.format(metric),
metric_type=aggregator.MONOTONIC_COUNT,
tags=common_tags + tags,
count=1,
)
def _check_juniper_virtual_chassis(aggregator, common_tags):
"""
Shared testing function for Juniper profiles supporting virtual chassis metrics
"""
virtual_chassis_tags = [
['port_name:but driving but'],
['port_name:Jaded forward but oxen quaintly their their'],
['port_name:forward forward driving driving Jaded Jaded'],
]
for count_and_rate_metric in VIRTUAL_CHASSIS_COUNTS:
for tags in virtual_chassis_tags:
aggregator.assert_metric(
'snmp.{}'.format(count_and_rate_metric),
metric_type=aggregator.MONOTONIC_COUNT,
tags=common_tags + tags,
count=1,
)
for rate_metric in VIRTUAL_CHASSIS_RATES:
for tags in virtual_chassis_tags:
aggregator.assert_metric(
'snmp.{}'.format(rate_metric), metric_type=aggregator.GAUGE, tags=common_tags + tags, count=1
)
def _check_juniper_cos(aggregator, common_tags):
"""
Shared testing function for Juniper profiles supporting COS metrics
"""
cos_tags = [
['interface:acted oxen oxen forward quaintly kept zombies but oxen', 'queue_number:25'],
['interface:acted kept quaintly acted oxen kept', 'queue_number:50'],
['interface:their', 'queue_number:15'],
]
for cos_metric in COS_COUNTS:
for tags in cos_tags:
aggregator.assert_metric(
'snmp.{}'.format(cos_metric), metric_type=aggregator.MONOTONIC_COUNT, tags=common_tags + tags, count=1
)
for cos_metric in COS_RATES:
for tags in cos_tags:
aggregator.assert_metric(
'snmp.{}'.format(cos_metric), metric_type=aggregator.GAUGE, tags=common_tags + tags, count=1
)
| 40.546934 | 120 | 0.668066 |
import logging
import pytest
from datadog_checks.base import ConfigurationError
from datadog_checks.dev.utils import get_metadata_metrics
from datadog_checks.snmp import SnmpCheck
from datadog_checks.snmp.utils import (
_get_profile_name,
_is_abstract_profile,
_iter_default_profile_file_paths,
get_profile_definition,
recursively_expand_base_profiles,
)
from . import common
from .metrics import (
ADAPTER_IF_COUNTS,
CCCA_ROUTER_GAUGES,
CIE_METRICS,
COS_COUNTS,
COS_RATES,
CPU_METRICS,
DCU_COUNTS,
DISK_GAUGES,
DRS_GAUGES,
FIREWALL_COUNTS,
FRU_METRICS,
IF_BANDWIDTH_USAGE,
IF_COUNTS,
IF_GAUGES,
IF_RATES,
IP_COUNTS,
IP_IF_COUNTS,
IPX_COUNTS,
LTM_GAUGES,
LTM_NODES_COUNTS,
LTM_NODES_GAUGES,
LTM_NODES_RATES,
LTM_POOL_COUNTS,
LTM_POOL_GAUGES,
LTM_POOL_MEMBER_COUNTS,
LTM_POOL_MEMBER_GAUGES,
LTM_POOL_MEMBER_RATES,
LTM_POOL_RATES,
LTM_VIRTUAL_SERVER_COUNTS,
LTM_VIRTUAL_SERVER_GAUGES,
LTM_VIRTUAL_SERVER_RATES,
MEMORY_METRICS,
PEER_GAUGES,
PEER_RATES,
PROBE_GAUGES,
SCU_COUNTS,
SYSTEM_STATUS_GAUGES,
TCP_COUNTS,
TCP_GAUGES,
UDP_COUNTS,
USER_FIREWALL,
VIRTUAL_CHASSIS_COUNTS,
VIRTUAL_CHASSIS_RATES,
VOLTAGE_GAUGES,
)
pytestmark = common.python_autodiscovery_only
def test_load_profiles(caplog):
instance = common.generate_instance_config([])
check = SnmpCheck('snmp', {}, [instance])
caplog.at_level(logging.WARNING)
for name, profile in check.profiles.items():
try:
check._config.refresh_with_profile(profile)
except ConfigurationError as e:
pytest.fail("Profile `{}` is not configured correctly: {}".format(name, e))
assert "table doesn't have a 'metric_tags' section" not in caplog.text
caplog.clear()
def test_profile_hierarchy():
errors = []
compat_base_profiles = ['_base_cisco', '_base_cisco_voice']
for path in _iter_default_profile_file_paths():
name = _get_profile_name(path)
definition = get_profile_definition({'definition_file': path})
extends = definition.get('extends', [])
sysobjectid = definition.get('sysobjectid')
if _is_abstract_profile(name):
if '_base.yaml' in extends and name not in compat_base_profiles:
errors.append("'{}': mixin wrongly extends '_base.yaml'".format(name))
if sysobjectid is not None:
errors.append("'{}': mixin wrongly defines a `sysobjectid`".format(name))
else:
if '_base.yaml' not in extends:
errors.append("'{}': concrete profile must directly extend '_base.yaml'".format(name))
if sysobjectid is None:
errors.append("'{}': concrete profile must define a `sysobjectid`".format(name))
if errors:
pytest.fail('\n'.join(sorted(errors)))
def run_profile_check(recording_name, profile_name=None):
instance = common.generate_instance_config([])
instance['community_string'] = recording_name
instance['enforce_mib_constraints'] = False
check = SnmpCheck('snmp', {}, [instance])
# First, see if recording name is a profile, then use profile as definition.
if profile_name is not None:
profile = check.profiles.get(profile_name)
else:
profile = check.profiles.get(recording_name)
if profile:
try:
test_check = SnmpCheck('snmp', {}, [common.generate_instance_config([])])
test_check._config.refresh_with_profile(profile)
except ConfigurationError as e:
pytest.fail("Profile `{}` is not configured correctly: {}".format(recording_name, e))
check.check(instance)
@pytest.mark.unit
@pytest.mark.parametrize(
'definition_file, equivalent_definition',
[
pytest.param('_base_cisco.yaml', {'extends': ['_base.yaml', '_cisco-generic.yaml']}, id='generic'),
pytest.param(
'_base_cisco_voice.yaml',
{'extends': ['_base.yaml', '_cisco-generic.yaml', '_cisco-voice.yaml']},
id='voice',
),
],
)
def test_compat_cisco_base_profiles(definition_file, equivalent_definition):
# type: (str, dict) -> None
definition = get_profile_definition({'definition_file': definition_file})
recursively_expand_base_profiles(definition)
recursively_expand_base_profiles(equivalent_definition)
assert definition == equivalent_definition
@pytest.mark.usefixtures("dd_environment")
def test_cisco_voice(aggregator):
run_profile_check('cisco_icm')
tags = [
'snmp_profile:cisco_icm',
'snmp_host:test',
'device_vendor:cisco',
] + common.CHECK_TAGS
resources = ["hrSWRunPerfMem", "hrSWRunPerfCPU"]
common.assert_common_metrics(aggregator, tags)
for resource in resources:
aggregator.assert_metric('snmp.{}'.format(resource), metric_type=aggregator.GAUGE, tags=tags)
run_indices = [4, 7, 8, 9, 10, 18, 24, 29, 30]
for index in run_indices:
status_tags = tags + ['run_index:{}'.format(index)]
aggregator.assert_metric('snmp.hrSWRunStatus', metric_type=aggregator.GAUGE, tags=status_tags)
cvp_gauges = [
"ccvpSipIntAvgLatency1",
"ccvpSipIntAvgLatency2",
"ccvpSipIntConnectsRcv",
"ccvpSipIntNewCalls",
"ccvpSipRtActiveCalls",
"ccvpSipRtTotalCallLegs",
"ccvpLicRtPortsInUse",
"ccvpLicAggMaxPortsInUse",
]
for cvp in cvp_gauges:
aggregator.assert_metric('snmp.{}'.format(cvp), metric_type=aggregator.GAUGE, tags=tags)
ccms_counts = ["ccmRejectedPhones", "ccmUnregisteredPhones"]
ccms_gauges = ["ccmRegisteredGateways", "ccmRegisteredPhones"]
for ccm in ccms_counts:
aggregator.assert_metric('snmp.{}'.format(ccm), metric_type=aggregator.RATE, tags=tags)
for ccm in ccms_gauges:
aggregator.assert_metric('snmp.{}'.format(ccm), metric_type=aggregator.GAUGE, tags=tags)
calls = [
"cvCallVolPeerIncomingCalls",
"cvCallVolPeerOutgoingCalls",
]
peers = [4, 13, 14, 17, 18, 22, 25, 30, 31]
for call in calls:
for peer in peers:
peer_tags = tags + ["peer_index:{}".format(peer)]
aggregator.assert_metric('snmp.{}'.format(call), metric_type=aggregator.GAUGE, tags=peer_tags)
calls = [
"cvCallVolMediaIncomingCalls",
"cvCallVolMediaOutgoingCalls",
]
for call in calls:
aggregator.assert_metric('snmp.{}'.format(call), metric_type=aggregator.GAUGE, tags=tags)
dial_controls = [
"dialCtlPeerStatsAcceptCalls",
"dialCtlPeerStatsFailCalls",
"dialCtlPeerStatsRefuseCalls",
"dialCtlPeerStatsSuccessCalls",
]
for ctl in dial_controls:
aggregator.assert_metric(
'snmp.{}'.format(ctl), metric_type=aggregator.MONOTONIC_COUNT, tags=["peer_index:7"] + tags
)
pim_tags = tags + ['pim_host:test', 'pim_name:name', 'pim_num:2']
aggregator.assert_metric('snmp.{}'.format("cccaPimStatus"), metric_type=aggregator.GAUGE, tags=pim_tags)
aggregator.assert_metric('snmp.{}'.format("sysUpTimeInstance"), metric_type=aggregator.GAUGE, tags=tags, count=1)
instance_numbers = ['4446', '5179', '12093', '19363', '25033', '37738', '42562', '51845', '62906', '63361']
for metric in CCCA_ROUTER_GAUGES:
for instance_number in instance_numbers:
instance_tags = tags + ['instance_number:{}'.format(instance_number)]
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=instance_tags)
aggregator.assert_all_metrics_covered()
@pytest.mark.usefixtures("dd_environment")
def test_f5(aggregator):
profile = 'f5-big-ip'
run_profile_check(profile)
gauges = [
'sysStatMemoryTotal',
'sysStatMemoryUsed',
'sysGlobalTmmStatMemoryTotal',
'sysGlobalTmmStatMemoryUsed',
'sysGlobalHostOtherMemoryTotal',
'sysGlobalHostOtherMemoryUsed',
'sysGlobalHostSwapTotal',
'sysGlobalHostSwapUsed',
'sysTcpStatOpen',
'sysTcpStatCloseWait',
'sysTcpStatFinWait',
'sysTcpStatTimeWait',
'sysUdpStatOpen',
'sysClientsslStatCurConns',
]
counts = [
'sysTcpStatAccepts',
'sysTcpStatAcceptfails',
'sysTcpStatConnects',
'sysTcpStatConnfails',
'sysUdpStatAccepts',
'sysUdpStatAcceptfails',
'sysUdpStatConnects',
'sysUdpStatConnfails',
'sysClientsslStatEncryptedBytesIn',
'sysClientsslStatEncryptedBytesOut',
'sysClientsslStatDecryptedBytesIn',
'sysClientsslStatDecryptedBytesOut',
'sysClientsslStatHandshakeFailures',
]
cpu_rates = [
'sysMultiHostCpuUser',
'sysMultiHostCpuNice',
'sysMultiHostCpuSystem',
'sysMultiHostCpuIdle',
'sysMultiHostCpuIrq',
'sysMultiHostCpuSoftirq',
'sysMultiHostCpuIowait',
]
interfaces = [
('1.0', 'desc2'),
('mgmt', 'desc1'),
('/Common/internal', 'desc5'),
('/Common/http-tunnel', 'desc3'),
('/Common/socks-tunnel', 'desc4'),
]
interfaces_with_bandwidth_usage = {
'1.0',
'mgmt',
'/Common/internal',
}
tags = [
'snmp_profile:' + profile,
'snmp_host:f5-big-ip-adc-good-byol-1-vm.c.datadog-integrations-lab.internal',
'device_vendor:f5',
]
tags += common.CHECK_TAGS
common.assert_common_metrics(aggregator, tags)
for metric in gauges:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
for metric in counts:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=tags, count=1)
for metric in cpu_rates:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.RATE, tags=['cpu:0'] + tags, count=1)
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.RATE, tags=['cpu:1'] + tags, count=1)
for interface, desc in interfaces:
interface_tags = ['interface:{}'.format(interface), 'interface_alias:{}'.format(desc)] + tags
for metric in IF_COUNTS:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=interface_tags, count=1
)
for metric in IF_RATES:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.RATE, tags=interface_tags, count=1
)
if interface in interfaces_with_bandwidth_usage:
for metric in IF_BANDWIDTH_USAGE:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.RATE, tags=interface_tags, count=1
)
for metric in IF_GAUGES:
aggregator.assert_metric(
'snmp.{}'.format(metric),
metric_type=aggregator.GAUGE,
tags=interface_tags,
count=1,
)
for version in ['ipv4', 'ipv6']:
ip_tags = ['ipversion:{}'.format(version)] + tags
for metric in IP_COUNTS:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=ip_tags, count=1
)
for metric in LTM_GAUGES:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
servers = ['server1', 'server2', 'server3']
for server in servers:
server_tags = tags + ['server:{}'.format(server)]
for metric in LTM_VIRTUAL_SERVER_GAUGES:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=server_tags, count=1)
for metric in LTM_VIRTUAL_SERVER_COUNTS:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=server_tags, count=1
)
for metric in LTM_VIRTUAL_SERVER_RATES:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.RATE, tags=server_tags, count=1)
nodes = ['node1', 'node2', 'node3']
for node in nodes:
node_tags = tags + ['node:{}'.format(node)]
for metric in LTM_NODES_GAUGES:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=node_tags, count=1)
for metric in LTM_NODES_COUNTS:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=node_tags, count=1
)
for metric in LTM_NODES_RATES:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.RATE, tags=node_tags, count=1)
pools = ['pool1', 'pool2']
for pool in pools:
pool_tags = tags + ['pool:{}'.format(pool)]
for metric in LTM_POOL_GAUGES:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=pool_tags, count=1)
for metric in LTM_POOL_COUNTS:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=pool_tags, count=1
)
for metric in LTM_POOL_RATES:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.RATE, tags=pool_tags, count=1)
pool_members = [('pool1', 'node1'), ('pool1', 'node2'), ('pool2', 'node3')]
for pool, node in pool_members:
pool_member_tags = tags + ['pool:{}'.format(pool), 'node:{}'.format(node)]
for metric in LTM_POOL_MEMBER_GAUGES:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=pool_member_tags, count=1
)
for metric in LTM_POOL_MEMBER_COUNTS:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=pool_member_tags, count=1
)
for metric in LTM_POOL_MEMBER_RATES:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.RATE, tags=pool_member_tags, count=1
)
aggregator.assert_metric('snmp.sysUpTimeInstance', count=1)
aggregator.assert_all_metrics_covered()
@pytest.mark.usefixtures("dd_environment")
def test_router(aggregator):
profile = "generic-router"
run_profile_check(profile)
common_tags = common.CHECK_TAGS + ['snmp_profile:' + profile]
common.assert_common_metrics(aggregator, common_tags)
interfaces = [
('eth0', 'kept'),
('eth1', 'their forward oxen'),
]
for interface, if_desc in interfaces:
tags = ['interface:{}'.format(interface), 'interface_alias:{}'.format(if_desc)] + common_tags
for metric in IF_COUNTS:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=tags, count=1
)
for metric in IF_RATES:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.RATE, tags=tags, count=1)
for metric in IF_GAUGES:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
for metric in IF_BANDWIDTH_USAGE:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.RATE, tags=tags, count=1)
for metric in TCP_COUNTS:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=common_tags, count=1
)
for metric in TCP_GAUGES:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=common_tags, count=1)
for metric in UDP_COUNTS:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=common_tags, count=1
)
for version in ['ipv4', 'ipv6']:
tags = ['ipversion:{}'.format(version)] + common_tags
for metric in IP_COUNTS + IPX_COUNTS:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=tags, count=1
)
for metric in IP_IF_COUNTS:
for interface in ['17', '21']:
tags = ['ipversion:{}'.format(version), 'interface:{}'.format(interface)] + common_tags
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=tags, count=1
)
aggregator.assert_all_metrics_covered()
@pytest.mark.usefixtures("dd_environment")
def test_f5_router(aggregator):
# Use the generic profile against the f5 device
instance = common.generate_instance_config([])
instance['community_string'] = 'f5-big-ip'
instance['enforce_mib_constraints'] = False
init_config = {'profiles': {'router': {'definition_file': 'generic-router.yaml'}}}
check = SnmpCheck('snmp', init_config, [instance])
check.check(instance)
interfaces = [
('1.0', 'desc2'),
('mgmt', 'desc1'),
('/Common/internal', 'desc5'),
('/Common/http-tunnel', 'desc3'),
('/Common/socks-tunnel', 'desc4'),
]
interfaces_with_bandwidth_usage = {
'1.0',
'mgmt',
'/Common/internal',
}
common_tags = [
'snmp_profile:router',
'snmp_host:f5-big-ip-adc-good-byol-1-vm.c.datadog-integrations-lab.internal',
]
common_tags.extend(common.CHECK_TAGS)
common.assert_common_metrics(aggregator, common_tags)
for interface, desc in interfaces:
tags = ['interface:{}'.format(interface), 'interface_alias:{}'.format(desc)] + common_tags
for metric in IF_COUNTS:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=tags, count=1
)
for metric in IF_RATES:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.RATE, tags=tags, count=1)
for metric in IF_GAUGES:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
if interface in interfaces_with_bandwidth_usage:
for metric in IF_BANDWIDTH_USAGE:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.RATE, tags=tags, count=1)
for version in ['ipv4', 'ipv6']:
tags = ['ipversion:{}'.format(version)] + common_tags
for metric in IP_COUNTS:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=tags, count=1
)
aggregator.assert_metric('snmp.sysUpTimeInstance', count=1)
aggregator.assert_all_metrics_covered()
@pytest.mark.usefixtures("dd_environment")
def test_cisco_3850(aggregator):
profile = "cisco-3850"
run_profile_check(profile)
# We're not covering all interfaces
interfaces = ["Gi1/0/{}".format(i) for i in range(1, 48)]
common_tags = common.CHECK_TAGS + [
'snmp_host:Cat-3850-4th-Floor.companyname.local',
'snmp_profile:' + profile,
'device_vendor:cisco',
]
common.assert_common_metrics(aggregator, common_tags)
aliases = {
'Gi1/0/24': 'LWAP-example',
'Gi1/0/33': 'switchboard console',
'Gi1/0/38': 'Mitel Console',
'Gi1/1/3': 'Link to Switch',
'Gi2/0/13': 'AP01',
'Gi2/0/14': 'AP02',
'Gi2/0/15': 'AP03',
'Gi2/0/16': 'AP04',
'Gi2/0/17': 'AP05',
'Gi2/0/18': 'AP06',
'Gi2/1/4': 'Link to Switch',
}
for interface in interfaces:
alias = aliases.get(interface, '')
tags = ['interface:{}'.format(interface), 'interface_alias:{}'.format(alias)] + common_tags
for metric in IF_COUNTS:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=tags, count=1
)
for metric in IF_GAUGES:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
for metric in IF_BANDWIDTH_USAGE:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.RATE, tags=tags, count=1)
for metric in IF_RATES:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.RATE, tags=tags, count=1)
for metric in IP_COUNTS + IPX_COUNTS:
tags = common_tags + ['ipversion:ipv6']
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=tags, count=1)
for metric in TCP_COUNTS:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=common_tags, count=1
)
for metric in TCP_GAUGES:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=common_tags, count=1)
for metric in UDP_COUNTS:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=common_tags, count=1
)
sensors = [1006, 1007, 1008, 2006, 2007, 2008]
for sensor in sensors:
tags = ['sensor_id:{}'.format(sensor), 'sensor_type:8'] + common_tags
aggregator.assert_metric('snmp.entSensorValue', metric_type=aggregator.GAUGE, tags=tags, count=1)
frus = [1001, 1010, 2001, 2010]
for fru in frus:
tags = ['fru:{}'.format(fru)] + common_tags
for metric in FRU_METRICS:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
cpus = [1000, 2000]
for cpu in cpus:
tags = ['cpu:{}'.format(cpu)] + common_tags
for metric in CPU_METRICS:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
for interface in interfaces:
tags = ['interface:{}'.format(interface)] + common_tags
for metric in CIE_METRICS:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
aggregator.assert_metric('snmp.cieIfResetCount', metric_type=aggregator.MONOTONIC_COUNT, tags=tags, count=1)
power_supplies = [
(1, 'Switch 1 - Power Supply B, NotExist'),
(1, 'Switch 2 - Power Supply B, NotExist'),
(2, 'Switch 1 - Power Supply A, Normal'),
(2, 'Switch 2 - Power Supply A, Normal'),
]
for source, descr in power_supplies:
env_tags = ['power_source:{}'.format(source), 'power_status_descr:{}'.format(descr)]
aggregator.assert_metric(
'snmp.ciscoEnvMonSupplyState', metric_type=aggregator.GAUGE, tags=env_tags + common_tags
)
aggregator.assert_metric('snmp.ciscoEnvMonFanState', metric_type=aggregator.GAUGE, tags=common_tags)
aggregator.assert_metric('snmp.cswStackPortOperStatus', metric_type=aggregator.GAUGE)
for switch, mac_addr in [(1, '0x046c9d42b080'), (2, '0xdccec1430680')]:
tags = ['entity_name:Switch {}'.format(switch), 'mac_addr:{}'.format(mac_addr)] + common_tags
aggregator.assert_metric('snmp.cswSwitchState', metric_type=aggregator.GAUGE, tags=tags)
frus = [1011, 1012, 1013, 2011, 2012, 2013]
for fru in frus:
tags = ['fru:{}'.format(fru)] + common_tags
aggregator.assert_metric(
'snmp.cefcFanTrayOperStatus', metric_type=aggregator.GAUGE, tags=['fru:{}'.format(fru)] + common_tags
)
for metrics in MEMORY_METRICS:
for pool in ['Processor', 'IOS Process stack']:
tags = ['mem_pool_name:{}'.format(pool)] + common_tags
aggregator.assert_metric('snmp.{}'.format(metrics), metric_type=aggregator.GAUGE, tags=tags)
neighbor_metrics = [
('ospfNbrEvents', aggregator.RATE),
('ospfNbrState', aggregator.GAUGE),
('ospfNbrLsRetransQLen', aggregator.GAUGE),
]
for metric, metric_type in neighbor_metrics:
tags = ['neighbor_ip:192.29.116.26', 'neighbor_id:192.29.66.79'] + common_tags
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=metric_type, tags=tags, count=1)
lls_metrics = ['ospfIfRetransInterval', 'ospfIfState']
for metric in lls_metrics:
tags = ['ospf_ip_addr:192.29.116.25'] + common_tags
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
for temp_index in [1006, 1007, 1008, 2006, 2007, 2008]:
env_tag = ['temp_index:{}'.format(temp_index), 'temp_state:1']
aggregator.assert_metric(
'snmp.ciscoEnvMonTemperatureStatusValue', metric_type=aggregator.GAUGE, tags=env_tag + common_tags
)
aggregator.assert_metric('snmp.sysUpTimeInstance', count=1)
aggregator.assert_all_metrics_covered()
@pytest.mark.usefixtures("dd_environment")
def test_meraki_cloud_controller(aggregator):
run_profile_check('meraki-cloud-controller')
common_tags = common.CHECK_TAGS + [
'snmp_profile:meraki-cloud-controller',
'snmp_host:dashboard.meraki.com',
'device_vendor:meraki',
]
common.assert_common_metrics(aggregator, common_tags)
dev_metrics = ['devStatus', 'devClientCount']
dev_tags = ['device:Gymnasium', 'product:MR16-HW', 'network:L_NETWORK'] + common_tags
for metric in dev_metrics:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=dev_tags, count=1)
if_tags = ['interface:wifi0', 'index:4'] + common_tags
if_metrics = ['devInterfaceSentPkts', 'devInterfaceRecvPkts', 'devInterfaceSentBytes', 'devInterfaceRecvBytes']
for metric in if_metrics:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=if_tags, count=1)
if_tags = ['interface:eth0'] + common_tags
for metric in IF_COUNTS:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=if_tags, count=1
)
for metric in IF_GAUGES:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=if_tags, count=1)
for metric in IF_RATES:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.RATE, tags=if_tags, count=1)
for metric in IF_BANDWIDTH_USAGE:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.RATE, tags=if_tags, count=1)
aggregator.assert_metric('snmp.sysUpTimeInstance', count=1)
aggregator.assert_all_metrics_covered()
@pytest.mark.usefixtures("dd_environment")
def test_idrac(aggregator):
run_profile_check('idrac')
interfaces = ['eth0', 'en1']
common_tags = common.CHECK_TAGS + ['snmp_profile:idrac', 'device_vendor:dell']
common.assert_common_metrics(aggregator, common_tags)
for interface in interfaces:
tags = ['adapter:{}'.format(interface)] + common_tags
for count in ADAPTER_IF_COUNTS:
aggregator.assert_metric(
'snmp.{}'.format(count), metric_type=aggregator.MONOTONIC_COUNT, tags=tags, count=1
)
indexes = ['26', '29']
for index in indexes:
tags = ['chassis_index:{}'.format(index)] + common_tags
for gauge in SYSTEM_STATUS_GAUGES:
aggregator.assert_metric('snmp.{}'.format(gauge), metric_type=aggregator.GAUGE, tags=tags, count=1)
powers = ['supply1', 'supply2']
for power in powers:
tags = ['supply_name:{}'.format(power)] + common_tags
aggregator.assert_metric('snmp.enclosurePowerSupplyState', metric_type=aggregator.GAUGE, tags=tags, count=1)
disks = ['disk1', 'disk2']
for disk in disks:
tags = ['disk_name:{}'.format(disk)] + common_tags
for gauge in DISK_GAUGES:
aggregator.assert_metric('snmp.{}'.format(gauge), metric_type=aggregator.GAUGE, tags=tags, count=1)
batteries = ['battery1', 'battery2']
for battery_name in batteries:
tags = ['battery_name:{}'.format(battery_name)] + common_tags
aggregator.assert_metric('snmp.{}'.format("batteryState"), metric_type=aggregator.GAUGE, tags=tags, count=1)
controllers = ['controller1', 'controller2']
for controller in controllers:
tags = ['controller_name:{}'.format(controller)] + common_tags
aggregator.assert_metric(
'snmp.{}'.format("controllerRollUpStatus"), metric_type=aggregator.GAUGE, tags=tags, count=1
)
devices = ['device1', 'device2']
indexes = ['10', '20']
for device, index in zip(devices, indexes):
tags = ['device_descr_name:{}'.format(device), 'chassis_index:{}'.format(index)] + common_tags
aggregator.assert_metric('snmp.{}'.format("pCIDeviceStatus"), metric_type=aggregator.GAUGE, tags=tags, count=1)
slots = ['slot1', 'slot2']
indexes = ['19', '21']
for slot, index in zip(slots, indexes):
tags = ['slot_name:{}'.format(slot), 'chassis_index:{}'.format(index)] + common_tags
aggregator.assert_metric('snmp.{}'.format("systemSlotStatus"), metric_type=aggregator.GAUGE, tags=tags, count=1)
tag_mappings = [('29', 'device2', '0x9e00e0291401'), ('3', 'device1', '0x9e00e0291401')]
for index, device, mac in tag_mappings:
tags = [
'chassis_index:{}'.format(index),
'device_fqdd:{}'.format(device),
'mac_addr:{}'.format(mac),
] + common_tags
aggregator.assert_metric(
'snmp.{}'.format("networkDeviceStatus"), metric_type=aggregator.GAUGE, tags=tags, count=1
)
indexes = ['3', '31']
for index in indexes:
tags = ['chassis_index:{}'.format(index)] + common_tags
aggregator.assert_metric('snmp.{}'.format("systemBIOSStatus"), metric_type=aggregator.GAUGE, tags=tags, count=1)
indexes = ['9', '18']
probe_types = ['26', '26']
for index, probe_type in zip(indexes, probe_types):
tags = ['chassis_index:{}'.format(index), 'probe_type:{}'.format(probe_type)] + common_tags
for gauge in PROBE_GAUGES:
aggregator.assert_metric('snmp.{}'.format(gauge), metric_type=aggregator.GAUGE, tags=tags, count=1)
indexes = ['12', '22']
probe_types = ['6', '3']
for index, probe_type in zip(indexes, probe_types):
tags = ['chassis_index:{}'.format(index), 'probe_type:{}'.format(probe_type)] + common_tags
for gauge in VOLTAGE_GAUGES:
aggregator.assert_metric('snmp.{}'.format(gauge), metric_type=aggregator.GAUGE, tags=tags, count=1)
indexes = ['29', '22']
device_types = ['26', '4']
device_indexes = ['4', '21']
for index, device_type, device_index in zip(indexes, device_types, device_indexes):
tags = [
'chassis_index:{}'.format(index),
'device_type:{}'.format(device_type),
'device_index:{}'.format(device_index),
] + common_tags
aggregator.assert_metric(
'snmp.{}'.format("memoryDeviceStatus"), metric_type=aggregator.GAUGE, tags=tags, count=1
)
for gauge in DRS_GAUGES:
aggregator.assert_metric('snmp.{}'.format(gauge), metric_type=aggregator.GAUGE, tags=common_tags, count=1)
aggregator.assert_all_metrics_covered()
@pytest.mark.usefixtures("dd_environment")
def test_cisco_nexus(aggregator):
profile = "cisco-nexus"
run_profile_check(profile)
interfaces = ["GigabitEthernet1/0/{}".format(i) for i in range(1, 9)]
common_tags = common.CHECK_TAGS + [
'snmp_host:Nexus-eu1.companyname.managed',
'snmp_profile:' + profile,
'device_vendor:cisco',
]
common.assert_common_metrics(aggregator, common_tags)
for interface in interfaces:
tags = ['interface:{}'.format(interface)] + common_tags
aggregator.assert_metric('snmp.cieIfResetCount', metric_type=aggregator.MONOTONIC_COUNT, tags=tags, count=1)
for interface in interfaces:
tags = ['interface:{}'.format(interface), 'interface_alias:'] + common_tags
for metric in IF_COUNTS:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=tags, count=1
)
for metric in IF_RATES:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.RATE, tags=tags, count=1)
for metric in IF_GAUGES:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
for metric in IF_BANDWIDTH_USAGE:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.RATE, tags=tags, count=1)
for metric in TCP_COUNTS:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=common_tags, count=1
)
for metric in TCP_GAUGES:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=common_tags, count=1)
for metric in UDP_COUNTS:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=common_tags, count=1
)
sensors = [1, 9, 11, 12, 12, 14, 17, 26, 29, 31]
for sensor in sensors:
tags = ['sensor_id:{}'.format(sensor), 'sensor_type:8'] + common_tags
aggregator.assert_metric('snmp.entSensorValue', metric_type=aggregator.GAUGE, tags=tags, count=1)
frus = [6, 7, 15, 16, 19, 27, 30, 31]
for fru in frus:
tags = ['fru:{}'.format(fru)] + common_tags
for metric in FRU_METRICS:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
cpus = [3173, 6692, 11571, 19529, 30674, 38253, 52063, 54474, 55946, 63960]
for cpu in cpus:
tags = ['cpu:{}'.format(cpu)] + common_tags
for metric in CPU_METRICS:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
for (index, state) in [(3, 3), (6, 6), (8, 6), (11, 6), (13, 3), (14, 6), (20, 6), (21, 4), (31, 5)]:
aggregator.assert_metric(
'snmp.ciscoEnvMonTemperatureStatusValue',
metric_type=aggregator.GAUGE,
tags=['temp_state:{}'.format(state), 'temp_index:{}'.format(index)] + common_tags,
)
power_supply_tags = ['power_source:1', 'power_status_descr:Jaded driving their their their'] + common_tags
aggregator.assert_metric('snmp.ciscoEnvMonSupplyState', metric_type=aggregator.GAUGE, tags=power_supply_tags)
fan_indices = [4, 6, 7, 16, 21, 22, 25, 27]
for index in fan_indices:
tags = ['fan_status_index:{}'.format(index)] + common_tags
aggregator.assert_metric('snmp.ciscoEnvMonFanState', metric_type=aggregator.GAUGE, tags=tags)
aggregator.assert_metric(
'snmp.cswStackPortOperStatus',
metric_type=aggregator.GAUGE,
tags=common_tags + ['interface:GigabitEthernet1/0/1'],
)
aggregator.assert_metric(
'snmp.cswSwitchState', metric_type=aggregator.GAUGE, tags=['mac_addr:0xffffffffffff'] + common_tags
)
frus = [2, 7, 8, 21, 26, 27, 30, 31]
for fru in frus:
tags = ['fru:{}'.format(fru)] + common_tags
aggregator.assert_metric(
'snmp.cefcFanTrayOperStatus', metric_type=aggregator.GAUGE, tags=['fru:{}'.format(fru)] + common_tags
)
aggregator.assert_metric('snmp.sysUpTimeInstance', count=1)
aggregator.assert_all_metrics_covered()
@pytest.mark.usefixtures("dd_environment")
def test_dell_poweredge(aggregator):
run_profile_check('dell-poweredge')
sys_mem_gauges = [
'operatingSystemMemoryAvailablePhysicalSize',
'operatingSystemMemoryTotalPageFileSize',
'operatingSystemMemoryAvailablePageFileSize',
'operatingSystemMemoryTotalVirtualSize',
'operatingSystemMemoryAvailableVirtualSize',
]
power_supply_gauges = [
'powerSupplyStatus',
'powerSupplyOutputWatts',
'powerSupplyMaximumInputVoltage',
'powerSupplyCurrentInputVoltage',
]
temperature_probe_gauges = ['temperatureProbeStatus', 'temperatureProbeReading']
processor_device_gauges = ['processorDeviceStatus', 'processorDeviceThreadCount']
cache_device_gauges = ['cacheDeviceStatus', 'cacheDeviceMaximumSize', 'cacheDeviceCurrentSize']
memory_device_gauges = ['memoryDeviceStatus', 'memoryDeviceFailureModes']
idrac_gauges = (
['batteryState', 'controllerRollUpStatus', 'pCIDeviceStatus', 'systemSlotStatus', 'systemBIOSStatus']
+ VOLTAGE_GAUGES
+ PROBE_GAUGES
)
common_tags = common.CHECK_TAGS + [
'snmp_profile:dell-poweredge',
'device_vendor:dell',
]
common.assert_common_metrics(aggregator, common_tags)
chassis_indexes = [29, 31]
for chassis_index in chassis_indexes:
tags = ['chassis_index:{}'.format(chassis_index)] + common_tags
for metric in sys_mem_gauges:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, at_least=1)
indexes = [5, 17]
for index in indexes:
tags = ['chassis_index:4', 'index:{}'.format(index)] + common_tags
for metric in power_supply_gauges:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, at_least=1)
indexes = [13]
for index in indexes:
tags = ['chassis_index:18', 'index:{}'.format(index)] + common_tags
for metric in temperature_probe_gauges:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, at_least=1)
indexes = [17, 28]
for index in indexes:
tags = ['chassis_index:5', 'index:{}'.format(index)] + common_tags
for metric in processor_device_gauges:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, at_least=1)
indexes = [15, 27]
for index in indexes:
tags = ['chassis_index:11', 'index:{}'.format(index)] + common_tags
for metric in cache_device_gauges:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, at_least=1)
serial_numbers = ['forward zombies acted Jaded', 'kept oxen their their oxen oxen']
for serial_number in serial_numbers:
tags = ['serial_number_name:{}'.format(serial_number), 'chassis_index:1'] + common_tags
for metric in memory_device_gauges:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, at_least=1)
ip_addresses = ['66.97.1.103', '62.148.76.32', '45.3.243.155']
for ip_address in ip_addresses:
tags = ['ip_address:{}'.format(ip_address)] + common_tags
aggregator.assert_metric('snmp.networkDeviceStatus', metric_type=aggregator.GAUGE, tags=tags, at_least=1)
interfaces = ['eth0', 'en1']
for interface in interfaces:
tags = ['adapter:{}'.format(interface)] + common_tags
for count in ADAPTER_IF_COUNTS:
aggregator.assert_metric(
'snmp.{}'.format(count), metric_type=aggregator.MONOTONIC_COUNT, tags=tags, count=1
)
indexes = ['26', '29']
for index in indexes:
tags = ['chassis_index:{}'.format(index)] + common_tags
for gauge in SYSTEM_STATUS_GAUGES:
aggregator.assert_metric('snmp.{}'.format(gauge), metric_type=aggregator.GAUGE, tags=tags, count=1)
powers = ['supply1', 'supply2']
for power in powers:
tags = ['supply_name:{}'.format(power)] + common_tags
aggregator.assert_metric('snmp.enclosurePowerSupplyState', metric_type=aggregator.GAUGE, tags=tags, count=1)
disks = ['disk1', 'disk2']
for disk in disks:
tags = ['disk_name:{}'.format(disk)] + common_tags
for gauge in DISK_GAUGES:
aggregator.assert_metric('snmp.{}'.format(gauge), metric_type=aggregator.GAUGE, tags=tags, count=1)
for gauge in idrac_gauges:
aggregator.assert_metric('snmp.{}'.format(gauge), metric_type=aggregator.GAUGE)
aggregator.assert_all_metrics_covered()
@pytest.mark.usefixtures("dd_environment")
def test_hp_ilo4(aggregator):
profile = "hp-ilo4"
run_profile_check(profile)
status_gauges = [
'cpqHeCritLogCondition',
'cpqHeCorrMemLogStatus',
'cpqHeCorrMemLogCondition',
'cpqHeAsrStatus',
'cpqHeAsrPost',
'cpqHeAsrCondition',
'cpqHeAsrNetworkAccessStatus',
'cpqHeThermalCondition',
'cpqHeThermalTempStatus',
'cpqHeThermalSystemFanStatus',
'cpqHeThermalCpuFanStatus',
'cpqNicVtVirusActivity',
'cpqSm2CntlrServerPowerState',
'cpqSm2CntlrBatteryStatus',
'cpqSm2CntlrRemoteSessionStatus',
'cpqSm2CntlrInterfaceStatus',
]
cpqhlth_counts = ['cpqHeAsrRebootCount', 'cpqHeCorrMemTotalErrs']
cpqhlth_gauges = ['cpqHeSysUtilEisaBusMin', 'cpqHePowerMeterCurrReading', 'cpqHeSysUtilLifeTime']
cpqsm2_gauges = [
'cpqSm2CntlrBatteryPercentCharged',
'cpqSm2CntlrSelfTestErrors',
'cpqSm2EventTotalEntries',
]
EMBEDDED = 2
PCMCIA = 3
card_locations = [EMBEDDED, PCMCIA]
network_card_counts = [
'cpqSm2NicXmitBytes',
'cpqSm2NicXmitTotalPackets',
'cpqSm2NicXmitDiscardPackets',
'cpqSm2NicXmitErrorPackets',
'cpqSm2NicXmitQueueLength',
'cpqSm2NicRecvBytes',
'cpqSm2NicRecvTotalPackets',
'cpqSm2NicRecvDiscardPackets',
'cpqSm2NicRecvErrorPackets',
'cpqSm2NicRecvUnknownPackets',
]
interfaces = ['eth0', 'en1']
phys_adapter_counts = [
'cpqNicIfPhysAdapterGoodTransmits',
'cpqNicIfPhysAdapterGoodReceives',
'cpqNicIfPhysAdapterBadTransmits',
'cpqNicIfPhysAdapterBadReceives',
'cpqNicIfPhysAdapterInOctets',
'cpqNicIfPhysAdapterOutOctets',
]
phys_adapter_gauges = ['cpqNicIfPhysAdapterSpeed', 'cpqNicIfPhysAdapterSpeedMbps']
temperature_sensors = [1, 13, 28]
batteries = [1, 3, 4, 5]
common_tags = common.CHECK_TAGS + ['snmp_profile:' + profile, 'device_vendor:hp']
common.assert_common_metrics(aggregator, common_tags)
for metric in status_gauges:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=common_tags, count=1)
for metric in cpqhlth_counts:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=common_tags, count=1
)
for metric in cpqhlth_gauges:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=common_tags, count=1)
for metric in cpqsm2_gauges:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=common_tags, count=1)
for index in temperature_sensors:
tags = ['temperature_index:{}'.format(index)] + common_tags
aggregator.assert_metric('snmp.cpqHeTemperatureCelsius', metric_type=aggregator.GAUGE, tags=tags, count=1)
aggregator.assert_metric('snmp.cpqHeTemperatureCondition', metric_type=aggregator.GAUGE, tags=tags, count=1)
for index in batteries:
tags = ['battery_index:{}'.format(index)] + common_tags
aggregator.assert_metric('snmp.cpqHeSysBatteryCondition', metric_type=aggregator.GAUGE, tags=tags, count=1)
aggregator.assert_metric('snmp.cpqHeSysBatteryStatus', metric_type=aggregator.GAUGE, tags=tags, count=1)
for location in card_locations:
tags = ['nic_stats_location:{}'.format(location)] + common_tags
for metric in network_card_counts:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=tags, count=1
)
for interface in interfaces:
tags = ['interface:{}'.format(interface)] + common_tags
for metric in phys_adapter_counts:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=tags, count=1
)
for metric in phys_adapter_gauges:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
drive_counts = [
"cpqDaPhyDrvUsedReallocs",
"cpqDaPhyDrvRefHours",
"cpqDaPhyDrvHardReadErrs",
"cpqDaPhyDrvRecvReadErrs",
"cpqDaPhyDrvHardWriteErrs",
"cpqDaPhyDrvRecvWriteErrs",
"cpqDaPhyDrvHSeekErrs",
"cpqDaPhyDrvSeekErrs",
]
drive_gauges = [
"cpqDaPhyDrvStatus",
"cpqDaPhyDrvFactReallocs",
"cpqDaPhyDrvSpinupTime",
"cpqDaPhyDrvSize",
"cpqDaPhyDrvSmartStatus",
"cpqDaPhyDrvCurrentTemperature",
]
drive_idx = [(0, 2), (0, 28), (8, 31), (9, 24), (9, 28), (10, 17), (11, 4), (12, 20), (18, 22), (23, 2)]
for drive_cntrl_idx, drive_index in drive_idx:
tags = ['drive_cntrl_idx:{}'.format(drive_cntrl_idx), "drive_index:{}".format(drive_index)] + common_tags
for metric in drive_counts:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=tags, count=1
)
for metric in drive_gauges:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
aggregator.assert_all_metrics_covered()
aggregator.assert_metrics_using_metadata(get_metadata_metrics(), check_submission_type=True)
@pytest.mark.usefixtures("dd_environment")
def test_proliant(aggregator):
run_profile_check('hpe-proliant')
common_tags = common.CHECK_TAGS + ['snmp_profile:hpe-proliant', 'device_vendor:hp']
common.assert_common_metrics(aggregator, common_tags)
for metric in TCP_COUNTS:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=common_tags, count=1
)
for metric in TCP_GAUGES:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=common_tags, count=1)
for metric in UDP_COUNTS:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=common_tags, count=1
)
cpu_gauges = [
"cpqSeCpuSlot",
"cpqSeCpuSpeed",
"cpqSeCpuStatus",
"cpqSeCpuExtSpeed",
"cpqSeCpuCore",
"cpqSeCPUCoreMaxThreads",
"cpqSeCpuPrimary",
]
cpu_indexes = [0, 4, 6, 8, 13, 15, 26, 27]
for idx in cpu_indexes:
tags = ['cpu_index:{}'.format(idx)] + common_tags
for metric in cpu_gauges:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
cpu_util_gauges = ["cpqHoCpuUtilMin", "cpqHoCpuUtilFiveMin", "cpqHoCpuUtilThirtyMin", "cpqHoCpuUtilHour"]
cpu_unit_idx = [4, 7, 13, 20, 22, 23, 29]
for idx in cpu_unit_idx:
tags = ['cpu_unit_index:{}'.format(idx)] + common_tags
for metric in cpu_util_gauges:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
file_sys_gauges = [
"cpqHoFileSysSpaceTotal",
"cpqHoFileSysSpaceUsed",
"cpqHoFileSysPercentSpaceUsed",
"cpqHoFileSysAllocUnitsTotal",
"cpqHoFileSysAllocUnitsUsed",
"cpqHoFileSysStatus",
]
file_sys_idx = [5, 8, 11, 15, 19, 21, 28, 30]
for idx in file_sys_idx:
tags = ['file_sys_index:{}'.format(idx)] + common_tags
for metric in file_sys_gauges:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
memory_gauges = [
"cpqSiMemModuleSize",
"cpqSiMemModuleType",
"cpqSiMemModuleSpeed",
"cpqSiMemModuleTechnology",
"cpqSiMemModuleECCStatus",
"cpqSiMemModuleFrequency",
"cpqSiMemModuleCellStatus",
]
memory_idx = [(6, 16), (7, 17), (7, 30), (8, 20), (10, 4), (15, 27), (20, 14), (21, 14), (23, 0), (28, 20)]
for board_idx, mem_module_index in memory_idx:
tags = ['mem_board_index:{}'.format(board_idx), "mem_module_index:{}".format(mem_module_index)] + common_tags
for metric in memory_gauges:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
drive_counts = [
"cpqDaPhyDrvUsedReallocs",
"cpqDaPhyDrvRefHours",
"cpqDaPhyDrvHardReadErrs",
"cpqDaPhyDrvRecvReadErrs",
"cpqDaPhyDrvHardWriteErrs",
"cpqDaPhyDrvRecvWriteErrs",
"cpqDaPhyDrvHSeekErrs",
"cpqDaPhyDrvSeekErrs",
]
drive_gauges = [
"cpqDaPhyDrvStatus",
"cpqDaPhyDrvFactReallocs",
"cpqDaPhyDrvSpinupTime",
"cpqDaPhyDrvSize",
"cpqDaPhyDrvSmartStatus",
"cpqDaPhyDrvCurrentTemperature",
]
drive_idx = [(0, 2), (0, 28), (8, 31), (9, 24), (9, 28), (10, 17), (11, 4), (12, 20), (18, 22), (23, 2)]
for drive_cntrl_idx, drive_index in drive_idx:
tags = ['drive_cntrl_idx:{}'.format(drive_cntrl_idx), "drive_index:{}".format(drive_index)] + common_tags
for metric in drive_counts:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=tags, count=1
)
for metric in drive_gauges:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
interfaces = [
('eth0', 'quaintly zombies quaintly forward'),
('eth1', 'quaintly but quaintly quaintly'),
]
for interface, desc in interfaces:
if_tags = ['interface:{}'.format(interface), 'interface_alias:{}'.format(desc)] + common_tags
for metric in IF_COUNTS:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=if_tags, count=1
)
for metric in IF_GAUGES:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=if_tags, count=1)
for metric in IF_RATES:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.RATE, tags=if_tags, count=1)
for metric in IF_BANDWIDTH_USAGE:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.RATE, tags=if_tags, count=1)
mem_boards = ['11', '12']
for board in mem_boards:
tags = ['mem_board_index:{}'.format(board)] + common_tags
aggregator.assert_metric('snmp.cpqHeResMem2ModuleCondition', metric_type=aggregator.GAUGE, tags=tags, count=1)
adapter_gauges = ['cpqNicIfPhysAdapterStatus', 'cpqNicIfPhysAdapterState']
for gauge in adapter_gauges:
tags = ['adapter_name:adapter', 'adapter_mac_addr:mac'] + common_tags
aggregator.assert_metric('snmp.{}'.format(gauge), metric_type=aggregator.GAUGE, tags=tags, count=1)
power_metrics = [
'cpqHeFltTolPowerSupplyStatus',
'cpqHeFltTolPowerSupplyCapacityUsed',
'cpqHeFltTolPowerSupplyCapacityMaximum',
]
for gauge in power_metrics:
tags = ['chassis_num:30'] + common_tags
aggregator.assert_metric('snmp.{}'.format(gauge), metric_type=aggregator.GAUGE, tags=tags, count=1)
controller_index = ['controller_index:3'] + common_tags
aggregator.assert_metric(
'snmp.{}'.format("cpqDaCntlrCondition"), metric_type=aggregator.GAUGE, tags=controller_index, count=1
)
thermal_metrics = ['cpqHeThermalCondition', 'cpqHeSysUtilLifeTime', 'cpqHeFltTolPwrSupplyStatus']
for metric in thermal_metrics:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=common_tags, count=1)
aggregator.assert_all_metrics_covered()
aggregator.assert_metrics_using_metadata(get_metadata_metrics(), check_submission_type=True)
@pytest.mark.usefixtures("dd_environment")
def test_generic_host_resources(aggregator):
instance = common.generate_instance_config([])
instance['community_string'] = 'generic_host'
instance['enforce_mib_constraints'] = False
instance['profile'] = 'generic'
init_config = {'profiles': {'generic': {'definition_file': '_generic-host-resources.yaml'}}}
check = SnmpCheck('snmp', init_config, [instance])
check.check(instance)
common_tags = common.CHECK_TAGS + ['snmp_profile:generic']
common.assert_common_metrics(aggregator, common_tags)
sys_metrics = [
'snmp.hrSystemUptime',
'snmp.hrSystemNumUsers',
'snmp.hrSystemProcesses',
'snmp.hrSystemMaxProcesses',
]
for metric in sys_metrics:
aggregator.assert_metric(metric, metric_type=aggregator.GAUGE, tags=common_tags, count=1)
aggregator.assert_metric('snmp.hrStorageAllocationUnits', count=2)
aggregator.assert_metric('snmp.hrStorageSize', count=2)
aggregator.assert_metric('snmp.hrStorageUsed', count=2)
aggregator.assert_metric('snmp.hrStorageAllocationFailures', count=2)
aggregator.assert_metric('snmp.hrProcessorLoad', count=2)
aggregator.assert_all_metrics_covered()
@pytest.mark.usefixtures("dd_environment")
def test_palo_alto(aggregator):
profile = "palo-alto"
run_profile_check(profile)
common_tags = common.CHECK_TAGS + [
'snmp_profile:' + profile,
'device_vendor:paloaltonetworks',
]
common.assert_common_metrics(aggregator, common_tags)
session = [
'panSessionUtilization',
'panSessionMax',
'panSessionActive',
'panSessionActiveTcp',
'panSessionActiveUdp',
'panSessionActiveICMP',
'panSessionActiveSslProxy',
'panSessionSslProxyUtilization',
]
global_protect = [
'panGPGWUtilizationPct',
'panGPGWUtilizationMaxTunnels',
'panGPGWUtilizationActiveTunnels',
]
entity = [
'panEntityTotalPowerAvail',
'panEntityTotalPowerUsed',
]
entry = ['panEntryFRUModulePowerUsed', 'panEntryFRUModuleNumPorts']
for metric in session:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=common_tags, count=1)
for metric in global_protect:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=common_tags, count=1)
for metric in entity:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=common_tags, count=1)
for metric in entry:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=common_tags)
aggregator.assert_metric('snmp.panEntryFanTrayPowerUsed', metric_type=aggregator.GAUGE, tags=common_tags)
aggregator.assert_all_metrics_covered()
@pytest.mark.usefixtures("dd_environment")
def test_cisco_asa_all(aggregator):
profile = "cisco-asa"
assert_cisco_asa(aggregator, profile)
@pytest.mark.usefixtures("dd_environment")
def test_cisco_asa_5525(aggregator):
profile = "cisco-asa-5525"
assert_cisco_asa(aggregator, profile)
def assert_cisco_asa(aggregator, profile):
run_profile_check(profile)
common_tags = common.CHECK_TAGS + [
'snmp_profile:' + profile,
'snmp_host:kept',
'device_vendor:cisco',
]
common.assert_common_metrics(aggregator, common_tags)
for metric in TCP_COUNTS:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=common_tags, count=1
)
for metric in TCP_GAUGES:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=common_tags, count=1)
for metric in UDP_COUNTS:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=common_tags, count=1
)
if_tags = ['interface:eth0'] + common_tags
for metric in IF_COUNTS:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=if_tags, count=1
)
for metric in IF_GAUGES:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=if_tags, count=1)
for metric in IF_RATES:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.RATE, tags=if_tags, count=1)
for metric in IF_BANDWIDTH_USAGE:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.RATE, tags=if_tags, count=1)
aggregator.assert_metric('snmp.cieIfResetCount', metric_type=aggregator.MONOTONIC_COUNT, tags=common_tags, count=1)
frus = [3, 4, 5, 7, 16, 17, 24, 25]
for fru in frus:
tags = ['fru:{}'.format(fru)] + common_tags
for metric in FRU_METRICS:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
cpus = [7746]
for cpu in cpus:
tags = ['cpu:{}'.format(cpu)] + common_tags
for metric in CPU_METRICS:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
sensor_tags = ['sensor_id:31', 'sensor_type:9'] + common_tags
aggregator.assert_metric('snmp.entPhySensorValue', metric_type=aggregator.GAUGE, tags=sensor_tags, count=1)
stat_tags = [(20, 2), (5, 5)]
for (svc, stat) in stat_tags:
aggregator.assert_metric(
'snmp.cfwConnectionStatValue',
metric_type=aggregator.GAUGE,
tags=['stat_type:{}'.format(stat), 'service_type:{}'.format(svc)] + common_tags,
)
aggregator.assert_metric('snmp.crasNumDeclinedSessions', metric_type=aggregator.GAUGE, tags=common_tags)
aggregator.assert_metric('snmp.crasNumSessions', metric_type=aggregator.GAUGE, tags=common_tags)
aggregator.assert_metric('snmp.crasNumUsers', metric_type=aggregator.GAUGE, tags=common_tags)
aggregator.assert_metric(
'snmp.crasNumSetupFailInsufResources', metric_type=aggregator.MONOTONIC_COUNT, tags=common_tags
)
aggregator.assert_metric('snmp.cipSecGlobalActiveTunnels', metric_type=aggregator.GAUGE, tags=common_tags)
aggregator.assert_metric('snmp.cipSecGlobalHcInOctets', metric_type=aggregator.MONOTONIC_COUNT, tags=common_tags)
aggregator.assert_metric('snmp.cipSecGlobalHcOutOctets', metric_type=aggregator.MONOTONIC_COUNT, tags=common_tags)
for (index, state) in [(3, 3), (6, 6), (8, 6), (11, 6), (13, 3), (14, 6), (20, 6), (21, 4), (31, 5)]:
aggregator.assert_metric(
'snmp.ciscoEnvMonTemperatureStatusValue',
metric_type=aggregator.GAUGE,
tags=['temp_state:{}'.format(state), 'temp_index:{}'.format(index)] + common_tags,
)
power_supply_tags = ['power_source:1', 'power_status_descr:Jaded driving their their their'] + common_tags
aggregator.assert_metric('snmp.ciscoEnvMonSupplyState', metric_type=aggregator.GAUGE, tags=power_supply_tags)
fan_indices = [4, 6, 7, 16, 21, 22, 25, 27]
for index in fan_indices:
tags = ['fan_status_index:{}'.format(index)] + common_tags
aggregator.assert_metric('snmp.ciscoEnvMonFanState', metric_type=aggregator.GAUGE, tags=tags)
aggregator.assert_metric('snmp.cswStackPortOperStatus', metric_type=aggregator.GAUGE)
aggregator.assert_metric(
'snmp.cswSwitchState', metric_type=aggregator.GAUGE, tags=['mac_addr:0xffffffffffff'] + common_tags
)
frus = [2, 7, 8, 21, 26, 27, 30, 31]
for fru in frus:
tags = ['fru:{}'.format(fru)] + common_tags
aggregator.assert_metric(
'snmp.cefcFanTrayOperStatus', metric_type=aggregator.GAUGE, tags=['fru:{}'.format(fru)] + common_tags
)
for metrics in MEMORY_METRICS:
tags = ['mem_pool_name:test_pool'] + common_tags
aggregator.assert_metric('snmp.{}'.format(metrics), metric_type=aggregator.GAUGE, tags=tags)
for conn in [1, 2, 5]:
conn_tags = ['connection_type:{}'.format(conn)] + common_tags
aggregator.assert_metric('snmp.cfwConnectionStatCount', metric_type=aggregator.RATE, tags=conn_tags)
hardware_tags = [(3, 'Secondary unit'), (5, 'Primary unit'), (6, 'Failover LAN Interface')]
for (htype, hdesc) in hardware_tags:
aggregator.assert_metric(
'snmp.cfwHardwareStatusValue',
metric_type=aggregator.GAUGE,
tags=['hardware_type:{}'.format(htype), 'hardware_desc:{}'.format(hdesc)] + common_tags,
)
for switch in [4684, 4850, 8851, 9997, 15228, 16580, 24389, 30813, 36264]:
aggregator.assert_metric(
'snmp.cvsChassisUpTime',
metric_type=aggregator.GAUGE,
tags=['chassis_switch_id:{}'.format(switch)] + common_tags,
)
aggregator.assert_metric('snmp.sysUpTimeInstance', count=1)
rtt_indexes = [1, 7, 10, 13, 15, 18, 20]
rtt_types = [22, 21, 17, 6, 20, 8, 16]
rtt_states = [3, 1, 6, 4, 6, 1, 6]
rtt_gauges = ['rttMonLatestRttOperCompletionTime', 'rttMonLatestRttOperSense', 'rttMonCtrlOperTimeoutOccurred']
for i in range(len(rtt_indexes)):
tags = [
"rtt_index:{}".format(rtt_indexes[i]),
"rtt_type:{}".format(rtt_types[i]),
"rtt_state:{}".format(rtt_states[i]),
] + common_tags
for rtt in rtt_gauges:
aggregator.assert_metric('snmp.{}'.format(rtt), metric_type=aggregator.GAUGE, tags=tags)
aggregator.assert_all_metrics_covered()
@pytest.mark.usefixtures("dd_environment")
def test_cisco_csr(aggregator):
run_profile_check('cisco-csr1000v')
common_tags = common.CHECK_TAGS + [
'snmp_profile:cisco-csr1000v',
'device_vendor:cisco',
]
common.assert_common_metrics(aggregator, common_tags)
tags = ['neighbor:244.12.239.177'] + common_tags
for metric in PEER_GAUGES:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags)
for metric in PEER_RATES:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.RATE, tags=tags)
aggregator.assert_all_metrics_covered()
aggregator.assert_metrics_using_metadata(get_metadata_metrics())
@pytest.mark.usefixtures("dd_environment")
def test_checkpoint_firewall(aggregator):
run_profile_check('checkpoint-firewall')
common_tags = common.CHECK_TAGS + [
'snmp_profile:checkpoint-firewall',
'device_vendor:checkpoint',
]
common.assert_common_metrics(aggregator, common_tags)
cpu_metrics = [
'multiProcUserTime',
'multiProcSystemTime',
'multiProcIdleTime',
'multiProcUsage',
]
cpu_cores = [7097, 13039, 13761, 28994, 29751, 33826, 40053, 48847, 61593, 65044]
for core in cpu_cores:
tags = ['cpu_core:{}'.format(core)] + common_tags
for metric in cpu_metrics:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags)
aggregator.assert_metric('snmp.procNum', metric_type=aggregator.GAUGE, tags=common_tags)
mem_metrics = ['memTotalReal64', 'memActiveReal64', 'memFreeReal64', 'memTotalVirtual64', 'memActiveVirtual64']
for metric in mem_metrics:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=common_tags)
disk_metrics = [
'multiDiskSize',
'multiDiskUsed',
'multiDiskFreeTotalBytes',
'multiDiskFreeAvailableBytes',
'multiDiskFreeTotalPercent',
'multiDiskFreeAvailablePercent',
]
appliance_metrics = [
'fanSpeedSensorValue',
'fanSpeedSensorStatus',
'tempertureSensorValue',
'tempertureSensorStatus',
]
common_indices = range(10)
common_names = ['first', 'second', 'third', 'fourth', 'fifth', 'sixth', 'seventh', 'eighth', 'ninth', 'tenth']
for idx in common_indices:
name = common_names[idx]
tags = ['disk_index:{}'.format(idx), 'disk_name:{}'.format(name)] + common_tags
for metric in disk_metrics:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags)
tags = ['sensor_index:{}'.format(idx), 'sensor_name:{}'.format(name)] + common_tags
for metric in appliance_metrics:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags)
fw_count_metrics = ['fwAccepted', 'fwDropped', 'fwRejected']
for metric in fw_count_metrics:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=common_tags)
fw_gauge_metrics = ['fwNumConn', 'fwPeakNumConn']
for metric in fw_gauge_metrics:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=common_tags)
aggregator.assert_all_metrics_covered()
@pytest.mark.usefixtures("dd_environment")
def test_arista(aggregator):
run_profile_check('arista')
common_tags = common.CHECK_TAGS + ['snmp_profile:arista', 'device_vendor:arista']
common.assert_common_metrics(aggregator, common_tags)
aggregator.assert_metric(
'snmp.aristaEgressQueuePktsDropped',
metric_type=aggregator.MONOTONIC_COUNT,
tags=common_tags + ['interface_index:13', 'queue_index:10'],
count=1,
)
aggregator.assert_metric(
'snmp.aristaEgressQueuePktsDropped',
metric_type=aggregator.MONOTONIC_COUNT,
tags=common_tags + ['interface_index:28', 'queue_index:22'],
count=1,
)
aggregator.assert_metric(
'snmp.aristaIngressQueuePktsDropped',
metric_type=aggregator.MONOTONIC_COUNT,
tags=common_tags + ['interface_index:7', 'queue_index:25'],
count=1,
)
aggregator.assert_metric(
'snmp.aristaIngressQueuePktsDropped',
metric_type=aggregator.MONOTONIC_COUNT,
tags=common_tags + ['interface_index:8', 'queue_index:24'],
count=1,
)
for (sensor_id, sensor_type) in [(1, 11), (7, 8)]:
sensor_tags = ['sensor_id:{}'.format(sensor_id), 'sensor_type:{}'.format(sensor_type)] + common_tags
aggregator.assert_metric('snmp.entPhySensorValue', metric_type=aggregator.GAUGE, tags=sensor_tags, count=1)
aggregator.assert_metric('snmp.entPhySensorOperStatus', metric_type=aggregator.GAUGE, tags=sensor_tags, count=1)
aggregator.assert_all_metrics_covered()
@pytest.mark.usefixtures("dd_environment")
def test_aruba(aggregator):
run_profile_check('aruba')
common_tags = common.CHECK_TAGS + ['snmp_profile:aruba', 'device_vendor:aruba']
common.assert_common_metrics(aggregator, common_tags)
for fan in [18, 28]:
fan_tags = common_tags + ['fan_index:{}'.format(fan)]
aggregator.assert_metric('snmp.sysExtFanStatus', metric_type=aggregator.GAUGE, tags=fan_tags, count=1)
for psu in [1, 17]:
psu_tags = common_tags + ['powersupply_index:{}'.format(psu)]
aggregator.assert_metric('snmp.sysExtPowerSupplyStatus', metric_type=aggregator.GAUGE, tags=psu_tags, count=1)
for proc in [11, 26]:
proc_tags = common_tags + ['processor_index:{}'.format(proc)]
aggregator.assert_metric('snmp.sysExtProcessorLoad', metric_type=aggregator.GAUGE, tags=proc_tags, count=1)
for mem in [3, 20]:
mem_tags = common_tags + ['memory_index:{}'.format(mem)]
aggregator.assert_metric('snmp.sysExtMemorySize', metric_type=aggregator.GAUGE, tags=mem_tags, count=1)
aggregator.assert_metric('snmp.sysExtMemoryUsed', metric_type=aggregator.GAUGE, tags=mem_tags, count=1)
aggregator.assert_metric('snmp.sysExtMemoryFree', metric_type=aggregator.GAUGE, tags=mem_tags, count=1)
aggregator.assert_metric(
'snmp.wlsxSysExtPacketLossPercent', metric_type=aggregator.GAUGE, tags=common_tags, count=1
)
neighbor_metrics = [
('ospfNbrEvents', aggregator.RATE),
('ospfNbrState', aggregator.GAUGE),
('ospfNbrLsRetransQLen', aggregator.GAUGE),
]
for metric, metric_type in neighbor_metrics:
tags = ['neighbor_ip:192.29.116.26', 'neighbor_id:192.29.66.79'] + common_tags
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=metric_type, tags=tags, count=1)
virtual_neighbor_metrics = [
('ospfVirtNbrState', aggregator.GAUGE),
('ospfVirtNbrEvents', aggregator.RATE),
('ospfVirtNbrLsRetransQLen', aggregator.GAUGE),
]
for metric, metric_type in virtual_neighbor_metrics:
for ip, nbr in [('74.210.82.1', '194.154.66.112'), ('122.226.86.1', '184.201.101.140')]:
tags = ['neighbor_ip:{}'.format(ip), 'neighbor_id:{}'.format(nbr)] + common_tags
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=metric_type, tags=tags, count=1)
lls_metrics = ['ospfIfRetransInterval', 'ospfIfState', 'ospfIfLsaCount']
for metric in lls_metrics:
for ip, nbr in [('58.115.169.188', '192.29.66.79'), ('18.2.8.29', '118.246.193.247')]:
tags = ['ospf_ip_addr:{}'.format(ip), 'neighbor_id:{}'.format(nbr)] + common_tags
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
virtual_lls_metrics = ['ospfVirtIfRetransInterval', 'ospfVirtIfState', 'ospfVirtIfLsaCount']
for metric in virtual_lls_metrics:
for nbr in ['194.154.66.112', '184.201.101.140']:
tags = ['neighbor_id:{}'.format(nbr)] + common_tags
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
aggregator.assert_all_metrics_covered()
@pytest.mark.usefixtures("dd_environment")
def test_chatsworth(aggregator):
profile = "chatsworth_pdu"
run_profile_check(profile)
legacy_global_tags = [
'legacy_pdu_macaddress:00:0E:D3:AA:CC:EE',
'legacy_pdu_model:P10-1234-ABC',
'legacy_pdu_name:legacy-name1',
'legacy_pdu_version:1.2.3',
]
common_tags = common.CHECK_TAGS + legacy_global_tags + ['snmp_profile:' + profile, 'device_vendor:chatsworth']
common.assert_common_metrics(aggregator, common_tags)
legacy_pdu_tags = common_tags
legacy_pdu_gauge_metrics = [
'snmp.pduRole',
'snmp.outOfService',
]
legacy_pdu_monotonic_count_metrics = []
for line in range(1, 4):
legacy_pdu_gauge_metrics.append('snmp.line{}curr'.format(line))
for branch in range(1, 3):
legacy_pdu_gauge_metrics.append('snmp.temperatureProbe{}'.format(branch))
legacy_pdu_gauge_metrics.append('snmp.humidityProbe{}'.format(branch))
for xyz in ['xy', 'yz', 'zx']:
legacy_pdu_monotonic_count_metrics.append('snmp.energy{}{}s'.format(xyz, branch))
legacy_pdu_gauge_metrics.append('snmp.voltage{}{}'.format(xyz, branch))
legacy_pdu_gauge_metrics.append('snmp.power{}{}'.format(xyz, branch))
legacy_pdu_gauge_metrics.append('snmp.powerFact{}{}'.format(xyz, branch))
legacy_pdu_gauge_metrics.append('snmp.current{}{}'.format(xyz, branch))
for branch in range(1, 25):
legacy_pdu_monotonic_count_metrics.append('snmp.receptacleEnergyoutlet{}s'.format(branch))
legacy_pdu_gauge_metrics.append('snmp.outlet{}Current'.format(branch))
for metric in legacy_pdu_gauge_metrics:
aggregator.assert_metric(metric, metric_type=aggregator.GAUGE, tags=legacy_pdu_tags, count=1)
for metric in legacy_pdu_monotonic_count_metrics:
aggregator.assert_metric(metric, metric_type=aggregator.MONOTONIC_COUNT, tags=legacy_pdu_tags, count=1)
pdu_tags = common_tags + [
'pdu_cabinetid:cab1',
'pdu_ipaddress:42.2.210.224',
'pdu_macaddress:0x00249b3503f6',
'pdu_model:model1',
'pdu_name:name1',
'pdu_version:v1.1',
]
aggregator.assert_metric('snmp.cpiPduNumberBranches', metric_type=aggregator.GAUGE, tags=pdu_tags, count=1)
aggregator.assert_metric('snmp.cpiPduNumberOutlets', metric_type=aggregator.GAUGE, tags=pdu_tags, count=1)
aggregator.assert_metric('snmp.cpiPduOutOfService', metric_type=aggregator.GAUGE, tags=pdu_tags, count=1)
aggregator.assert_metric('snmp.cpiPduUpgrade', metric_type=aggregator.GAUGE, tags=pdu_tags, count=1)
aggregator.assert_metric('snmp.cpiPduChainRole', metric_type=aggregator.GAUGE, tags=pdu_tags, count=1)
aggregator.assert_metric('snmp.cpiPduTotalPower', metric_type=aggregator.GAUGE, tags=pdu_tags, count=1)
for lock in [1, 2]:
lock_tags = common_tags + ['lock_id:{}'.format(lock)]
aggregator.assert_metric('snmp.cpiPduEasStatus', metric_type=aggregator.GAUGE, tags=lock_tags, count=1)
aggregator.assert_metric('snmp.cpiPduDoorStatus', metric_type=aggregator.GAUGE, tags=lock_tags, count=1)
aggregator.assert_metric('snmp.cpiPduLockStatus', metric_type=aggregator.GAUGE, tags=lock_tags, count=1)
for (sensor_name, sensor_index) in [('sensor1', 4), ('sensor2', 6)]:
sensor_tags = common_tags + [
'sensor_index:{}'.format(sensor_index),
'sensor_name:{}'.format(sensor_name),
'sensor_type:1',
]
aggregator.assert_metric('snmp.cpiPduSensorValue', metric_type=aggregator.GAUGE, tags=sensor_tags, count=1)
for line in [6, 18]:
line_tags = common_tags + ['line_id:{}'.format(line)]
aggregator.assert_metric('snmp.cpiPduLineCurrent', metric_type=aggregator.GAUGE, tags=line_tags, count=1)
for branch in [1, 17]:
branch_tags = common_tags + ['branch_id:{}'.format(branch), 'pdu_name:name1']
aggregator.assert_metric('snmp.cpiPduBranchCurrent', metric_type=aggregator.GAUGE, tags=branch_tags, count=1)
aggregator.assert_metric('snmp.cpiPduBranchMaxCurrent', metric_type=aggregator.GAUGE, tags=branch_tags, count=1)
aggregator.assert_metric('snmp.cpiPduBranchVoltage', metric_type=aggregator.GAUGE, tags=branch_tags, count=1)
aggregator.assert_metric('snmp.cpiPduBranchPower', metric_type=aggregator.GAUGE, tags=branch_tags, count=1)
aggregator.assert_metric(
'snmp.cpiPduBranchPowerFactor', metric_type=aggregator.GAUGE, tags=branch_tags, count=1
)
aggregator.assert_metric('snmp.cpiPduBranchStatus', metric_type=aggregator.GAUGE, tags=branch_tags, count=1)
aggregator.assert_metric(
'snmp.cpiPduBranchEnergy', metric_type=aggregator.MONOTONIC_COUNT, tags=branch_tags, count=1
)
for branch in [1]:
branch_tags = common_tags + ['branch_id:{}'.format(branch), 'pdu_name:name2']
aggregator.assert_metric(
'snmp.cpiPduBranchPowerFactor', metric_type=aggregator.GAUGE, tags=branch_tags, count=1
)
aggregator.assert_metric(
'snmp.cpiPduBranchEnergy', metric_type=aggregator.MONOTONIC_COUNT, tags=branch_tags, count=1
)
for (outlet_id, outlet_branch, outlet_name) in [(7, 29, 'outlet1'), (16, 23, 'outlet2')]:
outlet_tags = common_tags + [
'outlet_id:{}'.format(outlet_id),
'outlet_branchid:{}'.format(outlet_branch),
'outlet_name:{}'.format(outlet_name),
]
aggregator.assert_metric('snmp.cpiPduOutletCurrent', metric_type=aggregator.GAUGE, tags=outlet_tags, count=1)
aggregator.assert_metric('snmp.cpiPduOutletVoltage', metric_type=aggregator.GAUGE, tags=outlet_tags, count=1)
aggregator.assert_metric('snmp.cpiPduOutletPower', metric_type=aggregator.GAUGE, tags=outlet_tags, count=1)
aggregator.assert_metric('snmp.cpiPduOutletStatus', metric_type=aggregator.GAUGE, tags=outlet_tags, count=1)
aggregator.assert_metric(
'snmp.cpiPduOutletEnergy', metric_type=aggregator.MONOTONIC_COUNT, tags=outlet_tags, count=1
)
aggregator.assert_all_metrics_covered()
aggregator.assert_metrics_using_metadata(get_metadata_metrics(), check_submission_type=True)
@pytest.mark.usefixtures("dd_environment")
def test_isilon(aggregator):
run_profile_check('isilon')
common_tags = common.CHECK_TAGS + [
'snmp_profile:isilon',
'cluster_name:testcluster1',
'node_name:node1',
'node_type:1',
'device_vendor:dell',
]
cluster_rates = [
'clusterIfsInBytes',
'clusterIfsOutBytes',
]
node_rates = [
'nodeIfsOutBytes',
'nodeIfsInBytes',
]
protocol_metrics = [
'protocolOpsPerSecond',
'latencyMin',
'latencyMax',
'latencyAverage',
]
quota_metrics = ['quotaHardThreshold', 'quotaSoftThreshold', 'quotaUsage', 'quotaAdvisoryThreshold']
quota_ids_types = [
(422978632, 1),
(153533730, 5),
(3299369987, 4),
(2149993012, 3),
(1424325378, 1),
(4245321451, 0),
(2328145711, 1),
(1198032230, 4),
(1232918362, 1),
(1383990869, 1),
]
common.assert_common_metrics(aggregator, common_tags)
for metric in quota_metrics:
for qid, qtype in quota_ids_types:
tags = ['quota_id:{}'.format(qid), 'quota_type:{}'.format(qtype)] + common_tags
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.RATE, tags=tags, count=1)
for metric in protocol_metrics:
for num in range(1, 3):
tags = ['protocol_name:testprotocol{}'.format(num)] + common_tags
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
aggregator.assert_metric('snmp.clusterHealth', metric_type=aggregator.GAUGE, tags=common_tags, count=1)
for metric in cluster_rates:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.RATE, tags=common_tags, count=1)
aggregator.assert_metric('snmp.nodeHealth', metric_type=aggregator.GAUGE, tags=common_tags, count=1)
for metric in node_rates:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.RATE, tags=common_tags, count=1)
for fan in [4, 6, 10, 11, 14, 21, 22, 23, 25, 30]:
tags = ['fan_name:testfan', 'fan_number:{}'.format(fan)] + common_tags
aggregator.assert_metric('snmp.fanSpeed', metric_type=aggregator.GAUGE, tags=tags, count=1)
for status, bay in [('SMARTFAIL', 1), ('HEALTHY', 2), ('DEAD', 3)]:
tags = common_tags + ['disk_status:{}'.format(status), 'disk_bay:{}'.format((bay))]
aggregator.assert_metric('snmp.diskSizeBytes', metric_type=aggregator.RATE, tags=tags)
aggregator.assert_metric('snmp.ifsUsedBytes', metric_type=aggregator.RATE, tags=common_tags, count=1)
aggregator.assert_metric('snmp.ifsTotalBytes', metric_type=aggregator.RATE, tags=common_tags, count=1)
@pytest.mark.usefixtures("dd_environment")
def test_apc_ups(aggregator):
run_profile_check('apc_ups')
profile_tags = [
'snmp_profile:apc_ups',
'model:APC Smart-UPS 600',
'firmware_version:2.0.3-test',
'serial_num:test_serial',
'ups_name:testIdentName',
'device_vendor:apc',
]
tags = common.CHECK_TAGS + profile_tags
metrics = [
'upsAdvBatteryNumOfBadBattPacks',
'upsAdvBatteryReplaceIndicator',
'upsAdvBatteryRunTimeRemaining',
'upsAdvBatteryTemperature',
'upsAdvBatteryCapacity',
'upsHighPrecInputFrequency',
'upsHighPrecInputLineVoltage',
'upsHighPrecOutputCurrent',
'upsAdvInputLineFailCause',
'upsAdvOutputLoad',
'upsBasicBatteryTimeOnBattery',
'upsAdvTestDiagnosticsResults',
]
common.assert_common_metrics(aggregator, tags)
for metric in metrics:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
aggregator.assert_metric(
'snmp.upsOutletGroupStatusGroupState',
metric_type=aggregator.GAUGE,
tags=['outlet_group_name:test_outlet'] + tags,
)
aggregator.assert_metric(
'snmp.upsBasicStateOutputState.AVRTrimActive', 1, metric_type=aggregator.GAUGE, tags=tags, count=1
)
aggregator.assert_metric(
'snmp.upsBasicStateOutputState.BatteriesDischarged', 1, metric_type=aggregator.GAUGE, tags=tags, count=1
)
aggregator.assert_metric(
'snmp.upsBasicStateOutputState.LowBatteryOnBattery', 1, metric_type=aggregator.GAUGE, tags=tags, count=1
)
aggregator.assert_metric(
'snmp.upsBasicStateOutputState.NoBatteriesAttached', 1, metric_type=aggregator.GAUGE, tags=tags, count=1
)
aggregator.assert_metric(
'snmp.upsBasicStateOutputState.OnLine', 0, metric_type=aggregator.GAUGE, tags=tags, count=1
)
aggregator.assert_metric(
'snmp.upsBasicStateOutputState.ReplaceBattery', 1, metric_type=aggregator.GAUGE, tags=tags, count=1
)
aggregator.assert_metric('snmp.upsBasicStateOutputState.On', 1, metric_type=aggregator.GAUGE, tags=tags, count=1)
aggregator.assert_all_metrics_covered()
@pytest.mark.usefixtures("dd_environment")
def test_fortinet_fortigate(aggregator):
run_profile_check('fortinet-fortigate')
common_tags = common.CHECK_TAGS + [
'snmp_profile:fortinet-fortigate',
'device_vendor:fortinet',
]
common_gauge_metrics = [
'fgSysCpuUsage',
'fgSysMemUsage',
'fgSysMemCapacity',
'fgSysLowMemUsage',
'fgSysLowMemCapacity',
'fgSysDiskUsage',
'fgSysDiskCapacity',
'fgSysSesCount',
'fgSysSesRate1',
'fgSysSes6Count',
'fgSysSes6Rate1',
'fgApHTTPConnections',
'fgApHTTPMaxConnections',
'fgVdNumber',
'fgVdMaxVdoms',
]
processor_gauge_metrics = [
'fgProcessorUsage',
'fgProcessorSysUsage',
]
processor_count_metrics = [
'fgProcessorPktRxCount',
'fgProcessorPktTxCount',
'fgProcessorPktDroppedCount',
]
processor_tags = common_tags + ['processor_index:12']
vd_metrics = [
'fgVdEntOpMode',
'fgVdEntHaState',
'fgVdEntCpuUsage',
'fgVdEntMemUsage',
'fgVdEntSesCount',
'fgVdEntSesRate',
]
vd_tags = common_tags + ['virtualdomain_index:4', 'virtualdomain_name:their oxen quaintly']
common.assert_common_metrics(aggregator, common_tags)
for metric in common_gauge_metrics:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=common_tags, count=1)
for metric in processor_gauge_metrics:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=processor_tags, count=1)
for metric in processor_count_metrics:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=processor_tags, count=1
)
aggregator.assert_metric(
'snmp.{}.rate'.format(metric), metric_type=aggregator.RATE, tags=processor_tags, count=1
)
for metric in vd_metrics:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=vd_tags, count=1)
aggregator.assert_metric('snmp.fgIntfEntVdom', metric_type=aggregator.GAUGE, count=1)
firewall_tags = common_tags + ['policy_index:22']
for metric in ['fgFwPolPktCount', 'fgFwPolByteCount']:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=firewall_tags, count=1
)
aggregator.assert_metric(
'snmp.{}.rate'.format(metric), metric_type=aggregator.RATE, tags=firewall_tags, count=1
)
firewall6_tags = common_tags + ['policy6_index:29']
for metric in ['fgFwPol6PktCount', 'fgFwPol6ByteCount']:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=firewall6_tags, count=1
)
aggregator.assert_metric(
'snmp.{}.rate'.format(metric), metric_type=aggregator.RATE, tags=firewall6_tags, count=1
)
aggregator.assert_all_metrics_covered()
aggregator.assert_metrics_using_metadata(get_metadata_metrics(), check_submission_type=True)
@pytest.mark.usefixtures("dd_environment")
def test_netapp(aggregator):
run_profile_check('netapp')
profile_tags = [
'snmp_profile:netapp',
'snmp_host:example-datacenter.company',
'device_vendor:netapp',
]
common_tags = common.CHECK_TAGS + profile_tags
common.assert_common_metrics(aggregator, common_tags)
gauges = [
'cfInterconnectStatus',
'miscCacheAge',
'ncHttpActiveCliConns',
]
counts = [
'extcache64Hits',
]
for metric in gauges:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=common_tags, count=1)
for metric in counts:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=common_tags, count=1
)
snapvault_counts = [
'svTotalFailures',
]
snapvaults = [('5', '/vol/dir1', '5'), ('6', '/vol/dir3', '2'), ('18', '/vol/dir9', '4')]
for metric in snapvault_counts:
for index, destination, state in snapvaults:
tags = [
'index:{}'.format(index),
'destination:{}'.format(destination),
'state:{}'.format(state),
] + common_tags
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=tags, count=1
)
snapmirrors = [('6', '1'), ('9', '5'), ('29', '1')]
snapmirror_gauges = [
'snapmirrorLag',
]
snapmirror_counts = [
'snapmirrorTotalFailures',
]
for index, state in snapmirrors:
tags = ['index:{}'.format(index), 'state:{}'.format(state)] + common_tags
for metric in snapmirror_gauges:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
for metric in snapmirror_counts:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=tags, count=1
)
filesystem_gauges = [
'dfHighTotalKBytes',
'dfHighAvailKBytes',
'dfInodesUsed',
'dfInodesFree',
]
filesystem_indexes = [
'1022',
'1023',
'1024',
'1025',
'1026',
'1027',
'1028',
'1029',
'1032',
'1033',
]
filesystems = ['/vol/dir{}'.format(n) for n in range(1, len(filesystem_indexes) + 1)]
for metric in filesystem_gauges:
for index, filesystem in zip(filesystem_indexes, filesystems):
tags = ['index:{}'.format(index), 'filesystem:{}'.format(filesystem)] + common_tags
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
if_counts = [
'ifHighInOctets',
]
if_rates = [
'ifHighInOctets.rate',
]
interfaces = [
('6', 'netgear ifX300 v1'),
('7', 'junyper proto12 12.3'),
('23', 'malabar yz42 10.2020'),
]
for index, descr in interfaces:
tags = ['index:{}'.format(index), 'interface:{}'.format(descr)] + common_tags
for metric in if_counts:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=tags, count=1
)
for metric in if_rates:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.RATE, tags=tags, count=1)
aggregator.assert_metric('snmp.sysUpTimeInstance', metric_type=aggregator.GAUGE, tags=common_tags, count=1)
aggregator.assert_all_metrics_covered()
@pytest.mark.usefixtures("dd_environment")
def test_cisco_catalyst(aggregator):
run_profile_check('cisco-catalyst')
common_tags = common.CHECK_TAGS + [
'snmp_host:catalyst-6000.example',
'snmp_profile:cisco-catalyst',
'device_vendor:cisco',
]
sensors = [5, 9]
for sensor in sensors:
tags = ['sensor_id:{}'.format(sensor), 'sensor_type:10'] + common_tags
aggregator.assert_metric('snmp.entSensorValue', metric_type=aggregator.GAUGE, tags=tags, count=1)
interfaces = ["Gi1/0/{}".format(i) for i in [6, 10, 12, 18, 22, 25, 27]]
for interface in interfaces:
tags = ['interface:{}'.format(interface)] + common_tags
for metric in CIE_METRICS:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
frus = [1001, 1010, 2001, 2010]
for fru in frus:
tags = ['fru:{}'.format(fru)] + common_tags
for metric in FRU_METRICS:
aggregator.assert_metric('snmp.{}'.format(metric), metric_type=aggregator.GAUGE, tags=tags, count=1)
aggregator.assert_metric('snmp.sysUpTimeInstance', count=1)
aggregator.assert_metric('snmp.devices_monitored', count=1)
aggregator.assert_all_metrics_covered()
@pytest.mark.usefixtures("dd_environment")
def test_juniper_ex(aggregator):
run_profile_check('juniper-ex')
common_tags = common.CHECK_TAGS + [
'snmp_profile:juniper-ex',
'device_vendor:juniper-networks',
]
_check_juniper_virtual_chassis(aggregator, common_tags)
_check_juniper_dcu(aggregator, common_tags)
_check_juniper_cos(aggregator, common_tags)
_check_juniper_firewall(aggregator, common_tags)
aggregator.assert_metric('snmp.devices_monitored', count=1)
aggregator.assert_all_metrics_covered()
aggregator.assert_metrics_using_metadata(get_metadata_metrics(), check_submission_type=True)
@pytest.mark.usefixtures("dd_environment")
def test_juniper_mx(aggregator):
run_profile_check('juniper-mx')
common_tags = common.CHECK_TAGS + [
'snmp_profile:juniper-mx',
'device_vendor:juniper-networks',
]
_check_juniper_virtual_chassis(aggregator, common_tags)
_check_juniper_firewall(aggregator, common_tags)
aggregator.assert_metric('snmp.devices_monitored', count=1)
aggregator.assert_all_metrics_covered()
aggregator.assert_metrics_using_metadata(get_metadata_metrics(), check_submission_type=True)
@pytest.mark.usefixtures("dd_environment")
def test_juniper_srx(aggregator):
run_profile_check('juniper-srx')
common_tags = common.CHECK_TAGS + [
'snmp_profile:juniper-srx',
'device_vendor:juniper-networks',
]
_check_juniper_userfirewall(aggregator, common_tags)
_check_juniper_dcu(aggregator, common_tags)
_check_juniper_scu(aggregator, common_tags)
aggregator.assert_metric('snmp.devices_monitored', count=1)
aggregator.assert_all_metrics_covered()
aggregator.assert_metrics_using_metadata(get_metadata_metrics(), check_submission_type=True)
def _check_juniper_scu(aggregator, common_tags):
scu_tags = [
['address_family:1', 'interface:kept but'],
['address_family:1', 'interface:quaintly driving oxen their zombies oxen acted acted'],
['address_family:1', 'interface:but forward kept but their driving oxen quaintly acted'],
]
for metric in SCU_COUNTS:
for tags in scu_tags:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=common_tags + tags, count=1
)
def _check_juniper_userfirewall(aggregator, common_tags):
userfirewall_tags = [
['ldap_domain_name:Mycroft Holmes', 'ldap_host:brother'],
['ldap_domain_name:Jim Moriarty', 'ldap_host:enemy'],
]
for metric in USER_FIREWALL:
for tags in userfirewall_tags:
aggregator.assert_metric(
'snmp.{}'.format(metric), metric_type=aggregator.MONOTONIC_COUNT, tags=common_tags + tags, count=1
)
def _check_juniper_dcu(aggregator, common_tags):
dcu_tags = [
[
'address_family:1',
'destination_class_name:their',
'interface:quaintly driving oxen their zombies oxen acted acted',
],
[
'address_family:1',
'destination_class_name:acted but forward acted zombies forward',
'interface:but forward kept but their driving oxen quaintly acted',
],
[
'address_family:2',
'destination_class_name:oxen Jaded oxen Jaded forward kept quaintly',
'interface:kept but',
],
]
for decu_metric in DCU_COUNTS:
for tags in dcu_tags:
aggregator.assert_metric(
'snmp.{}'.format(decu_metric), metric_type=aggregator.MONOTONIC_COUNT, tags=common_tags + tags, count=1
)
def _check_juniper_firewall(aggregator, common_tags):
firewall_tags = [
[
'counter_name:Jaded oxen kept their driving but kept',
'counter_type:4',
'firewall_filter_name:their driving quaintly but Jaded oxen',
],
[
'counter_name:but but but their their their kept kept forward',
'counter_type:4',
'firewall_filter_name:driving kept acted Jaded zombies kept acted',
],
]
for metric in FIREWALL_COUNTS:
for tags in firewall_tags:
aggregator.assert_metric(
'snmp.{}'.format(metric),
metric_type=aggregator.MONOTONIC_COUNT,
tags=common_tags + tags,
count=1,
)
def _check_juniper_virtual_chassis(aggregator, common_tags):
virtual_chassis_tags = [
['port_name:but driving but'],
['port_name:Jaded forward but oxen quaintly their their'],
['port_name:forward forward driving driving Jaded Jaded'],
]
for count_and_rate_metric in VIRTUAL_CHASSIS_COUNTS:
for tags in virtual_chassis_tags:
aggregator.assert_metric(
'snmp.{}'.format(count_and_rate_metric),
metric_type=aggregator.MONOTONIC_COUNT,
tags=common_tags + tags,
count=1,
)
for rate_metric in VIRTUAL_CHASSIS_RATES:
for tags in virtual_chassis_tags:
aggregator.assert_metric(
'snmp.{}'.format(rate_metric), metric_type=aggregator.GAUGE, tags=common_tags + tags, count=1
)
def _check_juniper_cos(aggregator, common_tags):
cos_tags = [
['interface:acted oxen oxen forward quaintly kept zombies but oxen', 'queue_number:25'],
['interface:acted kept quaintly acted oxen kept', 'queue_number:50'],
['interface:their', 'queue_number:15'],
]
for cos_metric in COS_COUNTS:
for tags in cos_tags:
aggregator.assert_metric(
'snmp.{}'.format(cos_metric), metric_type=aggregator.MONOTONIC_COUNT, tags=common_tags + tags, count=1
)
for cos_metric in COS_RATES:
for tags in cos_tags:
aggregator.assert_metric(
'snmp.{}'.format(cos_metric), metric_type=aggregator.GAUGE, tags=common_tags + tags, count=1
)
| true | true |
f724c766689f3310fcf1ff1658220beb097c094a | 30,199 | py | Python | Ingredient_Extractor/with_unknown_words_concideration/with_various_accuracies_on_first_layer/mean_values_1.py | ziko1305/Hidden-Markov-Based-Mathematical-Model | 0ad906e6c4f99ad91d4047aed78df49399447633 | [
"MIT"
] | null | null | null | Ingredient_Extractor/with_unknown_words_concideration/with_various_accuracies_on_first_layer/mean_values_1.py | ziko1305/Hidden-Markov-Based-Mathematical-Model | 0ad906e6c4f99ad91d4047aed78df49399447633 | [
"MIT"
] | null | null | null | Ingredient_Extractor/with_unknown_words_concideration/with_various_accuracies_on_first_layer/mean_values_1.py | ziko1305/Hidden-Markov-Based-Mathematical-Model | 0ad906e6c4f99ad91d4047aed78df49399447633 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Thu Aug 20 09:49:40 2020
@author: Mehdi
"""
import numpy as np
a1=np.nanmean([table_11.loc['A'].accuracy,table_12.loc['A'].accuracy,table_13.loc['A'].accuracy,table_14.loc['A'].accuracy,
table_15.loc['A'].accuracy,table_16.loc['A'].accuracy,table_17.loc['A'].accuracy,table_18.loc['A'].accuracy,
table_19.loc['A'].accuracy,table_110.loc['A'].accuracy])
a2=np.nanmean([table_11.loc['A'].f1_score,table_12.loc['A'].f1_score,table_13.loc['A'].f1_score,table_14.loc['A'].f1_score,
table_15.loc['A'].f1_score,table_16.loc['A'].f1_score,table_17.loc['A'].f1_score,table_18.loc['A'].f1_score,
table_19.loc['A'].f1_score,table_110.loc['A'].f1_score])
a3=np.nanmean([table_11.loc['A'][2],table_12.loc['A'][2],table_13.loc['A'][2],table_14.loc['A'][2],
table_15.loc['A'][2],table_16.loc['A'][2],table_17.loc['A'][2],table_18.loc['A'][2],
table_19.loc['A'][2],table_110.loc['A'][2]])
a4=np.nanmean([table_11.loc['A'][3],table_12.loc['A'][3],table_13.loc['A'][3],table_14.loc['A'][3],
table_15.loc['A'][3],table_16.loc['A'][3],table_17.loc['A'][3],table_18.loc['A'][3],
table_19.loc['A'][3],table_110.loc['A'][3]])
a5=np.nanmean([table_11.loc['A'][4],table_12.loc['A'][4],table_13.loc['A'][4],table_14.loc['A'][4],
table_15.loc['A'][4],table_16.loc['A'][4],table_17.loc['A'][4],table_18.loc['A'][4],
table_19.loc['A'][4],table_110.loc['A'][4]])
a6=np.nanmean([table_11.loc['A'][5],table_12.loc['A'][5],table_13.loc['A'][5],table_14.loc['A'][5],
table_15.loc['A'][5],table_16.loc['A'][5],table_17.loc['A'][5],table_18.loc['A'][5],
table_19.loc['A'][5],table_110.loc['A'][5]])
a7=np.nanmean([table_11.loc['B'].accuracy,table_12.loc['B'].accuracy,table_13.loc['B'].accuracy,table_14.loc['B'].accuracy,
table_15.loc['B'].accuracy,table_16.loc['B'].accuracy,table_17.loc['B'].accuracy,table_18.loc['B'].accuracy,
table_19.loc['B'].accuracy,table_110.loc['B'].accuracy])
a8=np.nanmean([table_11.loc['B'].f1_score,table_12.loc['B'].f1_score,table_13.loc['B'].f1_score,table_14.loc['B'].f1_score,
table_15.loc['B'].f1_score,table_16.loc['B'].f1_score,table_17.loc['B'].f1_score,table_18.loc['B'].f1_score,
table_19.loc['B'].f1_score,table_110.loc['B'].f1_score])
a9=np.nanmean([table_11.loc['B'][2],table_12.loc['B'][2],table_13.loc['B'][2],table_14.loc['B'][2],
table_15.loc['B'][2],table_16.loc['B'][2],table_17.loc['B'][2],table_18.loc['B'][2],
table_19.loc['B'][2],table_110.loc['B'][2]])
a10=np.nanmean([table_11.loc['B'][3],table_12.loc['B'][3],table_13.loc['B'][3],table_14.loc['B'][3],
table_15.loc['B'][3],table_16.loc['B'][3],table_17.loc['B'][3],table_18.loc['B'][3],
table_19.loc['B'][3],table_110.loc['B'][3]])
a11=np.nanmean([table_11.loc['B'][4],table_12.loc['B'][4],table_13.loc['B'][4],table_14.loc['B'][4],
table_15.loc['B'][4],table_16.loc['B'][4],table_17.loc['B'][4],table_18.loc['B'][4],
table_19.loc['B'][4],table_110.loc['B'][4]])
a12=np.nanmean([table_11.loc['B'][5],table_12.loc['B'][5],table_13.loc['B'][5],table_14.loc['B'][5],
table_15.loc['B'][5],table_16.loc['B'][5],table_17.loc['B'][5],table_18.loc['B'][5],
table_19.loc['B'][5],table_110.loc['B'][5]])
a13=np.nanmean([table_11.loc['C'].accuracy,table_12.loc['C'].accuracy,table_13.loc['C'].accuracy,table_14.loc['C'].accuracy,
table_15.loc['C'].accuracy,table_16.loc['C'].accuracy,table_17.loc['C'].accuracy,table_18.loc['C'].accuracy,
table_19.loc['C'].accuracy,table_110.loc['C'].accuracy])
a14=np.nanmean([table_11.loc['C'].f1_score,table_12.loc['C'].f1_score,table_13.loc['C'].f1_score,table_14.loc['C'].f1_score,
table_15.loc['C'].f1_score,table_16.loc['C'].f1_score,table_17.loc['C'].f1_score,table_18.loc['C'].f1_score,
table_19.loc['C'].f1_score,table_110.loc['C'].f1_score])
a15=np.nanmean([table_11.loc['C'][2],table_12.loc['C'][2],table_13.loc['C'][2],table_14.loc['C'][2],
table_15.loc['C'][2],table_16.loc['C'][2],table_17.loc['C'][2],table_18.loc['C'][2],
table_19.loc['C'][2],table_110.loc['C'][2]])
a16=np.nanmean([table_11.loc['C'][3],table_12.loc['C'][3],table_13.loc['C'][3],table_14.loc['C'][3],
table_15.loc['C'][3],table_16.loc['C'][3],table_17.loc['C'][3],table_18.loc['C'][3],
table_19.loc['C'][3],table_110.loc['C'][3]])
a17=np.nanmean([table_11.loc['C'][4],table_12.loc['C'][4],table_13.loc['C'][4],table_14.loc['C'][4],
table_15.loc['C'][4],table_16.loc['C'][4],table_17.loc['C'][4],table_18.loc['C'][4],
table_19.loc['C'][4],table_110.loc['C'][4]])
a18=np.nanmean([table_11.loc['C'][5],table_12.loc['C'][5],table_13.loc['C'][5],table_14.loc['C'][5],
table_15.loc['C'][5],table_16.loc['C'][5],table_17.loc['C'][5],table_18.loc['C'][5],
table_19.loc['C'][5],table_110.loc['C'][5]])
a19=np.nanmean([table_11.loc['D'].accuracy,table_12.loc['D'].accuracy,table_13.loc['D'].accuracy,table_14.loc['D'].accuracy,
table_15.loc['D'].accuracy,table_16.loc['D'].accuracy,table_17.loc['D'].accuracy,table_18.loc['D'].accuracy,
table_19.loc['D'].accuracy,table_110.loc['D'].accuracy])
a20=np.nanmean([table_11.loc['D'].f1_score,table_12.loc['D'].f1_score,table_13.loc['D'].f1_score,table_14.loc['D'].f1_score,
table_15.loc['D'].f1_score,table_16.loc['D'].f1_score,table_17.loc['D'].f1_score,table_18.loc['D'].f1_score,
table_19.loc['D'].f1_score,table_110.loc['D'].f1_score])
a21=np.nanmean([table_11.loc['D'][2],table_12.loc['D'][2],table_13.loc['D'][2],table_14.loc['D'][2],
table_15.loc['D'][2],table_16.loc['D'][2],table_17.loc['D'][2],table_18.loc['D'][2],
table_19.loc['D'][2],table_110.loc['D'][2]])
a22=np.nanmean([table_11.loc['D'][3],table_12.loc['D'][3],table_13.loc['D'][3],table_14.loc['D'][3],
table_15.loc['D'][3],table_16.loc['D'][3],table_17.loc['D'][3],table_18.loc['D'][3],
table_19.loc['D'][3],table_110.loc['D'][3]])
a23=np.nanmean([table_11.loc['D'][4],table_12.loc['D'][4],table_13.loc['D'][4],table_14.loc['D'][4],
table_15.loc['D'][4],table_16.loc['D'][4],table_17.loc['D'][4],table_18.loc['D'][4],
table_19.loc['D'][4],table_110.loc['D'][4]])
a24=np.nanmean([table_11.loc['D'][5],table_12.loc['D'][5],table_13.loc['D'][5],table_14.loc['D'][5],
table_15.loc['D'][5],table_16.loc['D'][5],table_17.loc['D'][5],table_18.loc['D'][5],
table_19.loc['D'][5],table_110.loc['D'][5]])
a25=np.nanmean([table_11.loc['E'].accuracy,table_12.loc['E'].accuracy,table_13.loc['E'].accuracy,table_14.loc['E'].accuracy,
table_15.loc['E'].accuracy,table_16.loc['E'].accuracy,table_17.loc['E'].accuracy,table_18.loc['E'].accuracy,
table_19.loc['E'].accuracy,table_110.loc['E'].accuracy])
a26=np.nanmean([table_11.loc['E'].f1_score,table_12.loc['E'].f1_score,table_13.loc['E'].f1_score,table_14.loc['E'].f1_score,
table_15.loc['E'].f1_score,table_16.loc['E'].f1_score,table_17.loc['E'].f1_score,table_18.loc['E'].f1_score,
table_19.loc['E'].f1_score,table_110.loc['E'].f1_score])
a27=np.nanmean([table_11.loc['E'][2],table_12.loc['E'][2],table_13.loc['E'][2],table_14.loc['E'][2],
table_15.loc['E'][2],table_16.loc['E'][2],table_17.loc['E'][2],table_18.loc['E'][2],
table_19.loc['E'][2],table_110.loc['E'][2]])
a28=np.nanmean([table_11.loc['E'][3],table_12.loc['E'][3],table_13.loc['E'][3],table_14.loc['E'][3],
table_15.loc['E'][3],table_16.loc['E'][3],table_17.loc['E'][3],table_18.loc['E'][3],
table_19.loc['E'][3],table_110.loc['E'][3]])
a29=np.nanmean([table_11.loc['E'][4],table_12.loc['E'][4],table_13.loc['E'][4],table_14.loc['E'][4],
table_15.loc['E'][4],table_16.loc['E'][4],table_17.loc['E'][4],table_18.loc['E'][4],
table_19.loc['E'][4],table_110.loc['E'][4]])
a30=np.nanmean([table_11.loc['E'][5],table_12.loc['E'][5],table_13.loc['E'][5],table_14.loc['E'][5],
table_15.loc['E'][5],table_16.loc['E'][5],table_17.loc['E'][5],table_18.loc['E'][5],
table_19.loc['E'][5],table_110.loc['E'][5]])
a31=np.nanmean([table_11.loc['F'].accuracy,table_12.loc['F'].accuracy,table_13.loc['F'].accuracy,table_14.loc['F'].accuracy,
table_15.loc['F'].accuracy,table_16.loc['F'].accuracy,table_17.loc['F'].accuracy,table_18.loc['F'].accuracy,
table_19.loc['F'].accuracy,table_110.loc['F'].accuracy])
a32=np.nanmean([table_11.loc['F'].f1_score,table_12.loc['F'].f1_score,table_13.loc['F'].f1_score,table_14.loc['F'].f1_score,
table_15.loc['F'].f1_score,table_16.loc['F'].f1_score,table_17.loc['F'].f1_score,table_18.loc['F'].f1_score,
table_19.loc['F'].f1_score,table_110.loc['F'].f1_score])
a33=np.nanmean([table_11.loc['F'][2],table_12.loc['F'][2],table_13.loc['F'][2],table_14.loc['F'][2],
table_15.loc['F'][2],table_16.loc['F'][2],table_17.loc['F'][2],table_18.loc['F'][2],
table_19.loc['F'][2],table_110.loc['F'][2]])
a34=np.nanmean([table_11.loc['F'][3],table_12.loc['F'][3],table_13.loc['F'][3],table_14.loc['F'][3],
table_15.loc['F'][3],table_16.loc['F'][3],table_17.loc['F'][3],table_18.loc['F'][3],
table_19.loc['F'][3],table_110.loc['F'][3]])
a35=np.nanmean([table_11.loc['F'][4],table_12.loc['F'][4],table_13.loc['F'][4],table_14.loc['F'][4],
table_15.loc['F'][4],table_16.loc['F'][4],table_17.loc['F'][4],table_18.loc['F'][4],
table_19.loc['F'][4],table_110.loc['F'][4]])
a36=np.nanmean([table_11.loc['F'][5],table_12.loc['F'][5],table_13.loc['F'][5],table_14.loc['F'][5],
table_15.loc['F'][5],table_16.loc['F'][5],table_17.loc['F'][5],table_18.loc['F'][5],
table_19.loc['F'][5],table_110.loc['F'][5]])
a37=np.nanmean([table_11.loc['G'].accuracy,table_12.loc['G'].accuracy,table_13.loc['G'].accuracy,table_14.loc['G'].accuracy,
table_15.loc['G'].accuracy,table_16.loc['G'].accuracy,table_17.loc['G'].accuracy,table_18.loc['G'].accuracy,
table_19.loc['G'].accuracy,table_110.loc['G'].accuracy])
a38=np.nanmean([table_11.loc['G'].f1_score,table_12.loc['G'].f1_score,table_13.loc['G'].f1_score,table_14.loc['G'].f1_score,
table_15.loc['G'].f1_score,table_16.loc['G'].f1_score,table_17.loc['G'].f1_score,table_18.loc['G'].f1_score,
table_19.loc['G'].f1_score,table_110.loc['G'].f1_score])
a39=np.nanmean([table_11.loc['G'][2],table_12.loc['G'][2],table_13.loc['G'][2],table_14.loc['G'][2],
table_15.loc['G'][2],table_16.loc['G'][2],table_17.loc['G'][2],table_18.loc['G'][2],
table_19.loc['G'][2],table_110.loc['G'][2]])
a40=np.nanmean([table_11.loc['G'][3],table_12.loc['G'][3],table_13.loc['G'][3],table_14.loc['G'][3],
table_15.loc['G'][3],table_16.loc['G'][3],table_17.loc['G'][3],table_18.loc['G'][3],
table_19.loc['G'][3],table_110.loc['G'][3]])
a41=np.nanmean([table_11.loc['G'][4],table_12.loc['G'][4],table_13.loc['G'][4],table_14.loc['G'][4],
table_15.loc['G'][4],table_16.loc['G'][4],table_17.loc['G'][4],table_18.loc['G'][4],
table_19.loc['G'][4],table_110.loc['G'][4]])
a42=np.nanmean([table_11.loc['G'][5],table_12.loc['G'][5],table_13.loc['G'][5],table_14.loc['G'][5],
table_15.loc['G'][5],table_16.loc['G'][5],table_17.loc['G'][5],table_18.loc['G'][5],
table_19.loc['G'][5],table_110.loc['G'][5]])
a43=np.nanmean([table_11.loc['H'].accuracy,table_12.loc['H'].accuracy,table_13.loc['H'].accuracy,table_14.loc['H'].accuracy,
table_15.loc['H'].accuracy,table_16.loc['H'].accuracy,table_17.loc['H'].accuracy,table_18.loc['H'].accuracy,
table_19.loc['H'].accuracy,table_110.loc['H'].accuracy])
a44=np.nanmean([table_11.loc['H'].f1_score,table_12.loc['H'].f1_score,table_13.loc['H'].f1_score,table_14.loc['H'].f1_score,
table_15.loc['H'].f1_score,table_16.loc['H'].f1_score,table_17.loc['H'].f1_score,table_18.loc['H'].f1_score,
table_19.loc['H'].f1_score,table_110.loc['H'].f1_score])
a45=np.nanmean([table_11.loc['H'][2],table_12.loc['H'][2],table_13.loc['H'][2],table_14.loc['H'][2],
table_15.loc['H'][2],table_16.loc['H'][2],table_17.loc['H'][2],table_18.loc['H'][2],
table_19.loc['H'][2],table_110.loc['H'][2]])
a46=np.nanmean([table_11.loc['H'][3],table_12.loc['H'][3],table_13.loc['H'][3],table_14.loc['H'][3],
table_15.loc['H'][3],table_16.loc['H'][3],table_17.loc['H'][3],table_18.loc['H'][3],
table_19.loc['H'][3],table_110.loc['H'][3]])
a47=np.nanmean([table_11.loc['H'][4],table_12.loc['H'][4],table_13.loc['H'][4],table_14.loc['H'][4],
table_15.loc['H'][4],table_16.loc['H'][4],table_17.loc['H'][4],table_18.loc['H'][4],
table_19.loc['H'][4],table_110.loc['H'][4]])
a48=np.nanmean([table_11.loc['H'][5],table_12.loc['H'][5],table_13.loc['H'][5],table_14.loc['H'][5],
table_15.loc['H'][5],table_16.loc['H'][5],table_17.loc['H'][5],table_18.loc['H'][5],
table_19.loc['H'][5],table_110.loc['H'][5]])
a49=np.nanmean([table_11.loc['I'].accuracy,table_12.loc['I'].accuracy,table_13.loc['I'].accuracy,table_14.loc['I'].accuracy,
table_15.loc['I'].accuracy,table_16.loc['I'].accuracy,table_17.loc['I'].accuracy,table_18.loc['I'].accuracy,
table_19.loc['I'].accuracy,table_110.loc['I'].accuracy])
a50=np.nanmean([table_11.loc['I'].f1_score,table_12.loc['I'].f1_score,table_13.loc['I'].f1_score,table_14.loc['I'].f1_score,
table_15.loc['I'].f1_score,table_16.loc['I'].f1_score,table_17.loc['I'].f1_score,table_18.loc['I'].f1_score,
table_19.loc['I'].f1_score,table_110.loc['I'].f1_score])
a51=np.nanmean([table_11.loc['I'][2],table_12.loc['I'][2],table_13.loc['I'][2],table_14.loc['I'][2],
table_15.loc['I'][2],table_16.loc['I'][2],table_17.loc['I'][2],table_18.loc['I'][2],
table_19.loc['I'][2],table_110.loc['I'][2]])
a52=np.nanmean([table_11.loc['I'][3],table_12.loc['I'][3],table_13.loc['I'][3],table_14.loc['I'][3],
table_15.loc['I'][3],table_16.loc['I'][3],table_17.loc['I'][3],table_18.loc['I'][3],
table_19.loc['I'][3],table_110.loc['I'][3]])
a53=np.nanmean([table_11.loc['I'][4],table_12.loc['I'][4],table_13.loc['I'][4],table_14.loc['I'][4],
table_15.loc['I'][4],table_16.loc['I'][4],table_17.loc['I'][4],table_18.loc['I'][4],
table_19.loc['I'][4],table_110.loc['I'][4]])
a54=np.nanmean([table_11.loc['I'][5],table_12.loc['I'][5],table_13.loc['I'][5],table_14.loc['I'][5],
table_15.loc['I'][5],table_16.loc['I'][5],table_17.loc['I'][5],table_18.loc['I'][5],
table_19.loc['I'][5],table_110.loc['I'][5]])
a55=np.nanmean([table_11.loc['J'].accuracy,table_12.loc['J'].accuracy,table_13.loc['J'].accuracy,table_14.loc['J'].accuracy,
table_15.loc['J'].accuracy,table_16.loc['J'].accuracy,table_17.loc['J'].accuracy,table_18.loc['J'].accuracy,
table_19.loc['J'].accuracy,table_110.loc['J'].accuracy])
a56=np.nanmean([table_11.loc['J'].f1_score,table_12.loc['J'].f1_score,table_13.loc['J'].f1_score,table_14.loc['J'].f1_score,
table_15.loc['J'].f1_score,table_16.loc['J'].f1_score,table_17.loc['J'].f1_score,table_18.loc['J'].f1_score,
table_19.loc['J'].f1_score,table_110.loc['J'].f1_score])
a57=np.nanmean([table_11.loc['J'][2],table_12.loc['J'][2],table_13.loc['J'][2],table_14.loc['J'][2],
table_15.loc['J'][2],table_16.loc['J'][2],table_17.loc['J'][2],table_18.loc['J'][2],
table_19.loc['J'][2],table_110.loc['J'][2]])
a58=np.nanmean([table_11.loc['J'][3],table_12.loc['J'][3],table_13.loc['J'][3],table_14.loc['J'][3],
table_15.loc['J'][3],table_16.loc['J'][3],table_17.loc['J'][3],table_18.loc['J'][3],
table_19.loc['J'][3],table_110.loc['J'][3]])
a59=np.nanmean([table_11.loc['J'][4],table_12.loc['J'][4],table_13.loc['J'][4],table_14.loc['J'][4],
table_15.loc['J'][4],table_16.loc['J'][4],table_17.loc['J'][4],table_18.loc['J'][4],
table_19.loc['J'][4],table_110.loc['J'][4]])
a60=np.nanmean([table_11.loc['J'][5],table_12.loc['J'][5],table_13.loc['J'][5],table_14.loc['J'][5],
table_15.loc['J'][5],table_16.loc['J'][5],table_17.loc['J'][5],table_18.loc['J'][5],
table_19.loc['J'][5],table_110.loc['J'][5]])
a61=np.nanmean([table_11.loc['K'].accuracy,table_12.loc['K'].accuracy,table_13.loc['K'].accuracy,table_14.loc['K'].accuracy,
table_15.loc['K'].accuracy,table_16.loc['K'].accuracy,table_17.loc['K'].accuracy,table_18.loc['K'].accuracy,
table_19.loc['K'].accuracy,table_110.loc['K'].accuracy])
a62=np.nanmean([table_11.loc['K'].f1_score,table_12.loc['K'].f1_score,table_13.loc['K'].f1_score,table_14.loc['K'].f1_score,
table_15.loc['K'].f1_score,table_16.loc['K'].f1_score,table_17.loc['K'].f1_score,table_18.loc['K'].f1_score,
table_19.loc['K'].f1_score,table_110.loc['K'].f1_score])
a63=np.nanmean([table_11.loc['K'][2],table_12.loc['K'][2],table_13.loc['K'][2],table_14.loc['K'][2],
table_15.loc['K'][2],table_16.loc['K'][2],table_17.loc['K'][2],table_18.loc['K'][2],
table_19.loc['K'][2],table_110.loc['K'][2]])
a64=np.nanmean([table_11.loc['K'][3],table_12.loc['K'][3],table_13.loc['K'][3],table_14.loc['K'][3],
table_15.loc['K'][3],table_16.loc['K'][3],table_17.loc['K'][3],table_18.loc['K'][3],
table_19.loc['K'][3],table_110.loc['K'][3]])
a65=np.nanmean([table_11.loc['K'][4],table_12.loc['K'][4],table_13.loc['K'][4],table_14.loc['K'][4],
table_15.loc['K'][4],table_16.loc['K'][4],table_17.loc['K'][4],table_18.loc['K'][4],
table_19.loc['K'][4],table_110.loc['K'][4]])
a66=np.nanmean([table_11.loc['K'][5],table_12.loc['K'][5],table_13.loc['K'][5],table_14.loc['K'][5],
table_15.loc['K'][5],table_16.loc['K'][5],table_17.loc['K'][5],table_18.loc['K'][5],
table_19.loc['K'][5],table_110.loc['K'][5]])
a67=np.nanmean([table_11.loc['L'].accuracy,table_12.loc['L'].accuracy,table_13.loc['L'].accuracy,table_14.loc['L'].accuracy,
table_15.loc['L'].accuracy,table_16.loc['L'].accuracy,table_17.loc['L'].accuracy,table_18.loc['L'].accuracy,
table_19.loc['L'].accuracy,table_110.loc['L'].accuracy])
a68=np.nanmean([table_11.loc['L'].f1_score,table_12.loc['L'].f1_score,table_13.loc['L'].f1_score,table_14.loc['L'].f1_score,
table_15.loc['L'].f1_score,table_16.loc['L'].f1_score,table_17.loc['L'].f1_score,table_18.loc['L'].f1_score,
table_19.loc['L'].f1_score,table_110.loc['L'].f1_score])
a69=np.nanmean([table_11.loc['L'][2],table_12.loc['L'][2],table_13.loc['L'][2],table_14.loc['L'][2],
table_15.loc['L'][2],table_16.loc['L'][2],table_17.loc['L'][2],table_18.loc['L'][2],
table_19.loc['L'][2],table_110.loc['L'][2]])
a70=np.nanmean([table_11.loc['L'][3],table_12.loc['L'][3],table_13.loc['L'][3],table_14.loc['L'][3],
table_15.loc['L'][3],table_16.loc['L'][3],table_17.loc['L'][3],table_18.loc['L'][3],
table_19.loc['L'][3],table_110.loc['L'][3]])
a71=np.nanmean([table_11.loc['L'][4],table_12.loc['L'][4],table_13.loc['L'][4],table_14.loc['L'][4],
table_15.loc['L'][4],table_16.loc['L'][4],table_17.loc['L'][4],table_18.loc['L'][4],
table_19.loc['L'][4],table_110.loc['L'][4]])
a72=np.nanmean([table_11.loc['L'][5],table_12.loc['L'][5],table_13.loc['L'][5],table_14.loc['L'][5],
table_15.loc['L'][5],table_16.loc['L'][5],table_17.loc['L'][5],table_18.loc['L'][5],
table_19.loc['L'][5],table_110.loc['L'][5]])
a73=np.nanmean([table_11.loc['M'].accuracy,table_12.loc['M'].accuracy,table_13.loc['M'].accuracy,table_14.loc['M'].accuracy,
table_15.loc['M'].accuracy,table_16.loc['M'].accuracy,table_17.loc['M'].accuracy,table_18.loc['M'].accuracy,
table_19.loc['M'].accuracy,table_110.loc['M'].accuracy])
a74=np.nanmean([table_11.loc['M'].f1_score,table_12.loc['M'].f1_score,table_13.loc['M'].f1_score,table_14.loc['M'].f1_score,
table_15.loc['M'].f1_score,table_16.loc['M'].f1_score,table_17.loc['M'].f1_score,table_18.loc['M'].f1_score,
table_19.loc['M'].f1_score,table_110.loc['M'].f1_score])
a75=np.nanmean([table_11.loc['M'][2],table_12.loc['M'][2],table_13.loc['M'][2],table_14.loc['M'][2],
table_15.loc['M'][2],table_16.loc['M'][2],table_17.loc['M'][2],table_18.loc['M'][2],
table_19.loc['M'][2],table_110.loc['M'][2]])
a76=np.nanmean([table_11.loc['M'][3],table_12.loc['M'][3],table_13.loc['M'][3],table_14.loc['M'][3],
table_15.loc['M'][3],table_16.loc['M'][3],table_17.loc['M'][3],table_18.loc['M'][3],
table_19.loc['M'][3],table_110.loc['M'][3]])
a77=np.nanmean([table_11.loc['M'][4],table_12.loc['M'][4],table_13.loc['M'][4],table_14.loc['M'][4],
table_15.loc['M'][4],table_16.loc['M'][4],table_17.loc['M'][4],table_18.loc['M'][4],
table_19.loc['M'][4],table_110.loc['M'][4]])
a78=np.nanmean([table_11.loc['M'][5],table_12.loc['M'][5],table_13.loc['M'][5],table_14.loc['M'][5],
table_15.loc['M'][5],table_16.loc['M'][5],table_17.loc['M'][5],table_18.loc['M'][5],
table_19.loc['M'][5],table_110.loc['M'][5]])
a79=np.nanmean([table_11.loc['.'].accuracy,table_12.loc['.'].accuracy,table_13.loc['.'].accuracy,table_14.loc['.'].accuracy,
table_15.loc['.'].accuracy,table_16.loc['.'].accuracy,table_17.loc['.'].accuracy,table_18.loc['.'].accuracy,
table_19.loc['.'].accuracy,table_110.loc['.'].accuracy])
a80=np.nanmean([table_11.loc['.'].f1_score,table_12.loc['.'].f1_score,table_13.loc['.'].f1_score,table_14.loc['.'].f1_score,
table_15.loc['.'].f1_score,table_16.loc['.'].f1_score,table_17.loc['.'].f1_score,table_18.loc['.'].f1_score,
table_19.loc['.'].f1_score,table_110.loc['.'].f1_score])
a81=np.nanmean([table_11.loc['.'][2],table_12.loc['.'][2],table_13.loc['.'][2],table_14.loc['.'][2],
table_15.loc['.'][2],table_16.loc['.'][2],table_17.loc['.'][2],table_18.loc['.'][2],
table_19.loc['.'][2],table_110.loc['.'][2]])
a82=np.nanmean([table_11.loc['.'][3],table_12.loc['.'][3],table_13.loc['.'][3],table_14.loc['.'][3],
table_15.loc['.'][3],table_16.loc['.'][3],table_17.loc['.'][3],table_18.loc['.'][3],
table_19.loc['.'][3],table_110.loc['.'][3]])
a83=np.nanmean([table_11.loc['.'][4],table_12.loc['.'][4],table_13.loc['.'][4],table_14.loc['.'][4],
table_15.loc['.'][4],table_16.loc['.'][4],table_17.loc['.'][4],table_18.loc['.'][4],
table_19.loc['.'][4],table_110.loc['.'][4]])
a84=np.nanmean([table_11.loc['.'][5],table_12.loc['.'][5],table_13.loc['.'][5],table_14.loc['.'][5],
table_15.loc['.'][5],table_16.loc['.'][5],table_17.loc['.'][5],table_18.loc['.'][5],
table_19.loc['.'][5],table_110.loc['.'][5]])
A=[[a1,a2,a3,round(a4),a5,round(a6)],[a7,a8,a9,round(a10),a11,round(a12)],[a13,a14,a15,round(a16),a17,round(a18)],
[a19,a20,a21,round(a22),a23,round(a24)]
,[a25,a26,a27,round(a28),a29,round(a30)],[a31,a32,a33,round(a34),a35,round(a36)],
[a37,a38,a39,round(a40),a41,round(a42)],[a43,a44,a45,round(a46),a47,round(a48)],
[a49,a50,a51,round(a52),a53,round(a54)],[a55,a56,a57,round(a58),a59,round(a60)],
[a61,a62,a63,round(a64),a65,round(a66)],[a67,a68,a69,round(a70),a71,round(a72)],
[a73,a74,a75,round(a76),a77,round(a78)],[a79,a80,a81,round(a82),a83,round(a84)]]
vv1=np.mean([v1[0],v2[0],v3[0],v4[0],v5[0],v6[0],v7[0],v8[0],v9[0],v10[0]])
vv2=np.mean([v1[1],v2[1],v3[1],v4[1],v5[1],v6[1],v7[1],v8[1],v9[1],v10[1]])
vv3=np.mean([v1[2],v2[2],v3[2],v4[2],v5[2],v6[2],v7[2],v8[2],v9[2],v10[2]])
vv4=np.mean([v1[3],v2[3],v3[3],v4[3],v5[3],v6[3],v7[3],v8[3],v9[3],v10[3]])
vv5=np.mean([v1[4],v2[4],v3[4],v4[4],v5[4],v6[4],v7[4],v8[4],v9[4],v10[4]])
vv6=np.mean([v1[5],v2[5],v3[5],v4[5],v5[5],v6[5],v7[5],v8[5],v9[5],v10[5]])
table_111= pd.DataFrame(A,columns=['accuracy', 'f1_score', 'accuracy for unknown words',
'number of unknown words','accuracy for known words','number of known words']
,index=['A','B','C','D','E','F','G','H','I','J','K','L','M','.'])
#table_10= pd.DataFrame(A,
#columns=['accuracy', 'f1_score', 'accuracy for unknown words',
# 'number of unknown words','accuracy for known words','number of known words']
#,index=[list(tag2idx.keys())[0], list(tag2idx.keys())[1], list(tag2idx.keys())[2] , list(tag2idx.keys())[3]
#, list(tag2idx.keys())[4] , list(tag2idx.keys())[5],list(tag2idx.keys())[6],list(tag2idx.keys())[7]
#,list(tag2idx.keys())[8],list(tag2idx.keys())[9],list(tag2idx.keys())[10],list(tag2idx.keys())[11],
#list(tag2idx.keys())[12],list(tag2idx.keys())[13]])
str_pythontex=[float("{0:.2f}".format(list(table_111.loc["A"])[0]*100)),float("{0:.2f}".format(list(table_111.loc["A"])[1]*100)),
float("{0:.2f}".format(list(table_111.loc["A"])[2]*100)),float("{0:.2f}".format(list(table_111.loc["A"])[4]*100)),
round(list(table_111.loc["A"])[3]),round(list(table_111.loc["A"])[5]),
float("{0:.2f}".format(list(table_111.loc["B"])[0]*100)),float("{0:.2f}".format(list(table_111.loc["B"])[1]*100)),
float("{0:.2f}".format(list(table_111.loc["B"])[2]*100)),float("{0:.2f}".format(list(table_111.loc["B"])[4]*100)),
round(list(table_111.loc["B"])[3]),round(list(table_111.loc["B"])[5]),
float("{0:.2f}".format(list(table_111.loc["C"])[0]*100)),float("{0:.2f}".format(list(table_111.loc["C"])[1]*100)),
float("{0:.2f}".format(list(table_111.loc["C"])[2]*100)),float("{0:.2f}".format(list(table_111.loc["C"])[4]*100)),
round(list(table_111.loc["C"])[3]),round(list(table_111.loc["C"])[5]),
float("{0:.2f}".format(list(table_111.loc["D"])[0]*100)),float("{0:.2f}".format(list(table_111.loc["D"])[1]*100)),
float("{0:.2f}".format(list(table_111.loc["D"])[2]*100)),float("{0:.2f}".format(list(table_111.loc["D"])[4]*100)),
round(list(table_111.loc["D"])[3]),round(list(table_111.loc["D"])[5]),
float("{0:.2f}".format(list(table_111.loc["E"])[0]*100)),float("{0:.2f}".format(list(table_111.loc["E"])[1]*100)),
float("{0:.2f}".format(list(table_111.loc["E"])[2]*100)),float("{0:.2f}".format(list(table_111.loc["E"])[4]*100)),
round(list(table_111.loc["E"])[3]),round(list(table_111.loc["E"])[5]),
float("{0:.2f}".format(list(table_111.loc["F"])[0]*100)),float("{0:.2f}".format(list(table_111.loc["F"])[1]*100)),
float("{0:.2f}".format(list(table_111.loc["F"])[2]*100)),float("{0:.2f}".format(list(table_111.loc["F"])[4]*100)),
round(list(table_111.loc["F"])[3]),round(list(table_111.loc["F"])[5]),
float("{0:.2f}".format(list(table_111.loc["G"])[0]*100)),float("{0:.2f}".format(list(table_111.loc["G"])[1]*100)),
float("{0:.2f}".format(list(table_111.loc["G"])[2]*100)),float("{0:.2f}".format(list(table_111.loc["G"])[4]*100)),
round(list(table_111.loc["G"])[3]),round(list(table_111.loc["G"])[5]),
float("{0:.2f}".format(list(table_111.loc["H"])[0]*100)),float("{0:.2f}".format(list(table_111.loc["H"])[1]*100)),
float("{0:.2f}".format(list(table_111.loc["H"])[2]*100)),float("{0:.2f}".format(list(table_111.loc["H"])[4]*100)),
round(list(table_111.loc["H"])[3]),round(list(table_111.loc["H"])[5]),
float("{0:.2f}".format(list(table_111.loc["I"])[0]*100)),float("{0:.2f}".format(list(table_111.loc["I"])[1]*100)),
float("{0:.2f}".format(list(table_111.loc["I"])[2]*100)),float("{0:.2f}".format(list(table_111.loc["I"])[4]*100)),
round(list(table_111.loc["I"])[3]),round(list(table_111.loc["I"])[5]),
float("{0:.2f}".format(list(table_111.loc["J"])[0]*100)),float("{0:.2f}".format(list(table_111.loc["J"])[1]*100)),
float("{0:.2f}".format(list(table_111.loc["J"])[2]*100)),float("{0:.2f}".format(list(table_111.loc["J"])[4]*100)),
round(list(table_111.loc["J"])[3]),round(list(table_111.loc["J"])[5]),
float("{0:.2f}".format(list(table_111.loc["K"])[0]*100)),float("{0:.2f}".format(list(table_111.loc["K"])[1]*100)),
float("{0:.2f}".format(list(table_111.loc["K"])[2]*100)),float("{0:.2f}".format(list(table_111.loc["K"])[4]*100)),
round(list(table_111.loc["K"])[3]),round(list(table_111.loc["K"])[5]),
float("{0:.2f}".format(list(table_111.loc["L"])[0]*100)),float("{0:.2f}".format(list(table_111.loc["L"])[1]*100)),
float("{0:.2f}".format(list(table_111.loc["L"])[2]*100)),float("{0:.2f}".format(list(table_111.loc["L"])[4]*100)),
round(list(table_111.loc["L"])[3]),round(list(table_111.loc["L"])[5]),
float("{0:.2f}".format(list(table_111.loc["M"])[0]*100)),float("{0:.2f}".format(list(table_111.loc["M"])[1]*100)),
float("{0:.2f}".format(list(table_111.loc["M"])[2]*100)),float("{0:.2f}".format(list(table_111.loc["M"])[4]*100)),
round(list(table_111.loc["M"])[3]),round(list(table_111.loc["M"])[5]),
float("{0:.2f}".format(list(table_111.loc["."])[0]*100)),float("{0:.2f}".format(list(table_111.loc["."])[1]*100)),
float("{0:.2f}".format(list(table_111.loc["."])[2]*100)),float("{0:.2f}".format(list(table_111.loc["."])[4]*100)),
round(list(table_111.loc["."])[3]),round(list(table_111.loc["."])[5]),float("{0:.2f}".format(vv1))
,float("{0:.2f}".format(vv2))
,float("{0:.2f}".format(vv3))
,float("{0:.2f}".format(vv4)),round(vv5)
,float("{0:.2f}".format(vv6))
]
L=[]
for x in str_pythontex:
if math.isnan(x):
L.append('NULL')
else:
L.append(str(x))
L1=[]
i=0
for x in L:
i=i+1
if i!=5 and i!=6 and x!="NULL":
L1.append(x+" \%")
elif x=="NULL":
L1.append(x)
elif i==5:
L1.append(x)
else:
L1.append(x)
i=0
L1[-1]=L1[-1]+" \%"
| 61.008081 | 130 | 0.594324 |
import numpy as np
a1=np.nanmean([table_11.loc['A'].accuracy,table_12.loc['A'].accuracy,table_13.loc['A'].accuracy,table_14.loc['A'].accuracy,
table_15.loc['A'].accuracy,table_16.loc['A'].accuracy,table_17.loc['A'].accuracy,table_18.loc['A'].accuracy,
table_19.loc['A'].accuracy,table_110.loc['A'].accuracy])
a2=np.nanmean([table_11.loc['A'].f1_score,table_12.loc['A'].f1_score,table_13.loc['A'].f1_score,table_14.loc['A'].f1_score,
table_15.loc['A'].f1_score,table_16.loc['A'].f1_score,table_17.loc['A'].f1_score,table_18.loc['A'].f1_score,
table_19.loc['A'].f1_score,table_110.loc['A'].f1_score])
a3=np.nanmean([table_11.loc['A'][2],table_12.loc['A'][2],table_13.loc['A'][2],table_14.loc['A'][2],
table_15.loc['A'][2],table_16.loc['A'][2],table_17.loc['A'][2],table_18.loc['A'][2],
table_19.loc['A'][2],table_110.loc['A'][2]])
a4=np.nanmean([table_11.loc['A'][3],table_12.loc['A'][3],table_13.loc['A'][3],table_14.loc['A'][3],
table_15.loc['A'][3],table_16.loc['A'][3],table_17.loc['A'][3],table_18.loc['A'][3],
table_19.loc['A'][3],table_110.loc['A'][3]])
a5=np.nanmean([table_11.loc['A'][4],table_12.loc['A'][4],table_13.loc['A'][4],table_14.loc['A'][4],
table_15.loc['A'][4],table_16.loc['A'][4],table_17.loc['A'][4],table_18.loc['A'][4],
table_19.loc['A'][4],table_110.loc['A'][4]])
a6=np.nanmean([table_11.loc['A'][5],table_12.loc['A'][5],table_13.loc['A'][5],table_14.loc['A'][5],
table_15.loc['A'][5],table_16.loc['A'][5],table_17.loc['A'][5],table_18.loc['A'][5],
table_19.loc['A'][5],table_110.loc['A'][5]])
a7=np.nanmean([table_11.loc['B'].accuracy,table_12.loc['B'].accuracy,table_13.loc['B'].accuracy,table_14.loc['B'].accuracy,
table_15.loc['B'].accuracy,table_16.loc['B'].accuracy,table_17.loc['B'].accuracy,table_18.loc['B'].accuracy,
table_19.loc['B'].accuracy,table_110.loc['B'].accuracy])
a8=np.nanmean([table_11.loc['B'].f1_score,table_12.loc['B'].f1_score,table_13.loc['B'].f1_score,table_14.loc['B'].f1_score,
table_15.loc['B'].f1_score,table_16.loc['B'].f1_score,table_17.loc['B'].f1_score,table_18.loc['B'].f1_score,
table_19.loc['B'].f1_score,table_110.loc['B'].f1_score])
a9=np.nanmean([table_11.loc['B'][2],table_12.loc['B'][2],table_13.loc['B'][2],table_14.loc['B'][2],
table_15.loc['B'][2],table_16.loc['B'][2],table_17.loc['B'][2],table_18.loc['B'][2],
table_19.loc['B'][2],table_110.loc['B'][2]])
a10=np.nanmean([table_11.loc['B'][3],table_12.loc['B'][3],table_13.loc['B'][3],table_14.loc['B'][3],
table_15.loc['B'][3],table_16.loc['B'][3],table_17.loc['B'][3],table_18.loc['B'][3],
table_19.loc['B'][3],table_110.loc['B'][3]])
a11=np.nanmean([table_11.loc['B'][4],table_12.loc['B'][4],table_13.loc['B'][4],table_14.loc['B'][4],
table_15.loc['B'][4],table_16.loc['B'][4],table_17.loc['B'][4],table_18.loc['B'][4],
table_19.loc['B'][4],table_110.loc['B'][4]])
a12=np.nanmean([table_11.loc['B'][5],table_12.loc['B'][5],table_13.loc['B'][5],table_14.loc['B'][5],
table_15.loc['B'][5],table_16.loc['B'][5],table_17.loc['B'][5],table_18.loc['B'][5],
table_19.loc['B'][5],table_110.loc['B'][5]])
a13=np.nanmean([table_11.loc['C'].accuracy,table_12.loc['C'].accuracy,table_13.loc['C'].accuracy,table_14.loc['C'].accuracy,
table_15.loc['C'].accuracy,table_16.loc['C'].accuracy,table_17.loc['C'].accuracy,table_18.loc['C'].accuracy,
table_19.loc['C'].accuracy,table_110.loc['C'].accuracy])
a14=np.nanmean([table_11.loc['C'].f1_score,table_12.loc['C'].f1_score,table_13.loc['C'].f1_score,table_14.loc['C'].f1_score,
table_15.loc['C'].f1_score,table_16.loc['C'].f1_score,table_17.loc['C'].f1_score,table_18.loc['C'].f1_score,
table_19.loc['C'].f1_score,table_110.loc['C'].f1_score])
a15=np.nanmean([table_11.loc['C'][2],table_12.loc['C'][2],table_13.loc['C'][2],table_14.loc['C'][2],
table_15.loc['C'][2],table_16.loc['C'][2],table_17.loc['C'][2],table_18.loc['C'][2],
table_19.loc['C'][2],table_110.loc['C'][2]])
a16=np.nanmean([table_11.loc['C'][3],table_12.loc['C'][3],table_13.loc['C'][3],table_14.loc['C'][3],
table_15.loc['C'][3],table_16.loc['C'][3],table_17.loc['C'][3],table_18.loc['C'][3],
table_19.loc['C'][3],table_110.loc['C'][3]])
a17=np.nanmean([table_11.loc['C'][4],table_12.loc['C'][4],table_13.loc['C'][4],table_14.loc['C'][4],
table_15.loc['C'][4],table_16.loc['C'][4],table_17.loc['C'][4],table_18.loc['C'][4],
table_19.loc['C'][4],table_110.loc['C'][4]])
a18=np.nanmean([table_11.loc['C'][5],table_12.loc['C'][5],table_13.loc['C'][5],table_14.loc['C'][5],
table_15.loc['C'][5],table_16.loc['C'][5],table_17.loc['C'][5],table_18.loc['C'][5],
table_19.loc['C'][5],table_110.loc['C'][5]])
a19=np.nanmean([table_11.loc['D'].accuracy,table_12.loc['D'].accuracy,table_13.loc['D'].accuracy,table_14.loc['D'].accuracy,
table_15.loc['D'].accuracy,table_16.loc['D'].accuracy,table_17.loc['D'].accuracy,table_18.loc['D'].accuracy,
table_19.loc['D'].accuracy,table_110.loc['D'].accuracy])
a20=np.nanmean([table_11.loc['D'].f1_score,table_12.loc['D'].f1_score,table_13.loc['D'].f1_score,table_14.loc['D'].f1_score,
table_15.loc['D'].f1_score,table_16.loc['D'].f1_score,table_17.loc['D'].f1_score,table_18.loc['D'].f1_score,
table_19.loc['D'].f1_score,table_110.loc['D'].f1_score])
a21=np.nanmean([table_11.loc['D'][2],table_12.loc['D'][2],table_13.loc['D'][2],table_14.loc['D'][2],
table_15.loc['D'][2],table_16.loc['D'][2],table_17.loc['D'][2],table_18.loc['D'][2],
table_19.loc['D'][2],table_110.loc['D'][2]])
a22=np.nanmean([table_11.loc['D'][3],table_12.loc['D'][3],table_13.loc['D'][3],table_14.loc['D'][3],
table_15.loc['D'][3],table_16.loc['D'][3],table_17.loc['D'][3],table_18.loc['D'][3],
table_19.loc['D'][3],table_110.loc['D'][3]])
a23=np.nanmean([table_11.loc['D'][4],table_12.loc['D'][4],table_13.loc['D'][4],table_14.loc['D'][4],
table_15.loc['D'][4],table_16.loc['D'][4],table_17.loc['D'][4],table_18.loc['D'][4],
table_19.loc['D'][4],table_110.loc['D'][4]])
a24=np.nanmean([table_11.loc['D'][5],table_12.loc['D'][5],table_13.loc['D'][5],table_14.loc['D'][5],
table_15.loc['D'][5],table_16.loc['D'][5],table_17.loc['D'][5],table_18.loc['D'][5],
table_19.loc['D'][5],table_110.loc['D'][5]])
a25=np.nanmean([table_11.loc['E'].accuracy,table_12.loc['E'].accuracy,table_13.loc['E'].accuracy,table_14.loc['E'].accuracy,
table_15.loc['E'].accuracy,table_16.loc['E'].accuracy,table_17.loc['E'].accuracy,table_18.loc['E'].accuracy,
table_19.loc['E'].accuracy,table_110.loc['E'].accuracy])
a26=np.nanmean([table_11.loc['E'].f1_score,table_12.loc['E'].f1_score,table_13.loc['E'].f1_score,table_14.loc['E'].f1_score,
table_15.loc['E'].f1_score,table_16.loc['E'].f1_score,table_17.loc['E'].f1_score,table_18.loc['E'].f1_score,
table_19.loc['E'].f1_score,table_110.loc['E'].f1_score])
a27=np.nanmean([table_11.loc['E'][2],table_12.loc['E'][2],table_13.loc['E'][2],table_14.loc['E'][2],
table_15.loc['E'][2],table_16.loc['E'][2],table_17.loc['E'][2],table_18.loc['E'][2],
table_19.loc['E'][2],table_110.loc['E'][2]])
a28=np.nanmean([table_11.loc['E'][3],table_12.loc['E'][3],table_13.loc['E'][3],table_14.loc['E'][3],
table_15.loc['E'][3],table_16.loc['E'][3],table_17.loc['E'][3],table_18.loc['E'][3],
table_19.loc['E'][3],table_110.loc['E'][3]])
a29=np.nanmean([table_11.loc['E'][4],table_12.loc['E'][4],table_13.loc['E'][4],table_14.loc['E'][4],
table_15.loc['E'][4],table_16.loc['E'][4],table_17.loc['E'][4],table_18.loc['E'][4],
table_19.loc['E'][4],table_110.loc['E'][4]])
a30=np.nanmean([table_11.loc['E'][5],table_12.loc['E'][5],table_13.loc['E'][5],table_14.loc['E'][5],
table_15.loc['E'][5],table_16.loc['E'][5],table_17.loc['E'][5],table_18.loc['E'][5],
table_19.loc['E'][5],table_110.loc['E'][5]])
a31=np.nanmean([table_11.loc['F'].accuracy,table_12.loc['F'].accuracy,table_13.loc['F'].accuracy,table_14.loc['F'].accuracy,
table_15.loc['F'].accuracy,table_16.loc['F'].accuracy,table_17.loc['F'].accuracy,table_18.loc['F'].accuracy,
table_19.loc['F'].accuracy,table_110.loc['F'].accuracy])
a32=np.nanmean([table_11.loc['F'].f1_score,table_12.loc['F'].f1_score,table_13.loc['F'].f1_score,table_14.loc['F'].f1_score,
table_15.loc['F'].f1_score,table_16.loc['F'].f1_score,table_17.loc['F'].f1_score,table_18.loc['F'].f1_score,
table_19.loc['F'].f1_score,table_110.loc['F'].f1_score])
a33=np.nanmean([table_11.loc['F'][2],table_12.loc['F'][2],table_13.loc['F'][2],table_14.loc['F'][2],
table_15.loc['F'][2],table_16.loc['F'][2],table_17.loc['F'][2],table_18.loc['F'][2],
table_19.loc['F'][2],table_110.loc['F'][2]])
a34=np.nanmean([table_11.loc['F'][3],table_12.loc['F'][3],table_13.loc['F'][3],table_14.loc['F'][3],
table_15.loc['F'][3],table_16.loc['F'][3],table_17.loc['F'][3],table_18.loc['F'][3],
table_19.loc['F'][3],table_110.loc['F'][3]])
a35=np.nanmean([table_11.loc['F'][4],table_12.loc['F'][4],table_13.loc['F'][4],table_14.loc['F'][4],
table_15.loc['F'][4],table_16.loc['F'][4],table_17.loc['F'][4],table_18.loc['F'][4],
table_19.loc['F'][4],table_110.loc['F'][4]])
a36=np.nanmean([table_11.loc['F'][5],table_12.loc['F'][5],table_13.loc['F'][5],table_14.loc['F'][5],
table_15.loc['F'][5],table_16.loc['F'][5],table_17.loc['F'][5],table_18.loc['F'][5],
table_19.loc['F'][5],table_110.loc['F'][5]])
a37=np.nanmean([table_11.loc['G'].accuracy,table_12.loc['G'].accuracy,table_13.loc['G'].accuracy,table_14.loc['G'].accuracy,
table_15.loc['G'].accuracy,table_16.loc['G'].accuracy,table_17.loc['G'].accuracy,table_18.loc['G'].accuracy,
table_19.loc['G'].accuracy,table_110.loc['G'].accuracy])
a38=np.nanmean([table_11.loc['G'].f1_score,table_12.loc['G'].f1_score,table_13.loc['G'].f1_score,table_14.loc['G'].f1_score,
table_15.loc['G'].f1_score,table_16.loc['G'].f1_score,table_17.loc['G'].f1_score,table_18.loc['G'].f1_score,
table_19.loc['G'].f1_score,table_110.loc['G'].f1_score])
a39=np.nanmean([table_11.loc['G'][2],table_12.loc['G'][2],table_13.loc['G'][2],table_14.loc['G'][2],
table_15.loc['G'][2],table_16.loc['G'][2],table_17.loc['G'][2],table_18.loc['G'][2],
table_19.loc['G'][2],table_110.loc['G'][2]])
a40=np.nanmean([table_11.loc['G'][3],table_12.loc['G'][3],table_13.loc['G'][3],table_14.loc['G'][3],
table_15.loc['G'][3],table_16.loc['G'][3],table_17.loc['G'][3],table_18.loc['G'][3],
table_19.loc['G'][3],table_110.loc['G'][3]])
a41=np.nanmean([table_11.loc['G'][4],table_12.loc['G'][4],table_13.loc['G'][4],table_14.loc['G'][4],
table_15.loc['G'][4],table_16.loc['G'][4],table_17.loc['G'][4],table_18.loc['G'][4],
table_19.loc['G'][4],table_110.loc['G'][4]])
a42=np.nanmean([table_11.loc['G'][5],table_12.loc['G'][5],table_13.loc['G'][5],table_14.loc['G'][5],
table_15.loc['G'][5],table_16.loc['G'][5],table_17.loc['G'][5],table_18.loc['G'][5],
table_19.loc['G'][5],table_110.loc['G'][5]])
a43=np.nanmean([table_11.loc['H'].accuracy,table_12.loc['H'].accuracy,table_13.loc['H'].accuracy,table_14.loc['H'].accuracy,
table_15.loc['H'].accuracy,table_16.loc['H'].accuracy,table_17.loc['H'].accuracy,table_18.loc['H'].accuracy,
table_19.loc['H'].accuracy,table_110.loc['H'].accuracy])
a44=np.nanmean([table_11.loc['H'].f1_score,table_12.loc['H'].f1_score,table_13.loc['H'].f1_score,table_14.loc['H'].f1_score,
table_15.loc['H'].f1_score,table_16.loc['H'].f1_score,table_17.loc['H'].f1_score,table_18.loc['H'].f1_score,
table_19.loc['H'].f1_score,table_110.loc['H'].f1_score])
a45=np.nanmean([table_11.loc['H'][2],table_12.loc['H'][2],table_13.loc['H'][2],table_14.loc['H'][2],
table_15.loc['H'][2],table_16.loc['H'][2],table_17.loc['H'][2],table_18.loc['H'][2],
table_19.loc['H'][2],table_110.loc['H'][2]])
a46=np.nanmean([table_11.loc['H'][3],table_12.loc['H'][3],table_13.loc['H'][3],table_14.loc['H'][3],
table_15.loc['H'][3],table_16.loc['H'][3],table_17.loc['H'][3],table_18.loc['H'][3],
table_19.loc['H'][3],table_110.loc['H'][3]])
a47=np.nanmean([table_11.loc['H'][4],table_12.loc['H'][4],table_13.loc['H'][4],table_14.loc['H'][4],
table_15.loc['H'][4],table_16.loc['H'][4],table_17.loc['H'][4],table_18.loc['H'][4],
table_19.loc['H'][4],table_110.loc['H'][4]])
a48=np.nanmean([table_11.loc['H'][5],table_12.loc['H'][5],table_13.loc['H'][5],table_14.loc['H'][5],
table_15.loc['H'][5],table_16.loc['H'][5],table_17.loc['H'][5],table_18.loc['H'][5],
table_19.loc['H'][5],table_110.loc['H'][5]])
a49=np.nanmean([table_11.loc['I'].accuracy,table_12.loc['I'].accuracy,table_13.loc['I'].accuracy,table_14.loc['I'].accuracy,
table_15.loc['I'].accuracy,table_16.loc['I'].accuracy,table_17.loc['I'].accuracy,table_18.loc['I'].accuracy,
table_19.loc['I'].accuracy,table_110.loc['I'].accuracy])
a50=np.nanmean([table_11.loc['I'].f1_score,table_12.loc['I'].f1_score,table_13.loc['I'].f1_score,table_14.loc['I'].f1_score,
table_15.loc['I'].f1_score,table_16.loc['I'].f1_score,table_17.loc['I'].f1_score,table_18.loc['I'].f1_score,
table_19.loc['I'].f1_score,table_110.loc['I'].f1_score])
a51=np.nanmean([table_11.loc['I'][2],table_12.loc['I'][2],table_13.loc['I'][2],table_14.loc['I'][2],
table_15.loc['I'][2],table_16.loc['I'][2],table_17.loc['I'][2],table_18.loc['I'][2],
table_19.loc['I'][2],table_110.loc['I'][2]])
a52=np.nanmean([table_11.loc['I'][3],table_12.loc['I'][3],table_13.loc['I'][3],table_14.loc['I'][3],
table_15.loc['I'][3],table_16.loc['I'][3],table_17.loc['I'][3],table_18.loc['I'][3],
table_19.loc['I'][3],table_110.loc['I'][3]])
a53=np.nanmean([table_11.loc['I'][4],table_12.loc['I'][4],table_13.loc['I'][4],table_14.loc['I'][4],
table_15.loc['I'][4],table_16.loc['I'][4],table_17.loc['I'][4],table_18.loc['I'][4],
table_19.loc['I'][4],table_110.loc['I'][4]])
a54=np.nanmean([table_11.loc['I'][5],table_12.loc['I'][5],table_13.loc['I'][5],table_14.loc['I'][5],
table_15.loc['I'][5],table_16.loc['I'][5],table_17.loc['I'][5],table_18.loc['I'][5],
table_19.loc['I'][5],table_110.loc['I'][5]])
a55=np.nanmean([table_11.loc['J'].accuracy,table_12.loc['J'].accuracy,table_13.loc['J'].accuracy,table_14.loc['J'].accuracy,
table_15.loc['J'].accuracy,table_16.loc['J'].accuracy,table_17.loc['J'].accuracy,table_18.loc['J'].accuracy,
table_19.loc['J'].accuracy,table_110.loc['J'].accuracy])
a56=np.nanmean([table_11.loc['J'].f1_score,table_12.loc['J'].f1_score,table_13.loc['J'].f1_score,table_14.loc['J'].f1_score,
table_15.loc['J'].f1_score,table_16.loc['J'].f1_score,table_17.loc['J'].f1_score,table_18.loc['J'].f1_score,
table_19.loc['J'].f1_score,table_110.loc['J'].f1_score])
a57=np.nanmean([table_11.loc['J'][2],table_12.loc['J'][2],table_13.loc['J'][2],table_14.loc['J'][2],
table_15.loc['J'][2],table_16.loc['J'][2],table_17.loc['J'][2],table_18.loc['J'][2],
table_19.loc['J'][2],table_110.loc['J'][2]])
a58=np.nanmean([table_11.loc['J'][3],table_12.loc['J'][3],table_13.loc['J'][3],table_14.loc['J'][3],
table_15.loc['J'][3],table_16.loc['J'][3],table_17.loc['J'][3],table_18.loc['J'][3],
table_19.loc['J'][3],table_110.loc['J'][3]])
a59=np.nanmean([table_11.loc['J'][4],table_12.loc['J'][4],table_13.loc['J'][4],table_14.loc['J'][4],
table_15.loc['J'][4],table_16.loc['J'][4],table_17.loc['J'][4],table_18.loc['J'][4],
table_19.loc['J'][4],table_110.loc['J'][4]])
a60=np.nanmean([table_11.loc['J'][5],table_12.loc['J'][5],table_13.loc['J'][5],table_14.loc['J'][5],
table_15.loc['J'][5],table_16.loc['J'][5],table_17.loc['J'][5],table_18.loc['J'][5],
table_19.loc['J'][5],table_110.loc['J'][5]])
a61=np.nanmean([table_11.loc['K'].accuracy,table_12.loc['K'].accuracy,table_13.loc['K'].accuracy,table_14.loc['K'].accuracy,
table_15.loc['K'].accuracy,table_16.loc['K'].accuracy,table_17.loc['K'].accuracy,table_18.loc['K'].accuracy,
table_19.loc['K'].accuracy,table_110.loc['K'].accuracy])
a62=np.nanmean([table_11.loc['K'].f1_score,table_12.loc['K'].f1_score,table_13.loc['K'].f1_score,table_14.loc['K'].f1_score,
table_15.loc['K'].f1_score,table_16.loc['K'].f1_score,table_17.loc['K'].f1_score,table_18.loc['K'].f1_score,
table_19.loc['K'].f1_score,table_110.loc['K'].f1_score])
a63=np.nanmean([table_11.loc['K'][2],table_12.loc['K'][2],table_13.loc['K'][2],table_14.loc['K'][2],
table_15.loc['K'][2],table_16.loc['K'][2],table_17.loc['K'][2],table_18.loc['K'][2],
table_19.loc['K'][2],table_110.loc['K'][2]])
a64=np.nanmean([table_11.loc['K'][3],table_12.loc['K'][3],table_13.loc['K'][3],table_14.loc['K'][3],
table_15.loc['K'][3],table_16.loc['K'][3],table_17.loc['K'][3],table_18.loc['K'][3],
table_19.loc['K'][3],table_110.loc['K'][3]])
a65=np.nanmean([table_11.loc['K'][4],table_12.loc['K'][4],table_13.loc['K'][4],table_14.loc['K'][4],
table_15.loc['K'][4],table_16.loc['K'][4],table_17.loc['K'][4],table_18.loc['K'][4],
table_19.loc['K'][4],table_110.loc['K'][4]])
a66=np.nanmean([table_11.loc['K'][5],table_12.loc['K'][5],table_13.loc['K'][5],table_14.loc['K'][5],
table_15.loc['K'][5],table_16.loc['K'][5],table_17.loc['K'][5],table_18.loc['K'][5],
table_19.loc['K'][5],table_110.loc['K'][5]])
a67=np.nanmean([table_11.loc['L'].accuracy,table_12.loc['L'].accuracy,table_13.loc['L'].accuracy,table_14.loc['L'].accuracy,
table_15.loc['L'].accuracy,table_16.loc['L'].accuracy,table_17.loc['L'].accuracy,table_18.loc['L'].accuracy,
table_19.loc['L'].accuracy,table_110.loc['L'].accuracy])
a68=np.nanmean([table_11.loc['L'].f1_score,table_12.loc['L'].f1_score,table_13.loc['L'].f1_score,table_14.loc['L'].f1_score,
table_15.loc['L'].f1_score,table_16.loc['L'].f1_score,table_17.loc['L'].f1_score,table_18.loc['L'].f1_score,
table_19.loc['L'].f1_score,table_110.loc['L'].f1_score])
a69=np.nanmean([table_11.loc['L'][2],table_12.loc['L'][2],table_13.loc['L'][2],table_14.loc['L'][2],
table_15.loc['L'][2],table_16.loc['L'][2],table_17.loc['L'][2],table_18.loc['L'][2],
table_19.loc['L'][2],table_110.loc['L'][2]])
a70=np.nanmean([table_11.loc['L'][3],table_12.loc['L'][3],table_13.loc['L'][3],table_14.loc['L'][3],
table_15.loc['L'][3],table_16.loc['L'][3],table_17.loc['L'][3],table_18.loc['L'][3],
table_19.loc['L'][3],table_110.loc['L'][3]])
a71=np.nanmean([table_11.loc['L'][4],table_12.loc['L'][4],table_13.loc['L'][4],table_14.loc['L'][4],
table_15.loc['L'][4],table_16.loc['L'][4],table_17.loc['L'][4],table_18.loc['L'][4],
table_19.loc['L'][4],table_110.loc['L'][4]])
a72=np.nanmean([table_11.loc['L'][5],table_12.loc['L'][5],table_13.loc['L'][5],table_14.loc['L'][5],
table_15.loc['L'][5],table_16.loc['L'][5],table_17.loc['L'][5],table_18.loc['L'][5],
table_19.loc['L'][5],table_110.loc['L'][5]])
a73=np.nanmean([table_11.loc['M'].accuracy,table_12.loc['M'].accuracy,table_13.loc['M'].accuracy,table_14.loc['M'].accuracy,
table_15.loc['M'].accuracy,table_16.loc['M'].accuracy,table_17.loc['M'].accuracy,table_18.loc['M'].accuracy,
table_19.loc['M'].accuracy,table_110.loc['M'].accuracy])
a74=np.nanmean([table_11.loc['M'].f1_score,table_12.loc['M'].f1_score,table_13.loc['M'].f1_score,table_14.loc['M'].f1_score,
table_15.loc['M'].f1_score,table_16.loc['M'].f1_score,table_17.loc['M'].f1_score,table_18.loc['M'].f1_score,
table_19.loc['M'].f1_score,table_110.loc['M'].f1_score])
a75=np.nanmean([table_11.loc['M'][2],table_12.loc['M'][2],table_13.loc['M'][2],table_14.loc['M'][2],
table_15.loc['M'][2],table_16.loc['M'][2],table_17.loc['M'][2],table_18.loc['M'][2],
table_19.loc['M'][2],table_110.loc['M'][2]])
a76=np.nanmean([table_11.loc['M'][3],table_12.loc['M'][3],table_13.loc['M'][3],table_14.loc['M'][3],
table_15.loc['M'][3],table_16.loc['M'][3],table_17.loc['M'][3],table_18.loc['M'][3],
table_19.loc['M'][3],table_110.loc['M'][3]])
a77=np.nanmean([table_11.loc['M'][4],table_12.loc['M'][4],table_13.loc['M'][4],table_14.loc['M'][4],
table_15.loc['M'][4],table_16.loc['M'][4],table_17.loc['M'][4],table_18.loc['M'][4],
table_19.loc['M'][4],table_110.loc['M'][4]])
a78=np.nanmean([table_11.loc['M'][5],table_12.loc['M'][5],table_13.loc['M'][5],table_14.loc['M'][5],
table_15.loc['M'][5],table_16.loc['M'][5],table_17.loc['M'][5],table_18.loc['M'][5],
table_19.loc['M'][5],table_110.loc['M'][5]])
a79=np.nanmean([table_11.loc['.'].accuracy,table_12.loc['.'].accuracy,table_13.loc['.'].accuracy,table_14.loc['.'].accuracy,
table_15.loc['.'].accuracy,table_16.loc['.'].accuracy,table_17.loc['.'].accuracy,table_18.loc['.'].accuracy,
table_19.loc['.'].accuracy,table_110.loc['.'].accuracy])
a80=np.nanmean([table_11.loc['.'].f1_score,table_12.loc['.'].f1_score,table_13.loc['.'].f1_score,table_14.loc['.'].f1_score,
table_15.loc['.'].f1_score,table_16.loc['.'].f1_score,table_17.loc['.'].f1_score,table_18.loc['.'].f1_score,
table_19.loc['.'].f1_score,table_110.loc['.'].f1_score])
a81=np.nanmean([table_11.loc['.'][2],table_12.loc['.'][2],table_13.loc['.'][2],table_14.loc['.'][2],
table_15.loc['.'][2],table_16.loc['.'][2],table_17.loc['.'][2],table_18.loc['.'][2],
table_19.loc['.'][2],table_110.loc['.'][2]])
a82=np.nanmean([table_11.loc['.'][3],table_12.loc['.'][3],table_13.loc['.'][3],table_14.loc['.'][3],
table_15.loc['.'][3],table_16.loc['.'][3],table_17.loc['.'][3],table_18.loc['.'][3],
table_19.loc['.'][3],table_110.loc['.'][3]])
a83=np.nanmean([table_11.loc['.'][4],table_12.loc['.'][4],table_13.loc['.'][4],table_14.loc['.'][4],
table_15.loc['.'][4],table_16.loc['.'][4],table_17.loc['.'][4],table_18.loc['.'][4],
table_19.loc['.'][4],table_110.loc['.'][4]])
a84=np.nanmean([table_11.loc['.'][5],table_12.loc['.'][5],table_13.loc['.'][5],table_14.loc['.'][5],
table_15.loc['.'][5],table_16.loc['.'][5],table_17.loc['.'][5],table_18.loc['.'][5],
table_19.loc['.'][5],table_110.loc['.'][5]])
A=[[a1,a2,a3,round(a4),a5,round(a6)],[a7,a8,a9,round(a10),a11,round(a12)],[a13,a14,a15,round(a16),a17,round(a18)],
[a19,a20,a21,round(a22),a23,round(a24)]
,[a25,a26,a27,round(a28),a29,round(a30)],[a31,a32,a33,round(a34),a35,round(a36)],
[a37,a38,a39,round(a40),a41,round(a42)],[a43,a44,a45,round(a46),a47,round(a48)],
[a49,a50,a51,round(a52),a53,round(a54)],[a55,a56,a57,round(a58),a59,round(a60)],
[a61,a62,a63,round(a64),a65,round(a66)],[a67,a68,a69,round(a70),a71,round(a72)],
[a73,a74,a75,round(a76),a77,round(a78)],[a79,a80,a81,round(a82),a83,round(a84)]]
vv1=np.mean([v1[0],v2[0],v3[0],v4[0],v5[0],v6[0],v7[0],v8[0],v9[0],v10[0]])
vv2=np.mean([v1[1],v2[1],v3[1],v4[1],v5[1],v6[1],v7[1],v8[1],v9[1],v10[1]])
vv3=np.mean([v1[2],v2[2],v3[2],v4[2],v5[2],v6[2],v7[2],v8[2],v9[2],v10[2]])
vv4=np.mean([v1[3],v2[3],v3[3],v4[3],v5[3],v6[3],v7[3],v8[3],v9[3],v10[3]])
vv5=np.mean([v1[4],v2[4],v3[4],v4[4],v5[4],v6[4],v7[4],v8[4],v9[4],v10[4]])
vv6=np.mean([v1[5],v2[5],v3[5],v4[5],v5[5],v6[5],v7[5],v8[5],v9[5],v10[5]])
table_111= pd.DataFrame(A,columns=['accuracy', 'f1_score', 'accuracy for unknown words',
'number of unknown words','accuracy for known words','number of known words']
,index=['A','B','C','D','E','F','G','H','I','J','K','L','M','.'])
str_pythontex=[float("{0:.2f}".format(list(table_111.loc["A"])[0]*100)),float("{0:.2f}".format(list(table_111.loc["A"])[1]*100)),
float("{0:.2f}".format(list(table_111.loc["A"])[2]*100)),float("{0:.2f}".format(list(table_111.loc["A"])[4]*100)),
round(list(table_111.loc["A"])[3]),round(list(table_111.loc["A"])[5]),
float("{0:.2f}".format(list(table_111.loc["B"])[0]*100)),float("{0:.2f}".format(list(table_111.loc["B"])[1]*100)),
float("{0:.2f}".format(list(table_111.loc["B"])[2]*100)),float("{0:.2f}".format(list(table_111.loc["B"])[4]*100)),
round(list(table_111.loc["B"])[3]),round(list(table_111.loc["B"])[5]),
float("{0:.2f}".format(list(table_111.loc["C"])[0]*100)),float("{0:.2f}".format(list(table_111.loc["C"])[1]*100)),
float("{0:.2f}".format(list(table_111.loc["C"])[2]*100)),float("{0:.2f}".format(list(table_111.loc["C"])[4]*100)),
round(list(table_111.loc["C"])[3]),round(list(table_111.loc["C"])[5]),
float("{0:.2f}".format(list(table_111.loc["D"])[0]*100)),float("{0:.2f}".format(list(table_111.loc["D"])[1]*100)),
float("{0:.2f}".format(list(table_111.loc["D"])[2]*100)),float("{0:.2f}".format(list(table_111.loc["D"])[4]*100)),
round(list(table_111.loc["D"])[3]),round(list(table_111.loc["D"])[5]),
float("{0:.2f}".format(list(table_111.loc["E"])[0]*100)),float("{0:.2f}".format(list(table_111.loc["E"])[1]*100)),
float("{0:.2f}".format(list(table_111.loc["E"])[2]*100)),float("{0:.2f}".format(list(table_111.loc["E"])[4]*100)),
round(list(table_111.loc["E"])[3]),round(list(table_111.loc["E"])[5]),
float("{0:.2f}".format(list(table_111.loc["F"])[0]*100)),float("{0:.2f}".format(list(table_111.loc["F"])[1]*100)),
float("{0:.2f}".format(list(table_111.loc["F"])[2]*100)),float("{0:.2f}".format(list(table_111.loc["F"])[4]*100)),
round(list(table_111.loc["F"])[3]),round(list(table_111.loc["F"])[5]),
float("{0:.2f}".format(list(table_111.loc["G"])[0]*100)),float("{0:.2f}".format(list(table_111.loc["G"])[1]*100)),
float("{0:.2f}".format(list(table_111.loc["G"])[2]*100)),float("{0:.2f}".format(list(table_111.loc["G"])[4]*100)),
round(list(table_111.loc["G"])[3]),round(list(table_111.loc["G"])[5]),
float("{0:.2f}".format(list(table_111.loc["H"])[0]*100)),float("{0:.2f}".format(list(table_111.loc["H"])[1]*100)),
float("{0:.2f}".format(list(table_111.loc["H"])[2]*100)),float("{0:.2f}".format(list(table_111.loc["H"])[4]*100)),
round(list(table_111.loc["H"])[3]),round(list(table_111.loc["H"])[5]),
float("{0:.2f}".format(list(table_111.loc["I"])[0]*100)),float("{0:.2f}".format(list(table_111.loc["I"])[1]*100)),
float("{0:.2f}".format(list(table_111.loc["I"])[2]*100)),float("{0:.2f}".format(list(table_111.loc["I"])[4]*100)),
round(list(table_111.loc["I"])[3]),round(list(table_111.loc["I"])[5]),
float("{0:.2f}".format(list(table_111.loc["J"])[0]*100)),float("{0:.2f}".format(list(table_111.loc["J"])[1]*100)),
float("{0:.2f}".format(list(table_111.loc["J"])[2]*100)),float("{0:.2f}".format(list(table_111.loc["J"])[4]*100)),
round(list(table_111.loc["J"])[3]),round(list(table_111.loc["J"])[5]),
float("{0:.2f}".format(list(table_111.loc["K"])[0]*100)),float("{0:.2f}".format(list(table_111.loc["K"])[1]*100)),
float("{0:.2f}".format(list(table_111.loc["K"])[2]*100)),float("{0:.2f}".format(list(table_111.loc["K"])[4]*100)),
round(list(table_111.loc["K"])[3]),round(list(table_111.loc["K"])[5]),
float("{0:.2f}".format(list(table_111.loc["L"])[0]*100)),float("{0:.2f}".format(list(table_111.loc["L"])[1]*100)),
float("{0:.2f}".format(list(table_111.loc["L"])[2]*100)),float("{0:.2f}".format(list(table_111.loc["L"])[4]*100)),
round(list(table_111.loc["L"])[3]),round(list(table_111.loc["L"])[5]),
float("{0:.2f}".format(list(table_111.loc["M"])[0]*100)),float("{0:.2f}".format(list(table_111.loc["M"])[1]*100)),
float("{0:.2f}".format(list(table_111.loc["M"])[2]*100)),float("{0:.2f}".format(list(table_111.loc["M"])[4]*100)),
round(list(table_111.loc["M"])[3]),round(list(table_111.loc["M"])[5]),
float("{0:.2f}".format(list(table_111.loc["."])[0]*100)),float("{0:.2f}".format(list(table_111.loc["."])[1]*100)),
float("{0:.2f}".format(list(table_111.loc["."])[2]*100)),float("{0:.2f}".format(list(table_111.loc["."])[4]*100)),
round(list(table_111.loc["."])[3]),round(list(table_111.loc["."])[5]),float("{0:.2f}".format(vv1))
,float("{0:.2f}".format(vv2))
,float("{0:.2f}".format(vv3))
,float("{0:.2f}".format(vv4)),round(vv5)
,float("{0:.2f}".format(vv6))
]
L=[]
for x in str_pythontex:
if math.isnan(x):
L.append('NULL')
else:
L.append(str(x))
L1=[]
i=0
for x in L:
i=i+1
if i!=5 and i!=6 and x!="NULL":
L1.append(x+" \%")
elif x=="NULL":
L1.append(x)
elif i==5:
L1.append(x)
else:
L1.append(x)
i=0
L1[-1]=L1[-1]+" \%"
| true | true |
f724c79b6742776adde045c80e5e517302744145 | 2,786 | py | Python | scripts/dct/data/GpioData.py | ABM-Community-Ports/droidboot_device_planet-cosmocom | 4e157f7f3def69cc47e2c5c8fec5346feaea2a8c | [
"MIT"
] | 10 | 2020-07-17T14:51:36.000Z | 2022-03-12T03:35:42.000Z | scripts/dct/data/GpioData.py | ABM-Community-Ports/droidboot_device_planet-cosmocom | 4e157f7f3def69cc47e2c5c8fec5346feaea2a8c | [
"MIT"
] | 6 | 2020-07-23T19:33:25.000Z | 2021-02-23T18:21:59.000Z | scripts/dct/data/GpioData.py | ABM-Community-Ports/droidboot_device_planet-cosmocom | 4e157f7f3def69cc47e2c5c8fec5346feaea2a8c | [
"MIT"
] | 4 | 2020-11-12T03:07:39.000Z | 2022-03-23T19:30:20.000Z | #! /usr/bin/python
# -*- coding: utf-8 -*-
class GpioData:
_count = 0
_modNum = 8
_specMap = {}
_freqMap = {}
_mapList = []
_modeMap = {}
_smtMap = {}
_map_table = {}
def __init__(self):
self.__defMode = 0
self.__eintMode = False
self.__modeVec = ['0', '0', '0', '0', '0', '0', '0', '0']
self.__inPullEn = True
self.__inPullSelHigh = False
self.__defDirInt = 0
self.__defDir = 'IN'
self.__inEn = True
self.__outEn = False
self.__outHigh = False
self.__varNames = []
self.__smtNum = -1
self.__smtEn = False
self.__iesEn = True
self.__drvCur = ""
def get_defMode(self):
return self.__defMode
def set_defMode(self, mode):
self.__defMode = mode
def get_eintMode(self):
return self.__eintMode
def set_eintMode(self, flag):
self.__eintMode = flag
def get_modeVec(self):
return self.__modeVec
def set_modeVec(self, vec):
self.__modeVec = vec
def get_inPullEn(self):
return self.__inPullEn
def set_inpullEn(self, flag):
self.__inPullEn = flag
def get_inPullSelHigh(self):
return self.__inPullSelHigh
def set_inpullSelHigh(self, flag):
self.__inPullSelHigh = flag
def get_defDir(self):
return self.__defDir
def set_defDir(self, dir):
self.__defDir = dir
def get_inEn(self):
return self.__inEn
def set_inEn(self, flag):
self.__inEn = flag
def get_outEn(self):
return self.__outEn
def set_outEn(self, flag):
self.__outEn = flag
def get_outHigh(self):
return self.__outHigh
def set_outHigh(self, outHigh):
self.__outHigh = outHigh
def get_varNames(self):
return self.__varNames
def set_varNames(self, names):
self.__varNames = names
def set_smtEn(self, flag):
self.__smtEn = flag
def get_smtEn(self):
return self.__smtEn
def get_iesEn(self):
return self.__iesEn
def set_iesEn(self, flag):
self.__iesEn = flag
def set_drvCur(self, val):
self.__drvCur = val
def get_drvCur(self):
return self.__drvCur
def set_smtNum(self, num):
self.__smtNum = num
def get_smtNum(self):
return self.__smtNum
def ge_defDirInt(self):
if self.__defDir == 'IN':
return 0
else:
return 1
@staticmethod
def set_eint_map_table(map_table):
GpioData._map_table = map_table
@staticmethod
def get_modeName(key, idx):
if key in GpioData._modeMap.keys():
value = GpioData._modeMap[key]
return value[idx]
| 21.106061 | 65 | 0.585068 |
class GpioData:
_count = 0
_modNum = 8
_specMap = {}
_freqMap = {}
_mapList = []
_modeMap = {}
_smtMap = {}
_map_table = {}
def __init__(self):
self.__defMode = 0
self.__eintMode = False
self.__modeVec = ['0', '0', '0', '0', '0', '0', '0', '0']
self.__inPullEn = True
self.__inPullSelHigh = False
self.__defDirInt = 0
self.__defDir = 'IN'
self.__inEn = True
self.__outEn = False
self.__outHigh = False
self.__varNames = []
self.__smtNum = -1
self.__smtEn = False
self.__iesEn = True
self.__drvCur = ""
def get_defMode(self):
return self.__defMode
def set_defMode(self, mode):
self.__defMode = mode
def get_eintMode(self):
return self.__eintMode
def set_eintMode(self, flag):
self.__eintMode = flag
def get_modeVec(self):
return self.__modeVec
def set_modeVec(self, vec):
self.__modeVec = vec
def get_inPullEn(self):
return self.__inPullEn
def set_inpullEn(self, flag):
self.__inPullEn = flag
def get_inPullSelHigh(self):
return self.__inPullSelHigh
def set_inpullSelHigh(self, flag):
self.__inPullSelHigh = flag
def get_defDir(self):
return self.__defDir
def set_defDir(self, dir):
self.__defDir = dir
def get_inEn(self):
return self.__inEn
def set_inEn(self, flag):
self.__inEn = flag
def get_outEn(self):
return self.__outEn
def set_outEn(self, flag):
self.__outEn = flag
def get_outHigh(self):
return self.__outHigh
def set_outHigh(self, outHigh):
self.__outHigh = outHigh
def get_varNames(self):
return self.__varNames
def set_varNames(self, names):
self.__varNames = names
def set_smtEn(self, flag):
self.__smtEn = flag
def get_smtEn(self):
return self.__smtEn
def get_iesEn(self):
return self.__iesEn
def set_iesEn(self, flag):
self.__iesEn = flag
def set_drvCur(self, val):
self.__drvCur = val
def get_drvCur(self):
return self.__drvCur
def set_smtNum(self, num):
self.__smtNum = num
def get_smtNum(self):
return self.__smtNum
def ge_defDirInt(self):
if self.__defDir == 'IN':
return 0
else:
return 1
@staticmethod
def set_eint_map_table(map_table):
GpioData._map_table = map_table
@staticmethod
def get_modeName(key, idx):
if key in GpioData._modeMap.keys():
value = GpioData._modeMap[key]
return value[idx]
| true | true |
f724c7bf893a319eb8f171129f7bf5a55e44dd61 | 837 | py | Python | test/fpga/spi_video_ram_test/spi_video_ram_test.py | mbalestrini/hack_soc | 157428ee6856a9e4cee5953b8b3c144b4f57f5ee | [
"Apache-2.0"
] | 1 | 2021-12-18T18:31:53.000Z | 2021-12-18T18:31:53.000Z | test/fpga/spi_video_ram_test/spi_video_ram_test.py | mbalestrini/hack_soc | 157428ee6856a9e4cee5953b8b3c144b4f57f5ee | [
"Apache-2.0"
] | null | null | null | test/fpga/spi_video_ram_test/spi_video_ram_test.py | mbalestrini/hack_soc | 157428ee6856a9e4cee5953b8b3c144b4f57f5ee | [
"Apache-2.0"
] | null | null | null | import cocotb
from cocotb.clock import Clock
from cocotb.triggers import RisingEdge, FallingEdge, ClockCycles
import random
async def reset(dut):
dut.RESET_N <= 0
await ClockCycles(dut.EXTERNAL_CLK, 20)
dut.RESET_N <= 1
await ClockCycles(dut.EXTERNAL_CLK, 1)
@cocotb.test()
async def spi_video_ram_test(dut):
clock = Clock(dut.EXTERNAL_CLK, 10, units="us")
cocotb.fork(clock.start())
random.seed(0)
await reset(dut)
while (dut.writing_to_vram_mode==1):
await ClockCycles(dut.EXTERNAL_CLK, 1)
await ClockCycles(dut.EXTERNAL_CLK, 1)
for i in range(0, 10):
await RisingEdge(dut.display_active)
dut.SRAM_SIO0 = 0
dut.SRAM_SIO1 = 0
dut.SRAM_SIO2 = 0
dut.SRAM_SIO3 = 1
await ClockCycles(dut.EXTERNAL_CLK, 2000)
| 19.465116 | 64 | 0.659498 | import cocotb
from cocotb.clock import Clock
from cocotb.triggers import RisingEdge, FallingEdge, ClockCycles
import random
async def reset(dut):
dut.RESET_N <= 0
await ClockCycles(dut.EXTERNAL_CLK, 20)
dut.RESET_N <= 1
await ClockCycles(dut.EXTERNAL_CLK, 1)
@cocotb.test()
async def spi_video_ram_test(dut):
clock = Clock(dut.EXTERNAL_CLK, 10, units="us")
cocotb.fork(clock.start())
random.seed(0)
await reset(dut)
while (dut.writing_to_vram_mode==1):
await ClockCycles(dut.EXTERNAL_CLK, 1)
await ClockCycles(dut.EXTERNAL_CLK, 1)
for i in range(0, 10):
await RisingEdge(dut.display_active)
dut.SRAM_SIO0 = 0
dut.SRAM_SIO1 = 0
dut.SRAM_SIO2 = 0
dut.SRAM_SIO3 = 1
await ClockCycles(dut.EXTERNAL_CLK, 2000)
| true | true |
f724c85914703d848de5492a26e8b70312f96884 | 1,725 | py | Python | notest/ext/generator_mysql.py | GodQ/notest | 530d91782e8ed06493a1313facbed86e06662daf | [
"Apache-2.0"
] | 3 | 2019-05-10T09:36:07.000Z | 2021-04-16T23:40:46.000Z | notest/ext/generator_mysql.py | GodQ/notest | 530d91782e8ed06493a1313facbed86e06662daf | [
"Apache-2.0"
] | null | null | null | notest/ext/generator_mysql.py | GodQ/notest | 530d91782e8ed06493a1313facbed86e06662daf | [
"Apache-2.0"
] | 1 | 2019-05-10T09:43:48.000Z | 2019-05-10T09:43:48.000Z |
import sys
import logging
import json
logger = logging.Logger("mysql_generator")
from notest.lib.mysql_lib import MysqlClient
from notest.lib.utils import templated_var
from notest import generators
'''
- generators:
- task_id: {type: 'number_sequence', start: 10}
- task_name:
type: 'mysql'
query: 'select name from sites'
config: '$mysql_config'
'''
def parse_mysql_query_generator(config, variable_binds):
""" Parses configuration options for a mysql_query generator """
mysql_config = config.get('config')
sql = config.get('query')
return_dict_list = config.get('return_dict_list', False)
mysql_config = templated_var(mysql_config, variable_binds)
if isinstance(mysql_config, str):
mysql_config = json.loads(mysql_config)
sql = templated_var(sql)
if isinstance(return_dict_list, str):
return_dict_list = True if return_dict_list.lower() == 'true' else False
try:
with MysqlClient(mysql_config) as cli:
r = None
if return_dict_list is False:
res = cli.query(sql)
r = list()
for i in res:
if isinstance(i, tuple):
i = i[0]
r.append(i)
else:
r = cli.query(sql, return_dict_list=return_dict_list)
if len(r) == 0:
raise Exception("No data queried in MySQL by '{}'!".format(sql))
return generators.factory_fixed_sequence(r)()
except Exception as e:
logger.error(str(e))
raise ValueError("Invalid query: " + sql + " : " + str(e))
GENERATORS = {'mysql': parse_mysql_query_generator}
| 31.363636 | 80 | 0.607536 |
import sys
import logging
import json
logger = logging.Logger("mysql_generator")
from notest.lib.mysql_lib import MysqlClient
from notest.lib.utils import templated_var
from notest import generators
def parse_mysql_query_generator(config, variable_binds):
mysql_config = config.get('config')
sql = config.get('query')
return_dict_list = config.get('return_dict_list', False)
mysql_config = templated_var(mysql_config, variable_binds)
if isinstance(mysql_config, str):
mysql_config = json.loads(mysql_config)
sql = templated_var(sql)
if isinstance(return_dict_list, str):
return_dict_list = True if return_dict_list.lower() == 'true' else False
try:
with MysqlClient(mysql_config) as cli:
r = None
if return_dict_list is False:
res = cli.query(sql)
r = list()
for i in res:
if isinstance(i, tuple):
i = i[0]
r.append(i)
else:
r = cli.query(sql, return_dict_list=return_dict_list)
if len(r) == 0:
raise Exception("No data queried in MySQL by '{}'!".format(sql))
return generators.factory_fixed_sequence(r)()
except Exception as e:
logger.error(str(e))
raise ValueError("Invalid query: " + sql + " : " + str(e))
GENERATORS = {'mysql': parse_mysql_query_generator}
| true | true |
f724c9711779d4f88a28880eec79b4a0e04ab006 | 907 | py | Python | src/coinc/exceptions.py | kimklai/Coinc | dbce0d257d90104bd012996c18884a68e01375a9 | [
"MIT"
] | 15 | 2020-07-11T23:30:23.000Z | 2022-03-25T08:10:26.000Z | src/coinc/exceptions.py | kimklai/Coinc | dbce0d257d90104bd012996c18884a68e01375a9 | [
"MIT"
] | 10 | 2020-06-26T18:20:22.000Z | 2022-03-31T02:55:29.000Z | src/coinc/exceptions.py | kimklai/Coinc | dbce0d257d90104bd012996c18884a68e01375a9 | [
"MIT"
] | 2 | 2020-09-10T10:51:01.000Z | 2021-04-11T09:08:48.000Z | # -*- coding: utf-8 -*-
"""Exceptions used in this module"""
class CoincError(Exception):
"""Base Class used to declare other errors for Coinc
Extends:
Exception
"""
pass
class ConfigError(CoincError):
"""Raised when there are invalid value filled in Configuration Sheet
Extends:
CoincError
"""
pass
class QueryError(CoincError):
"""Raised when invalid query were given
Extends:
CoincError
"""
pass
class AppIDError(CoincError):
"""Raised when App ID can not be used
Extends:
CoincError
"""
pass
class ApiError(CoincError):
"""Raised when API is unreachable or return bad response
Extends:
CoincError
"""
pass
class UnknownPythonError(CoincError):
"""Raised when Python runtime version can not be correctly detacted
Extends:
CoincError
"""
pass
| 15.912281 | 72 | 0.63065 |
class CoincError(Exception):
pass
class ConfigError(CoincError):
pass
class QueryError(CoincError):
pass
class AppIDError(CoincError):
pass
class ApiError(CoincError):
pass
class UnknownPythonError(CoincError):
pass
| true | true |
f724c9cbe59430da0dd2210d4efb6ddff77348cb | 9,522 | py | Python | tessia/server/db/alembic/versions/4f32ee5b2d29_0_0_3_remove_os_from_template_add_.py | tessia-project/tessia | b9ded8dc7f0b9a7a0ea00d95b5ccc4af4d2e7540 | [
"Apache-2.0"
] | 5 | 2020-06-04T10:20:33.000Z | 2020-10-26T15:09:19.000Z | tessia/server/db/alembic/versions/4f32ee5b2d29_0_0_3_remove_os_from_template_add_.py | tessia-project/tessia | b9ded8dc7f0b9a7a0ea00d95b5ccc4af4d2e7540 | [
"Apache-2.0"
] | null | null | null | tessia/server/db/alembic/versions/4f32ee5b2d29_0_0_3_remove_os_from_template_add_.py | tessia-project/tessia | b9ded8dc7f0b9a7a0ea00d95b5ccc4af4d2e7540 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=all
"""0.0.3 remove os from template, add template to operating_systems
Revision ID: 4f32ee5b2d29
Revises: 14e7934c17c8
Create Date: 2018-03-15 13:39:57.863743
"""
# revision identifiers, used by Alembic.
revision = '4f32ee5b2d29'
down_revision = '14e7934c17c8'
branch_labels = None
depends_on = None
from alembic import op
from sqlalchemy import Column
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.ext.hybrid import hybrid_property
from sqlalchemy.orm import relationship, sessionmaker
from sqlalchemy.schema import ForeignKey
from sqlalchemy.types import Boolean, Integer, LargeBinary, String
import os
import sqlalchemy as sa
SESSION = sessionmaker()
BASE = declarative_base()
# declare the models used in this migration
class CommonMixin(object):
"""
Helper mixin to set attributes common to most classes
"""
id = Column(Integer, primary_key=True)
# CommonMixin
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('operating_systems', sa.Column('template_id', sa.Integer(), nullable=True))
op.create_foreign_key(op.f('fk_operating_systems_template_id_templates'), 'operating_systems', 'templates', ['template_id'], ['id'])
op.drop_column('operating_systems', 'cmdline')
op.alter_column('operating_systems', 'desc', new_column_name='pretty_name')
op.alter_column('templates', 'content',
existing_type=sa.VARCHAR(),
nullable=False)
op.drop_index('ix_templates_operating_system_id', table_name='templates')
op.drop_constraint('fk_templates_operating_system_id_operating_systems', 'templates', type_='foreignkey')
op.drop_column('templates', 'operating_system_id')
# ### end Alembic commands ###
class OperatingSystem(CommonMixin, BASE):
"""A supported operating system"""
__tablename__ = 'operating_systems'
name = Column(String, unique=True, index=True)
type = Column(String, nullable=False)
major = Column(Integer, nullable=False)
minor = Column(Integer, nullable=False)
pretty_name = Column(String, nullable=False)
# default auto install template
template_id = Column(Integer, ForeignKey('templates.id'))
# OperatingSystem
class Template(CommonMixin, BASE):
"""A template for a InstallMachine"""
__tablename__ = "templates"
name = Column(String, unique=True, nullable=False)
content = Column(String, nullable=False)
desc = Column(String)
# Template
# data migration
session = SESSION(bind=op.get_bind())
# update templates to new names and descriptions
templates_dir = os.path.abspath(os.path.dirname(os.path.abspath(__file__)) + "/../../templates/")
update_templates = {
'RHEL7.2': (
'rhel7-default', 'Default template for RHEL7 installations'),
'SLES12.1': (
'sles12-default', 'Default template for SLES12 installations'),
'UBUNTU16.04.1': (
'ubuntu16-default', 'Default template for Ubuntu16 installations'),
}
for key, value in update_templates.items():
temp_obj = session.query(Template).filter_by(name=key).one()
temp_obj.name = value[0]
temp_obj.desc = value[1]
template_path = '{}.jinja'.format(value[0])
with open(templates_dir + '/' + template_path, "r") as template_file:
temp_obj.content = template_file.read()
# update existing oses to new type, pretty name, template
update_oses = {
'rhel7.2': ('redhat', 'Red Hat Enterprise Linux Server 7.2 (Maipo)', 'rhel7-default'),
'sles12.1': ('suse', 'SUSE Linux Enterprise Server 12 SP1', 'sles12-default'),
'ubuntu16.04.1': ('debian', 'Ubuntu 16.04.1 LTS', 'ubuntu16-default'),
}
for key, value in update_oses.items():
temp_obj = session.query(Template).filter_by(name=value[2]).one()
os_obj = session.query(OperatingSystem).filter_by(name=key).one()
os_obj.type = value[0]
if key == 'ubuntu16.04.1':
os_obj.major = 1604
os_obj.minor = 1
os_obj.pretty_name = value[1]
os_obj.template_id = temp_obj.id
# insert new oses
new_oses = [
'cms,cms,0,0,z/VM Conversational Monitor System (CMS),,',
'rhel7.3,redhat,7,3,Red Hat Enterprise Linux Server 7.3 (Maipo),rhel7-default',
'rhel7.4,redhat,7,4,Red Hat Enterprise Linux Server 7.4 (Maipo),rhel7-default',
'sles12.2,suse,12,2,SUSE Linux Enterprise Server 12 SP2,sles12-default',
'sles12.3,suse,12,3,SUSE Linux Enterprise Server 12 SP3,sles12-default',
'ubuntu16.04.2,debian,1604,2,Ubuntu 16.04.2 LTS,ubuntu16-default',
'ubuntu16.04.3,debian,1604,3,Ubuntu 16.04.3 LTS,ubuntu16-default',
]
for row in new_oses:
row = row.split(',', 6)
if row[0] == 'cms':
template = None
else:
temp_obj = session.query(Template).filter_by(name=row[5]).one()
template = temp_obj.id
os_obj = session.query(OperatingSystem).filter_by(name=row[0]).one_or_none()
if not os_obj:
os_obj = OperatingSystem()
os_obj.name = row[0],
os_obj.type = row[1]
os_obj.major = row[2]
os_obj.minor = row[3]
os_obj.pretty_name = row[4]
os_obj.template_id = template
session.add(os_obj)
session.commit()
# upgrade
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('templates', sa.Column('operating_system_id', sa.INTEGER(), autoincrement=False, nullable=True))
op.create_foreign_key('fk_templates_operating_system_id_operating_systems', 'templates', 'operating_systems', ['operating_system_id'], ['id'])
op.create_index('ix_templates_operating_system_id', 'templates', ['operating_system_id'], unique=False)
op.alter_column('templates', 'content',
existing_type=sa.VARCHAR(),
nullable=True)
op.alter_column('operating_systems', 'pretty_name', new_column_name='desc')
op.add_column('operating_systems', sa.Column('cmdline', sa.VARCHAR(), autoincrement=False, nullable=True))
op.drop_constraint(op.f('fk_operating_systems_template_id_templates'), 'operating_systems', type_='foreignkey')
op.drop_column('operating_systems', 'template_id')
# ### end Alembic commands ###
class OldOperatingSystem(CommonMixin, BASE):
"""Downgrade version of operating system"""
__tablename__ = 'operating_systems'
name = Column(String, unique=True, index=True)
type = Column(String, nullable=False)
major = Column(Integer, nullable=False)
minor = Column(Integer, nullable=False)
desc = Column(String, nullable=False)
cmdline = Column(String)
# OldOperatingSystem
class OldTemplate(CommonMixin, BASE):
"""The downgrade version of template"""
__tablename__ = "templates"
name = Column(String, unique=True, nullable=False)
content = Column(String, nullable=False)
desc = Column(String)
operating_system_id = Column(
Integer, ForeignKey('operating_systems.id'), index=True)
# OldTemplate
# data revert
session = SESSION(bind=op.get_bind())
# set templates to old name and description
update_templates = {
'rhel7-default': ('RHEL7.2', 'Template for RHEL7.2', 'rhel7.2'),
'sles12-default': ('SLES12.1', 'Template for SLES12.1', 'sles12.1'),
'ubuntu16-default': ('UBUNTU16.04.1', 'Template for Ubuntu 16.04.1', 'ubuntu16.04.1'),
}
for key, value in update_templates.items():
os_obj = session.query(OldOperatingSystem).filter_by(name=value[2]).one()
temp_obj = session.query(OldTemplate).filter_by(name=key).one()
temp_obj.name = value[0]
temp_obj.desc = value[1]
temp_obj.operating_system_id = os_obj.id
# set oses back to old type and description
templates_dir = os.path.abspath(
os.path.dirname(os.path.abspath(__file__)) +
"/../../../state_machines/autoinstall/templates")
update_oses = {
'rhel7.2': ('rhel', 'RHEL 7.2 GA', 'rhel7-default'),
'sles12.1': ('sles', 'SLES 12.1', 'sles12-default'),
'ubuntu16.04.1': ('ubuntu', 'Ubuntu 16.04.1', 'ubuntu16-default'),
}
for key, value in update_oses.items():
os_obj = session.query(OldOperatingSystem).filter_by(name=key).one()
new_type = os_obj.type
os_obj.type = value[0]
if key == 'ubuntu16.04.1':
os_obj.major = 16
os_obj.minor = 4
os_obj.desc = value[1]
cmdline_template_path = '{}.cmdline.jinja'.format(new_type)
with open(templates_dir + '/' + cmdline_template_path, "r") as cmdline_file:
os_obj.cmdline = cmdline_file.read()
session.commit()
# downgrade
| 40.347458 | 146 | 0.664251 |
revision = '4f32ee5b2d29'
down_revision = '14e7934c17c8'
branch_labels = None
depends_on = None
from alembic import op
from sqlalchemy import Column
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.ext.hybrid import hybrid_property
from sqlalchemy.orm import relationship, sessionmaker
from sqlalchemy.schema import ForeignKey
from sqlalchemy.types import Boolean, Integer, LargeBinary, String
import os
import sqlalchemy as sa
SESSION = sessionmaker()
BASE = declarative_base()
class CommonMixin(object):
id = Column(Integer, primary_key=True)
def upgrade():
ing_systems', 'templates', ['template_id'], ['id'])
op.drop_column('operating_systems', 'cmdline')
op.alter_column('operating_systems', 'desc', new_column_name='pretty_name')
op.alter_column('templates', 'content',
existing_type=sa.VARCHAR(),
nullable=False)
op.drop_index('ix_templates_operating_system_id', table_name='templates')
op.drop_constraint('fk_templates_operating_system_id_operating_systems', 'templates', type_='foreignkey')
op.drop_column('templates', 'operating_system_id')
tems'
name = Column(String, unique=True, index=True)
type = Column(String, nullable=False)
major = Column(Integer, nullable=False)
minor = Column(Integer, nullable=False)
pretty_name = Column(String, nullable=False)
template_id = Column(Integer, ForeignKey('templates.id'))
class Template(CommonMixin, BASE):
__tablename__ = "templates"
name = Column(String, unique=True, nullable=False)
content = Column(String, nullable=False)
desc = Column(String)
session = SESSION(bind=op.get_bind())
templates_dir = os.path.abspath(os.path.dirname(os.path.abspath(__file__)) + "/../../templates/")
update_templates = {
'RHEL7.2': (
'rhel7-default', 'Default template for RHEL7 installations'),
'SLES12.1': (
'sles12-default', 'Default template for SLES12 installations'),
'UBUNTU16.04.1': (
'ubuntu16-default', 'Default template for Ubuntu16 installations'),
}
for key, value in update_templates.items():
temp_obj = session.query(Template).filter_by(name=key).one()
temp_obj.name = value[0]
temp_obj.desc = value[1]
template_path = '{}.jinja'.format(value[0])
with open(templates_dir + '/' + template_path, "r") as template_file:
temp_obj.content = template_file.read()
update_oses = {
'rhel7.2': ('redhat', 'Red Hat Enterprise Linux Server 7.2 (Maipo)', 'rhel7-default'),
'sles12.1': ('suse', 'SUSE Linux Enterprise Server 12 SP1', 'sles12-default'),
'ubuntu16.04.1': ('debian', 'Ubuntu 16.04.1 LTS', 'ubuntu16-default'),
}
for key, value in update_oses.items():
temp_obj = session.query(Template).filter_by(name=value[2]).one()
os_obj = session.query(OperatingSystem).filter_by(name=key).one()
os_obj.type = value[0]
if key == 'ubuntu16.04.1':
os_obj.major = 1604
os_obj.minor = 1
os_obj.pretty_name = value[1]
os_obj.template_id = temp_obj.id
new_oses = [
'cms,cms,0,0,z/VM Conversational Monitor System (CMS),,',
'rhel7.3,redhat,7,3,Red Hat Enterprise Linux Server 7.3 (Maipo),rhel7-default',
'rhel7.4,redhat,7,4,Red Hat Enterprise Linux Server 7.4 (Maipo),rhel7-default',
'sles12.2,suse,12,2,SUSE Linux Enterprise Server 12 SP2,sles12-default',
'sles12.3,suse,12,3,SUSE Linux Enterprise Server 12 SP3,sles12-default',
'ubuntu16.04.2,debian,1604,2,Ubuntu 16.04.2 LTS,ubuntu16-default',
'ubuntu16.04.3,debian,1604,3,Ubuntu 16.04.3 LTS,ubuntu16-default',
]
for row in new_oses:
row = row.split(',', 6)
if row[0] == 'cms':
template = None
else:
temp_obj = session.query(Template).filter_by(name=row[5]).one()
template = temp_obj.id
os_obj = session.query(OperatingSystem).filter_by(name=row[0]).one_or_none()
if not os_obj:
os_obj = OperatingSystem()
os_obj.name = row[0],
os_obj.type = row[1]
os_obj.major = row[2]
os_obj.minor = row[3]
os_obj.pretty_name = row[4]
os_obj.template_id = template
session.add(os_obj)
session.commit()
def downgrade():
ating_systems', 'templates', 'operating_systems', ['operating_system_id'], ['id'])
op.create_index('ix_templates_operating_system_id', 'templates', ['operating_system_id'], unique=False)
op.alter_column('templates', 'content',
existing_type=sa.VARCHAR(),
nullable=True)
op.alter_column('operating_systems', 'pretty_name', new_column_name='desc')
op.add_column('operating_systems', sa.Column('cmdline', sa.VARCHAR(), autoincrement=False, nullable=True))
op.drop_constraint(op.f('fk_operating_systems_template_id_templates'), 'operating_systems', type_='foreignkey')
op.drop_column('operating_systems', 'template_id')
systems'
name = Column(String, unique=True, index=True)
type = Column(String, nullable=False)
major = Column(Integer, nullable=False)
minor = Column(Integer, nullable=False)
desc = Column(String, nullable=False)
cmdline = Column(String)
class OldTemplate(CommonMixin, BASE):
__tablename__ = "templates"
name = Column(String, unique=True, nullable=False)
content = Column(String, nullable=False)
desc = Column(String)
operating_system_id = Column(
Integer, ForeignKey('operating_systems.id'), index=True)
session = SESSION(bind=op.get_bind())
update_templates = {
'rhel7-default': ('RHEL7.2', 'Template for RHEL7.2', 'rhel7.2'),
'sles12-default': ('SLES12.1', 'Template for SLES12.1', 'sles12.1'),
'ubuntu16-default': ('UBUNTU16.04.1', 'Template for Ubuntu 16.04.1', 'ubuntu16.04.1'),
}
for key, value in update_templates.items():
os_obj = session.query(OldOperatingSystem).filter_by(name=value[2]).one()
temp_obj = session.query(OldTemplate).filter_by(name=key).one()
temp_obj.name = value[0]
temp_obj.desc = value[1]
temp_obj.operating_system_id = os_obj.id
templates_dir = os.path.abspath(
os.path.dirname(os.path.abspath(__file__)) +
"/../../../state_machines/autoinstall/templates")
update_oses = {
'rhel7.2': ('rhel', 'RHEL 7.2 GA', 'rhel7-default'),
'sles12.1': ('sles', 'SLES 12.1', 'sles12-default'),
'ubuntu16.04.1': ('ubuntu', 'Ubuntu 16.04.1', 'ubuntu16-default'),
}
for key, value in update_oses.items():
os_obj = session.query(OldOperatingSystem).filter_by(name=key).one()
new_type = os_obj.type
os_obj.type = value[0]
if key == 'ubuntu16.04.1':
os_obj.major = 16
os_obj.minor = 4
os_obj.desc = value[1]
cmdline_template_path = '{}.cmdline.jinja'.format(new_type)
with open(templates_dir + '/' + cmdline_template_path, "r") as cmdline_file:
os_obj.cmdline = cmdline_file.read()
session.commit()
| true | true |
f724c9e936f9b464bc9ef938bd84202c5c01e1e8 | 6,935 | py | Python | sdk/network/azure-mgmt-network/azure/mgmt/network/v2018_08_01/operations/__init__.py | vincenttran-msft/azure-sdk-for-python | 348b56f9f03eeb3f7b502eed51daf494ffff874d | [
"MIT"
] | 1 | 2022-02-01T18:50:12.000Z | 2022-02-01T18:50:12.000Z | sdk/network/azure-mgmt-network/azure/mgmt/network/v2018_08_01/operations/__init__.py | vincenttran-msft/azure-sdk-for-python | 348b56f9f03eeb3f7b502eed51daf494ffff874d | [
"MIT"
] | null | null | null | sdk/network/azure-mgmt-network/azure/mgmt/network/v2018_08_01/operations/__init__.py | vincenttran-msft/azure-sdk-for-python | 348b56f9f03eeb3f7b502eed51daf494ffff874d | [
"MIT"
] | null | null | null | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from ._operations import ApplicationGatewaysOperations
from ._operations import ApplicationSecurityGroupsOperations
from ._operations import AvailableDelegationsOperations
from ._operations import AvailableResourceGroupDelegationsOperations
from ._operations import AzureFirewallsOperations
from ._operations import AzureFirewallFqdnTagsOperations
from ._operations import NetworkManagementClientOperationsMixin
from ._operations import DdosProtectionPlansOperations
from ._operations import AvailableEndpointServicesOperations
from ._operations import ExpressRouteCircuitAuthorizationsOperations
from ._operations import ExpressRouteCircuitPeeringsOperations
from ._operations import ExpressRouteCircuitConnectionsOperations
from ._operations import ExpressRouteCircuitsOperations
from ._operations import ExpressRouteServiceProvidersOperations
from ._operations import ExpressRouteCrossConnectionsOperations
from ._operations import ExpressRouteCrossConnectionPeeringsOperations
from ._operations import ExpressRouteGatewaysOperations
from ._operations import ExpressRouteConnectionsOperations
from ._operations import ExpressRoutePortsLocationsOperations
from ._operations import ExpressRoutePortsOperations
from ._operations import ExpressRouteLinksOperations
from ._operations import InterfaceEndpointsOperations
from ._operations import LoadBalancersOperations
from ._operations import LoadBalancerBackendAddressPoolsOperations
from ._operations import LoadBalancerFrontendIPConfigurationsOperations
from ._operations import InboundNatRulesOperations
from ._operations import LoadBalancerLoadBalancingRulesOperations
from ._operations import LoadBalancerOutboundRulesOperations
from ._operations import LoadBalancerNetworkInterfacesOperations
from ._operations import LoadBalancerProbesOperations
from ._operations import NetworkInterfacesOperations
from ._operations import NetworkInterfaceIPConfigurationsOperations
from ._operations import NetworkInterfaceLoadBalancersOperations
from ._operations import NetworkInterfaceTapConfigurationsOperations
from ._operations import NetworkProfilesOperations
from ._operations import NetworkSecurityGroupsOperations
from ._operations import SecurityRulesOperations
from ._operations import DefaultSecurityRulesOperations
from ._operations import NetworkWatchersOperations
from ._operations import PacketCapturesOperations
from ._operations import ConnectionMonitorsOperations
from ._operations import Operations
from ._operations import PublicIPAddressesOperations
from ._operations import PublicIPPrefixesOperations
from ._operations import RouteFiltersOperations
from ._operations import RouteFilterRulesOperations
from ._operations import RouteTablesOperations
from ._operations import RoutesOperations
from ._operations import BgpServiceCommunitiesOperations
from ._operations import ServiceEndpointPoliciesOperations
from ._operations import ServiceEndpointPolicyDefinitionsOperations
from ._operations import UsagesOperations
from ._operations import VirtualNetworksOperations
from ._operations import SubnetsOperations
from ._operations import VirtualNetworkPeeringsOperations
from ._operations import VirtualNetworkTapsOperations
from ._operations import VirtualNetworkGatewaysOperations
from ._operations import VirtualNetworkGatewayConnectionsOperations
from ._operations import LocalNetworkGatewaysOperations
from ._operations import VirtualWansOperations
from ._operations import VpnSitesOperations
from ._operations import VpnSitesConfigurationOperations
from ._operations import VirtualHubsOperations
from ._operations import HubVirtualNetworkConnectionsOperations
from ._operations import VpnGatewaysOperations
from ._operations import VpnConnectionsOperations
from ._operations import P2SVpnServerConfigurationsOperations
from ._operations import P2SVpnGatewaysOperations
__all__ = [
'ApplicationGatewaysOperations',
'ApplicationSecurityGroupsOperations',
'AvailableDelegationsOperations',
'AvailableResourceGroupDelegationsOperations',
'AzureFirewallsOperations',
'AzureFirewallFqdnTagsOperations',
'NetworkManagementClientOperationsMixin',
'DdosProtectionPlansOperations',
'AvailableEndpointServicesOperations',
'ExpressRouteCircuitAuthorizationsOperations',
'ExpressRouteCircuitPeeringsOperations',
'ExpressRouteCircuitConnectionsOperations',
'ExpressRouteCircuitsOperations',
'ExpressRouteServiceProvidersOperations',
'ExpressRouteCrossConnectionsOperations',
'ExpressRouteCrossConnectionPeeringsOperations',
'ExpressRouteGatewaysOperations',
'ExpressRouteConnectionsOperations',
'ExpressRoutePortsLocationsOperations',
'ExpressRoutePortsOperations',
'ExpressRouteLinksOperations',
'InterfaceEndpointsOperations',
'LoadBalancersOperations',
'LoadBalancerBackendAddressPoolsOperations',
'LoadBalancerFrontendIPConfigurationsOperations',
'InboundNatRulesOperations',
'LoadBalancerLoadBalancingRulesOperations',
'LoadBalancerOutboundRulesOperations',
'LoadBalancerNetworkInterfacesOperations',
'LoadBalancerProbesOperations',
'NetworkInterfacesOperations',
'NetworkInterfaceIPConfigurationsOperations',
'NetworkInterfaceLoadBalancersOperations',
'NetworkInterfaceTapConfigurationsOperations',
'NetworkProfilesOperations',
'NetworkSecurityGroupsOperations',
'SecurityRulesOperations',
'DefaultSecurityRulesOperations',
'NetworkWatchersOperations',
'PacketCapturesOperations',
'ConnectionMonitorsOperations',
'Operations',
'PublicIPAddressesOperations',
'PublicIPPrefixesOperations',
'RouteFiltersOperations',
'RouteFilterRulesOperations',
'RouteTablesOperations',
'RoutesOperations',
'BgpServiceCommunitiesOperations',
'ServiceEndpointPoliciesOperations',
'ServiceEndpointPolicyDefinitionsOperations',
'UsagesOperations',
'VirtualNetworksOperations',
'SubnetsOperations',
'VirtualNetworkPeeringsOperations',
'VirtualNetworkTapsOperations',
'VirtualNetworkGatewaysOperations',
'VirtualNetworkGatewayConnectionsOperations',
'LocalNetworkGatewaysOperations',
'VirtualWansOperations',
'VpnSitesOperations',
'VpnSitesConfigurationOperations',
'VirtualHubsOperations',
'HubVirtualNetworkConnectionsOperations',
'VpnGatewaysOperations',
'VpnConnectionsOperations',
'P2SVpnServerConfigurationsOperations',
'P2SVpnGatewaysOperations',
]
| 46.858108 | 94 | 0.839366 |
from ._operations import ApplicationGatewaysOperations
from ._operations import ApplicationSecurityGroupsOperations
from ._operations import AvailableDelegationsOperations
from ._operations import AvailableResourceGroupDelegationsOperations
from ._operations import AzureFirewallsOperations
from ._operations import AzureFirewallFqdnTagsOperations
from ._operations import NetworkManagementClientOperationsMixin
from ._operations import DdosProtectionPlansOperations
from ._operations import AvailableEndpointServicesOperations
from ._operations import ExpressRouteCircuitAuthorizationsOperations
from ._operations import ExpressRouteCircuitPeeringsOperations
from ._operations import ExpressRouteCircuitConnectionsOperations
from ._operations import ExpressRouteCircuitsOperations
from ._operations import ExpressRouteServiceProvidersOperations
from ._operations import ExpressRouteCrossConnectionsOperations
from ._operations import ExpressRouteCrossConnectionPeeringsOperations
from ._operations import ExpressRouteGatewaysOperations
from ._operations import ExpressRouteConnectionsOperations
from ._operations import ExpressRoutePortsLocationsOperations
from ._operations import ExpressRoutePortsOperations
from ._operations import ExpressRouteLinksOperations
from ._operations import InterfaceEndpointsOperations
from ._operations import LoadBalancersOperations
from ._operations import LoadBalancerBackendAddressPoolsOperations
from ._operations import LoadBalancerFrontendIPConfigurationsOperations
from ._operations import InboundNatRulesOperations
from ._operations import LoadBalancerLoadBalancingRulesOperations
from ._operations import LoadBalancerOutboundRulesOperations
from ._operations import LoadBalancerNetworkInterfacesOperations
from ._operations import LoadBalancerProbesOperations
from ._operations import NetworkInterfacesOperations
from ._operations import NetworkInterfaceIPConfigurationsOperations
from ._operations import NetworkInterfaceLoadBalancersOperations
from ._operations import NetworkInterfaceTapConfigurationsOperations
from ._operations import NetworkProfilesOperations
from ._operations import NetworkSecurityGroupsOperations
from ._operations import SecurityRulesOperations
from ._operations import DefaultSecurityRulesOperations
from ._operations import NetworkWatchersOperations
from ._operations import PacketCapturesOperations
from ._operations import ConnectionMonitorsOperations
from ._operations import Operations
from ._operations import PublicIPAddressesOperations
from ._operations import PublicIPPrefixesOperations
from ._operations import RouteFiltersOperations
from ._operations import RouteFilterRulesOperations
from ._operations import RouteTablesOperations
from ._operations import RoutesOperations
from ._operations import BgpServiceCommunitiesOperations
from ._operations import ServiceEndpointPoliciesOperations
from ._operations import ServiceEndpointPolicyDefinitionsOperations
from ._operations import UsagesOperations
from ._operations import VirtualNetworksOperations
from ._operations import SubnetsOperations
from ._operations import VirtualNetworkPeeringsOperations
from ._operations import VirtualNetworkTapsOperations
from ._operations import VirtualNetworkGatewaysOperations
from ._operations import VirtualNetworkGatewayConnectionsOperations
from ._operations import LocalNetworkGatewaysOperations
from ._operations import VirtualWansOperations
from ._operations import VpnSitesOperations
from ._operations import VpnSitesConfigurationOperations
from ._operations import VirtualHubsOperations
from ._operations import HubVirtualNetworkConnectionsOperations
from ._operations import VpnGatewaysOperations
from ._operations import VpnConnectionsOperations
from ._operations import P2SVpnServerConfigurationsOperations
from ._operations import P2SVpnGatewaysOperations
__all__ = [
'ApplicationGatewaysOperations',
'ApplicationSecurityGroupsOperations',
'AvailableDelegationsOperations',
'AvailableResourceGroupDelegationsOperations',
'AzureFirewallsOperations',
'AzureFirewallFqdnTagsOperations',
'NetworkManagementClientOperationsMixin',
'DdosProtectionPlansOperations',
'AvailableEndpointServicesOperations',
'ExpressRouteCircuitAuthorizationsOperations',
'ExpressRouteCircuitPeeringsOperations',
'ExpressRouteCircuitConnectionsOperations',
'ExpressRouteCircuitsOperations',
'ExpressRouteServiceProvidersOperations',
'ExpressRouteCrossConnectionsOperations',
'ExpressRouteCrossConnectionPeeringsOperations',
'ExpressRouteGatewaysOperations',
'ExpressRouteConnectionsOperations',
'ExpressRoutePortsLocationsOperations',
'ExpressRoutePortsOperations',
'ExpressRouteLinksOperations',
'InterfaceEndpointsOperations',
'LoadBalancersOperations',
'LoadBalancerBackendAddressPoolsOperations',
'LoadBalancerFrontendIPConfigurationsOperations',
'InboundNatRulesOperations',
'LoadBalancerLoadBalancingRulesOperations',
'LoadBalancerOutboundRulesOperations',
'LoadBalancerNetworkInterfacesOperations',
'LoadBalancerProbesOperations',
'NetworkInterfacesOperations',
'NetworkInterfaceIPConfigurationsOperations',
'NetworkInterfaceLoadBalancersOperations',
'NetworkInterfaceTapConfigurationsOperations',
'NetworkProfilesOperations',
'NetworkSecurityGroupsOperations',
'SecurityRulesOperations',
'DefaultSecurityRulesOperations',
'NetworkWatchersOperations',
'PacketCapturesOperations',
'ConnectionMonitorsOperations',
'Operations',
'PublicIPAddressesOperations',
'PublicIPPrefixesOperations',
'RouteFiltersOperations',
'RouteFilterRulesOperations',
'RouteTablesOperations',
'RoutesOperations',
'BgpServiceCommunitiesOperations',
'ServiceEndpointPoliciesOperations',
'ServiceEndpointPolicyDefinitionsOperations',
'UsagesOperations',
'VirtualNetworksOperations',
'SubnetsOperations',
'VirtualNetworkPeeringsOperations',
'VirtualNetworkTapsOperations',
'VirtualNetworkGatewaysOperations',
'VirtualNetworkGatewayConnectionsOperations',
'LocalNetworkGatewaysOperations',
'VirtualWansOperations',
'VpnSitesOperations',
'VpnSitesConfigurationOperations',
'VirtualHubsOperations',
'HubVirtualNetworkConnectionsOperations',
'VpnGatewaysOperations',
'VpnConnectionsOperations',
'P2SVpnServerConfigurationsOperations',
'P2SVpnGatewaysOperations',
]
| true | true |
f724cac7525d981babd6c466078597e54db40a89 | 1,080 | py | Python | step2_run_perl_RT_fdda_reformat_obsnud.py | M2LabOrg/WRF_little_r | 8f46e733387db4c62f39426a03b6a03b3b406b0e | [
"Apache-2.0"
] | 1 | 2021-09-14T06:41:02.000Z | 2021-09-14T06:41:02.000Z | step2_run_perl_RT_fdda_reformat_obsnud.py | M2LabOrg/WRF_little_r | 8f46e733387db4c62f39426a03b6a03b3b406b0e | [
"Apache-2.0"
] | null | null | null | step2_run_perl_RT_fdda_reformat_obsnud.py | M2LabOrg/WRF_little_r | 8f46e733387db4c62f39426a03b6a03b3b406b0e | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
"""
This script creates WRF ready files using the little_r formatted files.
They are part of the input files needed for observation nudging.
Note that:
- You need to step1_run_time_series_converter.py first
- Here we convert to the format needed by WRF
- You do that by running:
$ perl RT_fdda_reformat_obsnud.pl OUTPUT/obs:2021-04-14_02
$ perl RT_fdda_reformat_obsnud.pl OUTPUT/obs:2021-04-14_03
$ (and so on)
- This will produce files with extension .obsnud, which you will concatenate
(see example below)
- You will also need to change the file name to OBS_DOMAIN101 for domain 1,
and OBS_DOMAIN201 for domain 2, and so on, as described in the WRF Users' manual
$ cat *.obsnud >> OBS_DOMAIN101
Adapted here by: Michel Mesquita, Ph.D. (July 2021)
"""
import os
import glob
for filepath in glob.iglob('OUTPUT_STEP1/*'):
print(filepath)
filename = os.path.basename(filepath)
os.system(f"perl RT_fdda_reformat_obsnud.pl {filepath}")
os.system("mv OUTPUT_STEP1/*.obsnud OUTPUT_STEP2/")
os.system("rm OUTPUT_STEP1/*.tmp")
| 28.421053 | 82 | 0.746296 |
import os
import glob
for filepath in glob.iglob('OUTPUT_STEP1/*'):
print(filepath)
filename = os.path.basename(filepath)
os.system(f"perl RT_fdda_reformat_obsnud.pl {filepath}")
os.system("mv OUTPUT_STEP1/*.obsnud OUTPUT_STEP2/")
os.system("rm OUTPUT_STEP1/*.tmp")
| true | true |
f724cad0080defcb0f50376906f3de9ab0cedd9e | 440 | py | Python | invenio_i18n/version.py | mvidalgarcia/invenio-i18n | 123b3db1538529ebb5eff165802d387d3337c7d1 | [
"MIT"
] | null | null | null | invenio_i18n/version.py | mvidalgarcia/invenio-i18n | 123b3db1538529ebb5eff165802d387d3337c7d1 | [
"MIT"
] | null | null | null | invenio_i18n/version.py | mvidalgarcia/invenio-i18n | 123b3db1538529ebb5eff165802d387d3337c7d1 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2015-2020 CERN.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Version information for Invenio-I18N.
This file is imported by ``invenio_i18n.__init__``,
and parsed by ``setup.py``.
"""
from __future__ import absolute_import, print_function
__version__ = '1.2.0'
| 24.444444 | 72 | 0.722727 |
from __future__ import absolute_import, print_function
__version__ = '1.2.0'
| true | true |
f724cb2af35070c9347e3228b9791a0c3e03873f | 13,189 | py | Python | test/functional/test_framework/wallet.py | dathx/bitcoin | 57982f419e36d0023c83af2dd0d683ca3160dc2a | [
"MIT"
] | 459 | 2015-09-25T22:46:28.000Z | 2022-02-27T18:01:48.000Z | test/functional/test_framework/wallet.py | dathx/bitcoin | 57982f419e36d0023c83af2dd0d683ca3160dc2a | [
"MIT"
] | 472 | 2015-09-17T09:42:03.000Z | 2022-03-29T05:29:04.000Z | test/functional/test_framework/wallet.py | dathx/bitcoin | 57982f419e36d0023c83af2dd0d683ca3160dc2a | [
"MIT"
] | 209 | 2015-10-04T00:49:49.000Z | 2022-03-24T11:05:09.000Z | #!/usr/bin/env python3
# Copyright (c) 2020-2021 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""A limited-functionality wallet, which may replace a real wallet in tests"""
from copy import deepcopy
from decimal import Decimal
from enum import Enum
from random import choice
from typing import Optional
from test_framework.address import create_deterministic_address_bcrt1_p2tr_op_true
from test_framework.descriptors import descsum_create
from test_framework.key import ECKey
from test_framework.messages import (
COIN,
COutPoint,
CTransaction,
CTxIn,
CTxInWitness,
CTxOut,
tx_from_hex,
)
from test_framework.script import (
CScript,
LegacySignatureHash,
LEAF_VERSION_TAPSCRIPT,
OP_NOP,
OP_TRUE,
SIGHASH_ALL,
)
from test_framework.script_util import (
key_to_p2pk_script,
key_to_p2wpkh_script,
)
from test_framework.util import (
assert_equal,
assert_greater_than_or_equal,
)
DEFAULT_FEE = Decimal("0.0001")
class MiniWalletMode(Enum):
"""Determines the transaction type the MiniWallet is creating and spending.
For most purposes, the default mode ADDRESS_OP_TRUE should be sufficient;
it simply uses a fixed bech32m P2TR address whose coins are spent with a
witness stack of OP_TRUE, i.e. following an anyone-can-spend policy.
However, if the transactions need to be modified by the user (e.g. prepending
scriptSig for testing opcodes that are activated by a soft-fork), or the txs
should contain an actual signature, the raw modes RAW_OP_TRUE and RAW_P2PK
can be useful. Summary of modes:
| output | | tx is | can modify | needs
mode | description | address | standard | scriptSig | signing
----------------+-------------------+-----------+----------+------------+----------
ADDRESS_OP_TRUE | anyone-can-spend | bech32m | yes | no | no
RAW_OP_TRUE | anyone-can-spend | - (raw) | no | yes | no
RAW_P2PK | pay-to-public-key | - (raw) | yes | yes | yes
"""
ADDRESS_OP_TRUE = 1
RAW_OP_TRUE = 2
RAW_P2PK = 3
class MiniWallet:
def __init__(self, test_node, *, mode=MiniWalletMode.ADDRESS_OP_TRUE):
self._test_node = test_node
self._utxos = []
self._priv_key = None
self._address = None
assert isinstance(mode, MiniWalletMode)
if mode == MiniWalletMode.RAW_OP_TRUE:
self._scriptPubKey = bytes(CScript([OP_TRUE]))
elif mode == MiniWalletMode.RAW_P2PK:
# use simple deterministic private key (k=1)
self._priv_key = ECKey()
self._priv_key.set((1).to_bytes(32, 'big'), True)
pub_key = self._priv_key.get_pubkey()
self._scriptPubKey = key_to_p2pk_script(pub_key.get_bytes())
elif mode == MiniWalletMode.ADDRESS_OP_TRUE:
self._address, self._internal_key = create_deterministic_address_bcrt1_p2tr_op_true()
self._scriptPubKey = bytes.fromhex(self._test_node.validateaddress(self._address)['scriptPubKey'])
def rescan_utxos(self):
"""Drop all utxos and rescan the utxo set"""
self._utxos = []
res = self._test_node.scantxoutset(action="start", scanobjects=[self.get_descriptor()])
assert_equal(True, res['success'])
for utxo in res['unspents']:
self._utxos.append({'txid': utxo['txid'], 'vout': utxo['vout'], 'value': utxo['amount'], 'height': utxo['height']})
def scan_tx(self, tx):
"""Scan the tx for self._scriptPubKey outputs and add them to self._utxos"""
for out in tx['vout']:
if out['scriptPubKey']['hex'] == self._scriptPubKey.hex():
self._utxos.append({'txid': tx['txid'], 'vout': out['n'], 'value': out['value'], 'height': 0})
def sign_tx(self, tx, fixed_length=True):
"""Sign tx that has been created by MiniWallet in P2PK mode"""
assert self._priv_key is not None
(sighash, err) = LegacySignatureHash(CScript(self._scriptPubKey), tx, 0, SIGHASH_ALL)
assert err is None
# for exact fee calculation, create only signatures with fixed size by default (>49.89% probability):
# 65 bytes: high-R val (33 bytes) + low-S val (32 bytes)
# with the DER header/skeleton data of 6 bytes added, this leads to a target size of 71 bytes
der_sig = b''
while not len(der_sig) == 71:
der_sig = self._priv_key.sign_ecdsa(sighash)
if not fixed_length:
break
tx.vin[0].scriptSig = CScript([der_sig + bytes(bytearray([SIGHASH_ALL]))])
def generate(self, num_blocks, **kwargs):
"""Generate blocks with coinbase outputs to the internal address, and append the outputs to the internal list"""
blocks = self._test_node.generatetodescriptor(num_blocks, self.get_descriptor(), **kwargs)
for b in blocks:
block_info = self._test_node.getblock(blockhash=b, verbosity=2)
cb_tx = block_info['tx'][0]
self._utxos.append({'txid': cb_tx['txid'], 'vout': 0, 'value': cb_tx['vout'][0]['value'], 'height': block_info['height']})
return blocks
def get_descriptor(self):
return descsum_create(f'raw({self._scriptPubKey.hex()})')
def get_address(self):
return self._address
def get_utxo(self, *, txid: Optional[str]='', mark_as_spent=True):
"""
Returns a utxo and marks it as spent (pops it from the internal list)
Args:
txid: get the first utxo we find from a specific transaction
"""
index = -1 # by default the last utxo
self._utxos = sorted(self._utxos, key=lambda k: (k['value'], -k['height'])) # Put the largest utxo last
if txid:
utxo = next(filter(lambda utxo: txid == utxo['txid'], self._utxos))
index = self._utxos.index(utxo)
if mark_as_spent:
return self._utxos.pop(index)
else:
return self._utxos[index]
def send_self_transfer(self, **kwargs):
"""Create and send a tx with the specified fee_rate. Fee may be exact or at most one satoshi higher than needed."""
tx = self.create_self_transfer(**kwargs)
self.sendrawtransaction(from_node=kwargs['from_node'], tx_hex=tx['hex'])
return tx
def send_to(self, *, from_node, scriptPubKey, amount, fee=1000):
"""
Create and send a tx with an output to a given scriptPubKey/amount,
plus a change output to our internal address. To keep things simple, a
fixed fee given in Satoshi is used.
Note that this method fails if there is no single internal utxo
available that can cover the cost for the amount and the fixed fee
(the utxo with the largest value is taken).
Returns a tuple (txid, n) referring to the created external utxo outpoint.
"""
tx = self.create_self_transfer(from_node=from_node, fee_rate=0, mempool_valid=False)['tx']
assert_greater_than_or_equal(tx.vout[0].nValue, amount + fee)
tx.vout[0].nValue -= (amount + fee) # change output -> MiniWallet
tx.vout.append(CTxOut(amount, scriptPubKey)) # arbitrary output -> to be returned
txid = self.sendrawtransaction(from_node=from_node, tx_hex=tx.serialize().hex())
return txid, 1
def create_self_transfer(self, *, fee_rate=Decimal("0.003"), from_node, utxo_to_spend=None, mempool_valid=True, locktime=0, sequence=0):
"""Create and return a tx with the specified fee_rate. Fee may be exact or at most one satoshi higher than needed."""
utxo_to_spend = utxo_to_spend or self.get_utxo()
if self._priv_key is None:
vsize = Decimal(104) # anyone-can-spend
else:
vsize = Decimal(168) # P2PK (73 bytes scriptSig + 35 bytes scriptPubKey + 60 bytes other)
send_value = int(COIN * (utxo_to_spend['value'] - fee_rate * (vsize / 1000)))
assert send_value > 0
tx = CTransaction()
tx.vin = [CTxIn(COutPoint(int(utxo_to_spend['txid'], 16), utxo_to_spend['vout']), nSequence=sequence)]
tx.vout = [CTxOut(send_value, self._scriptPubKey)]
tx.nLockTime = locktime
if not self._address:
# raw script
if self._priv_key is not None:
# P2PK, need to sign
self.sign_tx(tx)
else:
# anyone-can-spend
tx.vin[0].scriptSig = CScript([OP_NOP] * 43) # pad to identical size
else:
tx.wit.vtxinwit = [CTxInWitness()]
tx.wit.vtxinwit[0].scriptWitness.stack = [CScript([OP_TRUE]), bytes([LEAF_VERSION_TAPSCRIPT]) + self._internal_key]
tx_hex = tx.serialize().hex()
tx_info = from_node.testmempoolaccept([tx_hex])[0]
assert_equal(mempool_valid, tx_info['allowed'])
if mempool_valid:
assert_equal(tx_info['vsize'], vsize)
assert_equal(tx_info['fees']['base'], utxo_to_spend['value'] - Decimal(send_value) / COIN)
return {'txid': tx_info['txid'], 'wtxid': tx_info['wtxid'], 'hex': tx_hex, 'tx': tx}
def sendrawtransaction(self, *, from_node, tx_hex):
txid = from_node.sendrawtransaction(tx_hex)
self.scan_tx(from_node.decoderawtransaction(tx_hex))
return txid
def random_p2wpkh():
"""Generate a random P2WPKH scriptPubKey. Can be used when a random destination is needed,
but no compiled wallet is available (e.g. as replacement to the getnewaddress RPC)."""
key = ECKey()
key.generate()
return key_to_p2wpkh_script(key.get_pubkey().get_bytes())
def make_chain(node, address, privkeys, parent_txid, parent_value, n=0, parent_locking_script=None, fee=DEFAULT_FEE):
"""Build a transaction that spends parent_txid.vout[n] and produces one output with
amount = parent_value with a fee deducted.
Return tuple (CTransaction object, raw hex, nValue, scriptPubKey of the output created).
"""
inputs = [{"txid": parent_txid, "vout": n}]
my_value = parent_value - fee
outputs = {address : my_value}
rawtx = node.createrawtransaction(inputs, outputs)
prevtxs = [{
"txid": parent_txid,
"vout": n,
"scriptPubKey": parent_locking_script,
"amount": parent_value,
}] if parent_locking_script else None
signedtx = node.signrawtransactionwithkey(hexstring=rawtx, privkeys=privkeys, prevtxs=prevtxs)
assert signedtx["complete"]
tx = tx_from_hex(signedtx["hex"])
return (tx, signedtx["hex"], my_value, tx.vout[0].scriptPubKey.hex())
def create_child_with_parents(node, address, privkeys, parents_tx, values, locking_scripts, fee=DEFAULT_FEE):
"""Creates a transaction that spends the first output of each parent in parents_tx."""
num_parents = len(parents_tx)
total_value = sum(values)
inputs = [{"txid": tx.rehash(), "vout": 0} for tx in parents_tx]
outputs = {address : total_value - fee}
rawtx_child = node.createrawtransaction(inputs, outputs)
prevtxs = []
for i in range(num_parents):
prevtxs.append({"txid": parents_tx[i].rehash(), "vout": 0, "scriptPubKey": locking_scripts[i], "amount": values[i]})
signedtx_child = node.signrawtransactionwithkey(hexstring=rawtx_child, privkeys=privkeys, prevtxs=prevtxs)
assert signedtx_child["complete"]
return signedtx_child["hex"]
def create_raw_chain(node, first_coin, address, privkeys, chain_length=25):
"""Helper function: create a "chain" of chain_length transactions. The nth transaction in the
chain is a child of the n-1th transaction and parent of the n+1th transaction.
"""
parent_locking_script = None
txid = first_coin["txid"]
chain_hex = []
chain_txns = []
value = first_coin["amount"]
for _ in range(chain_length):
(tx, txhex, value, parent_locking_script) = make_chain(node, address, privkeys, txid, value, 0, parent_locking_script)
txid = tx.rehash()
chain_hex.append(txhex)
chain_txns.append(tx)
return (chain_hex, chain_txns)
def bulk_transaction(tx, node, target_weight, privkeys, prevtxs=None):
"""Pad a transaction with extra outputs until it reaches a target weight (or higher).
returns CTransaction object
"""
tx_heavy = deepcopy(tx)
assert_greater_than_or_equal(target_weight, tx_heavy.get_weight())
while tx_heavy.get_weight() < target_weight:
random_spk = "6a4d0200" # OP_RETURN OP_PUSH2 512 bytes
for _ in range(512*2):
random_spk += choice("0123456789ABCDEF")
tx_heavy.vout.append(CTxOut(0, bytes.fromhex(random_spk)))
# Re-sign the transaction
if privkeys:
signed = node.signrawtransactionwithkey(tx_heavy.serialize().hex(), privkeys, prevtxs)
return tx_from_hex(signed["hex"])
# OP_TRUE
tx_heavy.wit.vtxinwit = [CTxInWitness()]
tx_heavy.wit.vtxinwit[0].scriptWitness.stack = [CScript([OP_TRUE])]
return tx_heavy
| 45.323024 | 140 | 0.656759 |
from copy import deepcopy
from decimal import Decimal
from enum import Enum
from random import choice
from typing import Optional
from test_framework.address import create_deterministic_address_bcrt1_p2tr_op_true
from test_framework.descriptors import descsum_create
from test_framework.key import ECKey
from test_framework.messages import (
COIN,
COutPoint,
CTransaction,
CTxIn,
CTxInWitness,
CTxOut,
tx_from_hex,
)
from test_framework.script import (
CScript,
LegacySignatureHash,
LEAF_VERSION_TAPSCRIPT,
OP_NOP,
OP_TRUE,
SIGHASH_ALL,
)
from test_framework.script_util import (
key_to_p2pk_script,
key_to_p2wpkh_script,
)
from test_framework.util import (
assert_equal,
assert_greater_than_or_equal,
)
DEFAULT_FEE = Decimal("0.0001")
class MiniWalletMode(Enum):
ADDRESS_OP_TRUE = 1
RAW_OP_TRUE = 2
RAW_P2PK = 3
class MiniWallet:
def __init__(self, test_node, *, mode=MiniWalletMode.ADDRESS_OP_TRUE):
self._test_node = test_node
self._utxos = []
self._priv_key = None
self._address = None
assert isinstance(mode, MiniWalletMode)
if mode == MiniWalletMode.RAW_OP_TRUE:
self._scriptPubKey = bytes(CScript([OP_TRUE]))
elif mode == MiniWalletMode.RAW_P2PK:
self._priv_key = ECKey()
self._priv_key.set((1).to_bytes(32, 'big'), True)
pub_key = self._priv_key.get_pubkey()
self._scriptPubKey = key_to_p2pk_script(pub_key.get_bytes())
elif mode == MiniWalletMode.ADDRESS_OP_TRUE:
self._address, self._internal_key = create_deterministic_address_bcrt1_p2tr_op_true()
self._scriptPubKey = bytes.fromhex(self._test_node.validateaddress(self._address)['scriptPubKey'])
def rescan_utxos(self):
self._utxos = []
res = self._test_node.scantxoutset(action="start", scanobjects=[self.get_descriptor()])
assert_equal(True, res['success'])
for utxo in res['unspents']:
self._utxos.append({'txid': utxo['txid'], 'vout': utxo['vout'], 'value': utxo['amount'], 'height': utxo['height']})
def scan_tx(self, tx):
for out in tx['vout']:
if out['scriptPubKey']['hex'] == self._scriptPubKey.hex():
self._utxos.append({'txid': tx['txid'], 'vout': out['n'], 'value': out['value'], 'height': 0})
def sign_tx(self, tx, fixed_length=True):
assert self._priv_key is not None
(sighash, err) = LegacySignatureHash(CScript(self._scriptPubKey), tx, 0, SIGHASH_ALL)
assert err is None
der_sig = b''
while not len(der_sig) == 71:
der_sig = self._priv_key.sign_ecdsa(sighash)
if not fixed_length:
break
tx.vin[0].scriptSig = CScript([der_sig + bytes(bytearray([SIGHASH_ALL]))])
def generate(self, num_blocks, **kwargs):
blocks = self._test_node.generatetodescriptor(num_blocks, self.get_descriptor(), **kwargs)
for b in blocks:
block_info = self._test_node.getblock(blockhash=b, verbosity=2)
cb_tx = block_info['tx'][0]
self._utxos.append({'txid': cb_tx['txid'], 'vout': 0, 'value': cb_tx['vout'][0]['value'], 'height': block_info['height']})
return blocks
def get_descriptor(self):
return descsum_create(f'raw({self._scriptPubKey.hex()})')
def get_address(self):
return self._address
def get_utxo(self, *, txid: Optional[str]='', mark_as_spent=True):
index = -1
self._utxos = sorted(self._utxos, key=lambda k: (k['value'], -k['height']))
if txid:
utxo = next(filter(lambda utxo: txid == utxo['txid'], self._utxos))
index = self._utxos.index(utxo)
if mark_as_spent:
return self._utxos.pop(index)
else:
return self._utxos[index]
def send_self_transfer(self, **kwargs):
tx = self.create_self_transfer(**kwargs)
self.sendrawtransaction(from_node=kwargs['from_node'], tx_hex=tx['hex'])
return tx
def send_to(self, *, from_node, scriptPubKey, amount, fee=1000):
tx = self.create_self_transfer(from_node=from_node, fee_rate=0, mempool_valid=False)['tx']
assert_greater_than_or_equal(tx.vout[0].nValue, amount + fee)
tx.vout[0].nValue -= (amount + fee)
tx.vout.append(CTxOut(amount, scriptPubKey))
txid = self.sendrawtransaction(from_node=from_node, tx_hex=tx.serialize().hex())
return txid, 1
def create_self_transfer(self, *, fee_rate=Decimal("0.003"), from_node, utxo_to_spend=None, mempool_valid=True, locktime=0, sequence=0):
utxo_to_spend = utxo_to_spend or self.get_utxo()
if self._priv_key is None:
vsize = Decimal(104)
else:
vsize = Decimal(168)
send_value = int(COIN * (utxo_to_spend['value'] - fee_rate * (vsize / 1000)))
assert send_value > 0
tx = CTransaction()
tx.vin = [CTxIn(COutPoint(int(utxo_to_spend['txid'], 16), utxo_to_spend['vout']), nSequence=sequence)]
tx.vout = [CTxOut(send_value, self._scriptPubKey)]
tx.nLockTime = locktime
if not self._address:
if self._priv_key is not None:
self.sign_tx(tx)
else:
tx.vin[0].scriptSig = CScript([OP_NOP] * 43)
else:
tx.wit.vtxinwit = [CTxInWitness()]
tx.wit.vtxinwit[0].scriptWitness.stack = [CScript([OP_TRUE]), bytes([LEAF_VERSION_TAPSCRIPT]) + self._internal_key]
tx_hex = tx.serialize().hex()
tx_info = from_node.testmempoolaccept([tx_hex])[0]
assert_equal(mempool_valid, tx_info['allowed'])
if mempool_valid:
assert_equal(tx_info['vsize'], vsize)
assert_equal(tx_info['fees']['base'], utxo_to_spend['value'] - Decimal(send_value) / COIN)
return {'txid': tx_info['txid'], 'wtxid': tx_info['wtxid'], 'hex': tx_hex, 'tx': tx}
def sendrawtransaction(self, *, from_node, tx_hex):
txid = from_node.sendrawtransaction(tx_hex)
self.scan_tx(from_node.decoderawtransaction(tx_hex))
return txid
def random_p2wpkh():
key = ECKey()
key.generate()
return key_to_p2wpkh_script(key.get_pubkey().get_bytes())
def make_chain(node, address, privkeys, parent_txid, parent_value, n=0, parent_locking_script=None, fee=DEFAULT_FEE):
inputs = [{"txid": parent_txid, "vout": n}]
my_value = parent_value - fee
outputs = {address : my_value}
rawtx = node.createrawtransaction(inputs, outputs)
prevtxs = [{
"txid": parent_txid,
"vout": n,
"scriptPubKey": parent_locking_script,
"amount": parent_value,
}] if parent_locking_script else None
signedtx = node.signrawtransactionwithkey(hexstring=rawtx, privkeys=privkeys, prevtxs=prevtxs)
assert signedtx["complete"]
tx = tx_from_hex(signedtx["hex"])
return (tx, signedtx["hex"], my_value, tx.vout[0].scriptPubKey.hex())
def create_child_with_parents(node, address, privkeys, parents_tx, values, locking_scripts, fee=DEFAULT_FEE):
num_parents = len(parents_tx)
total_value = sum(values)
inputs = [{"txid": tx.rehash(), "vout": 0} for tx in parents_tx]
outputs = {address : total_value - fee}
rawtx_child = node.createrawtransaction(inputs, outputs)
prevtxs = []
for i in range(num_parents):
prevtxs.append({"txid": parents_tx[i].rehash(), "vout": 0, "scriptPubKey": locking_scripts[i], "amount": values[i]})
signedtx_child = node.signrawtransactionwithkey(hexstring=rawtx_child, privkeys=privkeys, prevtxs=prevtxs)
assert signedtx_child["complete"]
return signedtx_child["hex"]
def create_raw_chain(node, first_coin, address, privkeys, chain_length=25):
parent_locking_script = None
txid = first_coin["txid"]
chain_hex = []
chain_txns = []
value = first_coin["amount"]
for _ in range(chain_length):
(tx, txhex, value, parent_locking_script) = make_chain(node, address, privkeys, txid, value, 0, parent_locking_script)
txid = tx.rehash()
chain_hex.append(txhex)
chain_txns.append(tx)
return (chain_hex, chain_txns)
def bulk_transaction(tx, node, target_weight, privkeys, prevtxs=None):
tx_heavy = deepcopy(tx)
assert_greater_than_or_equal(target_weight, tx_heavy.get_weight())
while tx_heavy.get_weight() < target_weight:
random_spk = "6a4d0200"
for _ in range(512*2):
random_spk += choice("0123456789ABCDEF")
tx_heavy.vout.append(CTxOut(0, bytes.fromhex(random_spk)))
if privkeys:
signed = node.signrawtransactionwithkey(tx_heavy.serialize().hex(), privkeys, prevtxs)
return tx_from_hex(signed["hex"])
tx_heavy.wit.vtxinwit = [CTxInWitness()]
tx_heavy.wit.vtxinwit[0].scriptWitness.stack = [CScript([OP_TRUE])]
return tx_heavy
| true | true |
f724cb595d2162e3e332ee66f877f324cfe44a48 | 16,524 | py | Python | modules/routes/user/forms.py | manu-p-1/meet | 5e6865a9b5035e324ab0b7cf5a9a71383dfcac9d | [
"MIT"
] | 3 | 2020-08-27T20:15:52.000Z | 2022-02-19T12:05:11.000Z | modules/routes/user/forms.py | manu-p-1/meet | 5e6865a9b5035e324ab0b7cf5a9a71383dfcac9d | [
"MIT"
] | null | null | null | modules/routes/user/forms.py | manu-p-1/meet | 5e6865a9b5035e324ab0b7cf5a9a71383dfcac9d | [
"MIT"
] | 2 | 2020-09-26T00:37:46.000Z | 2021-09-23T03:45:00.000Z | import sys
from flask_wtf import FlaskForm
from wtforms import StringField, IntegerField, TextAreaField, SelectField, BooleanField, DecimalField, HiddenField, \
RadioField, FieldList
from wtforms.validators import InputRequired, NumberRange, Length, AnyOf
from wtforms.widgets.html5 import NumberInput
from modules.routes.user.custom_fields import EmployeeInfoTextAreaField
from modules.routes.user.custom_validators import RequiredIf, EmployeeUnique, EndDateProper, \
StartDateProper, RequiredIfRadioField, VelocityUsageLimit, DeptBalance
from modules.routes.utils.custom_fields import InlineSubmitField
from server import client
DEPT_MAPPINGS = [
('', 'Please Choose a fund Destination'),
('IT', 'IT'), ('AC', 'ACCOUNTING'), ('MK', 'MARKETING'), ('HR', 'HUMAN RESOURCES'),
('PD', 'PRODUCTION'), ('RD', 'RESEARCH & DEVELOPMENT'), ('SC', 'SECURITY'), ('LG', 'LOGISTICS')
]
def create_plan_form(sn):
"""
CREATE PLAN FORM
:param sn: Session Dictionary
:return: A Create Plan Form
"""
class CreatePlanForm(get_plan_base(sn)):
create_plan_btn = InlineSubmitField("Create Plan", btn_text="Create Plan",
render_kw={"class": "btn btn-primary btn-block"})
return CreatePlanForm()
def get_plan_form(sn: dict):
"""
GET PLAN FORM
:param sn: Session Dictionary
:return: A Manage Plan Form
"""
class ManagePlanForm(get_plan_base(sn)):
update_plan_btn = InlineSubmitField("Update Plan", btn_text="Update Plan",
render_kw={"class": "btn btn-primary btn-block"})
return ManagePlanForm()
def get_plan_base(sn: dict):
"""
GET REFERENCE TO PLAN BASE FORM
:param sn: Session Dictionary
:return: A Plan Form
"""
class Plan(FlaskForm):
DISB_ALL = "DISB_ALL"
DISB_INDIV = "DISB_INDIV"
plan_name = StringField("Plan Name",
validators=[
InputRequired(message="Enter a plan name."),
Length(min=2, max=255, message="Plan name was too short or too long")
],
render_kw={"placeholder": "Plan Name",
"class": "form-control"})
funding_amount = DecimalField('Per-Employee Funding Amount',
validators=[
InputRequired(message="Enter a funding amount."),
NumberRange(min=MINIMUM_FUND_AMT,
message=f"The minimum funding amount must be at "
f"least ${MINIMUM_FUND_AMT}."),
DeptBalance(client=client, sn=sn)
],
render_kw={"placeholder": "Funding Amount",
"class": "form-control"},
widget=NumberInput())
plan_justification = StringField('Plan Justification (e.g. Travel, Equipment, Party)',
validators=[
InputRequired(message="A plan justification is required."),
Length(min=3, max=50,
message="Plan justification was either too short or too long.")
],
render_kw={"placeholder": "Plan Justification",
"class": "form-control"})
memo = TextAreaField('Memo (min 10 chars, max 255 chars.)',
validators=[
InputRequired("A memo is required."),
Length(min=10, max=255, message="Memo was either too short or too long.")
],
render_kw={"rows": 4,
"maxlength": 255,
"placeholder": "Memo Description",
"class": "form-control"})
start_date = StringField('Start Date/Times',
validators=[
InputRequired(message="A start date is required."),
StartDateProper()
],
render_kw={"placeholder": "Start Date/Times",
"class": "form-control"})
source_fund = SelectField('Fund Source',
validators=[InputRequired(message="A funding source department is required.")],
choices=[
(
sn['manager_dept'],
client.READABLE_DEPARTMENTS[sn['manager_dept']]
)
],
render_kw={"class": "form-control"})
dest_fund = SelectField('Fund Destination',
validators=[InputRequired(message="A funding destination department is required.")],
choices=DEPT_MAPPINGS,
render_kw={"class": "form-control"})
has_fund_individuals = BooleanField('Employee specific disbursement',
render_kw={"class": "custom-control-input"})
disbursement_type = RadioField('Employee Disbursement Type', choices=[
(DISB_ALL, 'Disburse to all Employees'),
(DISB_INDIV, 'Search for an Employee'),
], default=DISB_ALL, validators=[RequiredIf('has_fund_individuals',
message="To disburse funds, search for an employee or disburse"
"to all employees")])
employees_list = FieldList(EmployeeInfoTextAreaField('employees_list',
validators=[
RequiredIfRadioField(
'disbursement_type',
DISB_INDIV,
message="Please specify at "
"least 1 employee to "
"disburse funds to.")
]),
validators=[EmployeeUnique(object_name="employee id's")],
min_entries=1,
max_entries=24)
has_end_date = BooleanField('Add End Date',
render_kw={"class": "custom-control-input"})
end_date = StringField('End Date/Times',
validators=[
RequiredIf("has_end_date", message="The end date is required."),
EndDateProper(),
],
render_kw={"placeholder": "Date Date/Times",
"class": "form-control"})
has_velocity_controls = BooleanField('Add Velocity Controls',
render_kw={"class": "custom-control-input"})
vel_control_name = StringField('Control Name',
validators=[
RequiredIf('has_velocity_controls',
message="The velocity control, control name is required."),
Length(max=50)
],
render_kw={"class": "form-control",
"placeholder": "Enter a Control Name"})
vel_control_window = SelectField('Control Window',
validators=[
RequiredIf('has_velocity_controls',
message="The velocity control, control window is required."),
Length(max=30)
],
choices=[
('', 'Select a Control Time Period'),
('day', 'DAY'),
('week', 'WEEK'),
('month', 'MONTH'),
('lifetime', 'LIFETIME'),
('transaction', 'TRANSACTION')
],
render_kw={"class": "form-control"})
vel_amt_limit = DecimalField('Amount Limit',
validators=[
RequiredIf('has_velocity_controls',
message="The velocity control amount limit is required."),
NumberRange(min=MINIMUM_CONTROL_AMT,
message=f"The minimum velocity control amount limit must be at "
f"least ${MINIMUM_CONTROL_AMT}."),
VelocityUsageLimit()
],
render_kw={"placeholder": "Amount Limit",
"class": "form-control"},
widget=NumberInput())
vel_usage_limit = IntegerField('Usage Limit (0 - 100)',
validators=[
RequiredIf('has_velocity_controls',
message="The velocity control usage limit is required."),
NumberRange(min=0, max=100,
message="The velocity control usage limit should be between "
"0 and 100, inclusive.")
],
render_kw={"placeholder": "Usage Limit",
"class": "form-control"},
widget=NumberInput())
time_zone = HiddenField(validators=[InputRequired(message="The timezone is a required field")])
priority = HiddenField(validators=[
InputRequired(message="Priority is a required field"),
AnyOf(values=["Low", "Medium", "High", "Urgent"], message="Priority must be Low, Medium, High, or Urgent")
], default="Low")
return Plan # Return a reference to the class and not an object!
class Forminator:
def __init__(self, form):
# These will always be required
self._form = form
self._plan_name: str = form.plan_name.data
self._funding_amount: str = form.funding_amount.data
self._plan_justification = form.plan_justification.data
self._memo = form.memo.data
self._start_date = form.start_date.data
self._source_fund = form.source_fund.data
self._dest_fund = form.dest_fund.data
# Here on out is optional
self._has_fund_individuals = form.has_fund_individuals.data
self._disbursement_type = form.disbursement_type.data
# We only want the field list here NOT the data
self._employees_list = form.employees_list
self._has_end_date = form.has_end_date.data
self._end_date = form.end_date.data
self._has_velocity_controls = form.has_velocity_controls.data
self._vel_control_name = form.vel_control_name.data
self._vel_control_window = form.vel_control_window.data
self._vel_amt_limit = form.vel_amt_limit.data
self._vel_usage_limit = form.vel_usage_limit.data
self._time_zone = form.time_zone.data
self._priority = form.priority.data
self.clean()
def clean(self):
self._scrub_plan_name()
self._scrub_plan_justification()
self._scrub_memo()
self._scrub_dates()
# strings are truthy
if self._vel_control_name:
self._scrub_vel_control_name()
def _scrub_plan_name(self):
self._plan_name = self.scrub_plan_name(self._plan_name)
def _scrub_plan_justification(self):
self._plan_justification = self.scrub_plan_name(self._plan_justification) # We just use the same filter
def _scrub_memo(self):
self._memo = self.scrub_plan_name(self._memo) # We just use the same filter
def _scrub_dates(self):
self._start_date = self.scrub_date(self._start_date)
if self._end_date:
self._end_date = self.scrub_date(self._end_date)
def _scrub_vel_control_name(self):
self._vel_control_name = self.scrub_plan_name(self._vel_control_name)
@staticmethod
def scrub_date(date):
return date.strip()
@staticmethod
def scrub_plan_name(name):
return " ".join(name.split()).capitalize()
def is_disbursed_all(self):
x = self.has_fund_individuals and self.disbursement_type == self._form.DISB_ALL
if x is None:
return False
return x
def retrieve(self):
return self
@property
def plan_name(self):
return self._plan_name
@property
def funding_amount(self):
return self._funding_amount
@property
def plan_justification(self):
return self._plan_justification
@property
def memo(self):
return self._memo
@property
def start_date(self):
return self._start_date
@property
def source_fund(self):
return self._source_fund
@property
def dest_fund(self):
return self._dest_fund
@property
def has_fund_individuals(self):
return self._has_fund_individuals
@property
def disbursement_type(self):
return self._disbursement_type if self._disbursement_type else None
@property
def employees_list(self):
e_list = self._employees_list.data
if len(e_list) != 0 and e_list[0] != '':
for employeeField in e_list:
yield employeeField
else:
yield []
@employees_list.setter
def employees_list(self, e_list):
self._employees_list.pop_entry() # Remove the default entry
[self._employees_list.append_entry(e) for e in e_list]
@property
def has_end_date(self):
return self._has_end_date if self._has_end_date else None
@property
def end_date(self):
return self._end_date if self._end_date else None
@property
def has_velocity_controls(self):
return self._has_velocity_controls if self._has_velocity_controls else None
@property
def vel_control_name(self):
return self._vel_control_name if self._vel_control_name else None
@property
def vel_control_window(self):
return self._vel_control_window if self._vel_control_window else None
@property
def vel_amt_limit(self):
return self._vel_amt_limit if self._vel_amt_limit else None
@property
def vel_usage_limit(self):
return self._vel_usage_limit if self._vel_usage_limit else None
@property
def raw_form(self):
return self._form
@property
def time_zone(self):
return self._time_zone
@property
def priority(self):
return self._priority
| 42.26087 | 118 | 0.489893 | import sys
from flask_wtf import FlaskForm
from wtforms import StringField, IntegerField, TextAreaField, SelectField, BooleanField, DecimalField, HiddenField, \
RadioField, FieldList
from wtforms.validators import InputRequired, NumberRange, Length, AnyOf
from wtforms.widgets.html5 import NumberInput
from modules.routes.user.custom_fields import EmployeeInfoTextAreaField
from modules.routes.user.custom_validators import RequiredIf, EmployeeUnique, EndDateProper, \
StartDateProper, RequiredIfRadioField, VelocityUsageLimit, DeptBalance
from modules.routes.utils.custom_fields import InlineSubmitField
from server import client
DEPT_MAPPINGS = [
('', 'Please Choose a fund Destination'),
('IT', 'IT'), ('AC', 'ACCOUNTING'), ('MK', 'MARKETING'), ('HR', 'HUMAN RESOURCES'),
('PD', 'PRODUCTION'), ('RD', 'RESEARCH & DEVELOPMENT'), ('SC', 'SECURITY'), ('LG', 'LOGISTICS')
]
def create_plan_form(sn):
class CreatePlanForm(get_plan_base(sn)):
create_plan_btn = InlineSubmitField("Create Plan", btn_text="Create Plan",
render_kw={"class": "btn btn-primary btn-block"})
return CreatePlanForm()
def get_plan_form(sn: dict):
class ManagePlanForm(get_plan_base(sn)):
update_plan_btn = InlineSubmitField("Update Plan", btn_text="Update Plan",
render_kw={"class": "btn btn-primary btn-block"})
return ManagePlanForm()
def get_plan_base(sn: dict):
class Plan(FlaskForm):
DISB_ALL = "DISB_ALL"
DISB_INDIV = "DISB_INDIV"
plan_name = StringField("Plan Name",
validators=[
InputRequired(message="Enter a plan name."),
Length(min=2, max=255, message="Plan name was too short or too long")
],
render_kw={"placeholder": "Plan Name",
"class": "form-control"})
funding_amount = DecimalField('Per-Employee Funding Amount',
validators=[
InputRequired(message="Enter a funding amount."),
NumberRange(min=MINIMUM_FUND_AMT,
message=f"The minimum funding amount must be at "
f"least ${MINIMUM_FUND_AMT}."),
DeptBalance(client=client, sn=sn)
],
render_kw={"placeholder": "Funding Amount",
"class": "form-control"},
widget=NumberInput())
plan_justification = StringField('Plan Justification (e.g. Travel, Equipment, Party)',
validators=[
InputRequired(message="A plan justification is required."),
Length(min=3, max=50,
message="Plan justification was either too short or too long.")
],
render_kw={"placeholder": "Plan Justification",
"class": "form-control"})
memo = TextAreaField('Memo (min 10 chars, max 255 chars.)',
validators=[
InputRequired("A memo is required."),
Length(min=10, max=255, message="Memo was either too short or too long.")
],
render_kw={"rows": 4,
"maxlength": 255,
"placeholder": "Memo Description",
"class": "form-control"})
start_date = StringField('Start Date/Times',
validators=[
InputRequired(message="A start date is required."),
StartDateProper()
],
render_kw={"placeholder": "Start Date/Times",
"class": "form-control"})
source_fund = SelectField('Fund Source',
validators=[InputRequired(message="A funding source department is required.")],
choices=[
(
sn['manager_dept'],
client.READABLE_DEPARTMENTS[sn['manager_dept']]
)
],
render_kw={"class": "form-control"})
dest_fund = SelectField('Fund Destination',
validators=[InputRequired(message="A funding destination department is required.")],
choices=DEPT_MAPPINGS,
render_kw={"class": "form-control"})
has_fund_individuals = BooleanField('Employee specific disbursement',
render_kw={"class": "custom-control-input"})
disbursement_type = RadioField('Employee Disbursement Type', choices=[
(DISB_ALL, 'Disburse to all Employees'),
(DISB_INDIV, 'Search for an Employee'),
], default=DISB_ALL, validators=[RequiredIf('has_fund_individuals',
message="To disburse funds, search for an employee or disburse"
"to all employees")])
employees_list = FieldList(EmployeeInfoTextAreaField('employees_list',
validators=[
RequiredIfRadioField(
'disbursement_type',
DISB_INDIV,
message="Please specify at "
"least 1 employee to "
"disburse funds to.")
]),
validators=[EmployeeUnique(object_name="employee id's")],
min_entries=1,
max_entries=24)
has_end_date = BooleanField('Add End Date',
render_kw={"class": "custom-control-input"})
end_date = StringField('End Date/Times',
validators=[
RequiredIf("has_end_date", message="The end date is required."),
EndDateProper(),
],
render_kw={"placeholder": "Date Date/Times",
"class": "form-control"})
has_velocity_controls = BooleanField('Add Velocity Controls',
render_kw={"class": "custom-control-input"})
vel_control_name = StringField('Control Name',
validators=[
RequiredIf('has_velocity_controls',
message="The velocity control, control name is required."),
Length(max=50)
],
render_kw={"class": "form-control",
"placeholder": "Enter a Control Name"})
vel_control_window = SelectField('Control Window',
validators=[
RequiredIf('has_velocity_controls',
message="The velocity control, control window is required."),
Length(max=30)
],
choices=[
('', 'Select a Control Time Period'),
('day', 'DAY'),
('week', 'WEEK'),
('month', 'MONTH'),
('lifetime', 'LIFETIME'),
('transaction', 'TRANSACTION')
],
render_kw={"class": "form-control"})
vel_amt_limit = DecimalField('Amount Limit',
validators=[
RequiredIf('has_velocity_controls',
message="The velocity control amount limit is required."),
NumberRange(min=MINIMUM_CONTROL_AMT,
message=f"The minimum velocity control amount limit must be at "
f"least ${MINIMUM_CONTROL_AMT}."),
VelocityUsageLimit()
],
render_kw={"placeholder": "Amount Limit",
"class": "form-control"},
widget=NumberInput())
vel_usage_limit = IntegerField('Usage Limit (0 - 100)',
validators=[
RequiredIf('has_velocity_controls',
message="The velocity control usage limit is required."),
NumberRange(min=0, max=100,
message="The velocity control usage limit should be between "
"0 and 100, inclusive.")
],
render_kw={"placeholder": "Usage Limit",
"class": "form-control"},
widget=NumberInput())
time_zone = HiddenField(validators=[InputRequired(message="The timezone is a required field")])
priority = HiddenField(validators=[
InputRequired(message="Priority is a required field"),
AnyOf(values=["Low", "Medium", "High", "Urgent"], message="Priority must be Low, Medium, High, or Urgent")
], default="Low")
return Plan # Return a reference to the class and not an object!
class Forminator:
def __init__(self, form):
# These will always be required
self._form = form
self._plan_name: str = form.plan_name.data
self._funding_amount: str = form.funding_amount.data
self._plan_justification = form.plan_justification.data
self._memo = form.memo.data
self._start_date = form.start_date.data
self._source_fund = form.source_fund.data
self._dest_fund = form.dest_fund.data
# Here on out is optional
self._has_fund_individuals = form.has_fund_individuals.data
self._disbursement_type = form.disbursement_type.data
# We only want the field list here NOT the data
self._employees_list = form.employees_list
self._has_end_date = form.has_end_date.data
self._end_date = form.end_date.data
self._has_velocity_controls = form.has_velocity_controls.data
self._vel_control_name = form.vel_control_name.data
self._vel_control_window = form.vel_control_window.data
self._vel_amt_limit = form.vel_amt_limit.data
self._vel_usage_limit = form.vel_usage_limit.data
self._time_zone = form.time_zone.data
self._priority = form.priority.data
self.clean()
def clean(self):
self._scrub_plan_name()
self._scrub_plan_justification()
self._scrub_memo()
self._scrub_dates()
# strings are truthy
if self._vel_control_name:
self._scrub_vel_control_name()
def _scrub_plan_name(self):
self._plan_name = self.scrub_plan_name(self._plan_name)
def _scrub_plan_justification(self):
self._plan_justification = self.scrub_plan_name(self._plan_justification) # We just use the same filter
def _scrub_memo(self):
self._memo = self.scrub_plan_name(self._memo) # We just use the same filter
def _scrub_dates(self):
self._start_date = self.scrub_date(self._start_date)
if self._end_date:
self._end_date = self.scrub_date(self._end_date)
def _scrub_vel_control_name(self):
self._vel_control_name = self.scrub_plan_name(self._vel_control_name)
@staticmethod
def scrub_date(date):
return date.strip()
@staticmethod
def scrub_plan_name(name):
return " ".join(name.split()).capitalize()
def is_disbursed_all(self):
x = self.has_fund_individuals and self.disbursement_type == self._form.DISB_ALL
if x is None:
return False
return x
def retrieve(self):
return self
@property
def plan_name(self):
return self._plan_name
@property
def funding_amount(self):
return self._funding_amount
@property
def plan_justification(self):
return self._plan_justification
@property
def memo(self):
return self._memo
@property
def start_date(self):
return self._start_date
@property
def source_fund(self):
return self._source_fund
@property
def dest_fund(self):
return self._dest_fund
@property
def has_fund_individuals(self):
return self._has_fund_individuals
@property
def disbursement_type(self):
return self._disbursement_type if self._disbursement_type else None
@property
def employees_list(self):
e_list = self._employees_list.data
if len(e_list) != 0 and e_list[0] != '':
for employeeField in e_list:
yield employeeField
else:
yield []
@employees_list.setter
def employees_list(self, e_list):
self._employees_list.pop_entry() # Remove the default entry
[self._employees_list.append_entry(e) for e in e_list]
@property
def has_end_date(self):
return self._has_end_date if self._has_end_date else None
@property
def end_date(self):
return self._end_date if self._end_date else None
@property
def has_velocity_controls(self):
return self._has_velocity_controls if self._has_velocity_controls else None
@property
def vel_control_name(self):
return self._vel_control_name if self._vel_control_name else None
@property
def vel_control_window(self):
return self._vel_control_window if self._vel_control_window else None
@property
def vel_amt_limit(self):
return self._vel_amt_limit if self._vel_amt_limit else None
@property
def vel_usage_limit(self):
return self._vel_usage_limit if self._vel_usage_limit else None
@property
def raw_form(self):
return self._form
@property
def time_zone(self):
return self._time_zone
@property
def priority(self):
return self._priority
| true | true |
f724cdce03e831ba60b697c99d9b9995f15edd45 | 18,132 | py | Python | cltk/corpus/utils/importer.py | Nada1996/cltk | 594f6aecff64c449a637ed05cd2c4655a606ba2d | [
"MIT"
] | null | null | null | cltk/corpus/utils/importer.py | Nada1996/cltk | 594f6aecff64c449a637ed05cd2c4655a606ba2d | [
"MIT"
] | null | null | null | cltk/corpus/utils/importer.py | Nada1996/cltk | 594f6aecff64c449a637ed05cd2c4655a606ba2d | [
"MIT"
] | null | null | null | """Import CLTK corpora.
TODO: Fix so ``import_corpora()`` can take relative path.
TODO: Add https://github.com/cltk/pos_latin
"""
from cltk.corpus.arabic.corpora import ARABIC_CORPORA
from cltk.corpus.chinese.corpora import CHINESE_CORPORA
from cltk.corpus.coptic.corpora import COPTIC_CORPORA
from cltk.corpus.greek.corpora import GREEK_CORPORA
from cltk.corpus.hebrew.corpora import HEBREW_CORPORA
from cltk.corpus.latin.corpora import LATIN_CORPORA
from cltk.corpus.sanskrit.corpora import SANSKRIT_CORPORA
from cltk.corpus.multilingual.corpora import MULTILINGUAL_CORPORA
from cltk.corpus.pali.corpora import PALI_CORPORA
from cltk.corpus.punjabi.corpora import PUNJABI_CORPORA
from cltk.corpus.tibetan.corpora import TIBETAN_CORPORA
from cltk.corpus.old_english.corpora import OLD_ENGLISH_CORPORA
from cltk.corpus.bengali.corpora import BENGALI_CORPORA
from cltk.corpus.old_church_slavonic.corpora import OCS_CORPORA
from cltk.corpus.prakrit.corpora import PRAKRIT_CORPORA
from cltk.corpus.hindi.corpora import HINDI_CORPORA
from cltk.corpus.javanese.corpora import JAVANESE_CORPORA
from cltk.corpus.malayalam.corpora import MALAYALAM_CORPORA
from cltk.corpus.old_norse.corpora import OLD_NORSE_CORPORA
from cltk.corpus.telugu.corpora import TELUGU_CORPORA
from cltk.corpus.classical_hindi.corpora import CLASSICAL_HINDI_CORPORA
from cltk.corpus.french.corpora import FRENCH_CORPORA
from cltk.corpus.marathi.corpora import MARATHI_CORPORA
from cltk.corpus.gujarati.corpora import GUJARATI_CORPORA
from cltk.corpus.medieval.corpora import MEDIEVAL_CORPORA
from cltk.utils.cltk_logger import logger
import errno
from git import RemoteProgress
from git import Repo
import os
import sys
import shutil
from urllib.parse import urljoin
import yaml
__author__ = ['Kyle P. Johnson <kyle@kyle-p-johnson.com>', 'Stephen Margheim <stephen.margheim@gmail.com>']
__license__ = 'MIT License. See LICENSE.'
AVAILABLE_LANGUAGES = ['arabic', 'chinese', 'coptic', 'greek', 'hebrew', 'latin', 'multilingual',
'pali', 'punjabi', 'tibetan', 'sanskrit', 'old_english',
'bengali', 'prakrit', 'hindi', 'old_church_slavonic',
'malayalam', 'marathi', 'javanese','old_norse','telugu','classical_hindi',
'french', 'gujarati', 'middle_high_german','medieval',]
CLTK_DATA_DIR = '~/cltk_data'
LANGUAGE_CORPORA = {'arabic': ARABIC_CORPORA,
'chinese': CHINESE_CORPORA,
'coptic': COPTIC_CORPORA,
'greek': GREEK_CORPORA,
'hebrew': HEBREW_CORPORA,
'latin': LATIN_CORPORA,
'multilingual': MULTILINGUAL_CORPORA,
'pali': PALI_CORPORA,
'punjabi': PUNJABI_CORPORA,
'tibetan': TIBETAN_CORPORA,
'sanskrit': SANSKRIT_CORPORA,
'old_english': OLD_ENGLISH_CORPORA,
'bengali': BENGALI_CORPORA,
'old_church_slavonic': OCS_CORPORA,
'prakrit': PRAKRIT_CORPORA,
'hindi': HINDI_CORPORA,
'malayalam': MALAYALAM_CORPORA,
'marathi': MARATHI_CORPORA,
'javanese': JAVANESE_CORPORA,
'old_norse':OLD_NORSE_CORPORA,
'telugu':TELUGU_CORPORA,
'classical_hindi':CLASSICAL_HINDI_CORPORA,
'french':FRENCH_CORPORA,
'gujarati': GUJARATI_CORPORA,
'medieval':MEDIEVAL_CORPORA,
}
class CorpusImportError(Exception):
"""CLTK exception to use when something goes wrong importing corpora"""
pass
class ProgressPrinter(RemoteProgress):
"""Class that implements progress reporting."""
def update(self, op_code, cur_count, max_count=None, message=''):
if message:
percentage = '%.0f' % (100 * cur_count / (max_count or 100.0))
sys.stdout.write('Downloaded %s%% %s \r' % (percentage, message))
class CorpusImporter:
"""Import CLTK corpora."""
def __init__(self, language, testing=False):
"""Setup corpus importing.
`testing` is a hack to check a tmp .yaml file to look at or local corpus. This keeps from overwriting
local. A better idea is probably to refuse to overwrite the .yaml.
"""
self.language = language.lower()
assert isinstance(testing, bool), '`testing` parameter must be boolean type'
self.testing = testing
self.user_defined_corpora = self._setup_language_variables()
# if user_defined_corpora, then we need to add these to the corpus.py objects
if self.user_defined_corpora:
logger.info('User-defined corpus found for "{}" language'.format(self.language))
try:
logger.debug('Core corpora also found for "{}" language'.format(self.language))
logger.debug('Combining the user-defined and the core corpora')
self.official_corpora = LANGUAGE_CORPORA[self.language]
self.all_corpora = self.official_corpora
for corpus in self.user_defined_corpora:
self.all_corpora.append(corpus)
except KeyError:
logger.debug('Nothing in the official repos '
'for "{}" language. Make the all_corpora solely '
'from the .yaml'.format(self.language))
self.all_corpora = []
for corpus in self.user_defined_corpora:
self.all_corpora.append(corpus)
else:
logger.info('No user-defined corpora found for "{}" language'.format(self.language))
# self.official_corpora = LANGUAGE_CORPORA[self.language]
self.all_corpora = LANGUAGE_CORPORA[self.language]
def __repr__(self):
"""Representation string for ipython
:rtype : str
"""
return 'CorpusImporter for: {}'.format(self.language)
def _check_distributed_corpora_file(self):
"""Check '~/cltk_data/distributed_corpora.yaml' for any custom,
distributed corpora that the user wants to load locally.
TODO: write check or try if `cltk_data` dir is not present
"""
if self.testing:
distributed_corpora_fp = os.path.expanduser('~/cltk_data/test_distributed_corpora.yaml')
else:
distributed_corpora_fp = os.path.expanduser('~/cltk_data/distributed_corpora.yaml')
try:
with open(distributed_corpora_fp) as file_open:
corpora_dict = yaml.safe_load(file_open)
except FileNotFoundError:
logger.info('`~/cltk_data/distributed_corpora.yaml` file not found.')
return []
except yaml.parser.ParserError as parse_err:
logger.debug('Yaml parsing error: %s' % parse_err)
return []
user_defined_corpora = []
for corpus_name in corpora_dict:
about = corpora_dict[corpus_name]
if about['language'].lower() == self.language:
user_defined_corpus = dict()
# user_defined_corpus['git_remote'] = about['git_remote']
user_defined_corpus['origin'] = about['origin']
user_defined_corpus['type'] = about['type']
user_defined_corpus['name'] = corpus_name
user_defined_corpora.append(user_defined_corpus)
return user_defined_corpora
def _setup_language_variables(self):
"""Check for availability of corpora for a language.
TODO: Make the selection of available languages dynamic from dirs
within ``corpora`` which contain a ``corpora.py`` file.
"""
if self.language not in AVAILABLE_LANGUAGES:
# If no official repos, check if user has custom
user_defined_corpora = self._check_distributed_corpora_file()
if user_defined_corpora:
return user_defined_corpora
else:
msg = 'Corpora not available (either core or user-defined) for the "{}" language.'.format(self.language)
logger.info(msg)
raise CorpusImportError(msg)
else:
user_defined_corpora = self._check_distributed_corpora_file()
return user_defined_corpora
@property
def list_corpora(self):
"""Show corpora available for the CLTK to download."""
try:
# corpora = LANGUAGE_CORPORA[self.language]
corpora = self.all_corpora
corpus_names = [corpus['name'] for corpus in corpora]
return corpus_names
except (NameError, KeyError) as error:
msg = 'Corpus not available for language "{}": {}'.format(self.language, error)
logger.error(msg)
raise CorpusImportError(msg)
@staticmethod
def _copy_dir_recursive(src_rel, dst_rel):
"""Copy contents of one directory to another. `dst_rel` dir cannot
exist. Source: http://stackoverflow.com/a/1994840
TODO: Move this to file_operations.py module.
:type src_rel: str
:param src_rel: Directory to be copied.
:type dst_rel: str
:param dst_rel: Directory to be created with contents of ``src_rel``.
"""
src = os.path.expanduser(src_rel)
dst = os.path.expanduser(dst_rel)
try:
shutil.copytree(src, dst)
logger.info('Files copied from %s to %s', src, dst)
except OSError as exc:
if exc.errno == errno.ENOTDIR:
shutil.copy(src, dst)
logger.info('Files copied from %s to %s', src, dst)
else:
raise
def _get_corpus_properties(self, corpus_name):
"""Check whether a corpus is available for import.
:type corpus_name: str
:param corpus_name: Name of available corpus.
:rtype : str
"""
try:
# corpora = LANGUAGE_CORPORA[self.language]
corpora = self.all_corpora
except NameError as name_error:
msg = 'Corpus not available for language ' \
'"%s": %s' % (self.language, name_error)
logger.error(msg)
raise CorpusImportError(msg)
for corpus_properties in corpora:
if corpus_properties['name'] == corpus_name:
return corpus_properties
msg = 'Corpus "%s" not available for the ' \
'"%s" language.' % (corpus_name, self.language)
logger.error(msg)
raise CorpusImportError(msg)
def _git_user_defined_corpus(self, corpus_name, corpus_type, uri: str, branch='master'):
"""Clone or update a git repo defined by user.
TODO: This code is very redundant with what's in import_corpus(),
could be refactored.
"""
# git_uri = urljoin('https://github.com/cltk/', corpus_name + '.git')
# self._download_corpus(corpus_type, corpus_name, path)
type_dir_rel = os.path.join(CLTK_DATA_DIR, self.language, corpus_type)
type_dir = os.path.expanduser(type_dir_rel)
repo_name = uri.split('/')[-1] # eg, 'latin_corpus_newton_example.git'
repo_name = repo_name.rstrip('.git')
target_dir = os.path.join(type_dir, repo_name)
target_file = os.path.join(type_dir, repo_name, 'README.md')
# check if corpus already present
# if not, clone
if not os.path.isfile(target_file):
if not os.path.isdir(type_dir):
os.makedirs(type_dir)
try:
msg = "Cloning '{}' from '{}'".format(corpus_name, uri)
logger.info(msg)
Repo.clone_from(uri, target_dir, branch=branch, depth=1,
progress=ProgressPrinter())
except CorpusImportError as corpus_imp_err:
msg = "Git clone of '{}' failed: '{}'".format(uri, corpus_imp_err)
logger.error(msg)
# if corpus is present, pull latest
else:
try:
repo = Repo(target_dir)
assert not repo.bare # or: assert repo.exists()
git_origin = repo.remotes.origin
msg = "Pulling latest '{}' from '{}'.".format(corpus_name, uri)
logger.info(msg)
git_origin.pull()
except CorpusImportError as corpus_imp_err:
msg = "Git pull of '{}' failed: '{}'".format(uri, corpus_imp_err)
logger.error(msg)
def import_corpus(self, corpus_name, local_path=None, branch='master'): # pylint: disable=R0912
"""Download a remote or load local corpus into dir ``~/cltk_data``.
TODO: maybe add ``from git import RemoteProgress``
TODO: refactor this, it's getting kinda long
:type corpus_name: str
:param corpus_name: The name of an available corpus.
:param local_path: str
:param local_path: A filepath, required when importing local corpora.
:param branch: What Git branch to clone.
"""
corpus_properties = self._get_corpus_properties(corpus_name)
try:
location = corpus_properties['location']
except KeyError:
# git_uri = corpus_properties['git_remote']
git_name = corpus_properties['']
git_uri = corpus_properties['origin']
git_type = corpus_properties['type']
# pass this off to a special downloader just for custom urls
self._git_user_defined_corpus(git_name, git_type, git_uri)
return
corpus_type = corpus_properties['type']
if location == 'remote':
# git_uri = urljoin('https://github.com/cltk/', corpus_name + '.git')
git_uri = corpus_properties['origin']
type_dir_rel = os.path.join(CLTK_DATA_DIR, self.language, corpus_type)
type_dir = os.path.expanduser(type_dir_rel)
target_dir = os.path.join(type_dir, corpus_name)
target_file = os.path.join(type_dir, corpus_name, 'README.md')
# check if corpus already present
# if not, clone
if not os.path.isfile(target_file):
if not os.path.isdir(type_dir):
os.makedirs(type_dir)
try:
msg = "Cloning '{}' from '{}'".format(corpus_name, git_uri)
logger.info(msg)
Repo.clone_from(git_uri, target_dir, branch=branch, depth=1,
progress=ProgressPrinter())
except CorpusImportError as corpus_imp_err:
msg = "Git clone of '{}' failed: '{}'".format(git_uri, corpus_imp_err)
logger.error(msg)
# if corpus is present, pull latest
else:
try:
repo = Repo(target_dir)
assert not repo.bare # or: assert repo.exists()
git_origin = repo.remotes.origin
msg = "Pulling latest '{}' from '{}'.".format(corpus_name, git_uri)
logger.info(msg)
git_origin.pull()
except CorpusImportError as corpus_imp_err:
msg = "Git pull of '{}' failed: '{}'".format(git_uri, corpus_imp_err)
logger.error(msg)
elif location == 'local':
msg = "Importing from local path: '{}'".format(local_path)
logger.info(msg)
if corpus_name in ('phi5', 'phi7', 'tlg'):
if corpus_name == 'phi5':
# normalize path for checking dir
if local_path.endswith('/'):
local_path = local_path[:-1]
# check for right corpus dir
if os.path.split(local_path)[1] != 'PHI5':
logger.info("Directory must be named 'PHI5'.")
if corpus_name == 'phi7':
# normalize local_path for checking dir
if local_path.endswith('/'):
local_path = local_path[:-1]
# check for right corpus dir
if os.path.split(local_path)[1] != 'PHI7':
logger.info("Directory must be named 'PHI7'.")
if corpus_name == 'tlg':
# normalize path for checking dir
if local_path.endswith('/'):
local_path = local_path[:-1]
# check for right corpus dir
if os.path.split(local_path)[1] != 'TLG_E':
logger.info("Directory must be named 'TLG_E'.")
# move the dir-checking commands into a function
data_dir = os.path.expanduser(CLTK_DATA_DIR)
originals_dir = os.path.join(data_dir, 'originals')
# check for `originals` dir; if not present mkdir
if not os.path.isdir(originals_dir):
os.makedirs(originals_dir)
msg = "Wrote directory at '{}'.".format(originals_dir)
logger.info(msg)
tlg_originals_dir = os.path.join(data_dir,
'originals',
corpus_name)
# check for `originals/<corpus_name>`; if pres, delete
if os.path.isdir(tlg_originals_dir):
shutil.rmtree(tlg_originals_dir)
msg = "Removed directory at '{}'.".format(tlg_originals_dir)
logger.info(msg)
# copy_dir requires that target
if not os.path.isdir(tlg_originals_dir):
self._copy_dir_recursive(local_path, tlg_originals_dir)
if __name__ == '__main__':
c = CorpusImporter('latin')
# print(c.list_corpora)
c.import_corpus('latin_training_set_sentence_cltk')
| 46.020305 | 120 | 0.596073 | from cltk.corpus.arabic.corpora import ARABIC_CORPORA
from cltk.corpus.chinese.corpora import CHINESE_CORPORA
from cltk.corpus.coptic.corpora import COPTIC_CORPORA
from cltk.corpus.greek.corpora import GREEK_CORPORA
from cltk.corpus.hebrew.corpora import HEBREW_CORPORA
from cltk.corpus.latin.corpora import LATIN_CORPORA
from cltk.corpus.sanskrit.corpora import SANSKRIT_CORPORA
from cltk.corpus.multilingual.corpora import MULTILINGUAL_CORPORA
from cltk.corpus.pali.corpora import PALI_CORPORA
from cltk.corpus.punjabi.corpora import PUNJABI_CORPORA
from cltk.corpus.tibetan.corpora import TIBETAN_CORPORA
from cltk.corpus.old_english.corpora import OLD_ENGLISH_CORPORA
from cltk.corpus.bengali.corpora import BENGALI_CORPORA
from cltk.corpus.old_church_slavonic.corpora import OCS_CORPORA
from cltk.corpus.prakrit.corpora import PRAKRIT_CORPORA
from cltk.corpus.hindi.corpora import HINDI_CORPORA
from cltk.corpus.javanese.corpora import JAVANESE_CORPORA
from cltk.corpus.malayalam.corpora import MALAYALAM_CORPORA
from cltk.corpus.old_norse.corpora import OLD_NORSE_CORPORA
from cltk.corpus.telugu.corpora import TELUGU_CORPORA
from cltk.corpus.classical_hindi.corpora import CLASSICAL_HINDI_CORPORA
from cltk.corpus.french.corpora import FRENCH_CORPORA
from cltk.corpus.marathi.corpora import MARATHI_CORPORA
from cltk.corpus.gujarati.corpora import GUJARATI_CORPORA
from cltk.corpus.medieval.corpora import MEDIEVAL_CORPORA
from cltk.utils.cltk_logger import logger
import errno
from git import RemoteProgress
from git import Repo
import os
import sys
import shutil
from urllib.parse import urljoin
import yaml
__author__ = ['Kyle P. Johnson <kyle@kyle-p-johnson.com>', 'Stephen Margheim <stephen.margheim@gmail.com>']
__license__ = 'MIT License. See LICENSE.'
AVAILABLE_LANGUAGES = ['arabic', 'chinese', 'coptic', 'greek', 'hebrew', 'latin', 'multilingual',
'pali', 'punjabi', 'tibetan', 'sanskrit', 'old_english',
'bengali', 'prakrit', 'hindi', 'old_church_slavonic',
'malayalam', 'marathi', 'javanese','old_norse','telugu','classical_hindi',
'french', 'gujarati', 'middle_high_german','medieval',]
CLTK_DATA_DIR = '~/cltk_data'
LANGUAGE_CORPORA = {'arabic': ARABIC_CORPORA,
'chinese': CHINESE_CORPORA,
'coptic': COPTIC_CORPORA,
'greek': GREEK_CORPORA,
'hebrew': HEBREW_CORPORA,
'latin': LATIN_CORPORA,
'multilingual': MULTILINGUAL_CORPORA,
'pali': PALI_CORPORA,
'punjabi': PUNJABI_CORPORA,
'tibetan': TIBETAN_CORPORA,
'sanskrit': SANSKRIT_CORPORA,
'old_english': OLD_ENGLISH_CORPORA,
'bengali': BENGALI_CORPORA,
'old_church_slavonic': OCS_CORPORA,
'prakrit': PRAKRIT_CORPORA,
'hindi': HINDI_CORPORA,
'malayalam': MALAYALAM_CORPORA,
'marathi': MARATHI_CORPORA,
'javanese': JAVANESE_CORPORA,
'old_norse':OLD_NORSE_CORPORA,
'telugu':TELUGU_CORPORA,
'classical_hindi':CLASSICAL_HINDI_CORPORA,
'french':FRENCH_CORPORA,
'gujarati': GUJARATI_CORPORA,
'medieval':MEDIEVAL_CORPORA,
}
class CorpusImportError(Exception):
pass
class ProgressPrinter(RemoteProgress):
def update(self, op_code, cur_count, max_count=None, message=''):
if message:
percentage = '%.0f' % (100 * cur_count / (max_count or 100.0))
sys.stdout.write('Downloaded %s%% %s \r' % (percentage, message))
class CorpusImporter:
def __init__(self, language, testing=False):
self.language = language.lower()
assert isinstance(testing, bool), '`testing` parameter must be boolean type'
self.testing = testing
self.user_defined_corpora = self._setup_language_variables()
if self.user_defined_corpora:
logger.info('User-defined corpus found for "{}" language'.format(self.language))
try:
logger.debug('Core corpora also found for "{}" language'.format(self.language))
logger.debug('Combining the user-defined and the core corpora')
self.official_corpora = LANGUAGE_CORPORA[self.language]
self.all_corpora = self.official_corpora
for corpus in self.user_defined_corpora:
self.all_corpora.append(corpus)
except KeyError:
logger.debug('Nothing in the official repos '
'for "{}" language. Make the all_corpora solely '
'from the .yaml'.format(self.language))
self.all_corpora = []
for corpus in self.user_defined_corpora:
self.all_corpora.append(corpus)
else:
logger.info('No user-defined corpora found for "{}" language'.format(self.language))
self.all_corpora = LANGUAGE_CORPORA[self.language]
def __repr__(self):
return 'CorpusImporter for: {}'.format(self.language)
def _check_distributed_corpora_file(self):
if self.testing:
distributed_corpora_fp = os.path.expanduser('~/cltk_data/test_distributed_corpora.yaml')
else:
distributed_corpora_fp = os.path.expanduser('~/cltk_data/distributed_corpora.yaml')
try:
with open(distributed_corpora_fp) as file_open:
corpora_dict = yaml.safe_load(file_open)
except FileNotFoundError:
logger.info('`~/cltk_data/distributed_corpora.yaml` file not found.')
return []
except yaml.parser.ParserError as parse_err:
logger.debug('Yaml parsing error: %s' % parse_err)
return []
user_defined_corpora = []
for corpus_name in corpora_dict:
about = corpora_dict[corpus_name]
if about['language'].lower() == self.language:
user_defined_corpus = dict()
user_defined_corpus['origin'] = about['origin']
user_defined_corpus['type'] = about['type']
user_defined_corpus['name'] = corpus_name
user_defined_corpora.append(user_defined_corpus)
return user_defined_corpora
def _setup_language_variables(self):
if self.language not in AVAILABLE_LANGUAGES:
user_defined_corpora = self._check_distributed_corpora_file()
if user_defined_corpora:
return user_defined_corpora
else:
msg = 'Corpora not available (either core or user-defined) for the "{}" language.'.format(self.language)
logger.info(msg)
raise CorpusImportError(msg)
else:
user_defined_corpora = self._check_distributed_corpora_file()
return user_defined_corpora
@property
def list_corpora(self):
try:
corpora = self.all_corpora
corpus_names = [corpus['name'] for corpus in corpora]
return corpus_names
except (NameError, KeyError) as error:
msg = 'Corpus not available for language "{}": {}'.format(self.language, error)
logger.error(msg)
raise CorpusImportError(msg)
@staticmethod
def _copy_dir_recursive(src_rel, dst_rel):
src = os.path.expanduser(src_rel)
dst = os.path.expanduser(dst_rel)
try:
shutil.copytree(src, dst)
logger.info('Files copied from %s to %s', src, dst)
except OSError as exc:
if exc.errno == errno.ENOTDIR:
shutil.copy(src, dst)
logger.info('Files copied from %s to %s', src, dst)
else:
raise
def _get_corpus_properties(self, corpus_name):
try:
corpora = self.all_corpora
except NameError as name_error:
msg = 'Corpus not available for language ' \
'"%s": %s' % (self.language, name_error)
logger.error(msg)
raise CorpusImportError(msg)
for corpus_properties in corpora:
if corpus_properties['name'] == corpus_name:
return corpus_properties
msg = 'Corpus "%s" not available for the ' \
'"%s" language.' % (corpus_name, self.language)
logger.error(msg)
raise CorpusImportError(msg)
def _git_user_defined_corpus(self, corpus_name, corpus_type, uri: str, branch='master'):
type_dir_rel = os.path.join(CLTK_DATA_DIR, self.language, corpus_type)
type_dir = os.path.expanduser(type_dir_rel)
repo_name = uri.split('/')[-1]
repo_name = repo_name.rstrip('.git')
target_dir = os.path.join(type_dir, repo_name)
target_file = os.path.join(type_dir, repo_name, 'README.md')
if not os.path.isfile(target_file):
if not os.path.isdir(type_dir):
os.makedirs(type_dir)
try:
msg = "Cloning '{}' from '{}'".format(corpus_name, uri)
logger.info(msg)
Repo.clone_from(uri, target_dir, branch=branch, depth=1,
progress=ProgressPrinter())
except CorpusImportError as corpus_imp_err:
msg = "Git clone of '{}' failed: '{}'".format(uri, corpus_imp_err)
logger.error(msg)
else:
try:
repo = Repo(target_dir)
assert not repo.bare
git_origin = repo.remotes.origin
msg = "Pulling latest '{}' from '{}'.".format(corpus_name, uri)
logger.info(msg)
git_origin.pull()
except CorpusImportError as corpus_imp_err:
msg = "Git pull of '{}' failed: '{}'".format(uri, corpus_imp_err)
logger.error(msg)
def import_corpus(self, corpus_name, local_path=None, branch='master'):
corpus_properties = self._get_corpus_properties(corpus_name)
try:
location = corpus_properties['location']
except KeyError:
git_name = corpus_properties['']
git_uri = corpus_properties['origin']
git_type = corpus_properties['type']
self._git_user_defined_corpus(git_name, git_type, git_uri)
return
corpus_type = corpus_properties['type']
if location == 'remote':
git_uri = corpus_properties['origin']
type_dir_rel = os.path.join(CLTK_DATA_DIR, self.language, corpus_type)
type_dir = os.path.expanduser(type_dir_rel)
target_dir = os.path.join(type_dir, corpus_name)
target_file = os.path.join(type_dir, corpus_name, 'README.md')
if not os.path.isfile(target_file):
if not os.path.isdir(type_dir):
os.makedirs(type_dir)
try:
msg = "Cloning '{}' from '{}'".format(corpus_name, git_uri)
logger.info(msg)
Repo.clone_from(git_uri, target_dir, branch=branch, depth=1,
progress=ProgressPrinter())
except CorpusImportError as corpus_imp_err:
msg = "Git clone of '{}' failed: '{}'".format(git_uri, corpus_imp_err)
logger.error(msg)
else:
try:
repo = Repo(target_dir)
assert not repo.bare
git_origin = repo.remotes.origin
msg = "Pulling latest '{}' from '{}'.".format(corpus_name, git_uri)
logger.info(msg)
git_origin.pull()
except CorpusImportError as corpus_imp_err:
msg = "Git pull of '{}' failed: '{}'".format(git_uri, corpus_imp_err)
logger.error(msg)
elif location == 'local':
msg = "Importing from local path: '{}'".format(local_path)
logger.info(msg)
if corpus_name in ('phi5', 'phi7', 'tlg'):
if corpus_name == 'phi5':
if local_path.endswith('/'):
local_path = local_path[:-1]
if os.path.split(local_path)[1] != 'PHI5':
logger.info("Directory must be named 'PHI5'.")
if corpus_name == 'phi7':
if local_path.endswith('/'):
local_path = local_path[:-1]
if os.path.split(local_path)[1] != 'PHI7':
logger.info("Directory must be named 'PHI7'.")
if corpus_name == 'tlg':
if local_path.endswith('/'):
local_path = local_path[:-1]
if os.path.split(local_path)[1] != 'TLG_E':
logger.info("Directory must be named 'TLG_E'.")
data_dir = os.path.expanduser(CLTK_DATA_DIR)
originals_dir = os.path.join(data_dir, 'originals')
if not os.path.isdir(originals_dir):
os.makedirs(originals_dir)
msg = "Wrote directory at '{}'.".format(originals_dir)
logger.info(msg)
tlg_originals_dir = os.path.join(data_dir,
'originals',
corpus_name)
if os.path.isdir(tlg_originals_dir):
shutil.rmtree(tlg_originals_dir)
msg = "Removed directory at '{}'.".format(tlg_originals_dir)
logger.info(msg)
if not os.path.isdir(tlg_originals_dir):
self._copy_dir_recursive(local_path, tlg_originals_dir)
if __name__ == '__main__':
c = CorpusImporter('latin')
c.import_corpus('latin_training_set_sentence_cltk')
| true | true |
f724cdeca3f91643abf9127ba1abde54edc87cec | 16,147 | py | Python | hikyuu/admin/HikyuuAdmin.py | dasuren/hikyuu | d1a1a43c10653d17ac91446e4499e6cfbfdbce12 | [
"MIT"
] | 1 | 2021-05-20T14:47:16.000Z | 2021-05-20T14:47:16.000Z | hikyuu/admin/HikyuuAdmin.py | dasuren/hikyuu | d1a1a43c10653d17ac91446e4499e6cfbfdbce12 | [
"MIT"
] | null | null | null | hikyuu/admin/HikyuuAdmin.py | dasuren/hikyuu | d1a1a43c10653d17ac91446e4499e6cfbfdbce12 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
#
# The MIT License (MIT)
#
# Copyright (c) 2010-2019 fasiondog/hikyuu
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import logging
import sys
import os
from PyQt5.QtWidgets import QVBoxLayout
cur_dir = os.path.dirname(__file__)
# 将当前目录加入 sys.path 以便其下子模块可以互相引用
sys.path.append(cur_dir)
# 将hikyuu目录加入 sys.path 以便直接引用 utils 包
sys.path.append(os.path.split(cur_dir)[0])
from PyQt5 import QtCore, QtGui, QtWidgets
import qdarkstyle
from UiConfig import UiConfig
from translate import _translate
from widget.HkuSessionViewWidget import HkuSessionViewWidget
from dialog import *
from widget import *
from data import (get_local_db, SessionModel)
from service import AssisService
class MyMainWindow(QtWidgets.QMainWindow):
def __init__(self):
super().__init__()
appid = 'HikyuuAdmin'
QtWidgets.QApplication.setApplicationName(appid)
QtWidgets.QApplication.setOrganizationName("org.hikyuu")
# 国际化支持
loc = QtCore.QLocale()
if loc.language() == QtCore.QLocale.Chinese:
self.trans = QtCore.QTranslator()
self.trans.load("{}/language/zh_CN.qm".format(os.path.dirname(__file__))) # 读取qm语言包
_app = QtWidgets.QApplication.instance() # 应用实例
_app.installTranslator(self.trans) # 将翻译者安装到实例中
# 设置程序图标资源
# 如未能正常显示图标,请检查 "import resource" 是否
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(":/logo/logo_16.png"))
icon.addPixmap(QtGui.QPixmap(":/logo/logo_32.png"))
icon.addPixmap(QtGui.QPixmap(":/logo/logo_48.png"))
icon.addPixmap(QtGui.QPixmap(":/logo/logo_64.png"))
icon.addPixmap(QtGui.QPixmap(":/logo/logo_128.png"))
icon.addPixmap(QtGui.QPixmap(":/logo/logo_256.png"))
self.setWindowIcon(icon)
if sys.platform == 'win32':
# window下设置任务栏图片
import ctypes
ctypes.windll.shell32.SetCurrentProcessExplicitAppUserModelID(appid)
self.ui_config = UiConfig()
self.setObjectName("HikyuuAdminMainWindow")
self.setWindowTitle(_translate("MainWindow", "Hikyuu Strategy Server Manager"))
# 绑定本地数据库,辅助使用,尽量直接使用 Model 中的方法
self.db = get_local_db()
self.initAction()
self.initMenuBar()
self.initMenu()
self.initToolBar()
self.initActionConnect()
self.initMainTabWidget()
self.initDockWidgets()
self.statusBar().showMessage(_translate('MainWindow', 'Running'))
# 在窗口初始化完毕后,根据历史信息对窗口风格和大小进行重置
style = self.ui_config.get('main_window', 'style', fallback='normal_style')
if style == 'dark_style':
QtWidgets.qApp.setStyleSheet(qdarkstyle.load_stylesheet(qt_api='pyqt5'))
if self.ui_config.getboolean('main_window', 'maximized', fallback=False):
self.showMaximized()
else:
self.resize(
self.ui_config.getint('main_window', 'width', fallback=800),
self.ui_config.getint('main_window', 'height', fallback=500)
)
QtCore.QMetaObject.connectSlotsByName(self)
@property
def session(self):
return self.db.session
def closeEvent(self, event):
self.ui_config.save(self)
event.accept()
def initAction(self):
self.action_dict = dict(
action_new_file_session=QtWidgets.QAction(
QtGui.QIcon(":/icon/new_32.png"), _translate("MainWindow", "&New Session"), self
),
action_edit_file_session=QtWidgets.QAction(
QtGui.QIcon(":/icon/edit_32.png"), _translate("MainWindow", "&Edit Session"), self
),
action_del_file_session=QtWidgets.QAction(
QtGui.QIcon(":/icon/cancel_32.png"), _translate("MainWindow", "&Remove Session"), self
),
action_file_connect=QtWidgets.QAction(
QtGui.QIcon(":/icon/connect_32.png"), _translate('MainWindow', '&Connect Now')
),
action_file_quit=QtWidgets.QAction(
QtGui.QIcon(":/icon/quit_32.png"), _translate('MainWindow', '&Quit'), self
),
action_view_normal_style=QtWidgets.QAction(_translate('MainWindow', 'Normal style'), self),
action_view_dark_style=QtWidgets.QAction(_translate('MainWindow', 'Dark style'), self),
action_about=QtWidgets.QAction(_translate('MainWindow', 'About'), self),
action_about_qt=QtWidgets.QAction(_translate('MainWindow', 'About Qt'), self),
)
self.action_dict['action_new_file_session'].setStatusTip(_translate('MainWindow', 'New Session'))
self.action_dict['action_file_connect'].setStatusTip(_translate('MainWindow', 'Connect Now'))
self.action_dict['action_file_quit'].setStatusTip(_translate('MainWindow', 'Quit Application'))
self.action_dict['action_about_qt'].setStatusTip(_translate('MainWindow', "Show the Qt library's About box"))
self.action_dict['action_view_normal_style'].setObjectName('normal_style')
self.action_dict['action_view_normal_style'].setStatusTip(_translate('MainWindow', 'Switch to normal style'))
self.action_dict['action_view_dark_style'].setObjectName('dark_style')
self.action_dict['action_view_dark_style'].setStatusTip(_translate('MainWindow', 'Switch to dark style'))
self.action_dict['action_edit_file_session'].setEnabled(False)
self.action_dict['action_del_file_session'].setEnabled(False)
def initMenuBar(self):
self.menubar_dict = dict(
menu_file=self.menuBar().addMenu(_translate('MainWindow', "&File(F)")),
menu_view=self.menuBar().addMenu(_translate('MainWindow', "&View(V)")),
menu_help=self.menuBar().addMenu(_translate('MainWindow', "&Help(H)"))
)
def initMenu(self):
file_session_menu = self.menubar_dict['menu_file'].addMenu(
QtGui.QIcon(":/icon/server_16.png"), _translate('MainWindow', '&Session Manager')
)
style_menu = self.menubar_dict['menu_view'].addMenu(_translate('MainWindow', 'Skin style'))
self.menu_dict = dict(
menu_file_new_session=file_session_menu.addAction(self.action_dict['action_new_file_session']),
menu_file_edit_session=file_session_menu.addAction(self.action_dict['action_edit_file_session']),
menu_file_del_session=file_session_menu.addAction(self.action_dict['action_del_file_session']),
menu_file_connect=self.menubar_dict['menu_file'].addAction(self.action_dict['action_file_connect']),
menu_file_quit=self.menubar_dict['menu_file'].addAction(self.action_dict['action_file_quit']),
menu_view_normal_style=style_menu.addAction(self.action_dict['action_view_normal_style']),
menu_view_dark_style=style_menu.addAction(self.action_dict['action_view_dark_style']),
menu_about=self.menubar_dict['menu_help'].addAction(self.action_dict['action_about']),
menu_about_qt=self.menubar_dict['menu_help'].addAction(self.action_dict['action_about_qt']),
)
def initToolBar(self):
self.setUnifiedTitleAndToolBarOnMac(True)
file_toolbar = self.addToolBar('File')
file_toolbar.addAction(self.action_dict['action_new_file_session'])
file_toolbar.addAction(self.action_dict['action_edit_file_session'])
file_toolbar.addAction(self.action_dict['action_del_file_session'])
file_toolbar.addAction(self.action_dict['action_file_connect'])
file_toolbar.addAction(self.action_dict['action_file_quit'])
def initActionConnect(self):
self.action_dict['action_new_file_session'].triggered.connect(self.actionNewSession)
self.action_dict['action_edit_file_session'].triggered.connect(self.actionEditSession)
self.action_dict['action_del_file_session'].triggered.connect(self.actionDeleteSession)
self.action_dict['action_file_connect'].triggered.connect(self.actionConnect)
self.action_dict['action_file_quit'].triggered.connect(self.close)
self.action_dict['action_about'].triggered.connect(self.actionAbout)
self.action_dict['action_about_qt'].triggered.connect(QtWidgets.QApplication.aboutQt)
self.action_dict['action_view_normal_style'].triggered.connect(self.actionChangStyle)
self.action_dict['action_view_dark_style'].triggered.connect(self.actionChangStyle)
def initMainTabWidget(self):
self.main_tab = QtWidgets.QTabWidget()
self.setCentralWidget(self.main_tab)
# 设置为可关闭,并连接信号
self.main_tab.setTabsClosable(True)
self.main_tab.tabCloseRequested.connect(self.closeTab)
self.tab_title_user_manage = _translate("MainWindow", "User Manage")
self.tabs = {}
def initDockWidgets(self):
self.server_view_dock = HkuSessionViewWidget(self)
self.server_view_dock.setFeatures(QtWidgets.QDockWidget.DockWidgetMovable) # 禁止关闭
self.server_view_dock.setMinimumWidth(200)
# 消除 docker window 的顶部按钮
title_bar = self.server_view_dock.titleBarWidget()
self.server_view_dock.setTitleBarWidget(QtWidgets.QWidget())
del title_bar
self.addDockWidget(QtCore.Qt.LeftDockWidgetArea, self.server_view_dock)
servers = self.db.session.query(SessionModel).order_by(SessionModel.name.asc()).all()
for server in servers:
server.running = False # SQLalchemy query 出来的对象并没有添加非数据库外的属性,此处手工添加保护
self.server_view_dock.addSession(server)
self.server_view_dock.user_manage_trigger.connect(self.openUserManageTab)
def actionAbout(self):
msg = _translate(
'MainWindow', "<p><b>Hikyuu Strategy Server Manager</b><p>"
"<p>Hikyuu strategy server management is used to "
"manage quant trading strategies based on hikyuu "
"quant framework</p>"
"<p><b>Hikyuu Quant Framework</b></p>"
"It is a high performance open source quantitative "
"trading research framework based on C++/Python, "
"which is used for stratgy analysis and back testing."
"Now it only used in Chinese stock market)</p>"
'<p>see more: <a href="https://hikyuu.org">https://hikyuu.org<a></p>'
)
QtWidgets.QMessageBox.about(self, _translate('MainWindow', 'About Hikyuu Strategy Server Manager'), msg)
def actionChangStyle(self):
QtWidgets.qApp.setStyleSheet('')
style_name = self.sender().objectName()
if style_name == 'dark_style':
QtWidgets.qApp.setStyleSheet(qdarkstyle.load_stylesheet(qt_api='pyqt5'))
self.ui_config.set('main_window', 'style', style_name)
def actionNewSession(self):
server_session = SessionModel()
session_dialog = HkuEditSessionDialog(self)
session_dialog.setWindowTitle(_translate("MainWindow", "New Session"))
session_dialog.setData(server_session)
if session_dialog.exec() >= 0:
session_data = session_dialog.getData()
session_data.save()
self.server_view_dock.addSession(session_data)
session_dialog.destroy()
def actionEditSession(self):
item = self.server_view_dock.tree.currentItem()
server_session = self.db.session.query(SessionModel).filter_by(name=item.text(0)).first() if item else None
if server_session is None:
QtWidgets.QMessageBox.about(
self, _translate("MainWindow", "info"), _translate("MainWindow", "Please select a session to execute")
)
return
edit_session_dialog = HkuEditSessionDialog(self)
edit_session_dialog.setWindowTitle(_translate("MainWindow", "Edit Session"))
edit_session_dialog.setData(server_session)
if edit_session_dialog.exec() >= 0:
session_data = edit_session_dialog.getData()
session_data.save()
self.server_view_dock.modifySession(item, session_data)
edit_session_dialog.destroy()
def actionDeleteSession(self):
item = self.server_view_dock.tree.currentItem()
data = item.data(0, QtCore.Qt.UserRole) if item is not None else None
if data is None:
QtWidgets.QMessageBox.about(
self, _translate("MainWindow", "info"), _translate("MainWindow", "Please select a session to execute")
)
return
ret = QtWidgets.QMessageBox.question(
self, _translate("MainWindow", "Confirm removal"),
_translate("MainWindow", "Confirm to remove the session (%s)?") % item.text(0),
QtWidgets.QMessageBox.Yes | QtWidgets.QMessageBox.No
)
if ret == QtWidgets.QMessageBox.Yes:
root_index = self.server_view_dock.tree.indexOfTopLevelItem(item)
self.server_view_dock.tree.takeTopLevelItem(root_index)
data.delete()
def actionConnect(self):
item = self.server_view_dock.tree.currentItem()
if item is None:
logging.error("Can't get currentItem.")
return
session = item.data(0, QtCore.Qt.UserRole)
status, msg = AssisService.getServerStatus(session)
if not session.running:
self.server_view_dock.set_gray(item)
QtWidgets.QMessageBox.warning(
self, _translate("MainWindow", "info"), _translate("MainWindow", "connection failed")
)
else:
self.server_view_dock.set_default(item)
self.server_view_dock.tree.viewport().update()
def closeTab(self, index):
title = self.main_tab.tabText(index)
self.main_tab.removeTab(index)
self.tabs[title] = None
def openUserManageTab(self, session):
"""用户管理"""
title = "{}({})".format(self.tab_title_user_manage, session.name)
if title not in self.tabs or self.tabs[title] is None:
if not session.running:
QtWidgets.QMessageBox.warning(
self, _translate("MainWindow", "info"),
_translate("MainWindow", "The server is disconnected. Please connect first!")
)
else:
tab = HkuUserManagerWidget(session, self.main_tab)
self.main_tab.addTab(tab, title)
self.tabs[title] = tab
def main_core():
FORMAT = '%(asctime)-15s [%(levelname)s]: %(message)s [%(name)s::%(funcName)s]'
logging.basicConfig(format=FORMAT, level=logging.INFO, handlers=[
logging.StreamHandler(),
])
# 自适应分辨率,防止字体显示不全
QtWidgets.QApplication.setAttribute(QtCore.Qt.AA_UseHighDpiPixmaps)
QtWidgets.QApplication.setAttribute(QtCore.Qt.AA_EnableHighDpiScaling)
app = QtWidgets.QApplication(sys.argv)
main_win = MyMainWindow()
main_win.show()
exit_code = app.exec()
if exit_code == 888:
# 应用中使用 qApp.exit(888) 指示重启
del main_win
del app # 必须,否则最终无法正常退出应用
main_core()
else:
sys.exit()
if __name__ == "__main__":
main_core()
| 45.872159 | 118 | 0.675543 |
import logging
import sys
import os
from PyQt5.QtWidgets import QVBoxLayout
cur_dir = os.path.dirname(__file__)
sys.path.append(cur_dir)
sys.path.append(os.path.split(cur_dir)[0])
from PyQt5 import QtCore, QtGui, QtWidgets
import qdarkstyle
from UiConfig import UiConfig
from translate import _translate
from widget.HkuSessionViewWidget import HkuSessionViewWidget
from dialog import *
from widget import *
from data import (get_local_db, SessionModel)
from service import AssisService
class MyMainWindow(QtWidgets.QMainWindow):
def __init__(self):
super().__init__()
appid = 'HikyuuAdmin'
QtWidgets.QApplication.setApplicationName(appid)
QtWidgets.QApplication.setOrganizationName("org.hikyuu")
loc = QtCore.QLocale()
if loc.language() == QtCore.QLocale.Chinese:
self.trans = QtCore.QTranslator()
self.trans.load("{}/language/zh_CN.qm".format(os.path.dirname(__file__)))
_app = QtWidgets.QApplication.instance()
_app.installTranslator(self.trans)
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(":/logo/logo_16.png"))
icon.addPixmap(QtGui.QPixmap(":/logo/logo_32.png"))
icon.addPixmap(QtGui.QPixmap(":/logo/logo_48.png"))
icon.addPixmap(QtGui.QPixmap(":/logo/logo_64.png"))
icon.addPixmap(QtGui.QPixmap(":/logo/logo_128.png"))
icon.addPixmap(QtGui.QPixmap(":/logo/logo_256.png"))
self.setWindowIcon(icon)
if sys.platform == 'win32':
import ctypes
ctypes.windll.shell32.SetCurrentProcessExplicitAppUserModelID(appid)
self.ui_config = UiConfig()
self.setObjectName("HikyuuAdminMainWindow")
self.setWindowTitle(_translate("MainWindow", "Hikyuu Strategy Server Manager"))
self.db = get_local_db()
self.initAction()
self.initMenuBar()
self.initMenu()
self.initToolBar()
self.initActionConnect()
self.initMainTabWidget()
self.initDockWidgets()
self.statusBar().showMessage(_translate('MainWindow', 'Running'))
style = self.ui_config.get('main_window', 'style', fallback='normal_style')
if style == 'dark_style':
QtWidgets.qApp.setStyleSheet(qdarkstyle.load_stylesheet(qt_api='pyqt5'))
if self.ui_config.getboolean('main_window', 'maximized', fallback=False):
self.showMaximized()
else:
self.resize(
self.ui_config.getint('main_window', 'width', fallback=800),
self.ui_config.getint('main_window', 'height', fallback=500)
)
QtCore.QMetaObject.connectSlotsByName(self)
@property
def session(self):
return self.db.session
def closeEvent(self, event):
self.ui_config.save(self)
event.accept()
def initAction(self):
self.action_dict = dict(
action_new_file_session=QtWidgets.QAction(
QtGui.QIcon(":/icon/new_32.png"), _translate("MainWindow", "&New Session"), self
),
action_edit_file_session=QtWidgets.QAction(
QtGui.QIcon(":/icon/edit_32.png"), _translate("MainWindow", "&Edit Session"), self
),
action_del_file_session=QtWidgets.QAction(
QtGui.QIcon(":/icon/cancel_32.png"), _translate("MainWindow", "&Remove Session"), self
),
action_file_connect=QtWidgets.QAction(
QtGui.QIcon(":/icon/connect_32.png"), _translate('MainWindow', '&Connect Now')
),
action_file_quit=QtWidgets.QAction(
QtGui.QIcon(":/icon/quit_32.png"), _translate('MainWindow', '&Quit'), self
),
action_view_normal_style=QtWidgets.QAction(_translate('MainWindow', 'Normal style'), self),
action_view_dark_style=QtWidgets.QAction(_translate('MainWindow', 'Dark style'), self),
action_about=QtWidgets.QAction(_translate('MainWindow', 'About'), self),
action_about_qt=QtWidgets.QAction(_translate('MainWindow', 'About Qt'), self),
)
self.action_dict['action_new_file_session'].setStatusTip(_translate('MainWindow', 'New Session'))
self.action_dict['action_file_connect'].setStatusTip(_translate('MainWindow', 'Connect Now'))
self.action_dict['action_file_quit'].setStatusTip(_translate('MainWindow', 'Quit Application'))
self.action_dict['action_about_qt'].setStatusTip(_translate('MainWindow', "Show the Qt library's About box"))
self.action_dict['action_view_normal_style'].setObjectName('normal_style')
self.action_dict['action_view_normal_style'].setStatusTip(_translate('MainWindow', 'Switch to normal style'))
self.action_dict['action_view_dark_style'].setObjectName('dark_style')
self.action_dict['action_view_dark_style'].setStatusTip(_translate('MainWindow', 'Switch to dark style'))
self.action_dict['action_edit_file_session'].setEnabled(False)
self.action_dict['action_del_file_session'].setEnabled(False)
def initMenuBar(self):
self.menubar_dict = dict(
menu_file=self.menuBar().addMenu(_translate('MainWindow', "&File(F)")),
menu_view=self.menuBar().addMenu(_translate('MainWindow', "&View(V)")),
menu_help=self.menuBar().addMenu(_translate('MainWindow', "&Help(H)"))
)
def initMenu(self):
file_session_menu = self.menubar_dict['menu_file'].addMenu(
QtGui.QIcon(":/icon/server_16.png"), _translate('MainWindow', '&Session Manager')
)
style_menu = self.menubar_dict['menu_view'].addMenu(_translate('MainWindow', 'Skin style'))
self.menu_dict = dict(
menu_file_new_session=file_session_menu.addAction(self.action_dict['action_new_file_session']),
menu_file_edit_session=file_session_menu.addAction(self.action_dict['action_edit_file_session']),
menu_file_del_session=file_session_menu.addAction(self.action_dict['action_del_file_session']),
menu_file_connect=self.menubar_dict['menu_file'].addAction(self.action_dict['action_file_connect']),
menu_file_quit=self.menubar_dict['menu_file'].addAction(self.action_dict['action_file_quit']),
menu_view_normal_style=style_menu.addAction(self.action_dict['action_view_normal_style']),
menu_view_dark_style=style_menu.addAction(self.action_dict['action_view_dark_style']),
menu_about=self.menubar_dict['menu_help'].addAction(self.action_dict['action_about']),
menu_about_qt=self.menubar_dict['menu_help'].addAction(self.action_dict['action_about_qt']),
)
def initToolBar(self):
self.setUnifiedTitleAndToolBarOnMac(True)
file_toolbar = self.addToolBar('File')
file_toolbar.addAction(self.action_dict['action_new_file_session'])
file_toolbar.addAction(self.action_dict['action_edit_file_session'])
file_toolbar.addAction(self.action_dict['action_del_file_session'])
file_toolbar.addAction(self.action_dict['action_file_connect'])
file_toolbar.addAction(self.action_dict['action_file_quit'])
def initActionConnect(self):
self.action_dict['action_new_file_session'].triggered.connect(self.actionNewSession)
self.action_dict['action_edit_file_session'].triggered.connect(self.actionEditSession)
self.action_dict['action_del_file_session'].triggered.connect(self.actionDeleteSession)
self.action_dict['action_file_connect'].triggered.connect(self.actionConnect)
self.action_dict['action_file_quit'].triggered.connect(self.close)
self.action_dict['action_about'].triggered.connect(self.actionAbout)
self.action_dict['action_about_qt'].triggered.connect(QtWidgets.QApplication.aboutQt)
self.action_dict['action_view_normal_style'].triggered.connect(self.actionChangStyle)
self.action_dict['action_view_dark_style'].triggered.connect(self.actionChangStyle)
def initMainTabWidget(self):
self.main_tab = QtWidgets.QTabWidget()
self.setCentralWidget(self.main_tab)
# 设置为可关闭,并连接信号
self.main_tab.setTabsClosable(True)
self.main_tab.tabCloseRequested.connect(self.closeTab)
self.tab_title_user_manage = _translate("MainWindow", "User Manage")
self.tabs = {}
def initDockWidgets(self):
self.server_view_dock = HkuSessionViewWidget(self)
self.server_view_dock.setFeatures(QtWidgets.QDockWidget.DockWidgetMovable) # 禁止关闭
self.server_view_dock.setMinimumWidth(200)
# 消除 docker window 的顶部按钮
title_bar = self.server_view_dock.titleBarWidget()
self.server_view_dock.setTitleBarWidget(QtWidgets.QWidget())
del title_bar
self.addDockWidget(QtCore.Qt.LeftDockWidgetArea, self.server_view_dock)
servers = self.db.session.query(SessionModel).order_by(SessionModel.name.asc()).all()
for server in servers:
server.running = False # SQLalchemy query 出来的对象并没有添加非数据库外的属性,此处手工添加保护
self.server_view_dock.addSession(server)
self.server_view_dock.user_manage_trigger.connect(self.openUserManageTab)
def actionAbout(self):
msg = _translate(
'MainWindow', "<p><b>Hikyuu Strategy Server Manager</b><p>"
"<p>Hikyuu strategy server management is used to "
"manage quant trading strategies based on hikyuu "
"quant framework</p>"
"<p><b>Hikyuu Quant Framework</b></p>"
"It is a high performance open source quantitative "
"trading research framework based on C++/Python, "
"which is used for stratgy analysis and back testing."
"Now it only used in Chinese stock market)</p>"
'<p>see more: <a href="https://hikyuu.org">https://hikyuu.org<a></p>'
)
QtWidgets.QMessageBox.about(self, _translate('MainWindow', 'About Hikyuu Strategy Server Manager'), msg)
def actionChangStyle(self):
QtWidgets.qApp.setStyleSheet('')
style_name = self.sender().objectName()
if style_name == 'dark_style':
QtWidgets.qApp.setStyleSheet(qdarkstyle.load_stylesheet(qt_api='pyqt5'))
self.ui_config.set('main_window', 'style', style_name)
def actionNewSession(self):
server_session = SessionModel()
session_dialog = HkuEditSessionDialog(self)
session_dialog.setWindowTitle(_translate("MainWindow", "New Session"))
session_dialog.setData(server_session)
if session_dialog.exec() >= 0:
session_data = session_dialog.getData()
session_data.save()
self.server_view_dock.addSession(session_data)
session_dialog.destroy()
def actionEditSession(self):
item = self.server_view_dock.tree.currentItem()
server_session = self.db.session.query(SessionModel).filter_by(name=item.text(0)).first() if item else None
if server_session is None:
QtWidgets.QMessageBox.about(
self, _translate("MainWindow", "info"), _translate("MainWindow", "Please select a session to execute")
)
return
edit_session_dialog = HkuEditSessionDialog(self)
edit_session_dialog.setWindowTitle(_translate("MainWindow", "Edit Session"))
edit_session_dialog.setData(server_session)
if edit_session_dialog.exec() >= 0:
session_data = edit_session_dialog.getData()
session_data.save()
self.server_view_dock.modifySession(item, session_data)
edit_session_dialog.destroy()
def actionDeleteSession(self):
item = self.server_view_dock.tree.currentItem()
data = item.data(0, QtCore.Qt.UserRole) if item is not None else None
if data is None:
QtWidgets.QMessageBox.about(
self, _translate("MainWindow", "info"), _translate("MainWindow", "Please select a session to execute")
)
return
ret = QtWidgets.QMessageBox.question(
self, _translate("MainWindow", "Confirm removal"),
_translate("MainWindow", "Confirm to remove the session (%s)?") % item.text(0),
QtWidgets.QMessageBox.Yes | QtWidgets.QMessageBox.No
)
if ret == QtWidgets.QMessageBox.Yes:
root_index = self.server_view_dock.tree.indexOfTopLevelItem(item)
self.server_view_dock.tree.takeTopLevelItem(root_index)
data.delete()
def actionConnect(self):
item = self.server_view_dock.tree.currentItem()
if item is None:
logging.error("Can't get currentItem.")
return
session = item.data(0, QtCore.Qt.UserRole)
status, msg = AssisService.getServerStatus(session)
if not session.running:
self.server_view_dock.set_gray(item)
QtWidgets.QMessageBox.warning(
self, _translate("MainWindow", "info"), _translate("MainWindow", "connection failed")
)
else:
self.server_view_dock.set_default(item)
self.server_view_dock.tree.viewport().update()
def closeTab(self, index):
title = self.main_tab.tabText(index)
self.main_tab.removeTab(index)
self.tabs[title] = None
def openUserManageTab(self, session):
title = "{}({})".format(self.tab_title_user_manage, session.name)
if title not in self.tabs or self.tabs[title] is None:
if not session.running:
QtWidgets.QMessageBox.warning(
self, _translate("MainWindow", "info"),
_translate("MainWindow", "The server is disconnected. Please connect first!")
)
else:
tab = HkuUserManagerWidget(session, self.main_tab)
self.main_tab.addTab(tab, title)
self.tabs[title] = tab
def main_core():
FORMAT = '%(asctime)-15s [%(levelname)s]: %(message)s [%(name)s::%(funcName)s]'
logging.basicConfig(format=FORMAT, level=logging.INFO, handlers=[
logging.StreamHandler(),
])
QtWidgets.QApplication.setAttribute(QtCore.Qt.AA_UseHighDpiPixmaps)
QtWidgets.QApplication.setAttribute(QtCore.Qt.AA_EnableHighDpiScaling)
app = QtWidgets.QApplication(sys.argv)
main_win = MyMainWindow()
main_win.show()
exit_code = app.exec()
if exit_code == 888:
del main_win
del app
main_core()
else:
sys.exit()
if __name__ == "__main__":
main_core()
| true | true |
f724ce0ba695747e01f48c7c55ced6477e36a6ed | 823 | py | Python | python/recursion/fibonacci.py | suddi/coding-challenges | f31b53790084dce1ad0be65ec1d61bf177cddb39 | [
"MIT"
] | null | null | null | python/recursion/fibonacci.py | suddi/coding-challenges | f31b53790084dce1ad0be65ec1d61bf177cddb39 | [
"MIT"
] | 11 | 2020-01-09T06:53:45.000Z | 2022-02-11T01:34:44.000Z | python/recursion/fibonacci.py | suddi/coding-challenges | f31b53790084dce1ad0be65ec1d61bf177cddb39 | [
"MIT"
] | 1 | 2017-03-18T17:19:43.000Z | 2017-03-18T17:19:43.000Z | def solution(number): # O(N)
"""
Write a function to compute the fibonacci sequence value to the requested iteration.
>>> solution(3)
2
>>> solution(10)
55
>>> solution(20)
6765
"""
m = {
0: 0,
1: 1
} # O(1)
def run_sequence(n): # O(N)
if not isinstance(m.get(n), int): # O(1)
m[n] = run_sequence(n - 1) + run_sequence(n - 2) # O(N)
return m[n] # O(1)
return run_sequence(number) # O(N)
if __name__ == '__main__':
import doctest
doctest.testmod()
| 30.481481 | 88 | 0.351154 | def solution(number):
m = {
0: 0,
1: 1
}
def run_sequence(n):
if not isinstance(m.get(n), int):
m[n] = run_sequence(n - 1) + run_sequence(n - 2)
return m[n]
return run_sequence(number)
if __name__ == '__main__':
import doctest
doctest.testmod()
| true | true |
f724cf25e6669a9f5102947f3cef81489c325e8c | 24,857 | py | Python | pikciosdk/PikcioChain.py | Pikciochain/PikcioChainSDK | 2a89b655268516060044ec51672c0cc46f44bd9b | [
"MIT"
] | 1 | 2019-04-11T06:24:40.000Z | 2019-04-11T06:24:40.000Z | pikciosdk/PikcioChain.py | Pikciochain/PythonSDK | 2a89b655268516060044ec51672c0cc46f44bd9b | [
"MIT"
] | 3 | 2018-10-26T08:52:10.000Z | 2018-10-26T08:55:38.000Z | pikciosdk/PikcioChain.py | Pikciochain/PythonSDK | 2a89b655268516060044ec51672c0cc46f44bd9b | [
"MIT"
] | null | null | null | import base64
import json
import os
import requests
import time
from flask import Flask, jsonify, abort, make_response, redirect, request, \
url_for
from flask_oauthlib.client import OAuth
from selenium import webdriver
from config import get_config
from log import Logger
access_token = ''
def init_api_client():
"""
Initialize Flask API Client
This is necessary for the grant code method
"""
log = Logger()
config = get_config()
app_name = config.get('application', 'name')
app = Flask('{0}_api_client'.format(app_name), template_folder='templates')
os.environ['DEBUG'] = 'true'
try:
client_id = config.get('api_client', 'client_id')
client_secret = config.get('api_client', 'client_secret')
public_ip_server = config.get('server', 'public_ip')
public_port_server = config.get('server', 'public_port')
private_ip_server = config.get('server', 'public_ip')
private_port_server = config.get('server', 'public_port')
https = config.get('server', 'tls')
redirect_uri = config.getboolean('server', 'redirect_uri')
except Exception as e:
log.error('init_api_client Exception : {0}'.format(e))
return json.dumps("Invalid config file")
@app.route('/api/authorized')
def grant_code():
try:
global access_token
# get access token with the authorization_code method
# to be able to use the access token easily, we store it in a
# global variable
code = request.args.get('code')
data = {
'grant_type': 'authorization_code',
'client_id': client_id,
'client_secret': client_secret,
'code': code,
'redirect_uri': redirect_uri
}
if https:
p = requests.post(
url='https://' + public_ip_server + ':' +
public_port_server + '/oauth/token',
data=data, verify=False)
else:
p = requests.post(
url='http://' + public_ip_server + ':' +
public_port_server + '/oauth/token',
data=data, verify=False)
access_token = p.json().get('access_token')
if not access_token:
# we try with private ip
if https:
p = requests.post(
url='https://' + private_ip_server + ':' +
private_port_server + '/oauth/token',
data=data, verify=False)
else:
p = requests.post(
url='http://' + private_ip_server + ':' +
private_port_server + '/oauth/token',
data=data, verify=False)
access_token = p.json().get('access_token')
return access_token
except Exception as ex:
log.error('init_api_client Exception : {0}'.format(ex))
return json.dumps("Invalid config file")
return app
class ClientAPI:
"""
Class access for python Client API
"""
def __init__(self, username=None, password=None):
config = get_config()
self.api_public_ip = config.get('server', 'public_ip')
self.api_public_port = config.get('server', 'public_port')
self.api_private_ip = config.get('server', 'private_ip')
self.api_private_port = config.get('server', 'private_port')
self.client_id = config.get('api_client', 'client_id')
self.client_secret = config.get('api_client', 'client_secret')
self.scope = config.get('api_client', 'scope')
self.method = config.get('api_client', 'auth_type')
self.https = config.getboolean('server', 'tls')
self.username = username
self.password = password
self.log = Logger(system=self)
self.app_name = config.get('application', 'name')
self.app = Flask('{0}_api_client'.format(self.app_name))
self.oauth = OAuth(self.app)
os.environ['DEBUG'] = 'true'
if self.https:
self.api_base_url = 'https://{0}:{1}/api/'.format(
self.api_public_ip, self.api_public_port)
self.access_token_url = 'https://{0}:{1}/oauth/token'.format(
self.api_public_ip, self.api_public_port)
self.authorize_url = 'https://{0}:{1}/oauth/authorize'.format(
self.api_public_ip, self.api_public_port)
else:
self.api_base_url = 'http://{0}:{1}/api/'.format(
self.api_public_ip, self.api_public_port)
self.access_token_url = 'http://{0}:{1}/oauth/token'.format(
self.api_public_ip, self.api_public_port)
self.authorize_url = 'http://{0}:{1}/oauth/authorize'.format(
self.api_public_ip, self.api_public_port)
self.remote = self.oauth.remote_app(
'remote',
consumer_key=self.client_id,
consumer_secret=self.client_secret,
request_token_params={'scope': self.scope},
base_url=self.api_base_url,
request_token_url=None,
access_token_url=self.access_token_url,
authorize_url=self.authorize_url
)
self.remote_oauth = ''
self.access_token = ''
self.refresh_token = ''
self.retries = 0
self.req_initiator_url = ''
self.web_server = ''
"""
Everything related to API connection
"""
def get_oauth_token(self):
return self.remote_oauth
def refresh_tok(self):
token = self.get_oauth_token()
if token == '' or token[1] == '':
return self.authorize()
data = {
'grant_type': 'refresh_token',
'client_id': self.client_id,
'refresh_token': token[1],
'scope': self.scope,
'client_secret': self.client_secret,
'username': self.username,
'password': self.password
}
auth_code = base64.b64encode(
'{0}:{1}'.format(self.client_id, self.client_secret))
res = requests.post(self.access_token_url, data=data, headers={
'Authorization': 'Basic {0}'.format(auth_code)},
verify=False)
if res.status_code == 401:
self.remote_oauth = ''
return self.authorize()
if res.status_code in (200, 201):
self.remote_oauth = (
res.json().get('access_token'),
res.json().get('refresh_token'))
self.access_token = res.json().get('access_token')
self.refresh_token = res.json().get('refresh_token')
return True
return False
def require_authorize(self, f):
"""
Decorator used to validate client authorization; In case the client
is not authorized, redirect to the Authorize Page, otherwise check
if the access token expired and request new one using the refresh
token.
:return:
"""
def wrap(*args, **kwargs):
token = self.get_oauth_token()
if not token:
self.req_initiator_url = '/api'
return redirect('/authorize')
resp = f(*args, **kwargs)
if not resp.status or resp.status in (401,):
token = self.get_oauth_token()
if token and token[1]:
self.refresh_tok()
else:
return redirect('/authorize')
resp = f(*args, **kwargs)
return make_response(jsonify(resp.data), resp.status)
return wrap
def authorize(self):
if self.remote_oauth != '':
return redirect(url_for('api_index'))
next_url = request.args.get('next') or request.referrer or None
return self.remote.authorize(
callback=url_for('authorized', next=next_url, _external=True)
)
def authorized(self):
resp = self.remote.authorized_response()
# print resp
if not resp:
return jsonify(
error=request.args.get('error'),
message=request.args.get('error_description') or ''
)
elif hasattr(resp, 'data') and resp.data.get('error'):
return jsonify(
error=resp.data['error'],
message=resp.message or ''
)
if not resp.get('access_token') or not resp.get('refresh_token'):
abort(401)
self.refresh_token = resp['refresh_token']
self.access_token = resp['access_token']
if self.req_initiator_url != '':
req_initiator = self.req_initiator_url
return redirect(req_initiator)
return redirect('/api')
def deauthorize(self):
if self.remote_oauth != '':
self.remote_oauth = ''
self.refresh_token = ''
self.access_token = ''
return redirect(url_for('authorize'))
def api_index(self):
resp = self.remote.get('home')
return resp
def generic_request(self, url, method, params=None):
global access_token
try:
# if we used grant_code method, the access token variable of the
# class won't be initialised yet
if self.access_token == '':
# if the access token hasn't been got yet, we wait 5s and call
# the function again until the global variable isn't null
# anymore
if access_token != '':
self.access_token = access_token
else:
self.retries += 1
if self.retries == 3:
self.retries = 0
p = jsonify({
'error': 'Too many failed attempts to retrieve '
'access token, please try the password '
'method.'})
return p
time.sleep(5)
return self.generic_request(url, method, params)
if method.lower() == 'get':
p = requests.get(
url=url + '?access_token=' + self.access_token,
verify=False)
elif method.lower() == 'post':
p = requests.post(
url=url + '?access_token=' + self.access_token,
data=params,
headers={'Content-Type': 'application/json'}, verify=False)
elif method.lower() == 'delete':
p = requests.delete(
url=url + '?access_token=' + self.access_token,
data=params, verify=False)
else:
p = json.dumps('Bad request')
if p.status_code == 401 and self.retries < 1:
if self.refresh_tok():
self.retries += 1
if method.lower() == 'get':
p = requests.get(
url=url + '?access_token=' + self.access_token,
verify=False)
elif method.lower() == 'post':
p = requests.post(
url=url + '?access_token=' + self.access_token,
data=params,
headers={'Content-Type': 'application/json'},
verify=False)
elif method.lower() == 'delete':
p = requests.delete(
url=url + '?access_token=' + self.access_token,
data=params, verify=False)
else:
p = json.dumps('API connexion lost')
elif p.status_code == 500:
self.log.error('Server connexion error : {0}'.format(p))
return json.dumps('Server failure, please report the bug')
else:
self.retries = 0
except Exception as e:
self.log.error('generic_request Exception : {0}'.format(e))
return json.dumps('Bad request')
return p
def get_access_token(self):
try:
if self.method.lower() == 'password_header':
data = {
'grant_type': 'password',
'username': self.username,
'password': self.password,
'scope': self.scope
}
auth_code = base64.b64encode(
bytes(self.client_id + ':' + self.client_secret))
try:
p = requests.post(url=self.access_token_url, data=data,
headers={
'Authorization': 'Basic {0}'.format(
auth_code)}, verify=False,
timeout=10)
except (requests.Timeout, requests.ConnectionError):
# try with private IP
self.log.error('Failed to connect public IP, try to '
'connect private IP')
base_url = 'http{0}://{1}:{2}/'.format(
's' if self.https else '',
self.api_private_ip,
self.api_private_port
)
self.api_base_url = base_url + 'api/'
self.access_token_url = base_url + 'oauth/token'
self.authorize_url = base_url + 'oauth/authorize'
p = requests.post(url=self.access_token_url, data=data,
headers={
'Authorization': 'Basic {0}'.format(
auth_code)}, verify=False,
timeout=10)
if p and p.status_code == 401:
return json.dumps(
{'status': False, 'msg': 'API authentication failed'})
else:
self.access_token = p.json().get('access_token')
self.refresh_token = p.json().get('refresh_token')
self.remote_oauth = (self.access_token, self.refresh_token)
return json.dumps(
{'status': True, 'msg': 'API access granted'})
elif self.method.lower() == 'password_data':
data = {
'grant_type': 'password',
'client_id': self.client_id,
'client_secret': self.client_secret,
'username': self.username,
'password': self.password,
'scope': self.scope
}
try:
p = requests.post(url=self.access_token_url, data=data,
verify=False, timeout=10)
except (requests.Timeout, requests.ConnectionError):
# try with private IP
self.log.error(
'Failed to connect public IP, try to connect private '
'IP')
base_url = 'http{0}://{1}:{2}/'.format(
's' if self.https else '',
self.api_private_ip,
self.api_private_port
)
self.api_base_url = base_url + 'api/'
self.access_token_url = base_url + 'oauth/token'
self.authorize_url = base_url + 'oauth/authorize'
p = requests.post(url=self.access_token_url, data=data,
verify=False, timeout=10)
if p.status_code == 401:
return json.dumps(
{'status': False, 'msg': 'API authentication failed'})
else:
self.access_token = p.json().get('access_token')
self.refresh_token = p.json().get('refresh_token')
self.remote_oauth = (self.access_token, self.refresh_token)
return json.dumps(
{'status': True, 'msg': 'API access granted'})
# todo : to be tested + manage https and private/public IP address
elif self.method.lower() == "grant_code":
url = self.authorize_url + '?client_id=' + self.client_id + \
"&response_type=code"
driver = webdriver.Firefox()
return driver.get(url)
else:
return json.dumps(
{'status': False, 'msg': 'Invalid grant type'})
except Exception as e:
self.log.error('get_access_token Exception : {0}'.format(e))
return json.dumps(
{'status': False, 'msg': 'API authentication failed'})
"""
Everything related to the user
"""
def get_user_profile(self):
try:
p = self.generic_request(url=self.api_base_url + 'user/profile',
method='GET')
except Exception as e:
self.log.error('get_user_profile Exception : {0}'.format(e))
return json.dumps('Get user profile : Bad request')
return p
def update_user_profile(self, data):
try:
p = self.generic_request(url=self.api_base_url + 'user/profile',
method='POST', params=json.dumps(data))
except Exception as e:
self.log.error('update_user_profile Exception : {0}'.format(e))
return json.dumps('Update user profile : Bad request')
return p
def delete_custom_profile_item(self, data):
try:
p = self.generic_request(
url=self.api_base_url + 'user/profile/delete_item',
method='POST', params=json.dumps(data))
except Exception as e:
self.log.error(
'delete_custom_profile_item Exception : {0}'.format(e))
return json.dumps('Delete custom profile item : Bad request')
return p
def get_user_avatar(self):
try:
p = self.generic_request(url=self.api_base_url + 'user/avatar',
method='GET')
except Exception as e:
self.log.error('get_user_avatar Exception : {0}'.format(e))
return json.dumps('Get user avatar : Bad request')
return p
def set_user_avatar(self, data):
try:
p = self.generic_request(url=self.api_base_url + 'user/avatar',
method='POST', params=json.dumps(data))
except Exception as e:
self.log.error('set_user_avatar Exception : {0}'.format(e))
return json.dumps('Update user avatar : Bad request')
return p
def update_password(self, data):
try:
p = self.generic_request(
url=self.api_base_url + 'profile/change_password',
method='POST',
params=json.dumps(data))
except Exception as e:
self.log.error('update_password Exception : {0}'.format(e))
return json.dumps('Update password : Bad request')
return p
"""
Everything related to chat messages
"""
def send_chat_message(self, data):
try:
p = self.generic_request(url=self.api_base_url + 'chat/send',
method="POST", params=json.dumps(data))
except Exception as e:
self.log.error('send_chat_message Exception : {0}'.format(e))
return json.dumps('Send chat message : Bad request')
return p
def delete_chat_message(self, msg_id):
try:
p = self.generic_request(url=self.api_base_url + 'chat/' + msg_id,
method="DELETE")
except Exception as e:
self.log.error('delete_chat_message Exception : {0}'.format(e))
return json.dumps('Delete chat message : Bad request')
return p
def get_chat_conversation(self, data):
try:
p = self.generic_request(url=self.api_base_url + 'chat',
method='POST', params=json.dumps(data))
except Exception as e:
self.log.error('get_chat_conversation Exception : {0}'.format(e))
return json.dumps('Get chat conversation : Bad request')
return p
"""
Everything related to file messages
"""
def get_file_messages(self, data):
try:
p = self.generic_request(url=self.api_base_url + 'file_message',
method='POST', params=json.dumps(data))
except Exception as e:
self.log.error('get_file_messages Exception : {0}'.format(e))
return json.dumps('Get file messages : Bad request')
return p
def send_file_message(self, data):
try:
p = self.generic_request(
url=self.api_base_url + 'file_message/send',
method="POST",
params=json.dumps(data))
except Exception as e:
self.log.error('send_file_message Exception : {0}'.format(e))
return json.dumps('Send file message : Bad request')
return p
def delete_file_message(self, msg_id):
try:
p = self.generic_request(
url=self.api_base_url + 'file_message/' + msg_id,
method='DELETE')
except Exception as e:
self.log.error('delete_file_message Exception : {0}'.format(e))
return json.dumps('Set file message as read : Bad request')
return p
"""
Everything related to contacts
"""
def get_contacts(self):
try:
p = self.generic_request(url=self.api_base_url + 'contacts',
method='GET')
except Exception as e:
self.log.error('get_contacts Exception : {0}'.format(e))
return json.dumps(
'Get contacts list : Bad request : {0}'.format(e))
return p
def find_user(self, query):
try:
p = self.generic_request(
url=self.api_base_url + 'contacts/find_user' + query,
method='GET')
except Exception as e:
self.log.error('find_user Exception : {0}'.format(e))
return json.dumps('Find user : Bad request')
return p
def add_contact(self, data):
try:
p = self.generic_request(url=self.api_base_url + 'contacts/add',
method='POST', params=json.dumps(data))
except Exception as e:
self.log.error('add_contact Exception : {0}'.format(e))
return json.dumps('Add contact : Bad request')
return p
def remove_contact(self, data):
try:
p = self.generic_request(url=self.api_base_url + 'contacts/remove',
method='POST', params=json.dumps(data))
except Exception as e:
self.log.error('remove_contact Exception : {0}'.format(e))
return json.dumps('Remove contact : Bad request')
return p
def accept_contact_request(self, matr_id):
try:
p = self.generic_request(
url=self.api_base_url + 'contacts/accept/' + matr_id,
method='GET')
except Exception as e:
self.log.error('accept_contact_request Exception : {0}'.format(e))
return json.dumps('Accept contact request : Bad request')
return p
def reject_contact_request(self, matr_id):
try:
p = self.generic_request(
url=self.api_base_url + 'contacts/reject/' + matr_id,
method='GET')
except Exception as e:
self.log.error('reject_contact_request Exception : {0}'.format(e))
return json.dumps('Reject contact request : Bad request')
return p
def get_contact_profile(self, matr_id):
try:
p = self.generic_request(
url=self.api_base_url + 'contacts/' + matr_id, method='GET')
except Exception as e:
self.log.error('get_contact_profile Exception : {0}'.format(e))
return json.dumps('Get contact profile : Bad request')
return p
| 39.083333 | 79 | 0.518968 | import base64
import json
import os
import requests
import time
from flask import Flask, jsonify, abort, make_response, redirect, request, \
url_for
from flask_oauthlib.client import OAuth
from selenium import webdriver
from config import get_config
from log import Logger
access_token = ''
def init_api_client():
log = Logger()
config = get_config()
app_name = config.get('application', 'name')
app = Flask('{0}_api_client'.format(app_name), template_folder='templates')
os.environ['DEBUG'] = 'true'
try:
client_id = config.get('api_client', 'client_id')
client_secret = config.get('api_client', 'client_secret')
public_ip_server = config.get('server', 'public_ip')
public_port_server = config.get('server', 'public_port')
private_ip_server = config.get('server', 'public_ip')
private_port_server = config.get('server', 'public_port')
https = config.get('server', 'tls')
redirect_uri = config.getboolean('server', 'redirect_uri')
except Exception as e:
log.error('init_api_client Exception : {0}'.format(e))
return json.dumps("Invalid config file")
@app.route('/api/authorized')
def grant_code():
try:
global access_token
code = request.args.get('code')
data = {
'grant_type': 'authorization_code',
'client_id': client_id,
'client_secret': client_secret,
'code': code,
'redirect_uri': redirect_uri
}
if https:
p = requests.post(
url='https://' + public_ip_server + ':' +
public_port_server + '/oauth/token',
data=data, verify=False)
else:
p = requests.post(
url='http://' + public_ip_server + ':' +
public_port_server + '/oauth/token',
data=data, verify=False)
access_token = p.json().get('access_token')
if not access_token:
if https:
p = requests.post(
url='https://' + private_ip_server + ':' +
private_port_server + '/oauth/token',
data=data, verify=False)
else:
p = requests.post(
url='http://' + private_ip_server + ':' +
private_port_server + '/oauth/token',
data=data, verify=False)
access_token = p.json().get('access_token')
return access_token
except Exception as ex:
log.error('init_api_client Exception : {0}'.format(ex))
return json.dumps("Invalid config file")
return app
class ClientAPI:
def __init__(self, username=None, password=None):
config = get_config()
self.api_public_ip = config.get('server', 'public_ip')
self.api_public_port = config.get('server', 'public_port')
self.api_private_ip = config.get('server', 'private_ip')
self.api_private_port = config.get('server', 'private_port')
self.client_id = config.get('api_client', 'client_id')
self.client_secret = config.get('api_client', 'client_secret')
self.scope = config.get('api_client', 'scope')
self.method = config.get('api_client', 'auth_type')
self.https = config.getboolean('server', 'tls')
self.username = username
self.password = password
self.log = Logger(system=self)
self.app_name = config.get('application', 'name')
self.app = Flask('{0}_api_client'.format(self.app_name))
self.oauth = OAuth(self.app)
os.environ['DEBUG'] = 'true'
if self.https:
self.api_base_url = 'https://{0}:{1}/api/'.format(
self.api_public_ip, self.api_public_port)
self.access_token_url = 'https://{0}:{1}/oauth/token'.format(
self.api_public_ip, self.api_public_port)
self.authorize_url = 'https://{0}:{1}/oauth/authorize'.format(
self.api_public_ip, self.api_public_port)
else:
self.api_base_url = 'http://{0}:{1}/api/'.format(
self.api_public_ip, self.api_public_port)
self.access_token_url = 'http://{0}:{1}/oauth/token'.format(
self.api_public_ip, self.api_public_port)
self.authorize_url = 'http://{0}:{1}/oauth/authorize'.format(
self.api_public_ip, self.api_public_port)
self.remote = self.oauth.remote_app(
'remote',
consumer_key=self.client_id,
consumer_secret=self.client_secret,
request_token_params={'scope': self.scope},
base_url=self.api_base_url,
request_token_url=None,
access_token_url=self.access_token_url,
authorize_url=self.authorize_url
)
self.remote_oauth = ''
self.access_token = ''
self.refresh_token = ''
self.retries = 0
self.req_initiator_url = ''
self.web_server = ''
def get_oauth_token(self):
return self.remote_oauth
def refresh_tok(self):
token = self.get_oauth_token()
if token == '' or token[1] == '':
return self.authorize()
data = {
'grant_type': 'refresh_token',
'client_id': self.client_id,
'refresh_token': token[1],
'scope': self.scope,
'client_secret': self.client_secret,
'username': self.username,
'password': self.password
}
auth_code = base64.b64encode(
'{0}:{1}'.format(self.client_id, self.client_secret))
res = requests.post(self.access_token_url, data=data, headers={
'Authorization': 'Basic {0}'.format(auth_code)},
verify=False)
if res.status_code == 401:
self.remote_oauth = ''
return self.authorize()
if res.status_code in (200, 201):
self.remote_oauth = (
res.json().get('access_token'),
res.json().get('refresh_token'))
self.access_token = res.json().get('access_token')
self.refresh_token = res.json().get('refresh_token')
return True
return False
def require_authorize(self, f):
def wrap(*args, **kwargs):
token = self.get_oauth_token()
if not token:
self.req_initiator_url = '/api'
return redirect('/authorize')
resp = f(*args, **kwargs)
if not resp.status or resp.status in (401,):
token = self.get_oauth_token()
if token and token[1]:
self.refresh_tok()
else:
return redirect('/authorize')
resp = f(*args, **kwargs)
return make_response(jsonify(resp.data), resp.status)
return wrap
def authorize(self):
if self.remote_oauth != '':
return redirect(url_for('api_index'))
next_url = request.args.get('next') or request.referrer or None
return self.remote.authorize(
callback=url_for('authorized', next=next_url, _external=True)
)
def authorized(self):
resp = self.remote.authorized_response()
if not resp:
return jsonify(
error=request.args.get('error'),
message=request.args.get('error_description') or ''
)
elif hasattr(resp, 'data') and resp.data.get('error'):
return jsonify(
error=resp.data['error'],
message=resp.message or ''
)
if not resp.get('access_token') or not resp.get('refresh_token'):
abort(401)
self.refresh_token = resp['refresh_token']
self.access_token = resp['access_token']
if self.req_initiator_url != '':
req_initiator = self.req_initiator_url
return redirect(req_initiator)
return redirect('/api')
def deauthorize(self):
if self.remote_oauth != '':
self.remote_oauth = ''
self.refresh_token = ''
self.access_token = ''
return redirect(url_for('authorize'))
def api_index(self):
resp = self.remote.get('home')
return resp
def generic_request(self, url, method, params=None):
global access_token
try:
if self.access_token == '':
# if the access token hasn't been got yet, we wait 5s and call
# anymore
if access_token != '':
self.access_token = access_token
else:
self.retries += 1
if self.retries == 3:
self.retries = 0
p = jsonify({
'error': 'Too many failed attempts to retrieve '
'access token, please try the password '
'method.'})
return p
time.sleep(5)
return self.generic_request(url, method, params)
if method.lower() == 'get':
p = requests.get(
url=url + '?access_token=' + self.access_token,
verify=False)
elif method.lower() == 'post':
p = requests.post(
url=url + '?access_token=' + self.access_token,
data=params,
headers={'Content-Type': 'application/json'}, verify=False)
elif method.lower() == 'delete':
p = requests.delete(
url=url + '?access_token=' + self.access_token,
data=params, verify=False)
else:
p = json.dumps('Bad request')
if p.status_code == 401 and self.retries < 1:
if self.refresh_tok():
self.retries += 1
if method.lower() == 'get':
p = requests.get(
url=url + '?access_token=' + self.access_token,
verify=False)
elif method.lower() == 'post':
p = requests.post(
url=url + '?access_token=' + self.access_token,
data=params,
headers={'Content-Type': 'application/json'},
verify=False)
elif method.lower() == 'delete':
p = requests.delete(
url=url + '?access_token=' + self.access_token,
data=params, verify=False)
else:
p = json.dumps('API connexion lost')
elif p.status_code == 500:
self.log.error('Server connexion error : {0}'.format(p))
return json.dumps('Server failure, please report the bug')
else:
self.retries = 0
except Exception as e:
self.log.error('generic_request Exception : {0}'.format(e))
return json.dumps('Bad request')
return p
def get_access_token(self):
try:
if self.method.lower() == 'password_header':
data = {
'grant_type': 'password',
'username': self.username,
'password': self.password,
'scope': self.scope
}
auth_code = base64.b64encode(
bytes(self.client_id + ':' + self.client_secret))
try:
p = requests.post(url=self.access_token_url, data=data,
headers={
'Authorization': 'Basic {0}'.format(
auth_code)}, verify=False,
timeout=10)
except (requests.Timeout, requests.ConnectionError):
# try with private IP
self.log.error('Failed to connect public IP, try to '
'connect private IP')
base_url = 'http{0}://{1}:{2}/'.format(
's' if self.https else '',
self.api_private_ip,
self.api_private_port
)
self.api_base_url = base_url + 'api/'
self.access_token_url = base_url + 'oauth/token'
self.authorize_url = base_url + 'oauth/authorize'
p = requests.post(url=self.access_token_url, data=data,
headers={
'Authorization': 'Basic {0}'.format(
auth_code)}, verify=False,
timeout=10)
if p and p.status_code == 401:
return json.dumps(
{'status': False, 'msg': 'API authentication failed'})
else:
self.access_token = p.json().get('access_token')
self.refresh_token = p.json().get('refresh_token')
self.remote_oauth = (self.access_token, self.refresh_token)
return json.dumps(
{'status': True, 'msg': 'API access granted'})
elif self.method.lower() == 'password_data':
data = {
'grant_type': 'password',
'client_id': self.client_id,
'client_secret': self.client_secret,
'username': self.username,
'password': self.password,
'scope': self.scope
}
try:
p = requests.post(url=self.access_token_url, data=data,
verify=False, timeout=10)
except (requests.Timeout, requests.ConnectionError):
# try with private IP
self.log.error(
'Failed to connect public IP, try to connect private '
'IP')
base_url = 'http{0}://{1}:{2}/'.format(
's' if self.https else '',
self.api_private_ip,
self.api_private_port
)
self.api_base_url = base_url + 'api/'
self.access_token_url = base_url + 'oauth/token'
self.authorize_url = base_url + 'oauth/authorize'
p = requests.post(url=self.access_token_url, data=data,
verify=False, timeout=10)
if p.status_code == 401:
return json.dumps(
{'status': False, 'msg': 'API authentication failed'})
else:
self.access_token = p.json().get('access_token')
self.refresh_token = p.json().get('refresh_token')
self.remote_oauth = (self.access_token, self.refresh_token)
return json.dumps(
{'status': True, 'msg': 'API access granted'})
# todo : to be tested + manage https and private/public IP address
elif self.method.lower() == "grant_code":
url = self.authorize_url + '?client_id=' + self.client_id + \
"&response_type=code"
driver = webdriver.Firefox()
return driver.get(url)
else:
return json.dumps(
{'status': False, 'msg': 'Invalid grant type'})
except Exception as e:
self.log.error('get_access_token Exception : {0}'.format(e))
return json.dumps(
{'status': False, 'msg': 'API authentication failed'})
def get_user_profile(self):
try:
p = self.generic_request(url=self.api_base_url + 'user/profile',
method='GET')
except Exception as e:
self.log.error('get_user_profile Exception : {0}'.format(e))
return json.dumps('Get user profile : Bad request')
return p
def update_user_profile(self, data):
try:
p = self.generic_request(url=self.api_base_url + 'user/profile',
method='POST', params=json.dumps(data))
except Exception as e:
self.log.error('update_user_profile Exception : {0}'.format(e))
return json.dumps('Update user profile : Bad request')
return p
def delete_custom_profile_item(self, data):
try:
p = self.generic_request(
url=self.api_base_url + 'user/profile/delete_item',
method='POST', params=json.dumps(data))
except Exception as e:
self.log.error(
'delete_custom_profile_item Exception : {0}'.format(e))
return json.dumps('Delete custom profile item : Bad request')
return p
def get_user_avatar(self):
try:
p = self.generic_request(url=self.api_base_url + 'user/avatar',
method='GET')
except Exception as e:
self.log.error('get_user_avatar Exception : {0}'.format(e))
return json.dumps('Get user avatar : Bad request')
return p
def set_user_avatar(self, data):
try:
p = self.generic_request(url=self.api_base_url + 'user/avatar',
method='POST', params=json.dumps(data))
except Exception as e:
self.log.error('set_user_avatar Exception : {0}'.format(e))
return json.dumps('Update user avatar : Bad request')
return p
def update_password(self, data):
try:
p = self.generic_request(
url=self.api_base_url + 'profile/change_password',
method='POST',
params=json.dumps(data))
except Exception as e:
self.log.error('update_password Exception : {0}'.format(e))
return json.dumps('Update password : Bad request')
return p
def send_chat_message(self, data):
try:
p = self.generic_request(url=self.api_base_url + 'chat/send',
method="POST", params=json.dumps(data))
except Exception as e:
self.log.error('send_chat_message Exception : {0}'.format(e))
return json.dumps('Send chat message : Bad request')
return p
def delete_chat_message(self, msg_id):
try:
p = self.generic_request(url=self.api_base_url + 'chat/' + msg_id,
method="DELETE")
except Exception as e:
self.log.error('delete_chat_message Exception : {0}'.format(e))
return json.dumps('Delete chat message : Bad request')
return p
def get_chat_conversation(self, data):
try:
p = self.generic_request(url=self.api_base_url + 'chat',
method='POST', params=json.dumps(data))
except Exception as e:
self.log.error('get_chat_conversation Exception : {0}'.format(e))
return json.dumps('Get chat conversation : Bad request')
return p
def get_file_messages(self, data):
try:
p = self.generic_request(url=self.api_base_url + 'file_message',
method='POST', params=json.dumps(data))
except Exception as e:
self.log.error('get_file_messages Exception : {0}'.format(e))
return json.dumps('Get file messages : Bad request')
return p
def send_file_message(self, data):
try:
p = self.generic_request(
url=self.api_base_url + 'file_message/send',
method="POST",
params=json.dumps(data))
except Exception as e:
self.log.error('send_file_message Exception : {0}'.format(e))
return json.dumps('Send file message : Bad request')
return p
def delete_file_message(self, msg_id):
try:
p = self.generic_request(
url=self.api_base_url + 'file_message/' + msg_id,
method='DELETE')
except Exception as e:
self.log.error('delete_file_message Exception : {0}'.format(e))
return json.dumps('Set file message as read : Bad request')
return p
def get_contacts(self):
try:
p = self.generic_request(url=self.api_base_url + 'contacts',
method='GET')
except Exception as e:
self.log.error('get_contacts Exception : {0}'.format(e))
return json.dumps(
'Get contacts list : Bad request : {0}'.format(e))
return p
def find_user(self, query):
try:
p = self.generic_request(
url=self.api_base_url + 'contacts/find_user' + query,
method='GET')
except Exception as e:
self.log.error('find_user Exception : {0}'.format(e))
return json.dumps('Find user : Bad request')
return p
def add_contact(self, data):
try:
p = self.generic_request(url=self.api_base_url + 'contacts/add',
method='POST', params=json.dumps(data))
except Exception as e:
self.log.error('add_contact Exception : {0}'.format(e))
return json.dumps('Add contact : Bad request')
return p
def remove_contact(self, data):
try:
p = self.generic_request(url=self.api_base_url + 'contacts/remove',
method='POST', params=json.dumps(data))
except Exception as e:
self.log.error('remove_contact Exception : {0}'.format(e))
return json.dumps('Remove contact : Bad request')
return p
def accept_contact_request(self, matr_id):
try:
p = self.generic_request(
url=self.api_base_url + 'contacts/accept/' + matr_id,
method='GET')
except Exception as e:
self.log.error('accept_contact_request Exception : {0}'.format(e))
return json.dumps('Accept contact request : Bad request')
return p
def reject_contact_request(self, matr_id):
try:
p = self.generic_request(
url=self.api_base_url + 'contacts/reject/' + matr_id,
method='GET')
except Exception as e:
self.log.error('reject_contact_request Exception : {0}'.format(e))
return json.dumps('Reject contact request : Bad request')
return p
def get_contact_profile(self, matr_id):
try:
p = self.generic_request(
url=self.api_base_url + 'contacts/' + matr_id, method='GET')
except Exception as e:
self.log.error('get_contact_profile Exception : {0}'.format(e))
return json.dumps('Get contact profile : Bad request')
return p
| true | true |
f724cfc965d385156ea1686e76199775451ff589 | 4,083 | py | Python | test/functional/feature_includeconf.py | minblock/arcticoin | fc0ee011cc8a27cc22dd9841d563b37a8fa12255 | [
"MIT"
] | null | null | null | test/functional/feature_includeconf.py | minblock/arcticoin | fc0ee011cc8a27cc22dd9841d563b37a8fa12255 | [
"MIT"
] | null | null | null | test/functional/feature_includeconf.py | minblock/arcticoin | fc0ee011cc8a27cc22dd9841d563b37a8fa12255 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) 2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Tests the includeconf argument
Verify that:
1. adding includeconf to the configuration file causes the includeconf
file to be loaded in the correct order.
2. includeconf cannot be used as a command line argument.
3. includeconf cannot be used recursively (ie includeconf can only
be used from the base config file).
4. multiple includeconf arguments can be specified in the main config
file.
"""
import os
from test_framework.test_framework import BitcoinTestFramework
class IncludeConfTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = False
self.num_nodes = 1
def setup_chain(self):
super().setup_chain()
# Create additional config files
# - tmpdir/node0/relative.conf
with open(os.path.join(self.options.tmpdir, "node0", "relative.conf"), "w", encoding="utf8") as f:
f.write("uacomment=relative\n")
# - tmpdir/node0/relative2.conf
with open(os.path.join(self.options.tmpdir, "node0", "relative2.conf"), "w", encoding="utf8") as f:
f.write("uacomment=relative2\n")
with open(os.path.join(self.options.tmpdir, "node0", "arcticoin.conf"), "a", encoding='utf8') as f:
f.write("uacomment=main\nincludeconf=relative.conf\n")
def run_test(self):
self.log.info("-includeconf works from config file. subversion should end with 'main; relative)/'")
subversion = self.nodes[0].getnetworkinfo()["subversion"]
assert subversion.endswith("main; relative)/")
self.log.info("-includeconf cannot be used as command-line arg")
self.stop_node(0)
self.nodes[0].assert_start_raises_init_error(extra_args=["-includeconf=relative2.conf"], expected_msg="Error parsing command line arguments: -includeconf cannot be used from commandline; -includeconf=relative2.conf")
self.log.info("-includeconf cannot be used recursively. subversion should end with 'main; relative)/'")
with open(os.path.join(self.options.tmpdir, "node0", "relative.conf"), "a", encoding="utf8") as f:
f.write("includeconf=relative2.conf\n")
self.start_node(0)
subversion = self.nodes[0].getnetworkinfo()["subversion"]
assert subversion.endswith("main; relative)/")
self.stop_node(0, expected_stderr="warning: -includeconf cannot be used from included files; ignoring -includeconf=relative2.conf")
self.log.info("-includeconf cannot contain invalid arg")
# Commented out as long as we ignore invalid arguments in configuration files
#with open(os.path.join(self.options.tmpdir, "node0", "relative.conf"), "w", encoding="utf8") as f:
# f.write("foo=bar\n")
#self.nodes[0].assert_start_raises_init_error(expected_msg="Error reading configuration file: Invalid configuration value foo")
self.log.info("-includeconf cannot be invalid path")
os.remove(os.path.join(self.options.tmpdir, "node0", "relative.conf"))
self.nodes[0].assert_start_raises_init_error(expected_msg="Error reading configuration file: Failed to include configuration file relative.conf")
self.log.info("multiple -includeconf args can be used from the base config file. subversion should end with 'main; relative; relative2)/'")
with open(os.path.join(self.options.tmpdir, "node0", "relative.conf"), "w", encoding="utf8") as f:
# Restore initial file contents
f.write("uacomment=relative\n")
with open(os.path.join(self.options.tmpdir, "node0", "arcticoin.conf"), "a", encoding='utf8') as f:
f.write("includeconf=relative2.conf\n")
self.start_node(0)
subversion = self.nodes[0].getnetworkinfo()["subversion"]
assert subversion.endswith("main; relative; relative2)/")
if __name__ == '__main__':
IncludeConfTest().main()
| 49.192771 | 224 | 0.694832 |
import os
from test_framework.test_framework import BitcoinTestFramework
class IncludeConfTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = False
self.num_nodes = 1
def setup_chain(self):
super().setup_chain()
with open(os.path.join(self.options.tmpdir, "node0", "relative.conf"), "w", encoding="utf8") as f:
f.write("uacomment=relative\n")
with open(os.path.join(self.options.tmpdir, "node0", "relative2.conf"), "w", encoding="utf8") as f:
f.write("uacomment=relative2\n")
with open(os.path.join(self.options.tmpdir, "node0", "arcticoin.conf"), "a", encoding='utf8') as f:
f.write("uacomment=main\nincludeconf=relative.conf\n")
def run_test(self):
self.log.info("-includeconf works from config file. subversion should end with 'main; relative)/'")
subversion = self.nodes[0].getnetworkinfo()["subversion"]
assert subversion.endswith("main; relative)/")
self.log.info("-includeconf cannot be used as command-line arg")
self.stop_node(0)
self.nodes[0].assert_start_raises_init_error(extra_args=["-includeconf=relative2.conf"], expected_msg="Error parsing command line arguments: -includeconf cannot be used from commandline; -includeconf=relative2.conf")
self.log.info("-includeconf cannot be used recursively. subversion should end with 'main; relative)/'")
with open(os.path.join(self.options.tmpdir, "node0", "relative.conf"), "a", encoding="utf8") as f:
f.write("includeconf=relative2.conf\n")
self.start_node(0)
subversion = self.nodes[0].getnetworkinfo()["subversion"]
assert subversion.endswith("main; relative)/")
self.stop_node(0, expected_stderr="warning: -includeconf cannot be used from included files; ignoring -includeconf=relative2.conf")
self.log.info("-includeconf cannot contain invalid arg")
self.log.info("-includeconf cannot be invalid path")
os.remove(os.path.join(self.options.tmpdir, "node0", "relative.conf"))
self.nodes[0].assert_start_raises_init_error(expected_msg="Error reading configuration file: Failed to include configuration file relative.conf")
self.log.info("multiple -includeconf args can be used from the base config file. subversion should end with 'main; relative; relative2)/'")
with open(os.path.join(self.options.tmpdir, "node0", "relative.conf"), "w", encoding="utf8") as f:
f.write("uacomment=relative\n")
with open(os.path.join(self.options.tmpdir, "node0", "arcticoin.conf"), "a", encoding='utf8') as f:
f.write("includeconf=relative2.conf\n")
self.start_node(0)
subversion = self.nodes[0].getnetworkinfo()["subversion"]
assert subversion.endswith("main; relative; relative2)/")
if __name__ == '__main__':
IncludeConfTest().main()
| true | true |
f724cfe100b6d8d018d01d3fec03c2b0c5e1f781 | 6,287 | py | Python | DuelingDQN/agent.py | emarche/Value-based-DeepRL | 8b6458d4b82f293b401fc9e9c81cc482e0948830 | [
"MIT"
] | 1 | 2021-06-21T06:25:43.000Z | 2021-06-21T06:25:43.000Z | DuelingDQN/agent.py | emarche/Value-based-DeepRL | 8b6458d4b82f293b401fc9e9c81cc482e0948830 | [
"MIT"
] | null | null | null | DuelingDQN/agent.py | emarche/Value-based-DeepRL | 8b6458d4b82f293b401fc9e9c81cc482e0948830 | [
"MIT"
] | null | null | null | """DuelingDQN agent script
This manages the training phase of the off-policy DuelingDQN.
"""
import random
from collections import deque
import yaml
import numpy as np
with open('config.yml', 'r') as ymlfile:
cfg = yaml.load(ymlfile, Loader=yaml.FullLoader)
seed = cfg['setup']['seed']
ymlfile.close()
random.seed(seed)
np.random.seed(seed)
import tensorflow as tf
from tensorflow.keras.optimizers import Adam
tf.random.set_seed(seed)
from utils.deepnetwork import DeepNetwork
from utils.memorybuffer import Buffer
class DuelingDQN:
"""
Class for the DuelingDQN agent
"""
def __init__(self, env, params):
"""Initialize the agent, its network, optimizer and buffer
Args:
env (gym): gym environment
params (dict): agent parameters (e.g.,dnn structure)
Returns:
None
"""
self.env = env
self.model = DeepNetwork.build(env, params['dnn'])
self.model_opt = Adam()
self.buffer = Buffer(params['buffer']['size'])
def get_action(self, state, eps):
"""Get the action to perform
Args:
state (list): agent current state
eps (float): random action probability
Returns:
action (float): sampled actions to perform
"""
if np.random.uniform() <= eps:
return np.random.randint(0, self.env.action_space.n)
q_values = self.model(np.array([state])).numpy()[0]
return np.argmax(q_values)
def update(self, gamma, batch_size):
"""Prepare the samples to update the network
Args:
gamma (float): discount factor
batch_size (int): batch size for the off-policy A2C
Returns:
None
"""
batch_size = min(self.buffer.size, batch_size)
states, actions, rewards, obs_states, dones = self.buffer.sample(batch_size)
# The updates require shape (n° samples, len(metric))
rewards = rewards.reshape(-1, 1)
dones = dones.reshape(-1, 1)
self.fit(gamma, states, actions, rewards, obs_states, dones)
def fit(self, gamma, states, actions, rewards, obs_states, dones):
"""We want to minimizing mse of the temporal difference error given by Q(s,a|θ) and the target y = r + γ max_a' Q(s', a'|θ). This version is based on vanilla DQN, so it presents the non-stationary targets (i.e., the same network estimates its values and its targets).
The dueling follows the idea that the advantage A(s, a) = Q(s, a) - V(s). Hence it splits the network into two streams, one that estimates V(S) and one that estimates A(s, a). It then recomposes the two stream into the output layer that forms Q(s, a). The idea is that is that if a state is bad, it is bad regardless of the actions. This way we reduce the exploration and the requirement to evaluate all the actions to converge.
Args:
gamma (float): discount factor
states (list): episode's states for the update
actions (list): episode's actions for the update
rewards (list): episode's rewards for the update
obs_states (list): episode's obs_states for the update
dones (list): episode's dones for the update
Returns:
None
"""
with tf.GradientTape() as tape:
# Compute the target y = r + γ max_a' Q(s', a'|θ)
obs_qvalues = self.model(obs_states)
obs_action = tf.math.argmax(obs_qvalues, axis=-1).numpy()
idxs = np.array([[int(i), int(action)] for i, action in enumerate(obs_action)])
max_obs_qvalues = tf.expand_dims(tf.gather_nd(obs_qvalues, idxs), axis=-1)
y = rewards + gamma * max_obs_qvalues * dones
# Compute values Q(s,a|θ)
qvalues = self.model(states)
idxs = np.array([[int(i), int(action)] for i, action in enumerate(actions)])
qvalues = tf.expand_dims(tf.gather_nd(qvalues, idxs), axis=-1)
# Compute the loss as mse of Q(s, a) - y
td_errors = tf.math.subtract(qvalues, y)
td_errors = 0.5 * tf.math.square(td_errors)
loss = tf.math.reduce_mean(td_errors)
# Compute the model gradient and update the network
grad = tape.gradient(loss, self.model.trainable_variables)
self.model_opt.apply_gradients(zip(grad, self.model.trainable_variables))
def train(self, tracker, n_episodes, verbose, params, hyperp):
"""Main loop for the agent's training phase
Args:
tracker (object): used to store and save the training stats
n_episodes (int): n° of episodes to perform
verbose (int): how frequent we save the training stats
params (dict): agent parameters (e.g., the critic's gamma)
hyperp (dict): algorithmic specific values (e.g., tau)
Returns:
None
"""
mean_reward = deque(maxlen=100)
eps, eps_min = params['eps'], params['eps_min']
eps_decay = hyperp['eps_d']
for e in range(n_episodes):
ep_reward, steps = 0, 0
state = self.env.reset()
while True:
action = self.get_action(state, eps)
obs_state, obs_reward, done, _ = self.env.step(action)
self.buffer.store(state,
action,
obs_reward,
obs_state,
1 - int(done)
)
ep_reward += obs_reward
steps += 1
state = obs_state
if e > params['update_start']:
self.update(
params['gamma'],
params['buffer']['batch']
)
if done: break
eps = max(eps_min, eps * eps_decay)
mean_reward.append(ep_reward)
tracker.update([e, ep_reward])
if e % verbose == 0: tracker.save_metrics()
print(f'Ep: {e}, Ep_Rew: {ep_reward}, Mean_Rew: {np.mean(mean_reward)}')
| 33.801075 | 436 | 0.576905 |
import random
from collections import deque
import yaml
import numpy as np
with open('config.yml', 'r') as ymlfile:
cfg = yaml.load(ymlfile, Loader=yaml.FullLoader)
seed = cfg['setup']['seed']
ymlfile.close()
random.seed(seed)
np.random.seed(seed)
import tensorflow as tf
from tensorflow.keras.optimizers import Adam
tf.random.set_seed(seed)
from utils.deepnetwork import DeepNetwork
from utils.memorybuffer import Buffer
class DuelingDQN:
def __init__(self, env, params):
self.env = env
self.model = DeepNetwork.build(env, params['dnn'])
self.model_opt = Adam()
self.buffer = Buffer(params['buffer']['size'])
def get_action(self, state, eps):
if np.random.uniform() <= eps:
return np.random.randint(0, self.env.action_space.n)
q_values = self.model(np.array([state])).numpy()[0]
return np.argmax(q_values)
def update(self, gamma, batch_size):
batch_size = min(self.buffer.size, batch_size)
states, actions, rewards, obs_states, dones = self.buffer.sample(batch_size)
rewards = rewards.reshape(-1, 1)
dones = dones.reshape(-1, 1)
self.fit(gamma, states, actions, rewards, obs_states, dones)
def fit(self, gamma, states, actions, rewards, obs_states, dones):
with tf.GradientTape() as tape:
obs_qvalues = self.model(obs_states)
obs_action = tf.math.argmax(obs_qvalues, axis=-1).numpy()
idxs = np.array([[int(i), int(action)] for i, action in enumerate(obs_action)])
max_obs_qvalues = tf.expand_dims(tf.gather_nd(obs_qvalues, idxs), axis=-1)
y = rewards + gamma * max_obs_qvalues * dones
# Compute values Q(s,a|θ)
qvalues = self.model(states)
idxs = np.array([[int(i), int(action)] for i, action in enumerate(actions)])
qvalues = tf.expand_dims(tf.gather_nd(qvalues, idxs), axis=-1)
# Compute the loss as mse of Q(s, a) - y
td_errors = tf.math.subtract(qvalues, y)
td_errors = 0.5 * tf.math.square(td_errors)
loss = tf.math.reduce_mean(td_errors)
# Compute the model gradient and update the network
grad = tape.gradient(loss, self.model.trainable_variables)
self.model_opt.apply_gradients(zip(grad, self.model.trainable_variables))
def train(self, tracker, n_episodes, verbose, params, hyperp):
mean_reward = deque(maxlen=100)
eps, eps_min = params['eps'], params['eps_min']
eps_decay = hyperp['eps_d']
for e in range(n_episodes):
ep_reward, steps = 0, 0
state = self.env.reset()
while True:
action = self.get_action(state, eps)
obs_state, obs_reward, done, _ = self.env.step(action)
self.buffer.store(state,
action,
obs_reward,
obs_state,
1 - int(done)
)
ep_reward += obs_reward
steps += 1
state = obs_state
if e > params['update_start']:
self.update(
params['gamma'],
params['buffer']['batch']
)
if done: break
eps = max(eps_min, eps * eps_decay)
mean_reward.append(ep_reward)
tracker.update([e, ep_reward])
if e % verbose == 0: tracker.save_metrics()
print(f'Ep: {e}, Ep_Rew: {ep_reward}, Mean_Rew: {np.mean(mean_reward)}')
| true | true |
f724d00e6329a8b543e74c51d75c61d1be59e773 | 165 | py | Python | Oops/Python.py | SharkDeveloper/Client-Server-app | 071092f7f0a5ecc0c7e05eb8a5abeda759216709 | [
"Apache-2.0"
] | null | null | null | Oops/Python.py | SharkDeveloper/Client-Server-app | 071092f7f0a5ecc0c7e05eb8a5abeda759216709 | [
"Apache-2.0"
] | null | null | null | Oops/Python.py | SharkDeveloper/Client-Server-app | 071092f7f0a5ecc0c7e05eb8a5abeda759216709 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
try:
from PySide import QtWidgets
except:
from PyQt5 import QtWidgets
class Python:
def __init__(self):
print("Hi")
| 11.785714 | 32 | 0.612121 |
try:
from PySide import QtWidgets
except:
from PyQt5 import QtWidgets
class Python:
def __init__(self):
print("Hi")
| true | true |
f724d0189448a885ec38db8eea6a6121e6ff2796 | 11,397 | py | Python | django/db/backends/base/features.py | thomazzo/django | b0d716cbffdd66dd9108895d0524bef2530fc732 | [
"PSF-2.0",
"BSD-3-Clause"
] | null | null | null | django/db/backends/base/features.py | thomazzo/django | b0d716cbffdd66dd9108895d0524bef2530fc732 | [
"PSF-2.0",
"BSD-3-Clause"
] | null | null | null | django/db/backends/base/features.py | thomazzo/django | b0d716cbffdd66dd9108895d0524bef2530fc732 | [
"PSF-2.0",
"BSD-3-Clause"
] | null | null | null | from django.db.models.aggregates import StdDev
from django.db.utils import NotSupportedError, ProgrammingError
from django.utils.functional import cached_property
class BaseDatabaseFeatures:
gis_enabled = False
allows_group_by_pk = False
allows_group_by_selected_pks = False
empty_fetchmany_value = []
update_can_self_select = True
# Does the backend distinguish between '' and None?
interprets_empty_strings_as_nulls = False
# Does the backend allow inserting duplicate NULL rows in a nullable
# unique field? All core backends implement this correctly, but other
# databases such as SQL Server do not.
supports_nullable_unique_constraints = True
# Does the backend allow inserting duplicate rows when a unique_together
# constraint exists and some fields are nullable but not all of them?
supports_partially_nullable_unique_constraints = True
can_use_chunked_reads = True
can_return_id_from_insert = False
can_return_ids_from_bulk_insert = False
has_bulk_insert = True
uses_savepoints = False
can_release_savepoints = False
# If True, don't use integer foreign keys referring to, e.g., positive
# integer primary keys.
related_fields_match_type = False
allow_sliced_subqueries_with_in = True
has_select_for_update = False
has_select_for_update_nowait = False
has_select_for_update_skip_locked = False
has_select_for_update_of = False
# Does the database's SELECT FOR UPDATE OF syntax require a column rather
# than a table?
select_for_update_of_column = False
# Does the default test database allow multiple connections?
# Usually an indication that the test database is in-memory
test_db_allows_multiple_connections = True
# Can an object be saved without an explicit primary key?
supports_unspecified_pk = False
# Can a fixture contain forward references? i.e., are
# FK constraints checked at the end of transaction, or
# at the end of each save operation?
supports_forward_references = True
# Does the backend truncate names properly when they are too long?
truncates_names = False
# Is there a REAL datatype in addition to floats/doubles?
has_real_datatype = False
supports_subqueries_in_group_by = True
# Is there a true datatype for uuid?
has_native_uuid_field = False
# Is there a true datatype for timedeltas?
has_native_duration_field = False
# Does the database driver supports same type temporal data subtraction
# by returning the type used to store duration field?
supports_temporal_subtraction = False
# Does the __regex lookup support backreferencing and grouping?
supports_regex_backreferencing = True
# Can date/datetime lookups be performed using a string?
supports_date_lookup_using_string = True
# Can datetimes with timezones be used?
supports_timezones = True
# Does the database have a copy of the zoneinfo database?
has_zoneinfo_database = True
# When performing a GROUP BY, is an ORDER BY NULL required
# to remove any ordering?
requires_explicit_null_ordering_when_grouping = False
# Does the backend order NULL values as largest or smallest?
nulls_order_largest = False
# The database's limit on the number of query parameters.
max_query_params = None
# Can an object have an autoincrement primary key of 0? MySQL says No.
allows_auto_pk_0 = True
# Do we need to NULL a ForeignKey out, or can the constraint check be
# deferred
can_defer_constraint_checks = False
# date_interval_sql can properly handle mixed Date/DateTime fields and timedeltas
supports_mixed_date_datetime_comparisons = True
# Does the backend support tablespaces? Default to False because it isn't
# in the SQL standard.
supports_tablespaces = False
# Does the backend reset sequences between tests?
supports_sequence_reset = True
# Can the backend introspect the default value of a column?
can_introspect_default = True
# Confirm support for introspected foreign keys
# Every database can do this reliably, except MySQL,
# which can't do it for MyISAM tables
can_introspect_foreign_keys = True
# Can the backend introspect an AutoField, instead of an IntegerField?
can_introspect_autofield = False
# Can the backend introspect a BigIntegerField, instead of an IntegerField?
can_introspect_big_integer_field = True
# Can the backend introspect an BinaryField, instead of an TextField?
can_introspect_binary_field = True
# Can the backend introspect an DecimalField, instead of an FloatField?
can_introspect_decimal_field = True
# Can the backend introspect a DurationField, instead of a BigIntegerField?
can_introspect_duration_field = True
# Can the backend introspect an IPAddressField, instead of an CharField?
can_introspect_ip_address_field = False
# Can the backend introspect a PositiveIntegerField, instead of an IntegerField?
can_introspect_positive_integer_field = False
# Can the backend introspect a SmallIntegerField, instead of an IntegerField?
can_introspect_small_integer_field = False
# Can the backend introspect a TimeField, instead of a DateTimeField?
can_introspect_time_field = True
# Some backends may not be able to differentiate BooleanField from other
# fields such as IntegerField.
introspected_boolean_field_type = 'BooleanField'
# Can the backend introspect the column order (ASC/DESC) for indexes?
supports_index_column_ordering = True
# Support for the DISTINCT ON clause
can_distinct_on_fields = False
# Does the backend decide to commit before SAVEPOINT statements
# when autocommit is disabled? https://bugs.python.org/issue8145#msg109965
autocommits_when_autocommit_is_off = False
# Does the backend prevent running SQL queries in broken transactions?
atomic_transactions = True
# Can we roll back DDL in a transaction?
can_rollback_ddl = False
# Does it support operations requiring references rename in a transaction?
supports_atomic_references_rename = True
# Can we issue more than one ALTER COLUMN clause in an ALTER TABLE?
supports_combined_alters = False
# Does it support foreign keys?
supports_foreign_keys = True
# Does it support CHECK constraints?
supports_column_check_constraints = True
supports_table_check_constraints = True
# Does the backend support 'pyformat' style ("... %(name)s ...", {'name': value})
# parameter passing? Note this can be provided by the backend even if not
# supported by the Python driver
supports_paramstyle_pyformat = True
# Does the backend require literal defaults, rather than parameterized ones?
requires_literal_defaults = False
# Does the backend require a connection reset after each material schema change?
connection_persists_old_columns = False
# What kind of error does the backend throw when accessing closed cursor?
closed_cursor_error_class = ProgrammingError
# Does 'a' LIKE 'A' match?
has_case_insensitive_like = True
# Does the backend require the sqlparse library for splitting multi-line
# statements before executing them?
requires_sqlparse_for_splitting = True
# Suffix for backends that don't support "SELECT xxx;" queries.
bare_select_suffix = ''
# If NULL is implied on columns without needing to be explicitly specified
implied_column_null = False
uppercases_column_names = False
# Does the backend support "select for update" queries with limit (and offset)?
supports_select_for_update_with_limit = True
# Does the backend ignore null expressions in GREATEST and LEAST queries unless
# every expression is null?
greatest_least_ignores_nulls = False
# Can the backend clone databases for parallel test execution?
# Defaults to False to allow third-party backends to opt-in.
can_clone_databases = False
# Does the backend consider table names with different casing to
# be equal?
ignores_table_name_case = False
# Place FOR UPDATE right after FROM clause. Used on MSSQL.
for_update_after_from = False
# Combinatorial flags
supports_select_union = True
supports_select_intersection = True
supports_select_difference = True
supports_slicing_ordering_in_compound = False
# Does the database support SQL 2003 FILTER (WHERE ...) in aggregate
# expressions?
supports_aggregate_filter_clause = False
# Does the backend support indexing a TextField?
supports_index_on_text_field = True
# Does the backed support window expressions (expression OVER (...))?
supports_over_clause = False
# Does the backend support CAST with precision?
supports_cast_with_precision = True
# How many second decimals does the database return when casting a value to
# a type with time?
time_cast_precision = 6
# SQL to create a procedure for use by the Django test suite. The
# functionality of the procedure isn't important.
create_test_procedure_without_params_sql = None
create_test_procedure_with_int_param_sql = None
# Does the backend support keyword parameters for cursor.callproc()?
supports_callproc_kwargs = False
# Convert CharField results from bytes to str in database functions.
db_functions_convert_bytes_to_str = False
# What formats does the backend EXPLAIN syntax support?
supported_explain_formats = set()
# Does DatabaseOperations.explain_query_prefix() raise ValueError if
# unknown kwargs are passed to QuerySet.explain()?
validates_explain_options = True
# Does the backend support the default parameter in lead() and lag()?
supports_default_in_lead_lag = True
# Does the backend support ignoring constraint or uniqueness errors during
# INSERT?
supports_ignore_conflicts = True
# Does this backend require casting the results of CASE expressions used
# in UPDATE statements to ensure the expression has the correct type?
requires_casted_case_in_updates = False
def __init__(self, connection):
self.connection = connection
@cached_property
def supports_explaining_query_execution(self):
"""Does this backend support explaining query execution?"""
return self.connection.ops.explain_prefix is not None
@cached_property
def supports_transactions(self):
"""Confirm support for transactions."""
with self.connection.cursor() as cursor:
cursor.execute('CREATE TABLE ROLLBACK_TEST (X INT)')
self.connection.set_autocommit(False)
cursor.execute('INSERT INTO ROLLBACK_TEST (X) VALUES (8)')
self.connection.rollback()
self.connection.set_autocommit(True)
cursor.execute('SELECT COUNT(X) FROM ROLLBACK_TEST')
count, = cursor.fetchone()
cursor.execute('DROP TABLE ROLLBACK_TEST')
return count == 0
@cached_property
def supports_stddev(self):
"""Confirm support for STDDEV and related stats functions."""
try:
self.connection.ops.check_expression_support(StdDev(1))
except NotSupportedError:
return False
return True
| 36.883495 | 85 | 0.742476 | from django.db.models.aggregates import StdDev
from django.db.utils import NotSupportedError, ProgrammingError
from django.utils.functional import cached_property
class BaseDatabaseFeatures:
gis_enabled = False
allows_group_by_pk = False
allows_group_by_selected_pks = False
empty_fetchmany_value = []
update_can_self_select = True
interprets_empty_strings_as_nulls = False
supports_nullable_unique_constraints = True
supports_partially_nullable_unique_constraints = True
can_use_chunked_reads = True
can_return_id_from_insert = False
can_return_ids_from_bulk_insert = False
has_bulk_insert = True
uses_savepoints = False
can_release_savepoints = False
# integer primary keys.
related_fields_match_type = False
allow_sliced_subqueries_with_in = True
has_select_for_update = False
has_select_for_update_nowait = False
has_select_for_update_skip_locked = False
has_select_for_update_of = False
# Does the database's SELECT FOR UPDATE OF syntax require a column rather
select_for_update_of_column = False
test_db_allows_multiple_connections = True
supports_unspecified_pk = False
supports_forward_references = True
truncates_names = False
has_real_datatype = False
supports_subqueries_in_group_by = True
has_native_uuid_field = False
has_native_duration_field = False
supports_temporal_subtraction = False
supports_regex_backreferencing = True
supports_date_lookup_using_string = True
supports_timezones = True
has_zoneinfo_database = True
requires_explicit_null_ordering_when_grouping = False
nulls_order_largest = False
max_query_params = None
# Can an object have an autoincrement primary key of 0? MySQL says No.
allows_auto_pk_0 = True
# Do we need to NULL a ForeignKey out, or can the constraint check be
# deferred
can_defer_constraint_checks = False
# date_interval_sql can properly handle mixed Date/DateTime fields and timedeltas
supports_mixed_date_datetime_comparisons = True
# Does the backend support tablespaces? Default to False because it isn't
supports_tablespaces = False
supports_sequence_reset = True
can_introspect_default = True
can_introspect_foreign_keys = True
# Can the backend introspect an AutoField, instead of an IntegerField?
can_introspect_autofield = False
# Can the backend introspect a BigIntegerField, instead of an IntegerField?
can_introspect_big_integer_field = True
# Can the backend introspect an BinaryField, instead of an TextField?
can_introspect_binary_field = True
# Can the backend introspect an DecimalField, instead of an FloatField?
can_introspect_decimal_field = True
# Can the backend introspect a DurationField, instead of a BigIntegerField?
can_introspect_duration_field = True
# Can the backend introspect an IPAddressField, instead of an CharField?
can_introspect_ip_address_field = False
# Can the backend introspect a PositiveIntegerField, instead of an IntegerField?
can_introspect_positive_integer_field = False
# Can the backend introspect a SmallIntegerField, instead of an IntegerField?
can_introspect_small_integer_field = False
# Can the backend introspect a TimeField, instead of a DateTimeField?
can_introspect_time_field = True
# Some backends may not be able to differentiate BooleanField from other
# fields such as IntegerField.
introspected_boolean_field_type = 'BooleanField'
# Can the backend introspect the column order (ASC/DESC) for indexes?
supports_index_column_ordering = True
# Support for the DISTINCT ON clause
can_distinct_on_fields = False
# Does the backend decide to commit before SAVEPOINT statements
# when autocommit is disabled? https://bugs.python.org/issue8145#msg109965
autocommits_when_autocommit_is_off = False
# Does the backend prevent running SQL queries in broken transactions?
atomic_transactions = True
# Can we roll back DDL in a transaction?
can_rollback_ddl = False
# Does it support operations requiring references rename in a transaction?
supports_atomic_references_rename = True
# Can we issue more than one ALTER COLUMN clause in an ALTER TABLE?
supports_combined_alters = False
# Does it support foreign keys?
supports_foreign_keys = True
# Does it support CHECK constraints?
supports_column_check_constraints = True
supports_table_check_constraints = True
# Does the backend support 'pyformat' style ("... %(name)s ...", {'name': value})
# parameter passing? Note this can be provided by the backend even if not
# supported by the Python driver
supports_paramstyle_pyformat = True
# Does the backend require literal defaults, rather than parameterized ones?
requires_literal_defaults = False
# Does the backend require a connection reset after each material schema change?
connection_persists_old_columns = False
# What kind of error does the backend throw when accessing closed cursor?
closed_cursor_error_class = ProgrammingError
# Does 'a' LIKE 'A' match?
has_case_insensitive_like = True
# Does the backend require the sqlparse library for splitting multi-line
# statements before executing them?
requires_sqlparse_for_splitting = True
# Suffix for backends that don't support "SELECT xxx;" queries.
bare_select_suffix = ''
implied_column_null = False
uppercases_column_names = False
supports_select_for_update_with_limit = True
greatest_least_ignores_nulls = False
can_clone_databases = False
ignores_table_name_case = False
for_update_after_from = False
supports_select_union = True
supports_select_intersection = True
supports_select_difference = True
supports_slicing_ordering_in_compound = False
supports_aggregate_filter_clause = False
supports_index_on_text_field = True
supports_over_clause = False
supports_cast_with_precision = True
time_cast_precision = 6
create_test_procedure_without_params_sql = None
create_test_procedure_with_int_param_sql = None
# Does the backend support keyword parameters for cursor.callproc()?
supports_callproc_kwargs = False
# Convert CharField results from bytes to str in database functions.
db_functions_convert_bytes_to_str = False
# What formats does the backend EXPLAIN syntax support?
supported_explain_formats = set()
# Does DatabaseOperations.explain_query_prefix() raise ValueError if
# unknown kwargs are passed to QuerySet.explain()?
validates_explain_options = True
# Does the backend support the default parameter in lead() and lag()?
supports_default_in_lead_lag = True
# Does the backend support ignoring constraint or uniqueness errors during
# INSERT?
supports_ignore_conflicts = True
# Does this backend require casting the results of CASE expressions used
# in UPDATE statements to ensure the expression has the correct type?
requires_casted_case_in_updates = False
def __init__(self, connection):
self.connection = connection
@cached_property
def supports_explaining_query_execution(self):
return self.connection.ops.explain_prefix is not None
@cached_property
def supports_transactions(self):
with self.connection.cursor() as cursor:
cursor.execute('CREATE TABLE ROLLBACK_TEST (X INT)')
self.connection.set_autocommit(False)
cursor.execute('INSERT INTO ROLLBACK_TEST (X) VALUES (8)')
self.connection.rollback()
self.connection.set_autocommit(True)
cursor.execute('SELECT COUNT(X) FROM ROLLBACK_TEST')
count, = cursor.fetchone()
cursor.execute('DROP TABLE ROLLBACK_TEST')
return count == 0
@cached_property
def supports_stddev(self):
try:
self.connection.ops.check_expression_support(StdDev(1))
except NotSupportedError:
return False
return True
| true | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.