code stringlengths 22 1.05M | apis listlengths 1 3.31k | extract_api stringlengths 75 3.25M |
|---|---|---|
from direct.directnotify import DirectNotifyGlobal
from direct.distributed.DistributedObjectAI import DistributedObjectAI
class DistributedPlantBaseAI(DistributedObjectAI):
notify = DirectNotifyGlobal.directNotify.newCategory('DistributedPlantBaseAI')
| [
"direct.directnotify.DirectNotifyGlobal.directNotify.newCategory"
] | [((187, 256), 'direct.directnotify.DirectNotifyGlobal.directNotify.newCategory', 'DirectNotifyGlobal.directNotify.newCategory', (['"""DistributedPlantBaseAI"""'], {}), "('DistributedPlantBaseAI')\n", (230, 256), False, 'from direct.directnotify import DirectNotifyGlobal\n')] |
from setuptools import setup
from pathlib import Path
from typing import Dict
HERE = Path(__file__).parent
version: Dict[str, str] = {}
version_file = HERE / "src" / "thermostate" / "_version.py"
exec(version_file.read_text(), version)
setup(version=version["__version__"], package_dir={"": "src"})
| [
"setuptools.setup",
"pathlib.Path"
] | [((239, 301), 'setuptools.setup', 'setup', ([], {'version': "version['__version__']", 'package_dir': "{'': 'src'}"}), "(version=version['__version__'], package_dir={'': 'src'})\n", (244, 301), False, 'from setuptools import setup\n'), ((86, 100), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (90, 100), False, 'from pathlib import Path\n')] |
# (c) 2018, Ansible by Red Hat, inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import collections
from ansible.module_utils.six import iteritems, string_types
from ansible.errors import AnsibleUndefinedVariable
class TemplateBase(object):
def __init__(self, templar):
self._templar = templar
def __call__(self, data, variables, convert_bare=False):
return self.template(data, variables, convert_bare)
def run(self, template, variables):
pass
def template(self, data, variables, convert_bare=False):
if isinstance(data, collections.Mapping):
templated_data = {}
for key, value in iteritems(data):
templated_key = self.template(key, variables, convert_bare=convert_bare)
templated_value = self.template(value, variables, convert_bare=convert_bare)
templated_data[templated_key] = templated_value
return templated_data
elif isinstance(data, collections.Iterable) and not isinstance(data, string_types):
return [self.template(i, variables, convert_bare=convert_bare) for i in data]
else:
data = data or {}
tmp_avail_vars = self._templar._available_variables
self._templar.set_available_variables(variables)
try:
resp = self._templar.template(data, convert_bare=convert_bare)
resp = self._coerce_to_native(resp)
except AnsibleUndefinedVariable:
resp = None
pass
finally:
self._templar.set_available_variables(tmp_avail_vars)
return resp
def _coerce_to_native(self, value):
if not isinstance(value, bool):
try:
value = int(value)
except Exception:
if value is None or len(value) == 0:
return None
pass
return value
def _update(self, d, u):
for k, v in iteritems(u):
if isinstance(v, collections.Mapping):
d[k] = self._update(d.get(k, {}), v)
else:
d[k] = v
return d
| [
"ansible.module_utils.six.iteritems"
] | [((2284, 2296), 'ansible.module_utils.six.iteritems', 'iteritems', (['u'], {}), '(u)\n', (2293, 2296), False, 'from ansible.module_utils.six import iteritems, string_types\n'), ((937, 952), 'ansible.module_utils.six.iteritems', 'iteritems', (['data'], {}), '(data)\n', (946, 952), False, 'from ansible.module_utils.six import iteritems, string_types\n')] |
from Redy.Opt import feature, constexpr
import timeit
class Closure(tuple):
def __call__(self, a):
c, f = self
return f(c, a)
def f1(x):
def g(y):
return x + y
return g
def fc(c, y):
return c + y
@feature(constexpr)
def f2(x):
return constexpr[Closure]((x, constexpr[fc]))
print(f1(1)(2))
print(f2(1)(2))
# 3
# 3
# mk closure
print(timeit.timeit("f(1)", globals=dict(f=f1)))
print(timeit.timeit("f(1)", globals=dict(f=f2)))
# 0.15244655999958923
# 0.16590227899905585
f1_ = f1(2)
f2_ = f2(2)
print(timeit.timeit("f(1)", globals=dict(f=f1_)))
print(timeit.timeit("f(1)", globals=dict(f=f2_)))
# 0.08070355000018026
# 0.20936105600048904
# So, use builtin closures instead of making our own
| [
"Redy.Opt.feature"
] | [((246, 264), 'Redy.Opt.feature', 'feature', (['constexpr'], {}), '(constexpr)\n', (253, 264), False, 'from Redy.Opt import feature, constexpr\n')] |
"""AI is creating summary for
"""
from frontend import main_window
from PyQt5 import QtWidgets
from frontend import input_system
from PyQt5.QtWidgets import QInputDialog, qApp
from qt_material import apply_stylesheet
style_sheets = ['dark_amber.xml',
'dark_blue.xml',
'dark_cyan.xml',
'dark_lightgreen.xml',
'dark_pink.xml',
'dark_purple.xml',
'dark_red.xml',
'dark_teal.xml',
'dark_yellow.xml',
'light_amber.xml',
'light_blue.xml',
'light_cyan.xml',
'light_cyan_500.xml',
'light_lightgreen.xml',
'light_pink.xml',
'light_purple.xml',
'light_red.xml',
'light_teal.xml',
'light_yellow.xml']
class Two_Dim_system(QtWidgets.QMainWindow, main_window.Ui_MainWindow, input_system.Ui_input_system):
"""AI is creating summary for App
Args:
QtWidgets ([type]): [description]
main_window ([type]): [description]
"""
def __init__(self):
"""AI is creating summary for __init__
"""
super().__init__()
self.ui = main_window.Ui_MainWindow()
self.ui.setupUi(self)
self.InitUI()
def InitUI(self):
"""AI is creating summary for setupUi
"""
self.setupUi(self)
# self.statusBar = QStatusBar()
# self.setStatusBar(self.statusBar)
# self.menuFile.setStatusTip()
# self.menuFile.setStatusTip("test")
self.actionExit.triggered.connect(qApp.quit)
self.darkamber.triggered.connect(lambda: self.__change_theme(style_sheets.index('dark_amber.xml')))
self.lightamber.triggered.connect(lambda: self.__change_theme(style_sheets.index('light_amber.xml')))
self.darkblue.triggered.connect(lambda: self.__change_theme(style_sheets.index('dark_blue.xml')))
self.lightblue.triggered.connect(lambda: self.__change_theme(style_sheets.index('light_blue.xml')))
self.darkcyan.triggered.connect(lambda: self.__change_theme(style_sheets.index('dark_cyan.xml')))
self.lightcyan.triggered.connect(lambda: self.__change_theme(style_sheets.index('light_cyan.xml')))
self.darklightgreen.triggered.connect(lambda: self.__change_theme(style_sheets.index('dark_lightgreen.xml')))
self.lightlightgreen.triggered.connect(lambda: self.__change_theme(style_sheets.index('light_lightgreen.xml')))
self.darkpink.triggered.connect(lambda: self.__change_theme(style_sheets.index('dark_pink.xml')))
self.lightping.triggered.connect(lambda: self.__change_theme(style_sheets.index('light_pink.xml')))
self.darkpurple.triggered.connect(lambda: self.__change_theme(style_sheets.index('dark_purple.xml')))
self.lightpurple.triggered.connect(lambda: self.__change_theme(style_sheets.index('light_purple.xml')))
self.darkred.triggered.connect(lambda: self.__change_theme(style_sheets.index('dark_red.xml')))
self.lightred.triggered.connect(lambda: self.__change_theme(style_sheets.index('light_red.xml')))
self.darkteal.triggered.connect(lambda: self.__change_theme(style_sheets.index('dark_teal.xml')))
self.lightteal.triggered.connect(lambda: self.__change_theme(style_sheets.index('light_teal.xml')))
self.darkyellow.triggered.connect(lambda: self.__change_theme(style_sheets.index('dark_yellow.xml')))
self.lightyellow.triggered.connect(lambda: self.__change_theme(style_sheets.index('light_yellow.xml')))
self.actionInput_system.triggered.connect(self.__input_system)
def __input_system(self):
self.window = QtWidgets.QMainWindow()
self.ui = input_system.Ui_input_system()
self.ui.setupUi(self.window)
self.window.show()
def __change_theme(self, number: int):
"""AI is creating summary for change_theme
Args:
number (int): [description]
"""
with open('config_theme', 'w') as file:
file.write(str(number))
apply_stylesheet(self, theme=style_sheets[number])
print('TEST')
| [
"qt_material.apply_stylesheet",
"PyQt5.QtWidgets.QMainWindow",
"frontend.main_window.Ui_MainWindow",
"frontend.input_system.Ui_input_system"
] | [((1257, 1284), 'frontend.main_window.Ui_MainWindow', 'main_window.Ui_MainWindow', ([], {}), '()\n', (1282, 1284), False, 'from frontend import main_window\n'), ((3756, 3779), 'PyQt5.QtWidgets.QMainWindow', 'QtWidgets.QMainWindow', ([], {}), '()\n', (3777, 3779), False, 'from PyQt5 import QtWidgets\n'), ((3798, 3828), 'frontend.input_system.Ui_input_system', 'input_system.Ui_input_system', ([], {}), '()\n', (3826, 3828), False, 'from frontend import input_system\n'), ((4147, 4197), 'qt_material.apply_stylesheet', 'apply_stylesheet', (['self'], {'theme': 'style_sheets[number]'}), '(self, theme=style_sheets[number])\n', (4163, 4197), False, 'from qt_material import apply_stylesheet\n')] |
import mss
import numpy as np
from PIL import Image
from config import BOARD_HEIGHT, BOARD_WIDTH
CELL_SIZE = 22
BOARD_X = 14
BOARD_Y = 111
COLOR_CODES = {
(0, 0, 255): 1,
(0, 123, 0): 2,
(255, 0, 0): 3,
(0, 0, 123): 4,
(123, 0, 0): 5,
(0, 123, 123): 6,
(0, 0, 0): 7,
(123, 123, 123): 8,
(189, 189, 189): 0 #unopened/opened blank
}
def get_cell_type(cell) -> int:
cell_type = COLOR_CODES[cell.getpixel((15, 16))]
#cell_type=COLOR_CODES[cell.getpixel((13,14))]
if cell_type == 0 and cell.getpixel((1, 16)) != (255, 255, 255):
cell_type = -1
return cell_type
def get_board_array() -> np.ndarray:
with mss.mss() as sct:
screenshot = sct.grab(sct.monitors[0])
img = Image.frombytes('RGB', screenshot.size, screenshot.bgra, 'raw', 'BGRX')
#board=img.crop((384,111,1044,463))
board = img.crop((BOARD_X, BOARD_Y, BOARD_X + CELL_SIZE * BOARD_WIDTH, BOARD_Y + CELL_SIZE * BOARD_HEIGHT))
width, height = board.size
cell_imgs = [
board.crop((i, j, i + CELL_SIZE, j + CELL_SIZE)) for j in range(0, height, CELL_SIZE) for i in range(0, width, CELL_SIZE)
]
cells = np.fromiter((get_cell_type(cell) for cell in cell_imgs), dtype=np.int8)
grid = np.reshape(cells, (BOARD_HEIGHT, BOARD_WIDTH))
#surrond grid with -1(so you can make cell_surrondings with no errors)
return np.concatenate(
(
np.full((1, BOARD_WIDTH + 2), -1, dtype=np.int8), #top row of -1
np.insert(grid, (0, BOARD_WIDTH), -1, axis=1), #fill sides with -1
np.full((1, BOARD_WIDTH + 2), -1, dtype=np.int8) #bottom row of -1
)
)
| [
"numpy.insert",
"numpy.reshape",
"mss.mss",
"numpy.full",
"PIL.Image.frombytes"
] | [((1163, 1209), 'numpy.reshape', 'np.reshape', (['cells', '(BOARD_HEIGHT, BOARD_WIDTH)'], {}), '(cells, (BOARD_HEIGHT, BOARD_WIDTH))\n', (1173, 1209), True, 'import numpy as np\n'), ((617, 626), 'mss.mss', 'mss.mss', ([], {}), '()\n', (624, 626), False, 'import mss\n'), ((684, 755), 'PIL.Image.frombytes', 'Image.frombytes', (['"""RGB"""', 'screenshot.size', 'screenshot.bgra', '"""raw"""', '"""BGRX"""'], {}), "('RGB', screenshot.size, screenshot.bgra, 'raw', 'BGRX')\n", (699, 755), False, 'from PIL import Image\n'), ((1312, 1360), 'numpy.full', 'np.full', (['(1, BOARD_WIDTH + 2)', '(-1)'], {'dtype': 'np.int8'}), '((1, BOARD_WIDTH + 2), -1, dtype=np.int8)\n', (1319, 1360), True, 'import numpy as np\n'), ((1380, 1425), 'numpy.insert', 'np.insert', (['grid', '(0, BOARD_WIDTH)', '(-1)'], {'axis': '(1)'}), '(grid, (0, BOARD_WIDTH), -1, axis=1)\n', (1389, 1425), True, 'import numpy as np\n'), ((1450, 1498), 'numpy.full', 'np.full', (['(1, BOARD_WIDTH + 2)', '(-1)'], {'dtype': 'np.int8'}), '((1, BOARD_WIDTH + 2), -1, dtype=np.int8)\n', (1457, 1498), True, 'import numpy as np\n')] |
from django.apps import AppConfig
from django.db.models.signals import post_migrate
from django.utils.translation import gettext_lazy as _
class SitesConfig(AppConfig):
name = 'src.base'
verbose_name = _("Modulo de Frontend")
| [
"django.utils.translation.gettext_lazy"
] | [((212, 235), 'django.utils.translation.gettext_lazy', '_', (['"""Modulo de Frontend"""'], {}), "('Modulo de Frontend')\n", (213, 235), True, 'from django.utils.translation import gettext_lazy as _\n')] |
from sqlalchemy import MetaData, Table, Index, Column, Integer
meta = MetaData()
def upgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
upload = Table("upload", meta, autoload=True)
uploader_id = Column("uploader_id", Integer)
uploader_id.create(upload)
idx_upload_uploader_id = Index("idx_upload_uploader_id", upload.c.uploader_id)
idx_upload_uploader_id.create(migrate_engine)
def downgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
upload = Table("upload", meta, autoload=True)
idx_upload_uploader_id = Index("idx_upload_uploader_id", upload.c.uploader_id)
idx_upload_uploader_id.drop(migrate_engine)
upload.c.uploader_id.drop()
| [
"sqlalchemy.MetaData",
"sqlalchemy.Column",
"sqlalchemy.Table",
"sqlalchemy.Index"
] | [((71, 81), 'sqlalchemy.MetaData', 'MetaData', ([], {}), '()\n', (79, 81), False, 'from sqlalchemy import MetaData, Table, Index, Column, Integer\n'), ((124, 153), 'sqlalchemy.MetaData', 'MetaData', ([], {'bind': 'migrate_engine'}), '(bind=migrate_engine)\n', (132, 153), False, 'from sqlalchemy import MetaData, Table, Index, Column, Integer\n'), ((167, 203), 'sqlalchemy.Table', 'Table', (['"""upload"""', 'meta'], {'autoload': '(True)'}), "('upload', meta, autoload=True)\n", (172, 203), False, 'from sqlalchemy import MetaData, Table, Index, Column, Integer\n'), ((223, 253), 'sqlalchemy.Column', 'Column', (['"""uploader_id"""', 'Integer'], {}), "('uploader_id', Integer)\n", (229, 253), False, 'from sqlalchemy import MetaData, Table, Index, Column, Integer\n'), ((315, 368), 'sqlalchemy.Index', 'Index', (['"""idx_upload_uploader_id"""', 'upload.c.uploader_id'], {}), "('idx_upload_uploader_id', upload.c.uploader_id)\n", (320, 368), False, 'from sqlalchemy import MetaData, Table, Index, Column, Integer\n'), ((463, 492), 'sqlalchemy.MetaData', 'MetaData', ([], {'bind': 'migrate_engine'}), '(bind=migrate_engine)\n', (471, 492), False, 'from sqlalchemy import MetaData, Table, Index, Column, Integer\n'), ((506, 542), 'sqlalchemy.Table', 'Table', (['"""upload"""', 'meta'], {'autoload': '(True)'}), "('upload', meta, autoload=True)\n", (511, 542), False, 'from sqlalchemy import MetaData, Table, Index, Column, Integer\n'), ((573, 626), 'sqlalchemy.Index', 'Index', (['"""idx_upload_uploader_id"""', 'upload.c.uploader_id'], {}), "('idx_upload_uploader_id', upload.c.uploader_id)\n", (578, 626), False, 'from sqlalchemy import MetaData, Table, Index, Column, Integer\n')] |
#!/usr/bin/env python
"""
Subrequests to do things like range requests, content negotiation checks,
and validation.
This is the base class for all subrequests.
"""
from abc import ABCMeta, abstractmethod
from configparser import SectionProxy
from typing import List, Tuple, Type, Union, TYPE_CHECKING
from redbot.resource.fetch import RedFetcher
from redbot.speak import Note, levels, categories
from redbot.type import StrHeaderListType
if TYPE_CHECKING:
from redbot.resource import (
HttpResource,
) # pylint: disable=cyclic-import,unused-import
class SubRequest(RedFetcher, metaclass=ABCMeta):
"""
Base class for a subrequest of a "main" HttpResource, made to perform
additional behavioural tests on the resource.
"""
check_name = "undefined"
response_phrase = "undefined"
def __init__(self, config: SectionProxy, base_resource: "HttpResource") -> None:
self.config = config
self.base = base_resource # type: HttpResource
RedFetcher.__init__(self, config)
self.check_done = False
self.on("fetch_done", self._check_done)
@abstractmethod
def done(self) -> None:
"""The subrequest is done, process it. Must be overridden."""
raise NotImplementedError
def _check_done(self) -> None:
if self.preflight():
self.done()
self.check_done = True
self.emit("check_done")
def check(self) -> None:
modified_headers = self.modify_request_headers(list(self.base.request.headers))
RedFetcher.set_request(
self,
self.base.request.uri,
self.base.request.method,
modified_headers,
self.base.request.payload,
)
RedFetcher.check(self)
@abstractmethod
def modify_request_headers(
self, base_request_headers: StrHeaderListType
) -> StrHeaderListType:
"""Usually overridden; modifies the request headers."""
return base_request_headers
def add_base_note(
self, subject: str, note: Type[Note], **kw: Union[str, int]
) -> None:
"Add a Note to the base resource."
kw["response"] = self.response_phrase
self.base.add_note(subject, note, **kw)
def check_missing_hdrs(self, hdrs: List[str], note: Type[Note]) -> None:
"""
See if the listed headers are missing in the subrequest; if so,
set the specified note.
"""
missing_hdrs = []
for hdr in hdrs:
if (
hdr in self.base.response.parsed_headers
and hdr not in self.response.parsed_headers
):
missing_hdrs.append(hdr)
if missing_hdrs:
self.add_base_note("headers", note, missing_hdrs=", ".join(missing_hdrs))
self.add_note("headers", note, missing_hdrs=", ".join(missing_hdrs))
class MISSING_HDRS_304(Note):
category = categories.VALIDATION
level = levels.WARN
summary = "%(response)s is missing required headers."
text = """\
HTTP requires `304 Not Modified` responses to have certain headers, if they are also present in a
normal (e.g., `200 OK` response).
%(response)s is missing the following headers: `%(missing_hdrs)s`.
This can affect cache operation; because the headers are missing, caches might remove them from
their cached copies."""
| [
"redbot.resource.fetch.RedFetcher.check",
"redbot.resource.fetch.RedFetcher.__init__",
"redbot.resource.fetch.RedFetcher.set_request"
] | [((1004, 1037), 'redbot.resource.fetch.RedFetcher.__init__', 'RedFetcher.__init__', (['self', 'config'], {}), '(self, config)\n', (1023, 1037), False, 'from redbot.resource.fetch import RedFetcher\n'), ((1549, 1676), 'redbot.resource.fetch.RedFetcher.set_request', 'RedFetcher.set_request', (['self', 'self.base.request.uri', 'self.base.request.method', 'modified_headers', 'self.base.request.payload'], {}), '(self, self.base.request.uri, self.base.request.\n method, modified_headers, self.base.request.payload)\n', (1571, 1676), False, 'from redbot.resource.fetch import RedFetcher\n'), ((1751, 1773), 'redbot.resource.fetch.RedFetcher.check', 'RedFetcher.check', (['self'], {}), '(self)\n', (1767, 1773), False, 'from redbot.resource.fetch import RedFetcher\n')] |
import numpy
N,M,P = map(int,input().split())
p_cols1 =numpy.array([input().split() for _ in range(N)],int)
p_cols1.shape = (N,P)
p_cols2 =numpy.array([input().split() for _ in range(M)],int)
p_cols2.shape = (M,P)
concatenated = numpy.concatenate((p_cols1, p_cols2), axis = 0)
print(concatenated)
| [
"numpy.concatenate"
] | [((232, 277), 'numpy.concatenate', 'numpy.concatenate', (['(p_cols1, p_cols2)'], {'axis': '(0)'}), '((p_cols1, p_cols2), axis=0)\n', (249, 277), False, 'import numpy\n')] |
# GCT634 (2018) HW1
#
# Mar-18-2018: initial version
#
# <NAME>
#
import sys
import os
import numpy as np
import matplotlib.pyplot as plt
data_path = './dataset/'
mfcc_path = './mfcc/'
MFCC_DIM = 20
def mean_mfcc(dataset='train'):
f = open(data_path + dataset + '_list.txt','r')
if dataset == 'train':
mfcc_mat = np.zeros(shape=(MFCC_DIM, 1100))
else:
mfcc_mat = np.zeros(shape=(MFCC_DIM, 300))
i = 0
for file_name in f:
# load mfcc file
file_name = file_name.rstrip('\n')
file_name = file_name.replace('.wav','.npy')
mfcc_file = mfcc_path + file_name
mfcc = np.load(mfcc_file)
# mean pooling
temp = np.mean(mfcc, axis=1)
mfcc_mat[:,i]= np.mean(mfcc, axis=1)
i = i + 1
f.close()
return mfcc_mat
if __name__ == '__main__':
train_data = mean_mfcc('train')
valid_data = mean_mfcc('valid')
plt.figure(1)
plt.subplot(2,1,1)
plt.imshow(train_data, interpolation='nearest', origin='lower', aspect='auto')
plt.colorbar(format='%+2.0f dB')
plt.subplot(2,1,2)
plt.imshow(valid_data, interpolation='nearest', origin='lower', aspect='auto')
plt.colorbar(format='%+2.0f dB')
plt.show()
| [
"matplotlib.pyplot.imshow",
"numpy.mean",
"matplotlib.pyplot.colorbar",
"matplotlib.pyplot.figure",
"numpy.zeros",
"numpy.load",
"matplotlib.pyplot.subplot",
"matplotlib.pyplot.show"
] | [((941, 954), 'matplotlib.pyplot.figure', 'plt.figure', (['(1)'], {}), '(1)\n', (951, 954), True, 'import matplotlib.pyplot as plt\n'), ((959, 979), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(2)', '(1)', '(1)'], {}), '(2, 1, 1)\n', (970, 979), True, 'import matplotlib.pyplot as plt\n'), ((982, 1060), 'matplotlib.pyplot.imshow', 'plt.imshow', (['train_data'], {'interpolation': '"""nearest"""', 'origin': '"""lower"""', 'aspect': '"""auto"""'}), "(train_data, interpolation='nearest', origin='lower', aspect='auto')\n", (992, 1060), True, 'import matplotlib.pyplot as plt\n'), ((1065, 1097), 'matplotlib.pyplot.colorbar', 'plt.colorbar', ([], {'format': '"""%+2.0f dB"""'}), "(format='%+2.0f dB')\n", (1077, 1097), True, 'import matplotlib.pyplot as plt\n'), ((1103, 1123), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(2)', '(1)', '(2)'], {}), '(2, 1, 2)\n', (1114, 1123), True, 'import matplotlib.pyplot as plt\n'), ((1126, 1204), 'matplotlib.pyplot.imshow', 'plt.imshow', (['valid_data'], {'interpolation': '"""nearest"""', 'origin': '"""lower"""', 'aspect': '"""auto"""'}), "(valid_data, interpolation='nearest', origin='lower', aspect='auto')\n", (1136, 1204), True, 'import matplotlib.pyplot as plt\n'), ((1209, 1241), 'matplotlib.pyplot.colorbar', 'plt.colorbar', ([], {'format': '"""%+2.0f dB"""'}), "(format='%+2.0f dB')\n", (1221, 1241), True, 'import matplotlib.pyplot as plt\n'), ((1247, 1257), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (1255, 1257), True, 'import matplotlib.pyplot as plt\n'), ((341, 373), 'numpy.zeros', 'np.zeros', ([], {'shape': '(MFCC_DIM, 1100)'}), '(shape=(MFCC_DIM, 1100))\n', (349, 373), True, 'import numpy as np\n'), ((403, 434), 'numpy.zeros', 'np.zeros', ([], {'shape': '(MFCC_DIM, 300)'}), '(shape=(MFCC_DIM, 300))\n', (411, 434), True, 'import numpy as np\n'), ((649, 667), 'numpy.load', 'np.load', (['mfcc_file'], {}), '(mfcc_file)\n', (656, 667), True, 'import numpy as np\n'), ((707, 728), 'numpy.mean', 'np.mean', (['mfcc'], {'axis': '(1)'}), '(mfcc, axis=1)\n', (714, 728), True, 'import numpy as np\n'), ((752, 773), 'numpy.mean', 'np.mean', (['mfcc'], {'axis': '(1)'}), '(mfcc, axis=1)\n', (759, 773), True, 'import numpy as np\n')] |
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Ambari Agent
"""
import sys
from resource_management import *
from yarn import yarn
from service import service
class Resourcemanager(Script):
def install(self, env):
self.install_packages(env)
self.configure(env)
def configure(self, env):
import params
env.set_params(params)
yarn(name='resourcemanager')
def start(self, env):
import params
env.set_params(params)
self.configure(env) # FOR SECURITY
service('resourcemanager',
action='start'
)
def stop(self, env):
import params
env.set_params(params)
service('resourcemanager',
action='stop'
)
def status(self, env):
import status_params
env.set_params(status_params)
check_process_status(status_params.resourcemanager_pid_file)
pass
def refreshqueues(self, env):
import params
self.configure(env)
env.set_params(params)
service('resourcemanager',
action='refreshQueues'
)
def decommission(self, env):
import params
env.set_params(params)
rm_kinit_cmd = params.rm_kinit_cmd
yarn_user = params.yarn_user
conf_dir = params.hadoop_conf_dir
user_group = params.user_group
yarn_refresh_cmd = format("{rm_kinit_cmd} yarn --config {conf_dir} rmadmin -refreshNodes")
File(params.exclude_file_path,
content=Template("exclude_hosts_list.j2"),
owner=yarn_user,
group=user_group
)
if params.update_exclude_file_only == False:
Execute(yarn_refresh_cmd,
environment= {'PATH' : params.execute_path },
user=yarn_user)
pass
pass
if __name__ == "__main__":
Resourcemanager().execute()
| [
"yarn.yarn",
"service.service"
] | [((1069, 1097), 'yarn.yarn', 'yarn', ([], {'name': '"""resourcemanager"""'}), "(name='resourcemanager')\n", (1073, 1097), False, 'from yarn import yarn\n'), ((1212, 1254), 'service.service', 'service', (['"""resourcemanager"""'], {'action': '"""start"""'}), "('resourcemanager', action='start')\n", (1219, 1254), False, 'from service import service\n'), ((1347, 1388), 'service.service', 'service', (['"""resourcemanager"""'], {'action': '"""stop"""'}), "('resourcemanager', action='stop')\n", (1354, 1388), False, 'from service import service\n'), ((1674, 1724), 'service.service', 'service', (['"""resourcemanager"""'], {'action': '"""refreshQueues"""'}), "('resourcemanager', action='refreshQueues')\n", (1681, 1724), False, 'from service import service\n')] |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import requests
import manilaclient
from manilaclient.common import httpclient
from manilaclient import exceptions
from manilaclient.tests.unit import utils
fake_user_agent = "fake"
fake_response = utils.TestResponse({
"status_code": 200,
"text": '{"hi": "there"}',
})
mock_request = mock.Mock(return_value=(fake_response))
bad_400_response = utils.TestResponse({
"status_code": 400,
"text": '{"error": {"message": "n/a", "details": "Terrible!"}}',
})
bad_400_request = mock.Mock(return_value=(bad_400_response))
bad_401_response = utils.TestResponse({
"status_code": 401,
"text": '{"error": {"message": "FAILED!", "details": "DETAILS!"}}',
})
bad_401_request = mock.Mock(return_value=(bad_401_response))
bad_500_response = utils.TestResponse({
"status_code": 500,
"text": '{"error": {"message": "FAILED!", "details": "DETAILS!"}}',
})
bad_500_request = mock.Mock(return_value=(bad_500_response))
retry_after_response = utils.TestResponse({
"status_code": 413,
"text": '',
"headers": {
"retry-after": "5"
},
})
retry_after_mock_request = mock.Mock(return_value=retry_after_response)
retry_after_no_headers_response = utils.TestResponse({
"status_code": 413,
"text": '',
})
retry_after_no_headers_mock_request = mock.Mock(
return_value=retry_after_no_headers_response)
retry_after_non_supporting_response = utils.TestResponse({
"status_code": 403,
"text": '',
"headers": {
"retry-after": "5"
},
})
retry_after_non_supporting_mock_request = mock.Mock(
return_value=retry_after_non_supporting_response)
def get_authed_client(retries=0):
cl = httpclient.HTTPClient("http://example.com", "token", fake_user_agent,
retries=retries, http_log_debug=True,
api_version=manilaclient.API_MAX_VERSION)
return cl
class ClientTest(utils.TestCase):
def setUp(self):
super(ClientTest, self).setUp()
self.max_version = manilaclient.API_MAX_VERSION
self.max_version_str = self.max_version.get_string()
def test_get(self):
cl = get_authed_client()
@mock.patch.object(requests, "request", mock_request)
@mock.patch('time.time', mock.Mock(return_value=1234))
def test_get_call():
resp, body = cl.get("/hi")
headers = {
"X-Auth-Token": "token",
"User-Agent": fake_user_agent,
cl.API_VERSION_HEADER: self.max_version_str,
'Accept': 'application/json',
}
mock_request.assert_called_with(
"GET",
"http://example.com/hi",
headers=headers,
**self.TEST_REQUEST_BASE)
# Automatic JSON parsing
self.assertEqual(body, {"hi": "there"})
test_get_call()
def test_get_retry_500(self):
cl = get_authed_client(retries=1)
self.requests = [bad_500_request, mock_request]
def request(*args, **kwargs):
next_request = self.requests.pop(0)
return next_request(*args, **kwargs)
@mock.patch.object(requests, "request", request)
@mock.patch('time.time', mock.Mock(return_value=1234))
def test_get_call():
resp, body = cl.get("/hi")
test_get_call()
self.assertEqual(self.requests, [])
def test_retry_limit(self):
cl = get_authed_client(retries=1)
self.requests = [bad_500_request, bad_500_request, mock_request]
def request(*args, **kwargs):
next_request = self.requests.pop(0)
return next_request(*args, **kwargs)
@mock.patch.object(requests, "request", request)
@mock.patch('time.time', mock.Mock(return_value=1234))
def test_get_call():
resp, body = cl.get("/hi")
self.assertRaises(exceptions.ClientException, test_get_call)
self.assertEqual(self.requests, [mock_request])
def test_get_no_retry_400(self):
cl = get_authed_client(retries=0)
self.requests = [bad_400_request, mock_request]
def request(*args, **kwargs):
next_request = self.requests.pop(0)
return next_request(*args, **kwargs)
@mock.patch.object(requests, "request", request)
@mock.patch('time.time', mock.Mock(return_value=1234))
def test_get_call():
resp, body = cl.get("/hi")
self.assertRaises(exceptions.BadRequest, test_get_call)
self.assertEqual(self.requests, [mock_request])
def test_get_retry_400_socket(self):
cl = get_authed_client(retries=1)
self.requests = [bad_400_request, mock_request]
def request(*args, **kwargs):
next_request = self.requests.pop(0)
return next_request(*args, **kwargs)
@mock.patch.object(requests, "request", request)
@mock.patch('time.time', mock.Mock(return_value=1234))
def test_get_call():
resp, body = cl.get("/hi")
test_get_call()
self.assertEqual(self.requests, [])
def test_get_with_retries_none(self):
cl = get_authed_client(retries=None)
@mock.patch.object(requests, "request", bad_401_request)
def test_get_call():
resp, body = cl.get("/hi")
self.assertRaises(exceptions.Unauthorized, test_get_call)
def test_post(self):
cl = get_authed_client()
@mock.patch.object(requests, "request", mock_request)
def test_post_call():
cl.post("/hi", body=[1, 2, 3])
headers = {
"X-Auth-Token": "token",
"Content-Type": "application/json",
'Accept': 'application/json',
"X-Openstack-Manila-Api-Version": self.max_version_str,
"User-Agent": fake_user_agent
}
mock_request.assert_called_with(
"POST",
"http://example.com/hi",
headers=headers,
data='[1, 2, 3]',
**self.TEST_REQUEST_BASE)
test_post_call()
| [
"mock.Mock",
"manilaclient.common.httpclient.HTTPClient",
"manilaclient.tests.unit.utils.TestResponse",
"mock.patch.object"
] | [((754, 821), 'manilaclient.tests.unit.utils.TestResponse', 'utils.TestResponse', (['{\'status_code\': 200, \'text\': \'{"hi": "there"}\'}'], {}), '({\'status_code\': 200, \'text\': \'{"hi": "there"}\'})\n', (772, 821), False, 'from manilaclient.tests.unit import utils\n'), ((848, 885), 'mock.Mock', 'mock.Mock', ([], {'return_value': 'fake_response'}), '(return_value=fake_response)\n', (857, 885), False, 'import mock\n'), ((908, 1017), 'manilaclient.tests.unit.utils.TestResponse', 'utils.TestResponse', (['{\'status_code\': 400, \'text\':\n \'{"error": {"message": "n/a", "details": "Terrible!"}}\'}'], {}), '({\'status_code\': 400, \'text\':\n \'{"error": {"message": "n/a", "details": "Terrible!"}}\'})\n', (926, 1017), False, 'from manilaclient.tests.unit import utils\n'), ((1043, 1083), 'mock.Mock', 'mock.Mock', ([], {'return_value': 'bad_400_response'}), '(return_value=bad_400_response)\n', (1052, 1083), False, 'import mock\n'), ((1106, 1218), 'manilaclient.tests.unit.utils.TestResponse', 'utils.TestResponse', (['{\'status_code\': 401, \'text\':\n \'{"error": {"message": "FAILED!", "details": "DETAILS!"}}\'}'], {}), '({\'status_code\': 401, \'text\':\n \'{"error": {"message": "FAILED!", "details": "DETAILS!"}}\'})\n', (1124, 1218), False, 'from manilaclient.tests.unit import utils\n'), ((1244, 1284), 'mock.Mock', 'mock.Mock', ([], {'return_value': 'bad_401_response'}), '(return_value=bad_401_response)\n', (1253, 1284), False, 'import mock\n'), ((1307, 1419), 'manilaclient.tests.unit.utils.TestResponse', 'utils.TestResponse', (['{\'status_code\': 500, \'text\':\n \'{"error": {"message": "FAILED!", "details": "DETAILS!"}}\'}'], {}), '({\'status_code\': 500, \'text\':\n \'{"error": {"message": "FAILED!", "details": "DETAILS!"}}\'})\n', (1325, 1419), False, 'from manilaclient.tests.unit import utils\n'), ((1445, 1485), 'mock.Mock', 'mock.Mock', ([], {'return_value': 'bad_500_response'}), '(return_value=bad_500_response)\n', (1454, 1485), False, 'import mock\n'), ((1512, 1602), 'manilaclient.tests.unit.utils.TestResponse', 'utils.TestResponse', (["{'status_code': 413, 'text': '', 'headers': {'retry-after': '5'}}"], {}), "({'status_code': 413, 'text': '', 'headers': {\n 'retry-after': '5'}})\n", (1530, 1602), False, 'from manilaclient.tests.unit import utils\n'), ((1654, 1698), 'mock.Mock', 'mock.Mock', ([], {'return_value': 'retry_after_response'}), '(return_value=retry_after_response)\n', (1663, 1698), False, 'import mock\n'), ((1734, 1786), 'manilaclient.tests.unit.utils.TestResponse', 'utils.TestResponse', (["{'status_code': 413, 'text': ''}"], {}), "({'status_code': 413, 'text': ''})\n", (1752, 1786), False, 'from manilaclient.tests.unit import utils\n'), ((1836, 1891), 'mock.Mock', 'mock.Mock', ([], {'return_value': 'retry_after_no_headers_response'}), '(return_value=retry_after_no_headers_response)\n', (1845, 1891), False, 'import mock\n'), ((1936, 2026), 'manilaclient.tests.unit.utils.TestResponse', 'utils.TestResponse', (["{'status_code': 403, 'text': '', 'headers': {'retry-after': '5'}}"], {}), "({'status_code': 403, 'text': '', 'headers': {\n 'retry-after': '5'}})\n", (1954, 2026), False, 'from manilaclient.tests.unit import utils\n'), ((2093, 2152), 'mock.Mock', 'mock.Mock', ([], {'return_value': 'retry_after_non_supporting_response'}), '(return_value=retry_after_non_supporting_response)\n', (2102, 2152), False, 'import mock\n'), ((2203, 2361), 'manilaclient.common.httpclient.HTTPClient', 'httpclient.HTTPClient', (['"""http://example.com"""', '"""token"""', 'fake_user_agent'], {'retries': 'retries', 'http_log_debug': '(True)', 'api_version': 'manilaclient.API_MAX_VERSION'}), "('http://example.com', 'token', fake_user_agent,\n retries=retries, http_log_debug=True, api_version=manilaclient.\n API_MAX_VERSION)\n", (2224, 2361), False, 'from manilaclient.common import httpclient\n'), ((2712, 2764), 'mock.patch.object', 'mock.patch.object', (['requests', '"""request"""', 'mock_request'], {}), "(requests, 'request', mock_request)\n", (2729, 2764), False, 'import mock\n'), ((3707, 3754), 'mock.patch.object', 'mock.patch.object', (['requests', '"""request"""', 'request'], {}), "(requests, 'request', request)\n", (3724, 3754), False, 'import mock\n'), ((4250, 4297), 'mock.patch.object', 'mock.patch.object', (['requests', '"""request"""', 'request'], {}), "(requests, 'request', request)\n", (4267, 4297), False, 'import mock\n'), ((4838, 4885), 'mock.patch.object', 'mock.patch.object', (['requests', '"""request"""', 'request'], {}), "(requests, 'request', request)\n", (4855, 4885), False, 'import mock\n'), ((5425, 5472), 'mock.patch.object', 'mock.patch.object', (['requests', '"""request"""', 'request'], {}), "(requests, 'request', request)\n", (5442, 5472), False, 'import mock\n'), ((5771, 5826), 'mock.patch.object', 'mock.patch.object', (['requests', '"""request"""', 'bad_401_request'], {}), "(requests, 'request', bad_401_request)\n", (5788, 5826), False, 'import mock\n'), ((6031, 6083), 'mock.patch.object', 'mock.patch.object', (['requests', '"""request"""', 'mock_request'], {}), "(requests, 'request', mock_request)\n", (6048, 6083), False, 'import mock\n'), ((2798, 2826), 'mock.Mock', 'mock.Mock', ([], {'return_value': '(1234)'}), '(return_value=1234)\n', (2807, 2826), False, 'import mock\n'), ((3788, 3816), 'mock.Mock', 'mock.Mock', ([], {'return_value': '(1234)'}), '(return_value=1234)\n', (3797, 3816), False, 'import mock\n'), ((4331, 4359), 'mock.Mock', 'mock.Mock', ([], {'return_value': '(1234)'}), '(return_value=1234)\n', (4340, 4359), False, 'import mock\n'), ((4919, 4947), 'mock.Mock', 'mock.Mock', ([], {'return_value': '(1234)'}), '(return_value=1234)\n', (4928, 4947), False, 'import mock\n'), ((5506, 5534), 'mock.Mock', 'mock.Mock', ([], {'return_value': '(1234)'}), '(return_value=1234)\n', (5515, 5534), False, 'import mock\n')] |
import picamera
from time import sleep
IMG_WIDTH = 800
IMG_HEIGHT = 600
IMAGE_DIR = "/home/pi/Desktop/"
IMG = "snap.jpg"
def vid():
camera = picamera.PiCamera()
camera.vflip = True
camera.hflip = True
camera.brightness = 60
#camera.resolution = (IMG_WIDTH, IMG_HEIGHT)
camera.start_preview()
camera.annotate_text = "Doorbell pressed!"
camera.annotate_text_size = 50
#display video for 5 seconds
sleep(5)
camera.stop_preview()
camera.close()
# https://www.raspberrypi.org/learning/tweeting-babbage/worksheet/
######################################################
# picamera default values:
######################################################
# camera.sharpness = 0
# camera.contrast = 0
# camera.brightness = 50
# camera.saturation = 0
# camera.ISO = 0
# camera.video_stabilization = False
# camera.exposure_compensation = 0
# camera.exposure_mode = 'auto'
# camera.meter_mode = 'average'
# camera.awb_mode = 'auto'
# camera.image_effect = 'none'
# camera.color_effects = None
# camera.rotation = 180
# camera.hflip = False
# camera.vflip = False
# camera.crop = (0.0, 0.0, 1.0, 1.0)
######################################################
# video will record 5 seconds
######################################################
# camera.start_recording('video.h264')
# sleep(5)
# camera.stop_recording()
######################################################
# add text to video:
######################################################
#camera.start_preview()
#camera.annotate_text = "Doorbell pressed!"
#camera.annotate_text_size = 50
#sleep(5)
#camera.capture('/home/pi/Desktop/text.jpg')
#camera.stop_preview()
######################################################
# loop over camera effects:
######################################################
#camera = picamera.PiCamera()
#camera.vflip = True
#camera.hflip = True
#camera.start_preview()
#for effect in camera.IMAGE_EFFECTS:
# camera.image_effect = effect
# camera.annotate_text = "Effect: %s" % effect
# sleep(1)
#camera.stop_preview()
| [
"picamera.PiCamera",
"time.sleep"
] | [((150, 169), 'picamera.PiCamera', 'picamera.PiCamera', ([], {}), '()\n', (167, 169), False, 'import picamera\n'), ((445, 453), 'time.sleep', 'sleep', (['(5)'], {}), '(5)\n', (450, 453), False, 'from time import sleep\n')] |
#!/usr/bin/env python
# Copyright 2017 ARC Centre of Excellence for Climate Systems Science
# author: <NAME> <<EMAIL>>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
from flask_sqlalchemy import SQLAlchemy
import os
from datetime import datetime
db = SQLAlchemy()
class Path(db.Model):
"""
A path in the filesystem
"""
id = db.Column(db.Integer, primary_key=True)
path = db.Column(db.Text, unique=True, index=True)
basename = db.Column(db.Text, index=True)
extension = db.Column(db.Text, index=True)
uid = db.Column(db.Integer)
gid = db.Column(db.Integer, index=True)
size_bytes = db.Column(db.Integer)
modified = db.Column(db.Integer)
last_seen = db.Column(db.DateTime, index=True)
content_id = db.Column(db.Integer, db.ForeignKey('content.id'))
content = db.relationship("Content")
def add_from_filename(filename, session):
"""
Given a filename, add it to the database
"""
if not os.path.isfile(filename):
raise IOError("Not a file: %s"%filename)
abspath = os.path.abspath(filename)
path = Path.query.filter_by(path = abspath).one_or_none()
stat = os.stat(filename)
if path is not None:
path.last_seen = datetime.now()
if path.modified < stat.st_mtime:
path.update(stat)
session.add(path)
return path
path = Path()
path.path = abspath
path.update(stat)
path.last_seen = datetime.now()
session.add(path)
return path
def update(self, stat):
"""
Updates the file with new info
"""
self.basename = os.path.basename(self.path)
self.extension = os.path.splitext(self.path)[1]
self.uid = stat.st_uid
self.gid = stat.st_gid
self.size_bytes = stat.st_size
self.modified = stat.st_mtime
# Wipe the content link
self.content = None
class Content(db.Model):
"""
The contents of a file, identified via checksum
May be at multiple paths on the filesystem
sha256 is used for identification, md5 also provided for legacy
:var sha256: sha256 checksum
:var md5: md5 checksum
"""
id = db.Column(db.Integer, primary_key=True)
sha256 = db.Column(db.String, unique=True, index=True, nullable=False)
md5 = db.Column(db.String, index=True, nullable=False)
type = db.Column(db.String)
last_scanned = db.Column(db.DateTime)
paths = db.relationship("Path")
__mapper_args__ = {
'polymorphic_identity':'content',
'polymorphic_on':type
}
netcdf_variable_association = db.Table('netcdf_variable_association', db.Model.metadata,
db.Column('netcdf_id', db.Integer, db.ForeignKey('netcdf_content.id')),
db.Column('concretevar_id', db.Integer, db.ForeignKey('concrete_variable.id'))
)
class NetcdfContent(Content):
"""
Content of a NetCDF file
:var sha256: sha256 checksum
:var md5: md5 checksum
:var variables: list of :class:`~catalogue_flask.model.ConcreteVariable`
"""
id = db.Column(db.Integer, db.ForeignKey('content.id'), primary_key=True)
variables = db.relationship("ConcreteVariable",
secondary=netcdf_variable_association)
__mapper_args__ = {
'polymorphic_identity':'netcdfcontent',
}
class ConcreteVariable(db.Model):
"""
An abstract variable, may have many aliased names
:var cf_name: NetCDF-CF name
:var aliases: List of :class:`~catalogue_flask.model.Variable`
"""
id = db.Column(db.Integer, primary_key=True)
cf_name = db.Column(db.String)
aliases = db.relationship("Variable")
class Variable(db.Model):
"""
An alternate name for a variable
:var name: The name of this alias
:var concrete: The concrete variable this aliases
"""
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String)
concretevariable_id = db.Column(db.Integer, db.ForeignKey('concrete_variable.id'), index=True)
concrete = db.relationship("ConcreteVariable")
| [
"os.stat",
"os.path.splitext",
"os.path.isfile",
"datetime.datetime.now",
"os.path.basename",
"os.path.abspath",
"flask_sqlalchemy.SQLAlchemy"
] | [((792, 804), 'flask_sqlalchemy.SQLAlchemy', 'SQLAlchemy', ([], {}), '()\n', (802, 804), False, 'from flask_sqlalchemy import SQLAlchemy\n'), ((1617, 1642), 'os.path.abspath', 'os.path.abspath', (['filename'], {}), '(filename)\n', (1632, 1642), False, 'import os\n'), ((1725, 1742), 'os.stat', 'os.stat', (['filename'], {}), '(filename)\n', (1732, 1742), False, 'import os\n'), ((2057, 2071), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2069, 2071), False, 'from datetime import datetime\n'), ((2236, 2263), 'os.path.basename', 'os.path.basename', (['self.path'], {}), '(self.path)\n', (2252, 2263), False, 'import os\n'), ((1519, 1543), 'os.path.isfile', 'os.path.isfile', (['filename'], {}), '(filename)\n', (1533, 1543), False, 'import os\n'), ((1802, 1816), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1814, 1816), False, 'from datetime import datetime\n'), ((2289, 2316), 'os.path.splitext', 'os.path.splitext', (['self.path'], {}), '(self.path)\n', (2305, 2316), False, 'import os\n')] |
from src.books.models import Book
from src.books.schema import BookOut
from ninja import Router
router = Router()
@router.get("/", response=list[BookOut])
def get_books(request):
return Book.objects.all()
| [
"ninja.Router",
"src.books.models.Book.objects.all"
] | [((107, 115), 'ninja.Router', 'Router', ([], {}), '()\n', (113, 115), False, 'from ninja import Router\n'), ((194, 212), 'src.books.models.Book.objects.all', 'Book.objects.all', ([], {}), '()\n', (210, 212), False, 'from src.books.models import Book\n')] |
# generated by datamodel-codegen:
# filename: Organization.schema.json
# timestamp: 1985-10-26T08:21:00+00:00
from __future__ import annotations
from pydantic import BaseModel, Field
class Schema(BaseModel):
__root__: str = Field(..., description='Identifier string of this object.')
| [
"pydantic.Field"
] | [((237, 296), 'pydantic.Field', 'Field', (['...'], {'description': '"""Identifier string of this object."""'}), "(..., description='Identifier string of this object.')\n", (242, 296), False, 'from pydantic import BaseModel, Field\n')] |
import json
from typing import Union, Optional, Tuple, List
import numpy as np
from sklearn.feature_extraction import DictVectorizer
from sklearn.feature_extraction.text import TfidfVectorizer, CountVectorizer, TfidfTransformer
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
from shared import LANG_TO_INT
class DataSplitter:
def __init__(self, path: str, vectorizer: Optional[Union[DictVectorizer, TfidfVectorizer, CountVectorizer]] = None, seed: Optional[int] = None, scale: bool = True):
self.data_path = path
self.vectorizer = vectorizer or DictVectorizer(sparse=False)
self.transformer = TfidfTransformer() if type(self.vectorizer) == CountVectorizer else None
self.scale = type(self.vectorizer) not in (TfidfVectorizer, CountVectorizer) and scale
self.scaler = StandardScaler()
self.random_seed = seed
def collect_features_data(self) -> Tuple[Union[np.ndarray, List[str]], np.ndarray]:
if type(self.vectorizer) == DictVectorizer:
return self._collect_dict_vectorizer_features()
elif type(self.vectorizer) in (TfidfVectorizer, CountVectorizer):
return self._collect_tfidf_features()
else:
raise NotImplementedError
def _collect_dict_vectorizer_features(self) -> Tuple[np.ndarray, np.ndarray]:
examples = []
ys = []
with open(self.data_path, "r") as file:
for line in file:
info = json.loads(line)
examples.append(info["features"])
ys.append(LANG_TO_INT[info["lang"]])
return np.array(examples), np.array(ys)
def _collect_tfidf_features(self) -> Tuple[List[str], np.ndarray]:
examples = []
ys = []
with open(self.data_path, "r") as file:
for line in file:
info = json.loads(line)
examples.append(info["code"])
ys.append(LANG_TO_INT[info["lang"]])
return examples, np.array(ys)
def prepare_data(self, data: Union[np.ndarray, List[str]], fit: bool = False) -> np.ndarray:
if type(self.vectorizer) in (TfidfVectorizer, CountVectorizer):
assert not self.scale
if fit:
if self.scale:
transformed = self.scaler.fit_transform(self.vectorizer.fit_transform(data))
else:
transformed = self.vectorizer.fit_transform(data)
elif self.scale:
transformed = self.scaler.transform(self.vectorizer.transform(data))
else:
transformed = self.vectorizer.transform(data)
if type(transformed) != np.ndarray:
transformed = transformed.toarray()
return transformed
def split_train_vali_test(self, X: Union[np.ndarray, List[str]], y: np.ndarray, split_1: float = 0.75, split_2: float = 0.66) -> Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray, np.ndarray, np.ndarray]:
X_tv, X_test, y_tv, y_test = train_test_split(X, y, train_size=split_1, random_state=self.random_seed)
X_train, X_vali, y_train, y_vali = train_test_split(X_tv, y_tv, train_size=split_2, random_state=self.random_seed)
split_data = (self.prepare_data(X_train, fit=True), self.prepare_data(X_vali), self.prepare_data(X_test), y_train, y_vali, y_test)
if type(self.vectorizer) == CountVectorizer:
for split in split_data:
self.transformer.fit_transform(split.reshape(1, -1))
return split_data
| [
"sklearn.feature_extraction.text.TfidfTransformer",
"json.loads",
"sklearn.feature_extraction.DictVectorizer",
"sklearn.model_selection.train_test_split",
"sklearn.preprocessing.StandardScaler",
"numpy.array"
] | [((870, 886), 'sklearn.preprocessing.StandardScaler', 'StandardScaler', ([], {}), '()\n', (884, 886), False, 'from sklearn.preprocessing import StandardScaler\n'), ((3031, 3104), 'sklearn.model_selection.train_test_split', 'train_test_split', (['X', 'y'], {'train_size': 'split_1', 'random_state': 'self.random_seed'}), '(X, y, train_size=split_1, random_state=self.random_seed)\n', (3047, 3104), False, 'from sklearn.model_selection import train_test_split\n'), ((3148, 3227), 'sklearn.model_selection.train_test_split', 'train_test_split', (['X_tv', 'y_tv'], {'train_size': 'split_2', 'random_state': 'self.random_seed'}), '(X_tv, y_tv, train_size=split_2, random_state=self.random_seed)\n', (3164, 3227), False, 'from sklearn.model_selection import train_test_split\n'), ((624, 652), 'sklearn.feature_extraction.DictVectorizer', 'DictVectorizer', ([], {'sparse': '(False)'}), '(sparse=False)\n', (638, 652), False, 'from sklearn.feature_extraction import DictVectorizer\n'), ((680, 698), 'sklearn.feature_extraction.text.TfidfTransformer', 'TfidfTransformer', ([], {}), '()\n', (696, 698), False, 'from sklearn.feature_extraction.text import TfidfVectorizer, CountVectorizer, TfidfTransformer\n'), ((1657, 1675), 'numpy.array', 'np.array', (['examples'], {}), '(examples)\n', (1665, 1675), True, 'import numpy as np\n'), ((1677, 1689), 'numpy.array', 'np.array', (['ys'], {}), '(ys)\n', (1685, 1689), True, 'import numpy as np\n'), ((2044, 2056), 'numpy.array', 'np.array', (['ys'], {}), '(ys)\n', (2052, 2056), True, 'import numpy as np\n'), ((1521, 1537), 'json.loads', 'json.loads', (['line'], {}), '(line)\n', (1531, 1537), False, 'import json\n'), ((1902, 1918), 'json.loads', 'json.loads', (['line'], {}), '(line)\n', (1912, 1918), False, 'import json\n')] |
#!/usr/bin/python
#Sorts based on top 50 CMetric, all callPaths - CMetric
#, all call paths - call path count and all samples
from __future__ import print_function
from bcc import BPF, PerfType, PerfSWConfig
from bcc import BPF
import sys
import ctypes as ct # For mapping the 'C' structure to Python
import argparse #For parsing command line arguments
import datetime
import os
import operator
import subprocess
import re
# arg validation
def positive_int(val):
try:
ival = int(val)
except ValueError:
raise argparse.ArgumentTypeError("must be an integer")
if ival < 0:
raise argparse.ArgumentTypeError("must be positive")
return ival
def positive_nonzero_int(val):
ival = positive_int(val)
if ival == 0:
raise argparse.ArgumentTypeError("must be nonzero")
return ival
parser = argparse.ArgumentParser(description="Generates stack traces for critical code sections")
parser.add_argument("-x", metavar="<Path to executable>", dest = "targetPath", required = True, help = "Full path to the executable file to be profiled - Required")
parser.add_argument("-t", metavar="<Threshold>", dest = "threshold", type = positive_int, required = False, help = "Number active threads to trigger stack trace. Default = total no. of threads/2" )
parser.add_argument("-f", metavar="<Sampling Frequency>", dest = "sample_freq", type = positive_int, required = False, help = "Sampling frequency in Hz. Default = 333Hz (equivalent to 3 ms)" )
parser.add_argument("-d", metavar="<Stack Depth>", dest = "stack_depth", type = positive_int, required = False, help = "Maximum Stack depth for stack unwinding. Default = 10" )
parser.add_argument("-b", metavar="<Ring buffer Size>", dest = "buffer", type = positive_int, required = False, help = "Number of pages to be allocated for the ring buffer, Default = 64" )
parser.add_argument("--threads_only", help = "Trace threads alone", action = "store_true")
parser.add_argument("--process_only", help = "Trace processes alone", action = "store_true")
parser.add_argument("--trace_lib", help = "Include library paths in tracing", action = "store_true")
parser.add_argument("--kernel_stack", help = "Get kernel stack traces", action = "store_true")
args = parser.parse_args()
# define BPF program
bpf_text = """
#include <uapi/linux/ptrace.h>
#include <uapi/linux/bpf_perf_event.h>
#include <linux/sched.h>
#include <linux/types.h>
//Structure to pass information from the kernel probe to the user probe
struct key_t {
u32 tid; //Thread ID
u32 tgid; // Parent thread ID
u64 cm; //CMetric
int source; // 0 - sampling, 1 - critical time slice, 2 - non-critical time slice
int user_stackid;
int kernel_stackid;
u64 inst_ptr;
int store_stackTop;
};
BPF_HASH(threadList, u32, u32); //Stores threadIds of participating threads - Global
BPF_HASH(threadCount, u32, u32, 1); //Stores number of active threads - Global
BPF_HASH(tsp, u32, u64, 1); //Stores timestamp of previous event
BPF_ARRAY(count, u32, 1); //Stores the total thread count (parent not included)
BPF_HASH(global_CM, u32, u64, 1); //Keeps track of cumulative sum of CMetric - Global
BPF_PERCPU_ARRAY(local_CM, u64, 1); // To store the snapshot of global_CM when a thread is switched in
BPF_HASH(CM_hash, u32, u64); // Criticality Metric hash map for each thread
BPF_HASH(GLOBAL_WT_TC, u32, u64,1); //Stores the cumulative sum of weighted thread Count - Global
BPF_PERCPU_ARRAY(LOCAL_WT_TC, u64,1); //Stores the snapshot of GLOBAL_WT_TC - CPU Local
BPF_PERCPU_ARRAY(inTS, u64, 1); //Store the time at which a thread was switched in - CPU Local
BPF_PERF_OUTPUT(events); //Buffer to write event details
BPF_STACK_TRACE(user_stacktraces, 4086);
BPF_STACK_TRACE(kernel_stacktraces, 4086);
/*sched_switch_args {
// from /sys/kernel/debug/tracing/events/sched/sched_switch/format
u64 __unused__;
char prev_comm[16];
pid_t prev_pid;
int prev_prio;
long prev_state;
char next_comm[16];
pid_t next_pid;
int next_prio;
};
*/
TRACEPOINT_PROBE(task, task_rename){
u32 threadId, totalCount;
char comm[16];
u32 zero32 = 0, one = 1;
int len = bpf_probe_read_str(&comm, sizeof(args->newcomm), args->newcomm);
if(!len)
return 0;
//Compare the command argument with traced command
if(PGM_FILTER){
bpf_probe_read(&threadId, sizeof(threadId), &args->pid);
threadList.insert(&threadId, &zero32); //Store the thread ID in the hash startTracing.lookup_or_init(&threadId, &zero32);
u32 *countVal = count.lookup_or_init(&zero32, &zero32);
lock_xadd(countVal,1);
}
return 0;
}
TASK_NEWTASK
int do_perf_event(struct bpf_perf_event_data *ctx){
u32 zero32 = 0;
u32 threadId = bpf_get_current_pid_tgid();
u32 *val = threadList.lookup(&threadId);
if(!val)
return 0;
u32 *activeCount = threadCount.lookup(&zero32);
if(!activeCount)
{return 0;}
u32 tempCount;
bpf_probe_read(&tempCount, sizeof(tempCount), activeCount);
u32 *totalThreadCount = count.lookup(&zero32);
if(!totalThreadCount)
return 0;
u32 totalCount;
bpf_probe_read(&totalCount, sizeof(totalCount), totalThreadCount);
if( (tempCount <= STACK_FILTER) || tempCount ==1 ){
struct key_t key = {};
key.tid = bpf_get_current_pid_tgid();
key.tgid = bpf_get_current_pid_tgid()>>32;
key.cm = 0;
key.source = 0;
if(TRACE_THREADS_ONLY){
key.inst_ptr = PT_REGS_IP(&ctx->regs); //Get the instruction pointer
events.perf_submit(ctx, &key, sizeof(key)); //Write details to the ring buffer
}
}
return 0;
}
TRACEPOINT_PROBE(sched, sched_process_exit){
u32 zero32 = 0;
//Get the current tid
u32 threadId;
bpf_probe_read(&threadId, sizeof(threadId), &args->pid);
//Check if the thread ID belongs to the application
u32 *val = threadList.lookup(&threadId);
if(!val)
return 0;
//Decrement the number of threads
u32 *countVal = count.lookup(&zero32);
if(!countVal)
return 0;
//lock_xadd(countVal, -1);
countVal -= 1;
return 0;
}
TRACEPOINT_PROBE(sched, sched_wakeup){
u32 targetID, zero32 = 0, status, one32 = 1;
//Check if thread being woken up belongs to the application
bpf_probe_read(&targetID, sizeof(targetID), &args->pid);
u32 *list = threadList.lookup(&targetID);
if (!list)
return 0;
/////////////////////////////////////////////////////////////////////
if(args->success){ //If waking was successful
u32 *activeCount = threadCount.lookup(&zero32);
if(!activeCount)
{return 0;}
u32 prev_tCount; //Local variable to store thread count
bpf_probe_read(&prev_tCount, sizeof(prev_tCount), activeCount);
//Increment thread count if thread was inactive
bpf_probe_read(&status, sizeof(status), list);
if(status == 0)
lock_xadd(activeCount,1);
//Set thread as active
threadList.update(&targetID,&one32);
}
return 0;
}
//Tracepoint probe for the Sched_Switch tracepoint
TRACEPOINT_PROBE(sched, sched_switch){
u32 one32=1, arrayKey=0, zero32=0;
u32 *listVal, *listVal1; //Pointers to entries in threadList map
u32 next_pid, prev_pid;
u64 zero64 = 0;
//Copy data to BPF stack
bpf_probe_read(&next_pid, sizeof(next_pid), &args->next_pid);
bpf_probe_read(&prev_pid, sizeof(prev_pid), &args->prev_pid);
//Look up thread ids in the list created by sys_clone()
listVal1 = threadList.lookup(&next_pid);
listVal = threadList.lookup(&prev_pid);
u32 prev=0, next=0;
if(listVal){
bpf_probe_read(&prev, sizeof(prev),listVal);
prev = 1;
}
if(listVal1){
bpf_probe_read(&next, sizeof(next),listVal1);
next = 1;
}
//Return if the switching threads do not belong to the application
if( !prev && !next)
return 0;
//////////////////////////////////////////////////////////////////////
//Calculate values common for all switching events
u64 interval, intervalCM;
u64 *oldTS = tsp.lookup_or_init(&arrayKey, &zero64);
if(!oldTS)
{return 0;}
u64 tempTS;
bpf_probe_read(&tempTS, sizeof(tempTS), oldTS); //Copy Old time from bpf map to local variable
u64 newTS = bpf_ktime_get_ns();
tsp.update(&arrayKey, &newTS); //Update time stamp
//The thread count is initialized to one as the first switch in event is always missed.
u32 *ptr_threadCount = threadCount.lookup_or_init(&arrayKey, &one32);
if(!ptr_threadCount)
{return 0;}
int prev_tc; //Temporary variable to store thread count for the previous switching interval
bpf_probe_read(&prev_tc, sizeof(prev_tc),ptr_threadCount);
if(newTS < tempTS)//Very rarely, event probes are triggered out of order, which are ignored
return 0;
if(tempTS==0 || prev_tc==0){ //If first event or no active threads in during the previous interval, prev interval = 0
interval = 0;
}
else
interval = (newTS - tempTS); //Switching interval
u64 *ptr_globalCM = global_CM.lookup_or_init(&arrayKey, &zero64);
if(!ptr_globalCM)
return 0;
//Calculate the CMetric for previous interval and add it to global_CM
if (interval != 0){
intervalCM = interval/prev_tc;
lock_xadd(ptr_globalCM, intervalCM);
}
//Calculate weighted thread count for previous interval
u64 wt_threadCount = (interval) * prev_tc;
u64 *g_wt_threadCount = GLOBAL_WT_TC.lookup_or_init(&arrayKey, &zero64);
if(!g_wt_threadCount)
return 0;
lock_xadd(g_wt_threadCount, wt_threadCount); //Add to global weighted thread count
//////////////////////////////////////////////////////////////////////
//If previous thread was a peer thread
if(prev){
//Decrement active thread count only if thread switched out is not in RUNNING (0) state
if(args->prev_state != TASK_RUNNING){
if(prev_tc > 0 ){
lock_xadd(ptr_threadCount, -1);
}
//Mark the thread as inactive in the threadList hash map
threadList.update(&prev_pid,&zero32);
}
else
//Mark the thread as active as thread is switched out to TASK_RUNNING state
threadList.update(&prev_pid,&one32);
u64 temp;
//Get updated CM
bpf_probe_read(&temp, sizeof(temp),ptr_globalCM);
//Get snapshot of global_CM which was stored in local_CM when prev_pid was switched in
u64 *cpuCM = local_CM.lookup_or_init(&arrayKey, &zero64);
if(!cpuCM)
{return 0;}
//Update the CM of the thread by adding the CM for the time slice
u64 updateCM = temp - (*cpuCM);
u64 *tCM = CM_hash.lookup_or_init(&prev_pid, &zero64);
if(!tCM)
{return 0;}
*tCM = *tCM + updateCM;
//Get LOCAL_WT_TC, the thread's weighted threadCount at the time it was switched in.
u64 *t_wt_threadCount;
t_wt_threadCount = LOCAL_WT_TC.lookup_or_init(&arrayKey, &zero64);
if(!t_wt_threadCount)
{return 0;}
u64 temp_g_wt_threadCount, temp_t_wt_threadCount;
bpf_probe_read(&temp_g_wt_threadCount, sizeof(temp_g_wt_threadCount), g_wt_threadCount);
bpf_probe_read(&temp_t_wt_threadCount, sizeof(temp_t_wt_threadCount), t_wt_threadCount);
//Reset the per-CPU CMetric counter
local_CM.update(&arrayKey, &zero64);
//Reset local weighted ThreadCount counter
LOCAL_WT_TC.update(&arrayKey, &zero64);
//Get time when this thread was switched in
oldTS = inTS.lookup_or_init(&arrayKey, &zero64);
if(!oldTS)
return 0;
u64 switch_in_time, timeSlice;
bpf_probe_read(&switch_in_time, sizeof(switch_in_time), oldTS);
timeSlice = (newTS - switch_in_time);
//Reset switch in time
inTS.update(&arrayKey, &zero64);
u32 *totalThreadCount = count.lookup(&zero32);
if(!totalThreadCount)
return 0;
u32 totalCount;
bpf_probe_read(&totalCount, sizeof(totalCount), totalThreadCount);
//Calculate the average number of threads
u32 ratio = (temp_g_wt_threadCount - temp_t_wt_threadCount) / timeSlice;
struct key_t key = {};
key.tid = prev_pid;
key.tgid = bpf_get_current_pid_tgid()>>32;
key.cm = updateCM;
if( (ratio <= STACK_FILTER || ratio == 1) && TRACE_THREADS_ONLY){ //If thread_avg < threshold and not parent thread
key.user_stackid = user_stacktraces.get_stackid(args, BPF_F_USER_STACK);
if (GET_KERNEL_STACK && args->prev_state != TASK_RUNNING)
key.kernel_stackid= kernel_stacktraces.get_stackid(args, 0);
else
key.kernel_stackid = -1;
key.source = 1;
}
else{
key.user_stackid = 0;
key.source = 2;
}
key.store_stackTop = ((prev_tc <= STACK_FILTER) || prev_tc == 1)? 1:0;
if(TRACE_THREADS_ONLY)
events.perf_submit(args, &key, sizeof(key));
}
//Next thread is a peer thread
if(next){
//Get the previous state of this thread from the THREADLIST
u32 tempNext;
bpf_probe_read(&tempNext, sizeof(tempNext), listVal1);
//If the thread was not in TASK_RUNNING state
if(tempNext == 0){
lock_xadd(ptr_threadCount, 1); //Increment the number of active threads
}
threadList.update(&next_pid, &one32); //Set the thread status to RUNNING state
u64 temp;
//Get updated CM and store it to the CPU counter
bpf_probe_read(&temp, sizeof(temp),ptr_globalCM);
local_CM.update(&arrayKey,&temp);
//Store switch in time
inTS.update(&arrayKey, &newTS);
//Store the local cumulative weighted thread count
u64 temp_g_wt_threadCount;
bpf_probe_read(&temp_g_wt_threadCount, sizeof(temp_g_wt_threadCount), g_wt_threadCount);
LOCAL_WT_TC.update(&arrayKey, &temp_g_wt_threadCount);
}
return 0;
}
"""
task_newtask_pgm = """TRACEPOINT_PROBE(task, task_newtask){
u32 zero32=0;
char comm[TASK_COMM_LEN];
bpf_get_current_comm(&comm, sizeof(comm));
//We can also check for the parent id in the threadlist
//But if the parent was created before starting tracing this can fail
//So we check the command line instead
//If application is being traced
if(PGM_FILTER){
u32 threadId;
bpf_probe_read(&threadId, sizeof(threadId), &args->pid);
u32 *val = threadList.lookup_or_init(&threadId, &zero32); //Store the thread ID in the hash
u32 *countVal = count.lookup_or_init(&zero32, &zero32);
lock_xadd(countVal,1);
}
return 0;
}"""
#Path to executable
targetPath = ""
#Executable name
pgmName = ""
#Segments for customizing the filters
task_newtask_probe = task_newtask_pgm
trace_threads_only = '1'
get_kernel_stack = '0'
if args.threads_only:
trace_threads_only = 'key.tgid != key.tid'
if args.process_only:
task_newtask_probe = ''
if args.kernel_stack:
get_kernel_stack = '1'
#Get the path to target
if args.targetPath is not None:
targetPath = args.targetPath.rstrip(os.sep)
pgmName = os.path.basename(targetPath)
if pgmName is not None:
pgm_filter = 'comm[0]==\'%c\' && comm[1]==\'%c\' && comm[2]==\'%c\' && comm[3]==\'%c\'' % (pgmName[0],pgmName[1], pgmName[2], pgmName[3])
if args.threshold is not None:
stack_filter = '%d' % ( (args.threshold) )
else:
stack_filter = 'totalCount/2'
if args.sample_freq is not None:
freq = args.sample_freq
else:
freq = 333
if args.stack_depth is not None:
depth = args.stack_depth
else:
depth = 10
if args.buffer is not None:
buffer_size = args.buffer
else:
buffer_size = 64
bpf_text = bpf_text.replace('TASK_NEWTASK', task_newtask_probe)
bpf_text = bpf_text.replace('PGM_FILTER', pgm_filter)
bpf_text = bpf_text.replace('STACK_FILTER', stack_filter)
bpf_text = bpf_text.replace('TRACE_THREADS_ONLY', trace_threads_only)
bpf_text = bpf_text.replace('GET_KERNEL_STACK', get_kernel_stack)
#Print the customized program
#print(bpf_text)
print ("\n\n---Press Ctrl-C to start post processing---")
# load BPF program
b = BPF(text=bpf_text)
b.attach_perf_event(ev_type=PerfType.SOFTWARE,
ev_config=PerfSWConfig.CPU_CLOCK, fn_name="do_perf_event",
sample_freq=freq)
class Data(ct.Structure):
_fields_ = [
("tid", ct.c_uint),
("tgid", ct.c_uint),
("cm", ct.c_ulonglong),
("source", ct.c_uint),
("user_stack_id", ct.c_int),
("kernel_stack_id", ct.c_int),
("inst_ptr", ct.c_ulonglong),
("store_stackTop", ct.c_int)]
user_stack_traces = b["user_stacktraces"]
kernel_stack_traces = b["kernel_stacktraces"]
sampleAddr = dict() #Stores addresses corresponding to samples
CMetric = dict() #Dictionary to store CMetric
CM_Entry = 1 #Number of CMetric entry
CMetric_sampleAddr = dict() # Stores the sample address for each Cmetric - to get line of code
CMetric_callPath = dict() # Stores the call path for each CMetric
user_symbolMap = dict() #Store symbols corresponding addresses
kernel_symbolMap = dict()
total_switch = 0
noSample = 0
###############################################
#Function to trim the symbols of arguments
def trimSymbol(string_ret):
if '[' in string_ret:
symbol = (string_ret.rsplit('[',1))
if '@' in symbol[0]:
function = (symbol[0].split('@', 1)[0])
string_ret = function + "()" + "[" + symbol[1]
return string_ret
else:
string_ret = symbol[0].split('(',1)
return string_ret[0]+'()['+ symbol[1]
else:
return string_ret.split('(',1)[0]+'()'
################################################
def getKernelStack(kernel_stack_id):
kernel_call_path = ""
kernel_flag = 0
kernel_stack =[] if kernel_stack_id < 0 else \
kernel_stack_traces.walk(kernel_stack_id)
#For each address in the stack trace, get the symbols and create call path
for addr in kernel_stack:
if addr in kernel_symbolMap:
kernel_string_ret = kernel_symbolMap[addr]
else:
kernel_string_ret = b.ksym(addr)
kernel_symbolMap[addr] = kernel_string_ret
if kernel_flag == 0:
kernel_call_path = kernel_call_path + (kernel_string_ret.split('+',1)[0]).strip("\n ' '")
kernel_flag += 1
else: #If not stack top address
kernel_call_path = kernel_call_path + ("\n\t") + "<---" + (kernel_string_ret.split('+',1)[0]).strip("\n ' '")
return kernel_call_path
################################################
def print_event(cpu, data, size):
global CM_Entry #Unique id for stack traces
global total_switch # Total number of context switches
global noSample #Stores the number of switches without samples
event = ct.cast(data, ct.POINTER(Data)).contents
flag = 0
user_call_path = ""
kernel_call_path = ""
if event.source == 0: #Sample data
if event.inst_ptr in user_symbolMap:
string_ret = user_symbolMap[event.inst_ptr]
else:
#Map address to symbols
string_ret = b.sym(event.inst_ptr, event.tgid, show_offset=False, show_module = True)
string_ret = trimSymbol(string_ret)
user_symbolMap[event.inst_ptr]=string_ret
if "unknown" in string_ret:
return
#Add to list of samples for this thread ID
if event.tid not in sampleAddr:
sampleAddr[event.tid] = list()
if (string_ret.find(pgmName) >= 0): # If address belongs to application address map
sampleAddr[event.tid].append("0x" + format(event.inst_ptr, 'x'))
else:
sampleAddr[event.tid].append(string_ret)
return
if event.source == 2: # Reset Sample array if time slice not critical
if event.tid in sampleAddr:
sampleAddr[event.tid]=[]
total_switch += 1
return
if event.source == 1: #Critical Stack trace
skip_stackTop = 0
appl_addr = 0
total_switch += 1
user_stack =[] if event.user_stack_id < 0 else \
user_stack_traces.walk(event.user_stack_id)
#For each address in the stack trace, get the symbols and create call path
for addr in user_stack:
if addr in user_symbolMap:
string_ret = user_symbolMap[addr]
else:
string_ret = b.sym(addr, event.tgid, show_offset=False, show_module = True)
string_ret = trimSymbol(string_ret)
user_symbolMap[addr]=string_ret
if "unknown" in string_ret:
if flag == 0:
skip_stackTop = 1
continue
if (string_ret.find(pgmName) >= 0): # If address belongs to application address map
appl_addr = 1
if appl_addr or args.trace_lib:
if flag == 0: #Store top address of stack trace, if no samples
if event.tid not in sampleAddr:
sampleAddr[event.tid] = list()
if len(sampleAddr[event.tid]) ==0 and event.store_stackTop == 1 and skip_stackTop ==0:
noSample += 1
if appl_addr:
sampleAddr[event.tid].append("0xz" + format(addr, 'x'))
user_call_path = user_call_path+ (string_ret.split('+',1)[0]).strip("\n ' '")
else: #If not stack top address
user_call_path = user_call_path + "\n\t" + "<---" + (string_ret.split('+',1)[0]).strip("\n ' '")
flag += 1
if flag==depth: #Number of stack frames
break
if flag>0:
if get_kernel_stack == '1' and event.kernel_stack_id >= 0:
kernel_call_path = getKernelStack(event.kernel_stack_id)
CMetric[CM_Entry] = event.cm #Stores Cmetric of this critical stack trace
#Stores sample addresses of this critical stack trace
CMetric_sampleAddr[CM_Entry] = list(sampleAddr[event.tid])
CMetric_callPath[CM_Entry] = (user_call_path, kernel_call_path) #Stores call path of this critical stack trace
CM_Entry += 1
sampleAddr[event.tid]=[]
return
#Function to execute for each event written to the ring buffer
b["events"].open_perf_buffer(print_event, page_cnt=buffer_size)
#To print criticality metric of each thread
threadCM = b.get_table("CM_hash")
sum = 0;
criticalSwitch = dict()
criticalSwitch_allCM= dict()
criticalLine = dict()
critLineSamples = dict()
critLineSamples_all = dict()
critKernelPaths = dict()
allFunction = dict()
allLines = dict()
addrMap_fun = dict()
addrMap_line= dict()
def combine_Results(function, line, count, resultFunc, resultLine, tempFunc, tempLine):
#resultFunc and resultLine are for displaying the
#critical functions and lines combining results of
#Top 10 critical paths
if function:
if function in resultFunc:
resultFunc[function] += count
if line in resultLine[function]:
resultLine[function][line] += count
else:
resultLine[function][line] = count
else:
resultFunc[function] = count
resultLine[function] = dict()
resultLine[function][line] = count
#tempFunc and tempLine are functions and lines of current critical path alone
if function in tempFunc:
tempFunc[function] += count
if line in tempLine[function]:
tempLine[function][line] += count
else:
tempLine[function][line] = count
else:
tempFunc[function] = count
tempLine[function] = dict()
tempLine[function][line] = count
return
def combine_samples(addrList, resultFunc, resultLine):
tempFunc = dict()
tempLine = dict()
function = ""
line = ""
addrStringList =[]
addrCountList = []
stackTopList = []
for element, count in addrList.items():
specialString = ""
if "0x" in element:
#'0xz' pattern in the address denotes this is a stack top address(return address)
# not a sample address
if 'z' in element:
specialString=" (StackTop)"
#remove 'z'
element = element.replace('z','')
else:
specialString = ""
if element in addrMap_fun:
function = addrMap_fun[element]
line = addrMap_line[element]
#Add (StackTop) label to the line
if specialString:
line = line + specialString
#Combine all samples for this path
combine_Results(function, line, count, \
resultFunc, resultLine, tempFunc, tempLine);
else:
#Prepre to call addr2line
addrStringList.append(element)
addrCountList.append(count)
if specialString:
stackTopList.append(1)
else:
stackTopList.append(0)
#result = str(subprocess.check_output(['addr2line', '-s', '-C', '-f', '-p', '-i', element, '-e', "/data/rn1115/cfd/test/IncNavierStokesSolver-g"], stderr=subprocess.STDOUT))
else:
#library functions
function = element
line = ""
combine_Results(function, line, count, resultFunc, \
resultLine, tempFunc, tempLine)
#Map address to function name and line of code
if addrStringList != []:
cmd = ['addr2line', '-s', '-C', '-f', '-p']
cmd.extend(addrStringList)
cmdLast = ['-e', targetPath]
cmd.extend(cmdLast)
sourceLines = str(subprocess.check_output(cmd, stderr=subprocess.STDOUT))
for result in sourceLines.split('\n'):
specialString = ""
if result:
count = addrCountList.pop(0)
if stackTopList.pop(0) == 1:
specialString = " (StackTop)"
else:
specialString = ""
result = result.strip("\n ' '")
if result:
#Retrieve function and line number from addr2line result
result = result.split('\n', 1)[0]
result = result.strip("\n ' '")
if " at " in result:
function = result.split(" at ", 1)[0]
line = result.split(" at ", 1)[1]
function = function.strip()
if function:
addrMap_fun[element] = function
line = line.strip()
if line:
line = line.split(' (', 1)[0]
addrMap_line[element] = line
if specialString:
line = line + specialString
#There will not be any line if sample is not from application binary
else:
addrMap_line[element] = ""
combine_Results(function, line, count, \
resultFunc, resultLine, tempFunc, tempLine);
i=0
print("\tFunctions and lines + Frequency")
print("\t--------------------------------")
for key, value in sorted(tempFunc.items(), key=lambda x:x[1], reverse=True):
print("\n\t%s -- %u" % (key, value))
k=0
for line, count in sorted(tempLine[key].items(), key=lambda x:x[1], reverse=True):
print("\t\t%s -- %u" % (line, count))
k = k+1
if k==3:
break
i = i+1
if i == 5:
break
return
def choose_path(pathDict, strategy):
resultFunc = dict()
resultLine = dict()
i=0
print ("***************************************************")
for key, value in sorted(pathDict.items(), key=lambda x:x[1][0], reverse=True):
if ( i<10 ):
print("\nCritical Path %d -- CMetric, Frequency" % (i+1))
print("----------------------------------------")
print("\t%s --%u, %d. \n" % (key, value[0], value[1]))
addrList = critLineSamples_all[key]
#for element, count in addrList.items():
# print(element,count)
combine_samples(addrList, resultFunc, resultLine)
if get_kernel_stack == '1':
print("\n\tKernel Call Paths")
print("\t-----------------------")
for path, count in sorted(critKernelPaths[key].items(), key=lambda x:x[1], reverse=True):
print("\t%s -- %d\n" % (path, count))
i+= 1;
else:
break;
print ("***************************************************")
i=0
print ("\nTop Critical Functions and lines of code with frequency")
for key, value in sorted(resultFunc.items(), key=lambda x:x[1], reverse=True):
print("\n\t%s -- %u" % (key, value))
k=0
for line, count in sorted(resultLine[key].items(), key=lambda x:x[1], reverse=True):
print("\t\t%s -- %u" % (line, count))
k = k+1
if k==3:
break
i = i+1
if i == 10:
break
print ("***************************************************")
resultFunc.clear()
resultLine.clear()
return
try:
while 1:
b.kprobe_poll()
finally:
#Post Processing the stack traces
start = datetime.datetime.now()
print("Criticality Metric for each thread");
for k, v in sorted(threadCM.items(), key=lambda x:x[1].value):
print("%10u %u " % ((k.value), (v.value)))
sum += v.value
print ("Sum = %d" % sum)
print ("***************************************************")
#for key, value in sorted(CMetric.items(), key=lambda x:x[1], reverse= True): # key is CM_Entry, value is CMetric
for key, value in CMetric.items(): # key is CM_Entry, value is CMetric
user_callPath = CMetric_callPath[key][0]
kernel_callPath = CMetric_callPath[key][1]
#Combine all call paths irrespective of CMetric value and then sort as per CMetric value
if user_callPath in criticalSwitch_allCM:
criticalSwitch_allCM[user_callPath][0] += value
criticalSwitch_allCM[user_callPath][1] += 1
else:
criticalSwitch_allCM[user_callPath] = [value,1]
#Combine the sample addresses
if user_callPath not in critLineSamples_all:
critLineSamples_all[user_callPath] = dict()
lineDict = critLineSamples_all[user_callPath]
addrList = CMetric_sampleAddr[key]
for element in addrList:
if element in lineDict:
lineDict[element] += 1
else:
lineDict[element] = 1
#Combine kernel call paths
if user_callPath not in critKernelPaths:
critKernelPaths[user_callPath] = dict()
allKernelPaths = critKernelPaths[user_callPath]
if kernel_callPath in allKernelPaths:
allKernelPaths[kernel_callPath] += 1
else:
allKernelPaths[kernel_callPath] = 1
user_callPath = ""
kernel_callPath = ""
print ("Critical Call Paths, functions and Lines of Code:")
choose_path(criticalSwitch_allCM, 1)
end = datetime.datetime.now()
post_time = end - start
print ("Post Processing time in milli seconds: %u" % int(post_time.total_seconds() * 1000))
print ("Total switches: %u Critical switches: %u" % (total_switch, CM_Entry ))
print ("Stack trace with no samples: %u" % noSample)
print ("***************************************************")
sys.exit()
| [
"subprocess.check_output",
"ctypes.POINTER",
"argparse.ArgumentParser",
"argparse.ArgumentTypeError",
"datetime.datetime.now",
"os.path.basename",
"sys.exit",
"bcc.BPF"
] | [((845, 938), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Generates stack traces for critical code sections"""'}), "(description=\n 'Generates stack traces for critical code sections')\n", (868, 938), False, 'import argparse\n'), ((15910, 15928), 'bcc.BPF', 'BPF', ([], {'text': 'bpf_text'}), '(text=bpf_text)\n', (15913, 15928), False, 'from bcc import BPF\n'), ((14900, 14928), 'os.path.basename', 'os.path.basename', (['targetPath'], {}), '(targetPath)\n', (14916, 14928), False, 'import os\n'), ((29623, 29646), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (29644, 29646), False, 'import datetime\n'), ((31499, 31522), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (31520, 31522), False, 'import datetime\n'), ((31857, 31867), 'sys.exit', 'sys.exit', ([], {}), '()\n', (31865, 31867), False, 'import sys\n'), ((617, 663), 'argparse.ArgumentTypeError', 'argparse.ArgumentTypeError', (['"""must be positive"""'], {}), "('must be positive')\n", (643, 663), False, 'import argparse\n'), ((773, 818), 'argparse.ArgumentTypeError', 'argparse.ArgumentTypeError', (['"""must be nonzero"""'], {}), "('must be nonzero')\n", (799, 818), False, 'import argparse\n'), ((536, 584), 'argparse.ArgumentTypeError', 'argparse.ArgumentTypeError', (['"""must be an integer"""'], {}), "('must be an integer')\n", (562, 584), False, 'import argparse\n'), ((18713, 18729), 'ctypes.POINTER', 'ct.POINTER', (['Data'], {}), '(Data)\n', (18723, 18729), True, 'import ctypes as ct\n'), ((25821, 25875), 'subprocess.check_output', 'subprocess.check_output', (['cmd'], {'stderr': 'subprocess.STDOUT'}), '(cmd, stderr=subprocess.STDOUT)\n', (25844, 25875), False, 'import subprocess\n')] |
# -*- coding: utf-8 -*-
import random
import numpy as np
import scipy
import pandas as pd
import pandas
import numpy
import json
def resizeFeature(inputData,newSize):
# inputX: (temporal_length,feature_dimension) #
originalSize=len(inputData)
#print originalSize
if originalSize==1:
inputData=np.reshape(inputData,[-1])
return np.stack([inputData]*newSize)
x=numpy.array(range(originalSize))
f=scipy.interpolate.interp1d(x,inputData,axis=0)
x_new=[i*float(originalSize-1)/(newSize-1) for i in range(newSize)]
y_new=f(x_new)
return y_new
def readData(video_name,data_type=["spatial","temporal"]):
spatial_dir="./spatial/csv_action/"
temporal_dir="./temporal/csv_action/"
data=[]
for dtype in data_type:
if dtype=="spatial":
df=pandas.read_csv(spatial_dir+video_name+".csv")
elif dtype=="temporal":
df=pandas.read_csv(temporal_dir+video_name+".csv")
data.append(df.values[:,:])
lens=[len(d) for d in data]
#print lens
min_len=min(lens)
new_data=[d[:min_len] for d in data]
new_data=numpy.concatenate(new_data,axis=1)
return new_data
def load_json(file):
with open(file) as json_file:
data = json.load(json_file)
return data
def getDatasetDict():
df=pd.read_csv("./info/video_info.csv")
json_data= load_json("./info/activity_net.v1-3.min.json")
database=json_data['database']
out_dict={}
for i in range(len(df)):
video_name=df.video.values[i]
video_info=database[video_name[2:]]
video_new_info={}
video_new_info['duration_frame']=df.numFrame.values[i]
video_new_info['duration_second']=df.seconds.values[i]
video_new_info['annotations']=video_info['annotations']
out_dict[video_name]=video_new_info
return out_dict
def poolData(data,videoAnno,num_prop=100,num_bin=1,num_sample_bin=3,pool_type="mean"):
feature_frame=len(data)*16
video_frame=videoAnno['duration_frame']
video_second=videoAnno['duration_second']
corrected_second=float(feature_frame)/video_frame*video_second
fps=float(video_frame)/video_second
st=16/fps
if len(data)==1:
video_feature=np.stack([data]*num_prop)
video_feature=np.reshape(video_feature,[num_prop,400])
return video_feature
x=[st/2+ii*st for ii in range(len(data))]
f=scipy.interpolate.interp1d(x,data,axis=0)
video_feature=[]
zero_sample=np.zeros(num_bin*400)
tmp_anchor_xmin=[1.0/num_prop*i for i in range(num_prop)]
tmp_anchor_xmax=[1.0/num_prop*i for i in range(1,num_prop+1)]
num_sample=num_bin*num_sample_bin
for idx in range(num_prop):
xmin=max(x[0]+0.0001,tmp_anchor_xmin[idx]*corrected_second)
xmax=min(x[-1]-0.0001,tmp_anchor_xmax[idx]*corrected_second)
if xmax<x[0]:
#print "fuck"
video_feature.append(zero_sample)
continue
if xmin>x[-1]:
video_feature.append(zero_sample)
continue
plen=(xmax-xmin)/(num_sample-1)
x_new=[xmin+plen*ii for ii in range(num_sample)]
y_new=f(x_new)
y_new_pool=[]
for b in range(num_bin):
tmp_y_new=y_new[num_sample_bin*b:num_sample_bin*(b+1)]
if pool_type=="mean":
tmp_y_new=np.mean(y_new,axis=0)
elif pool_type=="max":
tmp_y_new=np.max(y_new,axis=0)
y_new_pool.append(tmp_y_new)
y_new_pool=np.stack(y_new_pool)
y_new_pool=np.reshape(y_new_pool,[-1])
video_feature.append(y_new_pool)
video_feature=np.stack(video_feature)
return video_feature
videoDict=getDatasetDict()
videoNameList=videoDict.keys()
random.shuffle(videoNameList)
col_names=[]
for i in range(400):
col_names.append("f"+str(i))
for videoName in videoNameList:
videoAnno=videoDict[videoName]
data=readData(videoName)
numFrame=videoAnno['duration_frame']
featureFrame=len(data)*16
videoAnno["feature_frame"]=featureFrame
videoDict[videoName]=videoAnno
print(numFrame,featureFrame)
videoFeature_mean=poolData(data,videoAnno,num_prop=100,num_bin=1,num_sample_bin=3,pool_type="mean")
outDf=pd.DataFrame(videoFeature_mean,columns=col_names)
outDf.to_csv("./csv_mean_100/"+videoName+".csv",index=False)
outfile=open("./anet_anno_anet.json","w")
json.dump(videoDict,outfile)
outfile.close() | [
"numpy.mean",
"numpy.reshape",
"random.shuffle",
"pandas.read_csv",
"scipy.interpolate.interp1d",
"json.load",
"numpy.stack",
"numpy.zeros",
"numpy.max",
"numpy.concatenate",
"pandas.DataFrame",
"json.dump"
] | [((3780, 3809), 'random.shuffle', 'random.shuffle', (['videoNameList'], {}), '(videoNameList)\n', (3794, 3809), False, 'import random\n'), ((4440, 4469), 'json.dump', 'json.dump', (['videoDict', 'outfile'], {}), '(videoDict, outfile)\n', (4449, 4469), False, 'import json\n'), ((437, 485), 'scipy.interpolate.interp1d', 'scipy.interpolate.interp1d', (['x', 'inputData'], {'axis': '(0)'}), '(x, inputData, axis=0)\n', (463, 485), False, 'import scipy\n'), ((1120, 1155), 'numpy.concatenate', 'numpy.concatenate', (['new_data'], {'axis': '(1)'}), '(new_data, axis=1)\n', (1137, 1155), False, 'import numpy\n'), ((1317, 1353), 'pandas.read_csv', 'pd.read_csv', (['"""./info/video_info.csv"""'], {}), "('./info/video_info.csv')\n", (1328, 1353), True, 'import pandas as pd\n'), ((2403, 2446), 'scipy.interpolate.interp1d', 'scipy.interpolate.interp1d', (['x', 'data'], {'axis': '(0)'}), '(x, data, axis=0)\n', (2429, 2446), False, 'import scipy\n'), ((2491, 2514), 'numpy.zeros', 'np.zeros', (['(num_bin * 400)'], {}), '(num_bin * 400)\n', (2499, 2514), True, 'import numpy as np\n'), ((3672, 3695), 'numpy.stack', 'np.stack', (['video_feature'], {}), '(video_feature)\n', (3680, 3695), True, 'import numpy as np\n'), ((4281, 4331), 'pandas.DataFrame', 'pd.DataFrame', (['videoFeature_mean'], {'columns': 'col_names'}), '(videoFeature_mean, columns=col_names)\n', (4293, 4331), True, 'import pandas as pd\n'), ((320, 347), 'numpy.reshape', 'np.reshape', (['inputData', '[-1]'], {}), '(inputData, [-1])\n', (330, 347), True, 'import numpy as np\n'), ((362, 393), 'numpy.stack', 'np.stack', (['([inputData] * newSize)'], {}), '([inputData] * newSize)\n', (370, 393), True, 'import numpy as np\n'), ((1246, 1266), 'json.load', 'json.load', (['json_file'], {}), '(json_file)\n', (1255, 1266), False, 'import json\n'), ((2232, 2259), 'numpy.stack', 'np.stack', (['([data] * num_prop)'], {}), '([data] * num_prop)\n', (2240, 2259), True, 'import numpy as np\n'), ((2280, 2322), 'numpy.reshape', 'np.reshape', (['video_feature', '[num_prop, 400]'], {}), '(video_feature, [num_prop, 400])\n', (2290, 2322), True, 'import numpy as np\n'), ((3545, 3565), 'numpy.stack', 'np.stack', (['y_new_pool'], {}), '(y_new_pool)\n', (3553, 3565), True, 'import numpy as np\n'), ((3585, 3613), 'numpy.reshape', 'np.reshape', (['y_new_pool', '[-1]'], {}), '(y_new_pool, [-1])\n', (3595, 3613), True, 'import numpy as np\n'), ((818, 868), 'pandas.read_csv', 'pandas.read_csv', (["(spatial_dir + video_name + '.csv')"], {}), "(spatial_dir + video_name + '.csv')\n", (833, 868), False, 'import pandas\n'), ((912, 963), 'pandas.read_csv', 'pandas.read_csv', (["(temporal_dir + video_name + '.csv')"], {}), "(temporal_dir + video_name + '.csv')\n", (927, 963), False, 'import pandas\n'), ((3381, 3403), 'numpy.mean', 'np.mean', (['y_new'], {'axis': '(0)'}), '(y_new, axis=0)\n', (3388, 3403), True, 'import numpy as np\n'), ((3464, 3485), 'numpy.max', 'np.max', (['y_new'], {'axis': '(0)'}), '(y_new, axis=0)\n', (3470, 3485), True, 'import numpy as np\n')] |
import atexit
import sqlite3
import traceback
#################
import sys
sys.path.append('/app')
from helper import *
settings_dict = load_yaml_dict(read_file("/Settings.yaml"))
conn = sqlite3.connect(f"/database/{settings_dict['db_name']}", isolation_level=None, detect_types=sqlite3.PARSE_DECLTYPES|sqlite3.PARSE_COLNAMES)
conn.row_factory = sqlite3.Row
cursor = conn.cursor()
cursor.execute("PRAGMA foreign_keys=ON")
atexit.register(conn.close)
atexit.register(cursor.close)
version = read_file("/VERSION").rstrip()
# This tells us whether the migration has already happened.
check_sql = '''SELECT COUNT(*) AS count
FROM pragma_table_info("problems")
WHERE name = "expected_text_output"'''
cursor.execute(check_sql)
check_result = cursor.fetchone()["count"]
if check_result > 0:
print("NotNeeded")
else:
alter_sql_list = ['ALTER TABLE problems RENAME COLUMN expected_output TO expected_text_output',
'ALTER TABLE problems ADD COLUMN expected_image_output text NOT NULL DEFAULT ""',
'''UPDATE problems
SET expected_image_output = expected_text_output
WHERE output_type = "jpg"''',
'''UPDATE problems
SET expected_text_output = ""
WHERE output_type = "jpg"''',
'ALTER TABLE submissions RENAME COLUMN code_output TO text_output',
'ALTER TABLE submissions ADD COLUMN image_output text NOT NULL DEFAULT ""',
'''UPDATE submissions
SET image_output = text_output
WHERE problem_id IN (SELECT problem_id FROM problems WHERE output_type = "jpg")''',
'''UPDATE submissions
SET text_output = ""
WHERE problem_id IN (SELECT problem_id FROM problems WHERE output_type = "jpg")''',
'''CREATE TABLE IF NOT EXISTS submissions2 (
course_id integer NOT NULL,
assignment_id integer NOT NULL,
problem_id integer NOT NULL,
user_id text NOT NULL,
submission_id integer NOT NULL,
code text NOT NULL,
text_output text NOT NULL,
image_output text NOT NULL,
passed integer NOT NULL,
date timestamp NOT NULL,
FOREIGN KEY (course_id) REFERENCES courses (course_id) ON DELETE CASCADE,
FOREIGN KEY (assignment_id) REFERENCES assignments (assignment_id) ON DELETE CASCADE,
FOREIGN KEY (problem_id) REFERENCES problems (problem_id) ON DELETE CASCADE,
FOREIGN KEY (user_id) REFERENCES users(user_id) ON DELETE CASCADE,
PRIMARY KEY (course_id, assignment_id, problem_id, user_id, submission_id))''',
'''INSERT INTO submissions2
SELECT course_id, assignment_id, problem_id, user_id, submission_id, code,
text_output, image_output, passed, date
FROM submissions''',
'DROP TABLE IF EXISTS submissions',
'ALTER TABLE submissions2 RENAME TO submissions'
]
error_occurred = False
for sql in alter_sql_list:
try:
cursor.execute(sql)
except:
print(sql)
print(traceback.format_exc())
error_occurred = True
if not error_occurred:
print("Success")
| [
"traceback.format_exc",
"sys.path.append",
"sqlite3.connect",
"atexit.register"
] | [((75, 98), 'sys.path.append', 'sys.path.append', (['"""/app"""'], {}), "('/app')\n", (90, 98), False, 'import sys\n'), ((189, 335), 'sqlite3.connect', 'sqlite3.connect', (['f"""/database/{settings_dict[\'db_name\']}"""'], {'isolation_level': 'None', 'detect_types': '(sqlite3.PARSE_DECLTYPES | sqlite3.PARSE_COLNAMES)'}), '(f"/database/{settings_dict[\'db_name\']}", isolation_level=\n None, detect_types=sqlite3.PARSE_DECLTYPES | sqlite3.PARSE_COLNAMES)\n', (204, 335), False, 'import sqlite3\n'), ((425, 452), 'atexit.register', 'atexit.register', (['conn.close'], {}), '(conn.close)\n', (440, 452), False, 'import atexit\n'), ((453, 482), 'atexit.register', 'atexit.register', (['cursor.close'], {}), '(cursor.close)\n', (468, 482), False, 'import atexit\n'), ((3619, 3641), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (3639, 3641), False, 'import traceback\n')] |
#!/usr/bin/env python3
"""
example.py
Example of using pypahdb to decompose an astronomical PAH spectrum.
"""
import pkg_resources
from pypahdb.decomposer import Decomposer
from pypahdb.observation import Observation
if __name__ == '__main__':
# The sample data (IPAC table).
file_path = 'resources/sample_data_NGC7023.tbl'
data_file = pkg_resources.resource_filename('pypahdb', file_path)
# Construct an Observation object.
obs = Observation(data_file)
# Pass the Observation's spectrum to Decomposer, which performs the fit.
pahdb_fit = Decomposer(obs.spectrum)
# Save the fit to disk, both as a PDF and FITS file.
pahdb_fit.save_pdf('NGC7023_pypahdb_tbl_example.pdf', domaps=False)
pahdb_fit.save_fits('NGC7023_pypahdb_tbl_example.fits', header=obs.header)
| [
"pypahdb.observation.Observation",
"pypahdb.decomposer.Decomposer",
"pkg_resources.resource_filename"
] | [((354, 407), 'pkg_resources.resource_filename', 'pkg_resources.resource_filename', (['"""pypahdb"""', 'file_path'], {}), "('pypahdb', file_path)\n", (385, 407), False, 'import pkg_resources\n'), ((458, 480), 'pypahdb.observation.Observation', 'Observation', (['data_file'], {}), '(data_file)\n', (469, 480), False, 'from pypahdb.observation import Observation\n'), ((575, 599), 'pypahdb.decomposer.Decomposer', 'Decomposer', (['obs.spectrum'], {}), '(obs.spectrum)\n', (585, 599), False, 'from pypahdb.decomposer import Decomposer\n')] |
#!/usr/bin/env python3
# Copyright (c) 2021 The Bitcoin developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the -uaclientname and -uaclientversion option."""
import re
from test_framework.test_framework import BitcoinTestFramework
from test_framework.test_node import ErrorMatch
from test_framework.util import assert_equal
class UseragentTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.setup_clean_chain = True
def run_test(self):
self.log.info("test -uaclientname and -uaclientversion")
default_useragent = self.nodes[0].getnetworkinfo()["subversion"]
expected = "/Bitcoin ABC:"
assert_equal(default_useragent[:len(expected)], expected)
default_version = default_useragent[default_useragent.index(':') + 1:]
default_version = default_version[:default_version.index('/')]
self.restart_node(0, ["-uaclientname=Foo Client"])
foo_ua = self.nodes[0].getnetworkinfo()["subversion"]
expected = f"/Foo Client:{default_version}"
assert_equal(foo_ua[:len(expected)], expected)
self.restart_node(0, ["-uaclientversion=123.45"])
foo_ua = self.nodes[0].getnetworkinfo()["subversion"]
expected = "/Bitcoin ABC:123.45"
assert_equal(foo_ua[:len(expected)], expected)
self.log.info(
"non-numeric version allowed (although not recommended in BIP14)")
self.restart_node(0, ["-uaclientversion=Version Two"])
foo_ua = self.nodes[0].getnetworkinfo()["subversion"]
expected = "/Bitcoin ABC:Version Two"
assert_equal(foo_ua[:len(expected)], expected)
self.log.info("test -uaclient doesn't break -uacomment")
self.restart_node(0, ["-uaclientname=<NAME>",
"-uaclientversion=3000",
"-uacomment=spam bacon and eggs"])
bar_ua = self.nodes[0].getnetworkinfo()["subversion"]
expected = "/Bar Client:3000"
assert_equal(bar_ua[:len(expected)], expected)
assert "spam bacon and eggs" in bar_ua
self.log.info("test -uaclientname max length")
self.stop_node(0)
expected = r"Error: Total length of network version string \([0-9]+\) exceeds maximum length \([0-9]+\)\. Reduce the number or size of uacomments\."
self.nodes[0].assert_start_raises_init_error(
["-uaclientname=" + "a" * 256], expected, match=ErrorMatch.FULL_REGEX)
self.log.info("test -uaclientversion max length")
expected = r"Error: Total length of network version string \([0-9]+\) exceeds maximum length \([0-9]+\)\. Reduce the number or size of uacomments\."
self.nodes[0].assert_start_raises_init_error(
["-uaclientversion=" + "a" * 256], expected, match=ErrorMatch.FULL_REGEX)
self.log.info("test -uaclientname and -uaclientversion max length")
expected = r"Error: Total length of network version string \([0-9]+\) exceeds maximum length \([0-9]+\)\. Reduce the number or size of uacomments\."
self.nodes[0].assert_start_raises_init_error(
["-uaclientname=" + "a" * 128, "-uaclientversion=" + "a" * 128], expected, match=ErrorMatch.FULL_REGEX)
self.log.info(
"test -uaclientname and -uaclientversion invalid characters")
for invalid_char in ['/', ':', '(', ')', '*', '!', '₿', '🏃']:
# for client name
expected = r"Error: -uaclientname \(" + \
re.escape(invalid_char) + r"\) contains invalid characters\."
self.nodes[0].assert_start_raises_init_error(
["-uaclientname=" + invalid_char],
expected, match=ErrorMatch.FULL_REGEX)
# for client version
expected = r"Error: -uaclientversion \(" + \
re.escape(invalid_char) + r"\) contains invalid characters\."
self.nodes[0].assert_start_raises_init_error(
["-uaclientversion=" + invalid_char],
expected, match=ErrorMatch.FULL_REGEX)
# for both
expected = r"Error: -uaclientname \(" + \
re.escape(invalid_char) + r"\) contains invalid characters\."
self.nodes[0].assert_start_raises_init_error(
["-uaclientname=" + invalid_char,
"-uaclientversion=" + invalid_char],
expected, match=ErrorMatch.FULL_REGEX)
if __name__ == '__main__':
UseragentTest().main()
| [
"re.escape"
] | [((3606, 3629), 're.escape', 're.escape', (['invalid_char'], {}), '(invalid_char)\n', (3615, 3629), False, 'import re\n'), ((3938, 3961), 're.escape', 're.escape', (['invalid_char'], {}), '(invalid_char)\n', (3947, 3961), False, 'import re\n'), ((4260, 4283), 're.escape', 're.escape', (['invalid_char'], {}), '(invalid_char)\n', (4269, 4283), False, 'import re\n')] |
# Copyright (c) 2017 Fujitsu Limited
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo_config import cfg
import oslo_messaging
from neutron.api.rpc.callbacks import events
from neutron.api.rpc.handlers import resources_rpc
from neutron.services.logapi.common import constants as log_const
from neutron.services.logapi.rpc import server as server_rpc
from neutron.tests import base
class LoggingApiNotificationTestCase(base.BaseTestCase):
def setUp(self):
super(LoggingApiNotificationTestCase, self).setUp()
self.test_obj = server_rpc.LoggingApiNotification()
def test___init__(self):
self.assertIsInstance(self.test_obj.notification_api,
resources_rpc.ResourcesPushRpcApi)
@mock.patch("neutron.api.rpc.handlers.resources_rpc.ResourcesPushRpcApi."
"push")
def test_create_log(self, mocked_push):
m_context = mock.Mock()
m_log_resource = mock.Mock()
self.test_obj.create_log(m_context, m_log_resource)
mocked_push.assert_called_with(m_context, [m_log_resource],
events.CREATED)
@mock.patch("neutron.api.rpc.handlers.resources_rpc.ResourcesPushRpcApi."
"push")
def test_update_log(self, mocked_push):
m_context = mock.Mock()
m_log_resource = mock.Mock()
self.test_obj.update_log(m_context, m_log_resource)
mocked_push.assert_called_with(m_context, [m_log_resource],
events.UPDATED)
@mock.patch("neutron.api.rpc.handlers.resources_rpc.ResourcesPushRpcApi."
"push")
def test_delete_log(self, mocked_push):
m_context = mock.Mock()
m_log_resource = mock.Mock()
self.test_obj.delete_log(m_context, m_log_resource)
mocked_push.assert_called_with(m_context, [m_log_resource],
events.DELETED)
class LoggingApiSkeletonTestCase(base.BaseTestCase):
@mock.patch("neutron.common.rpc.get_server")
def test___init__(self, mocked_get_server):
test_obj = server_rpc.LoggingApiSkeleton()
_target = oslo_messaging.Target(
topic=log_const.LOGGING_PLUGIN,
server=cfg.CONF.host,
fanout=False)
mocked_get_server.assert_called_with(_target, [test_obj])
@mock.patch("neutron.services.logapi.common.db_api."
"get_sg_log_info_for_port")
def test_get_sg_log_info_for_port(self, mock_callback):
test_obj = server_rpc.LoggingApiSkeleton()
m_context = mock.Mock()
port_id = '123'
test_obj.get_sg_log_info_for_port(m_context, port_id=port_id)
mock_callback.assert_called_with(m_context, port_id)
@mock.patch("neutron.services.logapi.common.db_api."
"get_sg_log_info_for_log_resources")
def test_get_sg_log_info_for_log_resources(self, mock_callback):
test_obj = server_rpc.LoggingApiSkeleton()
m_context = mock.Mock()
log_resources = [mock.Mock()]
test_obj.get_sg_log_info_for_log_resources(m_context,
log_resources=log_resources)
mock_callback.assert_called_with(m_context, log_resources)
| [
"mock.patch",
"mock.Mock",
"neutron.services.logapi.rpc.server.LoggingApiNotification",
"neutron.services.logapi.rpc.server.LoggingApiSkeleton",
"oslo_messaging.Target"
] | [((1316, 1393), 'mock.patch', 'mock.patch', (['"""neutron.api.rpc.handlers.resources_rpc.ResourcesPushRpcApi.push"""'], {}), "('neutron.api.rpc.handlers.resources_rpc.ResourcesPushRpcApi.push')\n", (1326, 1393), False, 'import mock\n'), ((1715, 1792), 'mock.patch', 'mock.patch', (['"""neutron.api.rpc.handlers.resources_rpc.ResourcesPushRpcApi.push"""'], {}), "('neutron.api.rpc.handlers.resources_rpc.ResourcesPushRpcApi.push')\n", (1725, 1792), False, 'import mock\n'), ((2114, 2191), 'mock.patch', 'mock.patch', (['"""neutron.api.rpc.handlers.resources_rpc.ResourcesPushRpcApi.push"""'], {}), "('neutron.api.rpc.handlers.resources_rpc.ResourcesPushRpcApi.push')\n", (2124, 2191), False, 'import mock\n'), ((2568, 2611), 'mock.patch', 'mock.patch', (['"""neutron.common.rpc.get_server"""'], {}), "('neutron.common.rpc.get_server')\n", (2578, 2611), False, 'import mock\n'), ((2928, 3004), 'mock.patch', 'mock.patch', (['"""neutron.services.logapi.common.db_api.get_sg_log_info_for_port"""'], {}), "('neutron.services.logapi.common.db_api.get_sg_log_info_for_port')\n", (2938, 3004), False, 'import mock\n'), ((3328, 3418), 'mock.patch', 'mock.patch', (['"""neutron.services.logapi.common.db_api.get_sg_log_info_for_log_resources"""'], {}), "(\n 'neutron.services.logapi.common.db_api.get_sg_log_info_for_log_resources')\n", (3338, 3418), False, 'import mock\n'), ((1117, 1152), 'neutron.services.logapi.rpc.server.LoggingApiNotification', 'server_rpc.LoggingApiNotification', ([], {}), '()\n', (1150, 1152), True, 'from neutron.services.logapi.rpc import server as server_rpc\n'), ((1477, 1488), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (1486, 1488), False, 'import mock\n'), ((1514, 1525), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (1523, 1525), False, 'import mock\n'), ((1876, 1887), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (1885, 1887), False, 'import mock\n'), ((1913, 1924), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (1922, 1924), False, 'import mock\n'), ((2275, 2286), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (2284, 2286), False, 'import mock\n'), ((2312, 2323), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (2321, 2323), False, 'import mock\n'), ((2679, 2710), 'neutron.services.logapi.rpc.server.LoggingApiSkeleton', 'server_rpc.LoggingApiSkeleton', ([], {}), '()\n', (2708, 2710), True, 'from neutron.services.logapi.rpc import server as server_rpc\n'), ((2729, 2822), 'oslo_messaging.Target', 'oslo_messaging.Target', ([], {'topic': 'log_const.LOGGING_PLUGIN', 'server': 'cfg.CONF.host', 'fanout': '(False)'}), '(topic=log_const.LOGGING_PLUGIN, server=cfg.CONF.host,\n fanout=False)\n', (2750, 2822), False, 'import oslo_messaging\n'), ((3103, 3134), 'neutron.services.logapi.rpc.server.LoggingApiSkeleton', 'server_rpc.LoggingApiSkeleton', ([], {}), '()\n', (3132, 3134), True, 'from neutron.services.logapi.rpc import server as server_rpc\n'), ((3155, 3166), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (3164, 3166), False, 'import mock\n'), ((3521, 3552), 'neutron.services.logapi.rpc.server.LoggingApiSkeleton', 'server_rpc.LoggingApiSkeleton', ([], {}), '()\n', (3550, 3552), True, 'from neutron.services.logapi.rpc import server as server_rpc\n'), ((3573, 3584), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (3582, 3584), False, 'import mock\n'), ((3610, 3621), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (3619, 3621), False, 'import mock\n')] |
""" Unit Tests for Service Patch
"""
# Copyright (c) 2021 ipyradiant contributors.
# Distributed under the terms of the Modified BSD License.
import ipyradiant
import rdflib
LINKEDDATA_QUERY = """
SELECT DISTINCT ?s ?p ?o
WHERE {
SERVICE <http://linkeddata.uriburner.com/sparql>
{
SELECT ?s ?p ?o
WHERE {?s ?p ?o}
}
}
"""
PATCHED_LINKEDDATA_QUERY = """
SELECT DISTINCT ?s ?p ?o
WHERE {
service <http://linkeddata.uriburner.com/sparql>
{
SELECT ?s ?p ?o
WHERE {?s ?p ?o}
}
}
"""
def test_service_fix():
query_str = ipyradiant.service_patch_rdflib(LINKEDDATA_QUERY)
assert query_str == PATCHED_LINKEDDATA_QUERY
def test_rdflib_version():
version = rdflib.__version__
v_split = tuple(map(int, version.split(".")))
assert v_split <= (5, 0, 0)
| [
"ipyradiant.service_patch_rdflib"
] | [((699, 748), 'ipyradiant.service_patch_rdflib', 'ipyradiant.service_patch_rdflib', (['LINKEDDATA_QUERY'], {}), '(LINKEDDATA_QUERY)\n', (730, 748), False, 'import ipyradiant\n')] |
# Created by Kelvin_Clark on 3/5/2022, 6:37 PM
from typing import Optional
from src.models.entities.user import User
from src import database as db
class UserDao:
@staticmethod
def get_user_by_email(email: str) -> Optional[User]:
return User.query.filter_by(email=email).first()
@staticmethod
def check_user_exists(email: str) -> bool:
"""
True if user exits, False if otherwise
:param email: str
:return: bool
"""
user = UserDao.get_user_by_email(email=email)
return user is not None
@staticmethod
def add_user(user: User) -> User:
db.session.add(user)
db.session.commit()
db.session.refresh(user)
return user
| [
"src.database.session.refresh",
"src.database.session.add",
"src.models.entities.user.User.query.filter_by",
"src.database.session.commit"
] | [((635, 655), 'src.database.session.add', 'db.session.add', (['user'], {}), '(user)\n', (649, 655), True, 'from src import database as db\n'), ((664, 683), 'src.database.session.commit', 'db.session.commit', ([], {}), '()\n', (681, 683), True, 'from src import database as db\n'), ((692, 716), 'src.database.session.refresh', 'db.session.refresh', (['user'], {}), '(user)\n', (710, 716), True, 'from src import database as db\n'), ((257, 290), 'src.models.entities.user.User.query.filter_by', 'User.query.filter_by', ([], {'email': 'email'}), '(email=email)\n', (277, 290), False, 'from src.models.entities.user import User\n')] |
from rdflib import Graph
import json
import glob
books = {}
rdf_files = glob.glob("gutindex/cache/epub/*/*.rdf")
i = 1
for rdf_file in rdf_files:
g = Graph()
g.parse(rdf_file)
for s,p,o in g:
if 'title' in p:
books[str(o)] = str(s)
print(i, str(o))
i+=1
with open("gutindex_titles.json", "w") as f:
json.dump(books, f)
| [
"rdflib.Graph",
"json.dump",
"glob.glob"
] | [((74, 114), 'glob.glob', 'glob.glob', (['"""gutindex/cache/epub/*/*.rdf"""'], {}), "('gutindex/cache/epub/*/*.rdf')\n", (83, 114), False, 'import glob\n'), ((156, 163), 'rdflib.Graph', 'Graph', ([], {}), '()\n', (161, 163), False, 'from rdflib import Graph\n'), ((357, 376), 'json.dump', 'json.dump', (['books', 'f'], {}), '(books, f)\n', (366, 376), False, 'import json\n')] |
"""
This tool compares measured data (observed) with model outputs (predicted), used in procedures of calibration and validation
"""
from __future__ import division
from __future__ import print_function
import os
from math import sqrt
import pandas as pd
from sklearn.metrics import mean_squared_error as calc_mean_squared_error
import cea.config
import cea.inputlocator
from cea_calibration.global_variables import *
# from cea.constants import MONTHS_IN_YEAR_NAMES
# import cea.examples.global_variables as global_variables
# def outputdatafolder(self):
# return self._ensure_folder(self.scenario, 'outputs', 'data')
#
#
# def get_calibrationresults(self):
# """scenario/outputs/data/calibration_results/calibrationresults.csv"""
# return os.path.join(self.scenario, 'outputs', 'data', 'calibration_results', 'calibrationresults.csv')
#
#
# def get_project_calibrationresults(self):
# """project/outputs/calibration_results/calibrationresults.csv"""
# return os.path.join(self.project, 'outputs', 'calibration_results', 'calibrationresults.csv')
#
#
# def get_totaloccupancy(self):
# """scenario/outputs/data/totaloccupancy.csv"""
# return os.path.join(self.scenario, "outputs", "data", "totaloccupancy.csv")
#
#
# def get_measurements_folder(self):
# return self._ensure_folder(self.scenario, 'inputs', 'measurements')
#
#
# def get_annual_measurements(self):
# return os.path.join(self.get_measurements_folder(), 'annual_measurements.csv')
#
#
# def get_monthly_measurements(self):
# return os.path.join(self.get_measurements_folder(), 'monthly_measurements.csv')
#
#
# def get_global_monthly_measurements(self):
# return os.path.join(self.get_measurements_folder(), 'monthly_measurements.csv')
# global_validation_n_calibrated = []
# global_validation_percentage = []
MONTHS_IN_YEAR_NAMES = ['JANUARY', 'FEBRUARY', 'MARCH', 'APRIL',
'MAY', 'JUNE', 'JULY', 'AUGUST', 'SEPTEMBER',
'OCTOBER', 'NOVEMBER', 'DECEMBER']
__author__ = "<NAME>"
__copyright__ = "Copyright 2020, Architecture and Building Systems - ETH Zurich"
__credits__ = ["<NAME>, <NAME>, <NAME>"]
__license__ = "MIT"
__version__ = "1.0"
__maintainer__ = "<NAME>"
__email__ = "<EMAIL>"
__status__ = "Production"
def validation(scenario_list,
locators_of_scenarios,
measured_building_names_of_scenarios,
monthly=True,
load='GRID',
):
"""
This tool compares observed (real life measured data) and predicted (output of the model data) values.
Monthly data is compared in terms of NMBE and CvRMSE (follwing ASHRAE Guideline 14-2014).
A new input folder with measurements has to be created, with a csv each for monthly data provided as input for this tool.
The input file contains: Name (CEA ID)| ZipCode (optional) | Monthly Data (JAN - DEC) | Type of equivalent variable in CEA (GRID_kWh is the default for total electricity consumption)
The script prints the NBME and CvRMSE for each building. It also outputs the number of calibrated buildings and a score metric (calibrated buildings weighted by their energy consumption)
"""
## monthly validation
if monthly:
number_of_buildings = 0
print("monthly validation")
validation_output = pd.DataFrame(columns=['scenario', 'calibrated_buildings', 'score'])
for scenario, locator, measured_building_names in zip(scenario_list, locators_of_scenarios,
measured_building_names_of_scenarios):
list_of_scores = []
number_of_calibrated = []
number_of_buildings = number_of_buildings + len(measured_building_names)
# get measured data for buildings in this scenario
monthly_measured_data = pd.read_csv(locator.get_monthly_measurements())
# loop in the measured buildings of this scenario
for building_name in measured_building_names: # number of buildings that have real data available
# extract measured data
print('For building', building_name, 'the errors are')
fields_to_extract = ['Name'] + MONTHS_IN_YEAR_NAMES
monthly_measured_demand = monthly_measured_data[fields_to_extract].set_index('Name')
monthly_measured_demand = monthly_measured_demand.loc[building_name]
monthly_measured_demand = pd.DataFrame({'Month': monthly_measured_demand.index.values,
'measurements': monthly_measured_demand.values})
# extract model output
hourly_modelled_data = pd.read_csv(locator.get_demand_results_file(building_name),
usecols=['DATE', load + '_kWh'])
hourly_modelled_data['DATE'] = pd.to_datetime(hourly_modelled_data['DATE'])
look_up = {1: 'JANUARY', 2: 'FEBRUARY', 3: 'MARCH', 4: 'APRIL', 5: 'MAY',
6: 'JUNE', 7: 'JULY', 8: 'AUGUST', 9: 'SEPTEMBER', 10: 'OCTOBER', 11: 'NOVEMBER',
12: 'DECEMBER'}
# this step is required to have allow the conversion from hourly to monthly data
monthly_modelled_data = hourly_modelled_data.resample('M', on='DATE').sum() # because data is in kWh
monthly_modelled_data['Month'] = monthly_modelled_data.index.month
monthly_modelled_data['Month'] = monthly_modelled_data.apply(lambda x: look_up[x['Month']], axis=1)
monthly_data = monthly_modelled_data.merge(monthly_measured_demand, on='Month')
# calculate errors
cv_root_mean_squared_error, normalized_mean_biased_error = calc_errors_per_building(load, monthly_data)
ind_calib_building, ind_score_building = calc_building_score(cv_root_mean_squared_error, monthly_data,
normalized_mean_biased_error)
# appending list of variables for later use
number_of_calibrated.append(ind_calib_building)
list_of_scores.append(ind_score_building)
n_scenario_calib = sum(number_of_calibrated)
scenario_score = sum(list_of_scores)
scenario_name = os.path.basename(scenario)
validation_output = validation_output.append(
{'scenario': scenario_name, 'calibrated_buildings': n_scenario_calib, 'score': scenario_score},
ignore_index=True)
n_calib = validation_output['calibrated_buildings'].sum()
score = validation_output['score'].sum()
global_validation_n_calibrated.append(n_calib)
global_validation_percentage.append((n_calib / number_of_buildings) * 100)
print('The number of calibrated buildings is', n_calib)
print('The final score is', score)
return score
def calc_errors_per_building(load, monthly_data):
biased_error = monthly_data['measurements'] - monthly_data[load + '_kWh']
normalized_mean_biased_error = ((biased_error.sum() / 12) / monthly_data[
'measurements'].mean()) * 100 # %
print('NMBE:', round(normalized_mean_biased_error,1))
mean_squared_error = calc_mean_squared_error(monthly_data['measurements'], monthly_data[load + '_kWh'])
root_mean_squared_error = sqrt(mean_squared_error) # root mean squared error
cv_root_mean_squared_error = root_mean_squared_error * 100 / monthly_data['measurements'].mean()
print('CVRMSE:', round(cv_root_mean_squared_error,1))
return cv_root_mean_squared_error, normalized_mean_biased_error
def calc_building_score(cv_root_mean_squared_error, monthly_data, normalized_mean_biased_error):
# indicates if the building is calibrated or not
if abs(normalized_mean_biased_error) < 5 and cv_root_mean_squared_error < 15: #a building is considered calibrated if NMBE<5 and CVRMSE <15 (ASHRAE Guideline for monthly data)
ind_calib_building = 1
else:
ind_calib_building = 0
# weights the calibration by building energy consumption
ind_score_building = ind_calib_building * sum(monthly_data['measurements'])
return ind_calib_building, ind_score_building
def get_measured_building_names(locator):
monthly_measured_data = pd.read_csv(locator.get_monthly_measurements())
measured_building_names = monthly_measured_data.Name.values
measured_building_names = list(measured_building_names)
return measured_building_names
def main(config):
"""
This is the main entry point to your script. Any parameters used by your script must be present in the ``config``
parameter. The CLI will call this ``main`` function passing in a ``config`` object after adjusting the configuration
to reflect parameters passed on the command line - this is how the ArcGIS interface interacts with the scripts
BTW.
:param config:
:type config: cea.config.Configuration
:return:
"""
assert os.path.exists(config.scenario), 'Scenario not found: %s' % config.scenario
locator = cea.inputlocator.InputLocator(config.scenario, config.plugins)
measured_building_names = get_measured_building_names(locator)
scenario_list = [config.scenario]
locators_of_scenarios = [locator]
measured_building_names_of_scenarios = [measured_building_names]
validation(scenario_list,
locators_of_scenarios,
measured_building_names_of_scenarios,
monthly=True,
load='GRID',
)
if __name__ == '__main__':
main(cea.config.Configuration())
| [
"os.path.exists",
"math.sqrt",
"sklearn.metrics.mean_squared_error",
"os.path.basename",
"pandas.DataFrame",
"pandas.to_datetime"
] | [((7356, 7442), 'sklearn.metrics.mean_squared_error', 'calc_mean_squared_error', (["monthly_data['measurements']", "monthly_data[load + '_kWh']"], {}), "(monthly_data['measurements'], monthly_data[load +\n '_kWh'])\n", (7379, 7442), True, 'from sklearn.metrics import mean_squared_error as calc_mean_squared_error\n'), ((7469, 7493), 'math.sqrt', 'sqrt', (['mean_squared_error'], {}), '(mean_squared_error)\n', (7473, 7493), False, 'from math import sqrt\n'), ((9109, 9140), 'os.path.exists', 'os.path.exists', (['config.scenario'], {}), '(config.scenario)\n', (9123, 9140), False, 'import os\n'), ((3332, 3399), 'pandas.DataFrame', 'pd.DataFrame', ([], {'columns': "['scenario', 'calibrated_buildings', 'score']"}), "(columns=['scenario', 'calibrated_buildings', 'score'])\n", (3344, 3399), True, 'import pandas as pd\n'), ((6419, 6445), 'os.path.basename', 'os.path.basename', (['scenario'], {}), '(scenario)\n', (6435, 6445), False, 'import os\n'), ((4484, 4597), 'pandas.DataFrame', 'pd.DataFrame', (["{'Month': monthly_measured_demand.index.values, 'measurements':\n monthly_measured_demand.values}"], {}), "({'Month': monthly_measured_demand.index.values, 'measurements':\n monthly_measured_demand.values})\n", (4496, 4597), True, 'import pandas as pd\n'), ((4920, 4964), 'pandas.to_datetime', 'pd.to_datetime', (["hourly_modelled_data['DATE']"], {}), "(hourly_modelled_data['DATE'])\n", (4934, 4964), True, 'import pandas as pd\n')] |
#! /usr/bin/env python
from __future__ import absolute_import
import os
import sys
PROJECT_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), os.pardir))
sys.path.append(PROJECT_DIR)
sys.path.append(os.path.abspath(os.path.join(PROJECT_DIR, "app")))
if __name__ == "__main__":
from app.main import Main
aws_bucket_name = None
if len(sys.argv) > 1:
aws_bucket_name = sys.argv[1]
Main().load_images(aws_bucket_name)
| [
"os.path.join",
"os.path.dirname",
"sys.path.append",
"app.main.Main"
] | [((168, 196), 'sys.path.append', 'sys.path.append', (['PROJECT_DIR'], {}), '(PROJECT_DIR)\n', (183, 196), False, 'import sys\n'), ((129, 154), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (144, 154), False, 'import os\n'), ((229, 261), 'os.path.join', 'os.path.join', (['PROJECT_DIR', '"""app"""'], {}), "(PROJECT_DIR, 'app')\n", (241, 261), False, 'import os\n'), ((418, 424), 'app.main.Main', 'Main', ([], {}), '()\n', (422, 424), False, 'from app.main import Main\n')] |
import click
from kryptos.scripts import build_strategy, stress_worker, kill_strat
@click.group(name="strat")
def cli():
pass
cli.add_command(build_strategy.run, "build")
cli.add_command(stress_worker.run, "stress")
cli.add_command(kill_strat.run, "kill")
| [
"click.group"
] | [((86, 111), 'click.group', 'click.group', ([], {'name': '"""strat"""'}), "(name='strat')\n", (97, 111), False, 'import click\n')] |
#!/usr/bin/env python3
"""Radio scheduling program.
Usage:
album_times.py [--host=HOST] PORT
Options:
--host=HOST Hostname of MPD [default: localhost]
-h --help Show this text
Prints out the last scheduling time of every album.
"""
from datetime import datetime
from docopt import docopt
from mpd import MPDClient
def album_sticker_get(client, album, sticker):
"""Gets a sticker associated with an album."""
# I am pretty sure that MPD only implements stickers for songs, so
# the sticker gets attached to the first song in the album.
tracks = client.find("album", album)
if len(tracks) == 0:
return
return client.sticker_get("song", tracks[0]["file"], "album_" + sticker)
def list_albums(client):
"""Lists albums sorted by last play timestamp."""
# Get all albums
albums = client.list("album")
all_albums = list(
filter(lambda a: a not in ["", "Lainchan Radio Transitions"], albums)
)
# Group albums by when they were last scheduled
albums_by_last_scheduled = {}
last_scheduled_times = []
for album in all_albums:
# Get the last scheduled time, defaulting to 0
try:
last_scheduled = int(album_sticker_get(client, album, "last_scheduled"))
except ValueError:
last_scheduled = 0
# Put the album into the appropriate bucket
if last_scheduled in albums_by_last_scheduled:
albums_by_last_scheduled[last_scheduled].append(album)
else:
albums_by_last_scheduled[last_scheduled] = [album]
last_scheduled_times.append(last_scheduled)
# Pick the 10 oldest times
last_scheduled_times.sort()
for last_scheduled in last_scheduled_times:
dt = datetime.utcfromtimestamp(last_scheduled)
albums = albums_by_last_scheduled[last_scheduled]
print("{}: {}".format(dt.strftime("%Y-%m-%d %H:%M:%S"), albums))
if __name__ == "__main__":
args = docopt(__doc__)
try:
args["PORT"] = int(args["PORT"])
except ValueError:
print("PORT must be an integer")
exit(1)
try:
client = MPDClient()
client.connect(args["--host"], args["PORT"])
except Exception as e:
print(f"could not connect to MPD: {e.args[0]}")
exit(2)
list_albums(client)
| [
"datetime.datetime.utcfromtimestamp",
"docopt.docopt",
"mpd.MPDClient"
] | [((1971, 1986), 'docopt.docopt', 'docopt', (['__doc__'], {}), '(__doc__)\n', (1977, 1986), False, 'from docopt import docopt\n'), ((1758, 1799), 'datetime.datetime.utcfromtimestamp', 'datetime.utcfromtimestamp', (['last_scheduled'], {}), '(last_scheduled)\n', (1783, 1799), False, 'from datetime import datetime\n'), ((2145, 2156), 'mpd.MPDClient', 'MPDClient', ([], {}), '()\n', (2154, 2156), False, 'from mpd import MPDClient\n')] |
import argparse
from cartografo import DEFAULT_OBJECT, DEFAULT_TARGET
def __get_argparser():
__parser.add_argument('--k8s-object', help='Output Kubernetes objet: secrets or configmap')
__parser.add_argument('--target', help='Target file. If it exists, it will be modified')
__parser.add_argument('files_folder', help='Folder where the actual files are.')
__parser = argparse.ArgumentParser()
__get_argparser()
__arguments = __parser.parse_args()
def get_arguments():
return {
"object": __arguments.k8s_object if __arguments.k8s_object else DEFAULT_OBJECT,
"target": __arguments.target if __arguments.target else DEFAULT_TARGET,
"source": __arguments.files_folder
} | [
"argparse.ArgumentParser"
] | [((382, 407), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (405, 407), False, 'import argparse\n')] |
#!/usr/bin/env python
# This is not an officially supported Google product, though support
# will be provided on a best-effort basis.
# Copyright 2018 Google LLC
# Licensed under the Apache License, Version 2.0 (the "License"); you
# may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
# https://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import ujson
import webapp2
import utilities
class main(webapp2.RequestHandler):
def get(self):
self.response.headers["Content-Type"] = "application/json"
self.response.headers.add_header(
"Cache-Control",
"no-cache, no-store, must-revalidate, max-age=0"
)
self.response.headers.add_header(
"Expires",
"0"
)
try:
globalId = self.request.get("gId")
sqlCmd = "update meetingRegistry set beenIndexed = %s where globalId = %s"
sqlData = (1, globalId)
resultList = utilities.dbExecution(sqlCmd, sqlData)
outputStr = str(resultList)
except:
outputStr = None
resultObj = {}
resultObj["response"] = outputStr
self.response.out.write(ujson.dumps(resultObj))
app = webapp2.WSGIApplication([
("/toggleIndex", main)], debug = True
)
| [
"ujson.dumps",
"utilities.dbExecution",
"webapp2.WSGIApplication"
] | [((1434, 1495), 'webapp2.WSGIApplication', 'webapp2.WSGIApplication', (["[('/toggleIndex', main)]"], {'debug': '(True)'}), "([('/toggleIndex', main)], debug=True)\n", (1457, 1495), False, 'import webapp2\n'), ((1221, 1259), 'utilities.dbExecution', 'utilities.dbExecution', (['sqlCmd', 'sqlData'], {}), '(sqlCmd, sqlData)\n', (1242, 1259), False, 'import utilities\n'), ((1402, 1424), 'ujson.dumps', 'ujson.dumps', (['resultObj'], {}), '(resultObj)\n', (1413, 1424), False, 'import ujson\n')] |
import os
from bs4 import BeautifulSoup
import html2text
import pandas
data_dir = 'co2-coalition'
data_text_dir = os.path.join(data_dir, 'text')
data_file_name = 'co2-coalition.csv'
def make_file_name(index):
return f'{index:02d}'
def save_text(data_dir, file_path, content):
f = open(os.path.join(data_dir, file_path), 'w')
f.write(content)
f.close()
def get_text(soup, tag, tag_class, do_strip = False):
text = html_converter.handle(str(soup.find(tag, tag_class)))
if(do_strip):
text = text.strip()
return text
html_converter = html2text.HTML2Text()
html_converter.body_width = 0
html_converter.ignore_images = True
f = open('html/faq.html', 'r')
content = f.read()
f.close()
faq_soup = BeautifulSoup(content, 'html.parser')
entries = {
'id' : [],
'title' : [],
'text_file_name' : [],
}
entry_index = 0
title = html_converter.handle(str(faq_soup.find('span', 'span-title2'))).strip()
content = html_converter.handle(str(faq_soup.find('p', 'p1')))
text_file_name = make_file_name(entry_index) + '.txt'
save_text(data_text_dir, text_file_name, content)
entries['id'].append(entry_index)
entries['title'].append(title)
entries['text_file_name'].append(text_file_name)
entry_index += 1
faq_entries_container = faq_soup.find('div', 'vc_tta-panels-container')
faq_entries = faq_entries_container.find_all('div', 'vc_tta-panel')
print(f'Found {len(faq_entries)} entries')
for entry in faq_entries:
title = get_text(entry, 'span', 'vc_tta-title-text', do_strip = True).capitalize()
print(f' Entry {entry_index} : {title}')
content = get_text(entry.find('div', 'vc_tta-panel-body'), 'div', 'wpb_wrapper')
text_file_name = make_file_name(entry_index) + '.txt'
save_text(data_text_dir, text_file_name, content)
entries['id'].append(entry_index)
entries['title'].append(title)
entries['text_file_name'].append(text_file_name)
entry_index += 1
d = pandas.DataFrame(entries)
d.to_csv(data_file_name, index = False)
| [
"bs4.BeautifulSoup",
"html2text.HTML2Text",
"os.path.join",
"pandas.DataFrame"
] | [((116, 146), 'os.path.join', 'os.path.join', (['data_dir', '"""text"""'], {}), "(data_dir, 'text')\n", (128, 146), False, 'import os\n'), ((557, 578), 'html2text.HTML2Text', 'html2text.HTML2Text', ([], {}), '()\n', (576, 578), False, 'import html2text\n'), ((719, 756), 'bs4.BeautifulSoup', 'BeautifulSoup', (['content', '"""html.parser"""'], {}), "(content, 'html.parser')\n", (732, 756), False, 'from bs4 import BeautifulSoup\n'), ((1898, 1923), 'pandas.DataFrame', 'pandas.DataFrame', (['entries'], {}), '(entries)\n', (1914, 1923), False, 'import pandas\n'), ((293, 326), 'os.path.join', 'os.path.join', (['data_dir', 'file_path'], {}), '(data_dir, file_path)\n', (305, 326), False, 'import os\n')] |
"""Test functions for FOOOF analysis."""
import numpy as np
from fooof.analysis import *
###################################################################################################
###################################################################################################
def test_get_band_peak_fm(tfm):
assert np.all(get_band_peak_fm(tfm, (8, 12)))
def test_get_band_peaks_fg(tfg):
assert np.all(get_band_peaks_fg(tfg, (8, 12)))
def test_get_band_peaks_group():
dat = np.array([[10, 1, 1.8, 0], [13, 1, 2, 2], [14, 2, 4, 2]])
out1 = get_band_peaks_group(dat, [8, 12], 3)
assert out1.shape == (3, 3)
assert np.array_equal(out1[0, :], [10, 1, 1.8])
out2 = get_band_peaks_group(dat, [12, 16], 3)
assert out2.shape == (3, 3)
assert np.array_equal(out2[2, :], [14, 2, 4])
def test_get_band_peak():
dat = np.array([[10, 1, 1.8], [14, 2, 4]])
# Test single result
assert np.array_equal(get_band_peak(dat, [10, 12]), [10, 1, 1.8])
# Test no results - returns nan
assert np.all(np.isnan(get_band_peak(dat, [4, 8])))
# Test muliple results - return all
assert np.array_equal(get_band_peak(dat, [10, 15], ret_one=False), [[10, 1, 1.8], [14, 2, 4]])
# Test multiple results - return one
assert np.array_equal(get_band_peak(dat, [10, 15], ret_one=True), [14, 2, 4])
def test_get_highest_peak():
dat = np.array([[10, 1, 1.8], [14, 2, 4], [12, 3, 2]])
assert np.array_equal(get_highest_peak(dat), [12, 3, 2])
def test_empty_inputs():
dat = np.empty(shape=[0, 3])
assert np.all(get_band_peak(dat, [8, 12]))
assert np.all(get_highest_peak(dat))
dat = np.empty(shape=[0, 4])
assert np.all(get_band_peaks_group(dat, [8, 12], 0))
| [
"numpy.array",
"numpy.empty",
"numpy.array_equal"
] | [((507, 564), 'numpy.array', 'np.array', (['[[10, 1, 1.8, 0], [13, 1, 2, 2], [14, 2, 4, 2]]'], {}), '([[10, 1, 1.8, 0], [13, 1, 2, 2], [14, 2, 4, 2]])\n', (515, 564), True, 'import numpy as np\n'), ((658, 698), 'numpy.array_equal', 'np.array_equal', (['out1[0, :]', '[10, 1, 1.8]'], {}), '(out1[0, :], [10, 1, 1.8])\n', (672, 698), True, 'import numpy as np\n'), ((793, 831), 'numpy.array_equal', 'np.array_equal', (['out2[2, :]', '[14, 2, 4]'], {}), '(out2[2, :], [14, 2, 4])\n', (807, 831), True, 'import numpy as np\n'), ((870, 906), 'numpy.array', 'np.array', (['[[10, 1, 1.8], [14, 2, 4]]'], {}), '([[10, 1, 1.8], [14, 2, 4]])\n', (878, 906), True, 'import numpy as np\n'), ((1401, 1449), 'numpy.array', 'np.array', (['[[10, 1, 1.8], [14, 2, 4], [12, 3, 2]]'], {}), '([[10, 1, 1.8], [14, 2, 4], [12, 3, 2]])\n', (1409, 1449), True, 'import numpy as np\n'), ((1549, 1571), 'numpy.empty', 'np.empty', ([], {'shape': '[0, 3]'}), '(shape=[0, 3])\n', (1557, 1571), True, 'import numpy as np\n'), ((1672, 1694), 'numpy.empty', 'np.empty', ([], {'shape': '[0, 4]'}), '(shape=[0, 4])\n', (1680, 1694), True, 'import numpy as np\n')] |
import math
# The code is based on from http://www.cs.cmu.edu/~ckingsf/class/02713-s13/src/mst.py
# Heap item
class HeapItem(object):
"""Represents an item in the heap"""
def __init__(self, key, value):
self.key = key
self.pos = None
self.value = value
# d-ary Heap
class Heap():
def __init__(self, dary=2):
self.heap = []
self.dary = dary
def siftdown(self, node, pos):
""" Move node down in the tree; restore heap condition after deletion
or replacement. """
c = self.minchild(pos)
while c is not None and self.heap[c].key < node.key:
self.heap[pos] = self.heap[c]
self.heap[pos].pos = pos
pos = c
c = self.minchild(c)
self.heap[pos] = node
node.pos = pos
def siftup(self, node, pos):
"""Move hi up in heap until it's parent is smaller than hi.key"""
p = self.parent(pos)
while p is not None and self.heap[p].key > node.key:
self.heap[pos] = self.heap[p]
self.heap[pos].pos = pos
pos = p
p = self.parent(p)
self.heap[pos] = node
node.pos = pos
def findmin(self):
"""Return element with smallest key, or None if heap is empty"""
return self.heap[0] if len(self.heap) > 0 else None
def extractmin(self):
"""Delete the smallest item"""
if len(self.heap) == 0:
return None
i = self.heap[0]
last = self.heap[-1]
del self.heap[-1]
if len(self.heap) > 0:
self.siftdown(last, 0)
return i
def insert(self, key, value):
"""Insert an item into the heap"""
self.heap.append(None)
hi = HeapItem(key,value)
self.siftup(hi, len(self.heap)-1)
return hi
def decreasekey(self, node, newkey):
"""Decrease the key of hi to newkey"""
node.key = newkey
self.siftup(node, node.pos)
def parent(self, pos):
"""Return the position of the parent of pos"""
if pos == 0:
return None
return int(math.ceil(pos / self.dary) - 1)
def children(self, pos):
"""Return a list of children of pos"""
return range(self.dary * pos + 1, min(self.dary * (pos + 1) + 1, len(self.heap)))
def minchild(self, pos):
"""Return the child of pos with the smallest key"""
minpos = minkey = None
for c in self.children(pos):
if minkey == None or self.heap[c].key < minkey:
minkey, minpos = self.heap[c].key, c
return minpos
| [
"math.ceil"
] | [((2177, 2203), 'math.ceil', 'math.ceil', (['(pos / self.dary)'], {}), '(pos / self.dary)\n', (2186, 2203), False, 'import math\n')] |
#!/usr/bin/env python
# coding: utf-8
# In[5]:
import cv2
import numpy as np
imagen = cv2.imread('wheel.png')
gray = cv2.cvtColor(imagen,cv2.COLOR_BGR2GRAY)
_,th = cv2.threshold(gray,100,255,cv2.THRESH_BINARY)
#Para versiones OpenCV3:
img1,contornos1,hierarchy1 = cv2.findContours(th, cv2.RETR_EXTERNAL,cv2.CHAIN_APPROX_NONE)
img2,contornos2,hierarchy2 = cv2.findContours(th, cv2.RETR_EXTERNAL,cv2.CHAIN_APPROX_SIMPLE)
cv2.drawContours(imagen, contornos1, -1, (0,0,255), 2)
print ('len(contornos1[2])=',len(contornos1[2]))
print ('len(contornos2[2])=',len(contornos2[2]))
cv2.imshow('imagen',imagen)
cv2.imshow('th',th)
cv2.waitKey(0)
cv2.destroyAllWindows()
# In[ ]:
| [
"cv2.drawContours",
"cv2.threshold",
"cv2.imshow",
"cv2.waitKey",
"cv2.destroyAllWindows",
"cv2.cvtColor",
"cv2.findContours",
"cv2.imread"
] | [((90, 113), 'cv2.imread', 'cv2.imread', (['"""wheel.png"""'], {}), "('wheel.png')\n", (100, 113), False, 'import cv2\n'), ((121, 161), 'cv2.cvtColor', 'cv2.cvtColor', (['imagen', 'cv2.COLOR_BGR2GRAY'], {}), '(imagen, cv2.COLOR_BGR2GRAY)\n', (133, 161), False, 'import cv2\n'), ((168, 216), 'cv2.threshold', 'cv2.threshold', (['gray', '(100)', '(255)', 'cv2.THRESH_BINARY'], {}), '(gray, 100, 255, cv2.THRESH_BINARY)\n', (181, 216), False, 'import cv2\n'), ((269, 331), 'cv2.findContours', 'cv2.findContours', (['th', 'cv2.RETR_EXTERNAL', 'cv2.CHAIN_APPROX_NONE'], {}), '(th, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE)\n', (285, 331), False, 'import cv2\n'), ((360, 424), 'cv2.findContours', 'cv2.findContours', (['th', 'cv2.RETR_EXTERNAL', 'cv2.CHAIN_APPROX_SIMPLE'], {}), '(th, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)\n', (376, 424), False, 'import cv2\n'), ((424, 480), 'cv2.drawContours', 'cv2.drawContours', (['imagen', 'contornos1', '(-1)', '(0, 0, 255)', '(2)'], {}), '(imagen, contornos1, -1, (0, 0, 255), 2)\n', (440, 480), False, 'import cv2\n'), ((577, 605), 'cv2.imshow', 'cv2.imshow', (['"""imagen"""', 'imagen'], {}), "('imagen', imagen)\n", (587, 605), False, 'import cv2\n'), ((605, 625), 'cv2.imshow', 'cv2.imshow', (['"""th"""', 'th'], {}), "('th', th)\n", (615, 625), False, 'import cv2\n'), ((625, 639), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (636, 639), False, 'import cv2\n'), ((640, 663), 'cv2.destroyAllWindows', 'cv2.destroyAllWindows', ([], {}), '()\n', (661, 663), False, 'import cv2\n')] |
from django import forms
class URLForm(forms.Form):
siteUrl = forms.CharField(label='Website Address', max_length=100,required=True)
'''
javascriptChoices = ((2,"Keep Javascript",),(1,"Remove Some Javascript"),(0,"Remove All Javascript"))
keepJavascript = forms.ChoiceField(choices=javascriptChoices,label=" Website Javascript")
'''
| [
"django.forms.CharField"
] | [((67, 138), 'django.forms.CharField', 'forms.CharField', ([], {'label': '"""Website Address"""', 'max_length': '(100)', 'required': '(True)'}), "(label='Website Address', max_length=100, required=True)\n", (82, 138), False, 'from django import forms\n')] |
from flexmeasures.app import create as create_app
application = create_app()
| [
"flexmeasures.app.create"
] | [((65, 77), 'flexmeasures.app.create', 'create_app', ([], {}), '()\n', (75, 77), True, 'from flexmeasures.app import create as create_app\n')] |
import numpy as np
import unittest
import coremltools.models.datatypes as datatypes
from coremltools.models import neural_network as neural_network
from coremltools.models import MLModel
from coremltools.models.neural_network.printer import print_network_spec
from coremltools.converters.nnssa.coreml.graph_pass.mlmodel_passes import \
remove_disconnected_layers, transform_conv_crop, remove_redundant_transposes
import copy
import pytest
DEBUG = False
np.random.seed(100)
class MLModelPassesTest(unittest.TestCase):
def test_load_constant_remove(self):
input_features = [('data', datatypes.Array(*(3, 4)))]
output_features = [('out', None)]
builder = neural_network.NeuralNetworkBuilder(input_features, output_features, disable_rank5_shape_mapping=True)
builder.add_activation('relu1', 'RELU', 'data', 'relu1')
builder.add_load_constant_nd('const1', 'c1', constant_value=np.ones((5,)), shape=(5,))
builder.add_activation('relu2', 'RELU', 'relu1', 'out')
builder.add_load_constant_nd('const2', 'c2', constant_value=np.ones((5,)), shape=(5,))
builder.add_load_constant_nd('const3', 'c3', constant_value=np.ones((5,)), shape=(5,))
spec = builder.spec
np.testing.assert_equal(5, len(spec.neuralNetwork.layers))
remove_disconnected_layers(spec)
np.testing.assert_equal(2, len(spec.neuralNetwork.layers))
def test_dead_layer_remove(self):
input_features = [('data', datatypes.Array(*(3, 4)))]
output_features = [('out', None)]
builder = neural_network.NeuralNetworkBuilder(input_features, output_features, disable_rank5_shape_mapping=True)
builder.add_activation('relu1', 'RELU', 'data', 'relu1')
builder.add_load_constant_nd('const1', 'c1', constant_value=np.ones((5,)), shape=(5,))
builder.add_load_constant_nd('const2', 'c2', constant_value=np.ones((5,)), shape=(5,))
builder.add_split_nd('splitnd1', 'const2', ['s1', 's2', 's3'], axis=0, num_splits=3)
builder.add_squeeze('squeeze', 's1', 'squeeze_out')
builder.add_activation('relu4', 'RELU', 's2', 'relu4')
builder.add_activation('relu5', 'RELU', 'relu4', 'relu5')
builder.add_load_constant_nd('const3', 'c3', constant_value=np.ones((5,)), shape=(5,))
builder.add_activation('relu2', 'RELU', 'relu1', 'out')
spec = builder.spec
np.testing.assert_equal(9, len(spec.neuralNetwork.layers))
remove_disconnected_layers(spec)
np.testing.assert_equal(2, len(spec.neuralNetwork.layers))
@pytest.mark.xfail
def test_dead_layer_remove_branch(self):
convergence_tolerance = 1e-8
input_features = [('input', datatypes.Array(*(2,)))]
output_features = [('out', None)]
builder = neural_network.NeuralNetworkBuilder(input_features, output_features, disable_rank5_shape_mapping=True)
# add condition to break from the loop, if convergence criterion is met
builder.add_less_than('cond', ['input'], 'cond', alpha=convergence_tolerance)
branch_layer = builder.add_branch('branch_layer', 'cond')
builder_ifbranch = neural_network.NeuralNetworkBuilder(nn_spec=branch_layer.branch.ifBranch)
builder_ifbranch.add_activation('relu1', 'RELU', 'input', 'relu1_out')
builder_ifbranch.add_activation('relu2_out', 'RELU', 'relu1_out', 'relu2_out')
builder_elsebranch = neural_network.NeuralNetworkBuilder(nn_spec=branch_layer.branch.elseBranch)
builder_elsebranch.add_activation('linear1', 'LINEAR', 'input', 'linear1_out')
builder_elsebranch.add_activation('linear2', 'LINEAR', 'linear1_out', 'relu2_out')
builder.add_squeeze('out', 'input', 'out', squeeze_all=True)
mlmodel = MLModel(builder.spec)
data = np.random.rand(2,)
data_dict = {'input': data}
before_pass_out = mlmodel.predict(data_dict)['out']
if DEBUG:
print('\n mlmodel description before remove disconnected layers pass: \n')
print_network_spec(builder.spec, style='coding')
remove_disconnected_layers(builder.spec)
if DEBUG:
print('\n mlmodel description after remove disconnected layers pass: \n')
print_network_spec(builder.spec, style='coding')
mlmodel = MLModel(builder.spec)
after_pass_out = mlmodel.predict(data_dict)['out']
np.testing.assert_almost_equal(before_pass_out, after_pass_out, decimal=2)
np.testing.assert_equal(len(builder.spec.neuralNetwork.layers), 1)
@pytest.mark.xfail
def test_dead_layer_partial_branch(self):
convergence_tolerance = 1e-8
input_features = [('input', datatypes.Array(*(2,)))]
output_features = [('out', None)]
builder = neural_network.NeuralNetworkBuilder(input_features, output_features, disable_rank5_shape_mapping=True)
# add condition to break from the loop, if convergence criterion is met
builder.add_less_than('cond', ['input'], 'cond', alpha=convergence_tolerance)
branch_layer = builder.add_branch('branch_layer', 'cond')
builder_ifbranch = neural_network.NeuralNetworkBuilder(nn_spec=branch_layer.branch.ifBranch)
builder_ifbranch.add_activation('relu1', 'RELU', 'input', 'relu1_out')
builder_ifbranch.add_activation('relu2_out', 'RELU', 'relu1_out', 'relu2_out')
builder_elsebranch = neural_network.NeuralNetworkBuilder(nn_spec=branch_layer.branch.elseBranch)
builder_elsebranch.add_activation('linear1', 'LINEAR', 'input', 'linear1_out')
builder_elsebranch.add_activation('linear_red_1', 'LINEAR', 'input', 'linear_red1_out')
builder_elsebranch.add_activation('linear_red_2', 'LINEAR', 'linear_red1_out', 'linear_red2_out')
builder_elsebranch.add_activation('linear2', 'LINEAR', 'linear1_out', 'relu2_out')
builder.add_squeeze('out', 'relu2_out', 'out', squeeze_all=True)
mlmodel = MLModel(builder.spec)
data = np.random.rand(2,)
data_dict = {'input': data}
before_pass_out = mlmodel.predict(data_dict)['out']
if DEBUG:
print('\n mlmodel description before remove disconnected layers pass: \n')
print_network_spec(builder.spec, style='coding')
old_spec = copy.copy(builder.spec)
remove_disconnected_layers(builder.spec)
if DEBUG:
print('\n mlmodel description after remove disconnected layers pass: \n')
print_network_spec(builder.spec, style='coding')
mlmodel = MLModel(builder.spec)
after_pass_out = mlmodel.predict(data_dict)['out']
np.testing.assert_almost_equal(before_pass_out, after_pass_out, decimal=2)
np.testing.assert_equal(len(old_spec.neuralNetwork.layers[1].branch.ifBranch.layers),
len(builder.spec.neuralNetwork.layers[1].branch.ifBranch.layers))
np.testing.assert_equal(len(builder.spec.neuralNetwork.layers[1].branch.elseBranch.layers), 2)
def test_conv_crop_bn_to_conv_bn_crop(self):
input_features = [('data', datatypes.Array(1, 10, 10))]
output_features = [('out', None)]
builder = neural_network.NeuralNetworkBuilder(input_features, output_features)
W = np.ones((2,10,1,10), dtype=np.float32)
builder.add_convolution(name='conv',
kernel_channels=1,
output_channels=2,
height=2, width=2,
stride_height=1, stride_width=1,
border_mode='valid', groups=1,
W=W,
b=None, has_bias=False,
input_name='data', output_name='conv_out')
builder.add_crop(name='crop',
left=1, right=1, top=1, bottom=1, offset=0,
input_names=['conv_out'],
output_name='crop_out')
builder.add_batchnorm(name='bn',
channels=2,
gamma=np.ones(2,).astype(np.float32),
beta=np.ones(2,).astype(np.float32),
mean=np.ones(2,).astype(np.float32),
variance=np.ones(2,).astype(np.float32),
input_name='crop_out',
output_name='out')
# Conv -> Crop -> BN
spec = builder.spec.neuralNetwork
np.testing.assert_equal('crop', spec.layers[1].WhichOneof('layer'))
np.testing.assert_equal('batchnorm', spec.layers[2].WhichOneof('layer'))
# transform the pattern
transform_conv_crop(builder.spec)
# Conv -> BN -> Crop
np.testing.assert_equal('batchnorm', spec.layers[1].WhichOneof('layer'))
np.testing.assert_equal('crop', spec.layers[2].WhichOneof('layer'))
def test_conv_crop_bn_relu_to_conv_bn_relu_crop(self):
input_features = [('data', datatypes.Array(1, 10, 10))]
output_features = [('out', None)]
builder = neural_network.NeuralNetworkBuilder(input_features, output_features)
W = np.ones((2,10,1,10), dtype=np.float32)
builder.add_convolution(name='conv',
kernel_channels=1,
output_channels=2,
height=2, width=2,
stride_height=1, stride_width=1,
border_mode='valid', groups=1,
W=W,
b=None, has_bias=False,
input_name='data', output_name='conv_out')
builder.add_crop(name='crop',
left=1, right=1, top=1, bottom=1, offset=0,
input_names=['conv_out'],
output_name='crop_out')
builder.add_batchnorm(name='bn',
channels=2,
gamma=np.ones(2,).astype(np.float32),
beta=np.ones(2,).astype(np.float32),
mean=np.ones(2,).astype(np.float32),
variance=np.ones(2,).astype(np.float32),
input_name='crop_out',
output_name='bn_out')
builder.add_activation(name='relu',
non_linearity='RELU',
input_name='bn_out',
output_name='out')
# Conv -> Crop -> BN -> ReLU
spec = builder.spec.neuralNetwork
np.testing.assert_equal('crop', spec.layers[1].WhichOneof('layer'))
np.testing.assert_equal('batchnorm', spec.layers[2].WhichOneof('layer'))
np.testing.assert_equal('activation', spec.layers[3].WhichOneof('layer'))
# transform the pattern
transform_conv_crop(builder.spec)
# Conv -> BN -> ReLU -> Crop
np.testing.assert_equal('batchnorm', spec.layers[1].WhichOneof('layer'))
np.testing.assert_equal('activation', spec.layers[2].WhichOneof('layer'))
np.testing.assert_equal('crop', spec.layers[3].WhichOneof('layer'))
def test_redundant_transposes(self):
def _build_and_test_network(input_size, transpose_layers, expected_layers):
"""
Helper function for testing transpose removal.
Args:
input_size: Size of the input network tensor.
transpose_layers: Array of transpose axes definitions.
expected_layers: Array of indices into transpose_layers indicating
which of the transpose layers should be present after the
graph pass.
"""
input_features = [('data', datatypes.Array(*input_size))]
output_features = [('out', None)]
builder = neural_network.NeuralNetworkBuilder(input_features, output_features)
last_layer = 'data'
for idx, axes in enumerate(transpose_layers):
name = 't{}'.format(idx)
if idx == len(transpose_layers) - 1:
output_name = 'out'
else:
output_name = name + '_out'
builder.add_transpose(name=name,
axes=axes,
input_name=last_layer,
output_name=output_name)
last_layer = output_name
spec = builder.spec.neuralNetwork
# Check the network before the graph pass.
for idx in range(len(transpose_layers)):
np.testing.assert_equal('transpose', spec.layers[idx].WhichOneof('layer'))
# Run the removal pass.
remove_redundant_transposes(builder.spec)
# Verify only the expected layers remain.
np.testing.assert_equal(len(spec.layers), len(expected_layers))
for output_layer_idx, input_layer_idx in enumerate(expected_layers):
np.testing.assert_equal(
'transpose',
spec.layers[output_layer_idx].WhichOneof('layer')
)
np.testing.assert_array_equal(
transpose_layers[input_layer_idx],
spec.layers[output_layer_idx].transpose.axes
)
_build_and_test_network(
input_size=[1, 10, 10],
# These transposes together are the identity.
transpose_layers=[[2, 0, 1], [1, 2, 0]],
expected_layers=[],
)
_build_and_test_network(
input_size=[1, 10, 10],
# These transposes are not inverses.
transpose_layers=[[2, 0, 1], [2, 0, 1]],
expected_layers=[0, 1],
)
_build_and_test_network(
input_size=[1, 1, 10, 10, 3],
# First two are the identity, then an extra.
transpose_layers=[[2, 4, 1, 0, 3], [3, 2, 0, 4, 1], [1, 0, 2, 3, 4]],
expected_layers=[2],
)
_build_and_test_network(
input_size=[1, 1, 10, 10, 3],
# First is okay, next two are the identity.
transpose_layers=[[1, 0, 2, 3, 4], [2, 4, 1, 0, 3], [3, 2, 0, 4, 1]],
expected_layers=[0],
)
# A slightly more complicated test case where there are two transposes
# in topological order, but are actually in parallel in the graph.
builder = neural_network.NeuralNetworkBuilder(
[('data', datatypes.Array(2, 4, 8))],
[('out', None)]
)
last_layer = 'data'
builder.add_transpose(name='t1',
axes=[0, 2, 1],
input_name='data',
output_name='t1')
builder.add_transpose(name='t2',
axes=[0, 2, 1],
input_name='data',
output_name='t2')
builder.add_stack(name='stack',
input_names=['t1', 't2'],
output_name='out')
spec = builder.spec.neuralNetwork
# Run the removal pass.
remove_redundant_transposes(builder.spec)
# Verify nothing was removed.
np.testing.assert_equal(len(spec.layers), 3)
if __name__ == '__main__':
RUN_ALL_TESTS = True
if RUN_ALL_TESTS:
unittest.main()
else:
suite = unittest.TestSuite()
suite.addTest(MLModelPassesTest('test_load_constant_remove'))
unittest.TextTestRunner().run(suite)
| [
"coremltools.models.MLModel",
"coremltools.converters.nnssa.coreml.graph_pass.mlmodel_passes.transform_conv_crop",
"unittest.TestSuite",
"numpy.random.rand",
"numpy.ones",
"coremltools.converters.nnssa.coreml.graph_pass.mlmodel_passes.remove_disconnected_layers",
"numpy.testing.assert_almost_equal",
"... | [((462, 481), 'numpy.random.seed', 'np.random.seed', (['(100)'], {}), '(100)\n', (476, 481), True, 'import numpy as np\n'), ((691, 797), 'coremltools.models.neural_network.NeuralNetworkBuilder', 'neural_network.NeuralNetworkBuilder', (['input_features', 'output_features'], {'disable_rank5_shape_mapping': '(True)'}), '(input_features, output_features,\n disable_rank5_shape_mapping=True)\n', (726, 797), True, 'from coremltools.models import neural_network as neural_network\n'), ((1311, 1343), 'coremltools.converters.nnssa.coreml.graph_pass.mlmodel_passes.remove_disconnected_layers', 'remove_disconnected_layers', (['spec'], {}), '(spec)\n', (1337, 1343), False, 'from coremltools.converters.nnssa.coreml.graph_pass.mlmodel_passes import remove_disconnected_layers, transform_conv_crop, remove_redundant_transposes\n'), ((1572, 1678), 'coremltools.models.neural_network.NeuralNetworkBuilder', 'neural_network.NeuralNetworkBuilder', (['input_features', 'output_features'], {'disable_rank5_shape_mapping': '(True)'}), '(input_features, output_features,\n disable_rank5_shape_mapping=True)\n', (1607, 1678), True, 'from coremltools.models import neural_network as neural_network\n'), ((2474, 2506), 'coremltools.converters.nnssa.coreml.graph_pass.mlmodel_passes.remove_disconnected_layers', 'remove_disconnected_layers', (['spec'], {}), '(spec)\n', (2500, 2506), False, 'from coremltools.converters.nnssa.coreml.graph_pass.mlmodel_passes import remove_disconnected_layers, transform_conv_crop, remove_redundant_transposes\n'), ((2803, 2909), 'coremltools.models.neural_network.NeuralNetworkBuilder', 'neural_network.NeuralNetworkBuilder', (['input_features', 'output_features'], {'disable_rank5_shape_mapping': '(True)'}), '(input_features, output_features,\n disable_rank5_shape_mapping=True)\n', (2838, 2909), True, 'from coremltools.models import neural_network as neural_network\n'), ((3165, 3238), 'coremltools.models.neural_network.NeuralNetworkBuilder', 'neural_network.NeuralNetworkBuilder', ([], {'nn_spec': 'branch_layer.branch.ifBranch'}), '(nn_spec=branch_layer.branch.ifBranch)\n', (3200, 3238), True, 'from coremltools.models import neural_network as neural_network\n'), ((3434, 3509), 'coremltools.models.neural_network.NeuralNetworkBuilder', 'neural_network.NeuralNetworkBuilder', ([], {'nn_spec': 'branch_layer.branch.elseBranch'}), '(nn_spec=branch_layer.branch.elseBranch)\n', (3469, 3509), True, 'from coremltools.models import neural_network as neural_network\n'), ((3776, 3797), 'coremltools.models.MLModel', 'MLModel', (['builder.spec'], {}), '(builder.spec)\n', (3783, 3797), False, 'from coremltools.models import MLModel\n'), ((3813, 3830), 'numpy.random.rand', 'np.random.rand', (['(2)'], {}), '(2)\n', (3827, 3830), True, 'import numpy as np\n'), ((4102, 4142), 'coremltools.converters.nnssa.coreml.graph_pass.mlmodel_passes.remove_disconnected_layers', 'remove_disconnected_layers', (['builder.spec'], {}), '(builder.spec)\n', (4128, 4142), False, 'from coremltools.converters.nnssa.coreml.graph_pass.mlmodel_passes import remove_disconnected_layers, transform_conv_crop, remove_redundant_transposes\n'), ((4326, 4347), 'coremltools.models.MLModel', 'MLModel', (['builder.spec'], {}), '(builder.spec)\n', (4333, 4347), False, 'from coremltools.models import MLModel\n'), ((4416, 4490), 'numpy.testing.assert_almost_equal', 'np.testing.assert_almost_equal', (['before_pass_out', 'after_pass_out'], {'decimal': '(2)'}), '(before_pass_out, after_pass_out, decimal=2)\n', (4446, 4490), True, 'import numpy as np\n'), ((4796, 4902), 'coremltools.models.neural_network.NeuralNetworkBuilder', 'neural_network.NeuralNetworkBuilder', (['input_features', 'output_features'], {'disable_rank5_shape_mapping': '(True)'}), '(input_features, output_features,\n disable_rank5_shape_mapping=True)\n', (4831, 4902), True, 'from coremltools.models import neural_network as neural_network\n'), ((5158, 5231), 'coremltools.models.neural_network.NeuralNetworkBuilder', 'neural_network.NeuralNetworkBuilder', ([], {'nn_spec': 'branch_layer.branch.ifBranch'}), '(nn_spec=branch_layer.branch.ifBranch)\n', (5193, 5231), True, 'from coremltools.models import neural_network as neural_network\n'), ((5427, 5502), 'coremltools.models.neural_network.NeuralNetworkBuilder', 'neural_network.NeuralNetworkBuilder', ([], {'nn_spec': 'branch_layer.branch.elseBranch'}), '(nn_spec=branch_layer.branch.elseBranch)\n', (5462, 5502), True, 'from coremltools.models import neural_network as neural_network\n'), ((5975, 5996), 'coremltools.models.MLModel', 'MLModel', (['builder.spec'], {}), '(builder.spec)\n', (5982, 5996), False, 'from coremltools.models import MLModel\n'), ((6012, 6029), 'numpy.random.rand', 'np.random.rand', (['(2)'], {}), '(2)\n', (6026, 6029), True, 'import numpy as np\n'), ((6312, 6335), 'copy.copy', 'copy.copy', (['builder.spec'], {}), '(builder.spec)\n', (6321, 6335), False, 'import copy\n'), ((6344, 6384), 'coremltools.converters.nnssa.coreml.graph_pass.mlmodel_passes.remove_disconnected_layers', 'remove_disconnected_layers', (['builder.spec'], {}), '(builder.spec)\n', (6370, 6384), False, 'from coremltools.converters.nnssa.coreml.graph_pass.mlmodel_passes import remove_disconnected_layers, transform_conv_crop, remove_redundant_transposes\n'), ((6568, 6589), 'coremltools.models.MLModel', 'MLModel', (['builder.spec'], {}), '(builder.spec)\n', (6575, 6589), False, 'from coremltools.models import MLModel\n'), ((6658, 6732), 'numpy.testing.assert_almost_equal', 'np.testing.assert_almost_equal', (['before_pass_out', 'after_pass_out'], {'decimal': '(2)'}), '(before_pass_out, after_pass_out, decimal=2)\n', (6688, 6732), True, 'import numpy as np\n'), ((7202, 7270), 'coremltools.models.neural_network.NeuralNetworkBuilder', 'neural_network.NeuralNetworkBuilder', (['input_features', 'output_features'], {}), '(input_features, output_features)\n', (7237, 7270), True, 'from coremltools.models import neural_network as neural_network\n'), ((7283, 7324), 'numpy.ones', 'np.ones', (['(2, 10, 1, 10)'], {'dtype': 'np.float32'}), '((2, 10, 1, 10), dtype=np.float32)\n', (7290, 7324), True, 'import numpy as np\n'), ((8747, 8780), 'coremltools.converters.nnssa.coreml.graph_pass.mlmodel_passes.transform_conv_crop', 'transform_conv_crop', (['builder.spec'], {}), '(builder.spec)\n', (8766, 8780), False, 'from coremltools.converters.nnssa.coreml.graph_pass.mlmodel_passes import remove_disconnected_layers, transform_conv_crop, remove_redundant_transposes\n'), ((9151, 9219), 'coremltools.models.neural_network.NeuralNetworkBuilder', 'neural_network.NeuralNetworkBuilder', (['input_features', 'output_features'], {}), '(input_features, output_features)\n', (9186, 9219), True, 'from coremltools.models import neural_network as neural_network\n'), ((9232, 9273), 'numpy.ones', 'np.ones', (['(2, 10, 1, 10)'], {'dtype': 'np.float32'}), '((2, 10, 1, 10), dtype=np.float32)\n', (9239, 9273), True, 'import numpy as np\n'), ((10988, 11021), 'coremltools.converters.nnssa.coreml.graph_pass.mlmodel_passes.transform_conv_crop', 'transform_conv_crop', (['builder.spec'], {}), '(builder.spec)\n', (11007, 11021), False, 'from coremltools.converters.nnssa.coreml.graph_pass.mlmodel_passes import remove_disconnected_layers, transform_conv_crop, remove_redundant_transposes\n'), ((15388, 15429), 'coremltools.converters.nnssa.coreml.graph_pass.mlmodel_passes.remove_redundant_transposes', 'remove_redundant_transposes', (['builder.spec'], {}), '(builder.spec)\n', (15415, 15429), False, 'from coremltools.converters.nnssa.coreml.graph_pass.mlmodel_passes import remove_disconnected_layers, transform_conv_crop, remove_redundant_transposes\n'), ((15605, 15620), 'unittest.main', 'unittest.main', ([], {}), '()\n', (15618, 15620), False, 'import unittest\n'), ((15647, 15667), 'unittest.TestSuite', 'unittest.TestSuite', ([], {}), '()\n', (15665, 15667), False, 'import unittest\n'), ((4045, 4093), 'coremltools.models.neural_network.printer.print_network_spec', 'print_network_spec', (['builder.spec'], {'style': '"""coding"""'}), "(builder.spec, style='coding')\n", (4063, 4093), False, 'from coremltools.models.neural_network.printer import print_network_spec\n'), ((4259, 4307), 'coremltools.models.neural_network.printer.print_network_spec', 'print_network_spec', (['builder.spec'], {'style': '"""coding"""'}), "(builder.spec, style='coding')\n", (4277, 4307), False, 'from coremltools.models.neural_network.printer import print_network_spec\n'), ((6244, 6292), 'coremltools.models.neural_network.printer.print_network_spec', 'print_network_spec', (['builder.spec'], {'style': '"""coding"""'}), "(builder.spec, style='coding')\n", (6262, 6292), False, 'from coremltools.models.neural_network.printer import print_network_spec\n'), ((6501, 6549), 'coremltools.models.neural_network.printer.print_network_spec', 'print_network_spec', (['builder.spec'], {'style': '"""coding"""'}), "(builder.spec, style='coding')\n", (6519, 6549), False, 'from coremltools.models.neural_network.printer import print_network_spec\n'), ((11999, 12067), 'coremltools.models.neural_network.NeuralNetworkBuilder', 'neural_network.NeuralNetworkBuilder', (['input_features', 'output_features'], {}), '(input_features, output_features)\n', (12034, 12067), True, 'from coremltools.models import neural_network as neural_network\n'), ((12919, 12960), 'coremltools.converters.nnssa.coreml.graph_pass.mlmodel_passes.remove_redundant_transposes', 'remove_redundant_transposes', (['builder.spec'], {}), '(builder.spec)\n', (12946, 12960), False, 'from coremltools.converters.nnssa.coreml.graph_pass.mlmodel_passes import remove_disconnected_layers, transform_conv_crop, remove_redundant_transposes\n'), ((604, 628), 'coremltools.models.datatypes.Array', 'datatypes.Array', (['*(3, 4)'], {}), '(*(3, 4))\n', (619, 628), True, 'import coremltools.models.datatypes as datatypes\n'), ((927, 940), 'numpy.ones', 'np.ones', (['(5,)'], {}), '((5,))\n', (934, 940), True, 'import numpy as np\n'), ((1086, 1099), 'numpy.ones', 'np.ones', (['(5,)'], {}), '((5,))\n', (1093, 1099), True, 'import numpy as np\n'), ((1181, 1194), 'numpy.ones', 'np.ones', (['(5,)'], {}), '((5,))\n', (1188, 1194), True, 'import numpy as np\n'), ((1485, 1509), 'coremltools.models.datatypes.Array', 'datatypes.Array', (['*(3, 4)'], {}), '(*(3, 4))\n', (1500, 1509), True, 'import coremltools.models.datatypes as datatypes\n'), ((1808, 1821), 'numpy.ones', 'np.ones', (['(5,)'], {}), '((5,))\n', (1815, 1821), True, 'import numpy as np\n'), ((1903, 1916), 'numpy.ones', 'np.ones', (['(5,)'], {}), '((5,))\n', (1910, 1916), True, 'import numpy as np\n'), ((2280, 2293), 'numpy.ones', 'np.ones', (['(5,)'], {}), '((5,))\n', (2287, 2293), True, 'import numpy as np\n'), ((2717, 2739), 'coremltools.models.datatypes.Array', 'datatypes.Array', (['*(2,)'], {}), '(*(2,))\n', (2732, 2739), True, 'import coremltools.models.datatypes as datatypes\n'), ((4710, 4732), 'coremltools.models.datatypes.Array', 'datatypes.Array', (['*(2,)'], {}), '(*(2,))\n', (4725, 4732), True, 'import coremltools.models.datatypes as datatypes\n'), ((7113, 7139), 'coremltools.models.datatypes.Array', 'datatypes.Array', (['(1)', '(10)', '(10)'], {}), '(1, 10, 10)\n', (7128, 7139), True, 'import coremltools.models.datatypes as datatypes\n'), ((9062, 9088), 'coremltools.models.datatypes.Array', 'datatypes.Array', (['(1)', '(10)', '(10)'], {}), '(1, 10, 10)\n', (9077, 9088), True, 'import coremltools.models.datatypes as datatypes\n'), ((13350, 13465), 'numpy.testing.assert_array_equal', 'np.testing.assert_array_equal', (['transpose_layers[input_layer_idx]', 'spec.layers[output_layer_idx].transpose.axes'], {}), '(transpose_layers[input_layer_idx], spec.\n layers[output_layer_idx].transpose.axes)\n', (13379, 13465), True, 'import numpy as np\n'), ((15746, 15771), 'unittest.TextTestRunner', 'unittest.TextTestRunner', ([], {}), '()\n', (15769, 15771), False, 'import unittest\n'), ((11900, 11928), 'coremltools.models.datatypes.Array', 'datatypes.Array', (['*input_size'], {}), '(*input_size)\n', (11915, 11928), True, 'import coremltools.models.datatypes as datatypes\n'), ((14707, 14731), 'coremltools.models.datatypes.Array', 'datatypes.Array', (['(2)', '(4)', '(8)'], {}), '(2, 4, 8)\n', (14722, 14731), True, 'import coremltools.models.datatypes as datatypes\n'), ((8139, 8149), 'numpy.ones', 'np.ones', (['(2)'], {}), '(2)\n', (8146, 8149), True, 'import numpy as np\n'), ((8206, 8216), 'numpy.ones', 'np.ones', (['(2)'], {}), '(2)\n', (8213, 8216), True, 'import numpy as np\n'), ((8273, 8283), 'numpy.ones', 'np.ones', (['(2)'], {}), '(2)\n', (8280, 8283), True, 'import numpy as np\n'), ((8344, 8354), 'numpy.ones', 'np.ones', (['(2)'], {}), '(2)\n', (8351, 8354), True, 'import numpy as np\n'), ((10088, 10098), 'numpy.ones', 'np.ones', (['(2)'], {}), '(2)\n', (10095, 10098), True, 'import numpy as np\n'), ((10155, 10165), 'numpy.ones', 'np.ones', (['(2)'], {}), '(2)\n', (10162, 10165), True, 'import numpy as np\n'), ((10222, 10232), 'numpy.ones', 'np.ones', (['(2)'], {}), '(2)\n', (10229, 10232), True, 'import numpy as np\n'), ((10293, 10303), 'numpy.ones', 'np.ones', (['(2)'], {}), '(2)\n', (10300, 10303), True, 'import numpy as np\n')] |
# Generated by Django 2.1.7 on 2019-04-03 11:56
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('accounting_tech', '0017_auto_20190403_1434'),
]
operations = [
migrations.AlterField(
model_name='request_to_repair',
name='inventory_number',
field=models.ForeignKey(db_column='inventory_number', null=True, on_delete=django.db.models.deletion.SET_NULL, to='accounting_tech.Equipment', verbose_name='ИНВ №'),
),
]
| [
"django.db.models.ForeignKey"
] | [((398, 565), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'db_column': '"""inventory_number"""', 'null': '(True)', 'on_delete': 'django.db.models.deletion.SET_NULL', 'to': '"""accounting_tech.Equipment"""', 'verbose_name': '"""ИНВ №"""'}), "(db_column='inventory_number', null=True, on_delete=django\n .db.models.deletion.SET_NULL, to='accounting_tech.Equipment',\n verbose_name='ИНВ №')\n", (415, 565), False, 'from django.db import migrations, models\n')] |
from unittest import TestCase
from parameterized import parameterized
from scheduling.schedule_config import ScheduleConfig
from utils import date_utils
def to_datetime(short_datetime_string):
dt_string = short_datetime_string + ':0.000000Z'
return date_utils.parse_iso_datetime(dt_string.replace(' ', 'T'))
class TestGetNextTime(TestCase):
@parameterized.expand([
('2020-03-19 11:30', '2020-03-15 16:13', 1, 'days', '2020-03-19 16:13'),
('2020-03-19 17:30', '2020-03-15 16:13', 1, 'days', '2020-03-20 16:13'),
('2020-03-15 11:30', '2020-03-15 16:13', 1, 'days', '2020-03-15 16:13'),
('2020-03-14 11:30', '2020-03-15 16:13', 1, 'days', '2020-03-15 16:13'),
('2020-03-15 16:13', '2020-03-15 16:13', 1, 'days', '2020-03-15 16:13'),
('2020-03-15 16:14', '2020-03-15 16:13', 1, 'days', '2020-03-16 16:13'),
('2020-03-19 11:30', '2020-03-15 16:13', 2, 'days', '2020-03-19 16:13'),
('2020-03-20 11:30', '2020-03-15 16:13', 2, 'days', '2020-03-21 16:13'),
('2020-03-19 16:13', '2020-03-15 16:13', 2, 'days', '2020-03-19 16:13'),
('2020-03-18 11:30', '2020-03-15 16:13', 5, 'days', '2020-03-20 16:13'),
('2020-03-20 11:30', '2020-03-15 16:13', 24, 'days', '2020-04-08 16:13'),
('2020-04-09 11:30', '2020-03-15 16:13', 24, 'days', '2020-05-02 16:13'),
('2020-03-19 11:30', '2020-03-15 16:13', 1, 'hours', '2020-03-19 12:13'),
('2020-03-19 17:30', '2020-03-15 16:13', 1, 'hours', '2020-03-19 18:13'),
('2020-03-15 11:30', '2020-03-15 16:13', 1, 'hours', '2020-03-15 16:13'),
('2020-03-14 11:30', '2020-03-15 16:13', 1, 'hours', '2020-03-15 16:13'),
('2020-03-15 16:13', '2020-03-15 16:13', 1, 'hours', '2020-03-15 16:13'),
('2020-03-15 16:14', '2020-03-15 16:13', 1, 'hours', '2020-03-15 17:13'),
('2022-01-02 06:30', '2022-01-01 04:14', 10, 'minutes', '2022-01-02 06:34'),
# big difference between start and now
('2021-12-31 02:30', '2019-01-01 01:31', 10, 'minutes', '2021-12-31 02:31'),
('2023-08-29 16:14', '2020-03-15 16:13', 1, 'hours', '2023-08-29 17:13'),
('2020-03-19 10:30', '2020-03-15 16:13', 2, 'hours', '2020-03-19 12:13'),
('2020-03-19 11:30', '2020-03-15 16:13', 2, 'hours', '2020-03-19 12:13'),
('2020-03-19 16:13', '2020-03-15 16:13', 2, 'hours', '2020-03-19 16:13'),
('2020-03-18 11:30', '2020-03-15 16:13', 5, 'hours', '2020-03-18 14:13'),
('2020-03-20 11:30', '2020-03-15 16:13', 24, 'hours', '2020-03-20 16:13'),
('2020-04-09 17:30', '2020-03-15 16:13', 24, 'hours', '2020-04-10 16:13'),
('2020-03-19 11:30', '2020-03-15 16:13', 1, 'months', '2020-04-15 16:13'),
('2020-03-19 17:30', '2020-03-15 16:13', 1, 'months', '2020-04-15 16:13'),
('2020-03-15 11:30', '2020-03-15 16:13', 1, 'months', '2020-03-15 16:13'),
('2020-03-14 11:30', '2020-03-15 16:13', 1, 'months', '2020-03-15 16:13'),
('2020-03-15 16:13', '2020-03-15 16:13', 1, 'months', '2020-03-15 16:13'),
('2020-03-15 16:14', '2020-03-15 16:13', 1, 'months', '2020-04-15 16:13'),
('2020-04-01 16:11', '2020-03-31 16:13', 1, 'months', '2020-04-30 16:13'),
('2021-01-31 20:00', '2021-01-31 16:13', 1, 'months', '2021-02-28 16:13'), # Roll to February
('2020-01-31 20:00', '2020-01-31 16:13', 1, 'months', '2020-02-29 16:13'), # Roll to February leap year
('2020-03-19 10:30', '2020-03-15 16:13', 2, 'months', '2020-05-15 16:13'),
('2020-04-19 11:30', '2020-03-15 16:13', 2, 'months', '2020-05-15 16:13'),
('2020-03-15 16:13', '2020-03-15 16:13', 2, 'months', '2020-03-15 16:13'),
('2020-04-01 16:11', '2020-03-31 16:13', 2, 'months', '2020-05-31 16:13'),
('2020-03-18 11:30', '2020-03-15 16:13', 5, 'months', '2020-08-15 16:13'),
('2020-08-18 11:30', '2020-03-15 16:13', 5, 'months', '2021-01-15 16:13'),
('2021-01-18 11:30', '2020-03-15 16:13', 5, 'months', '2021-06-15 16:13'),
('2020-03-16 11:30', '2020-03-15 16:13', 13, 'months', '2021-04-15 16:13'),
('2020-03-19 11:30', '2020-03-15 16:13', 1, 'weeks', '2020-03-20 16:13', ['monday', 'friday']),
('2020-03-15 11:30', '2020-03-15 16:13', 1, 'weeks', '2020-03-16 16:13', ['monday', 'friday']),
('2020-03-16 11:30', '2020-03-15 16:13', 1, 'weeks', '2020-03-16 16:13', ['monday', 'friday']),
('2020-03-16 16:30', '2020-03-15 16:13', 1, 'weeks', '2020-03-20 16:13', ['monday', 'friday']),
('2020-03-20 11:30', '2020-03-15 16:13', 1, 'weeks', '2020-03-20 16:13', ['monday', 'friday']),
('2020-04-04 11:30', '2020-03-15 16:13', 1, 'weeks', '2020-04-06 16:13', ['monday', 'friday']),
('2020-04-07 11:30', '2020-03-15 16:13', 1, 'weeks', '2020-04-10 16:13', ['monday', 'friday']),
('2020-03-16 16:13', '2020-03-16 16:13', 1, 'weeks', '2020-03-16 16:13', ['monday', 'friday']),
('2020-03-16 16:14', '2020-03-16 16:13', 1, 'weeks', '2020-03-20 16:13', ['monday', 'friday']),
# Test for testing start date on different weekdays, now tuesday
('2020-04-07 1:30', '2020-03-15 16:13', 1, 'weeks', '2020-04-08 16:13', ['monday', 'wednesday', 'friday']),
('2020-04-07 2:30', '2020-03-16 16:13', 1, 'weeks', '2020-04-08 16:13', ['monday', 'wednesday', 'friday']),
('2020-04-07 3:30', '2020-03-17 16:13', 1, 'weeks', '2020-04-08 16:13', ['monday', 'wednesday', 'friday']),
('2020-04-07 4:30', '2020-03-18 16:13', 1, 'weeks', '2020-04-08 16:13', ['monday', 'wednesday', 'friday']),
('2020-04-07 5:30', '2020-03-19 16:13', 1, 'weeks', '2020-04-08 16:13', ['monday', 'wednesday', 'friday']),
('2020-04-07 6:30', '2020-03-20 16:13', 1, 'weeks', '2020-04-08 16:13', ['monday', 'wednesday', 'friday']),
('2020-04-07 7:30', '2020-03-21 16:13', 1, 'weeks', '2020-04-08 16:13', ['monday', 'wednesday', 'friday']),
# Test for testing start date on different weekdays, now thursday
('2020-04-09 1:30', '2020-03-15 16:13', 1, 'weeks', '2020-04-10 16:13', ['monday', 'wednesday', 'friday']),
('2020-04-09 2:30', '2020-03-16 16:13', 1, 'weeks', '2020-04-10 16:13', ['monday', 'wednesday', 'friday']),
('2020-04-09 3:30', '2020-03-17 16:13', 1, 'weeks', '2020-04-10 16:13', ['monday', 'wednesday', 'friday']),
('2020-04-09 4:30', '2020-03-18 16:13', 1, 'weeks', '2020-04-10 16:13', ['monday', 'wednesday', 'friday']),
('2020-04-09 5:30', '2020-03-19 16:13', 1, 'weeks', '2020-04-10 16:13', ['monday', 'wednesday', 'friday']),
('2020-04-09 6:30', '2020-03-20 16:13', 1, 'weeks', '2020-04-10 16:13', ['monday', 'wednesday', 'friday']),
('2020-04-09 7:30', '2020-03-21 16:13', 1, 'weeks', '2020-04-10 16:13', ['monday', 'wednesday', 'friday']),
# Test for testing start date on different weekdays, now saturday
('2020-04-11 1:30', '2020-03-15 16:13', 1, 'weeks', '2020-04-13 16:13', ['monday', 'wednesday', 'friday']),
('2020-04-11 2:30', '2020-03-16 16:13', 1, 'weeks', '2020-04-13 16:13', ['monday', 'wednesday', 'friday']),
('2020-04-11 3:30', '2020-03-17 16:13', 1, 'weeks', '2020-04-13 16:13', ['monday', 'wednesday', 'friday']),
('2020-04-11 4:30', '2020-03-18 16:13', 1, 'weeks', '2020-04-13 16:13', ['monday', 'wednesday', 'friday']),
('2020-04-11 5:30', '2020-03-19 16:13', 1, 'weeks', '2020-04-13 16:13', ['monday', 'wednesday', 'friday']),
('2020-04-11 6:30', '2020-03-20 16:13', 1, 'weeks', '2020-04-13 16:13', ['monday', 'wednesday', 'friday']),
('2020-04-11 7:30', '2020-03-21 16:13', 1, 'weeks', '2020-04-13 16:13', ['monday', 'wednesday', 'friday']),
# Test for testing start date on different weekdays, now monday
('2020-04-13 1:30', '2020-03-15 16:13', 1, 'weeks', '2020-04-13 16:13', ['monday', 'wednesday', 'friday']),
('2020-04-13 2:30', '2020-03-16 16:13', 1, 'weeks', '2020-04-13 16:13', ['monday', 'wednesday', 'friday']),
('2020-04-13 3:30', '2020-03-17 16:13', 1, 'weeks', '2020-04-13 16:13', ['monday', 'wednesday', 'friday']),
('2020-04-13 4:30', '2020-03-18 16:13', 1, 'weeks', '2020-04-13 16:13', ['monday', 'wednesday', 'friday']),
('2020-04-13 5:30', '2020-03-19 16:13', 1, 'weeks', '2020-04-13 16:13', ['monday', 'wednesday', 'friday']),
('2020-04-13 6:30', '2020-03-20 16:13', 1, 'weeks', '2020-04-13 16:13', ['monday', 'wednesday', 'friday']),
('2020-04-13 7:30', '2020-03-21 16:13', 1, 'weeks', '2020-04-13 16:13', ['monday', 'wednesday', 'friday']),
# Test for testing start date on different weekdays, now wednesday, when larger interval
('2020-09-16 1:30', '2020-03-14 16:13', 1, 'weeks', '2020-09-19 16:13', ['tuesday', 'saturday']),
('2020-09-16 2:30', '2020-03-15 16:13', 1, 'weeks', '2020-09-19 16:13', ['tuesday', 'saturday']),
('2020-09-16 3:30', '2020-03-16 16:13', 1, 'weeks', '2020-09-19 16:13', ['tuesday', 'saturday']),
('2020-09-16 4:30', '2020-03-17 16:13', 1, 'weeks', '2020-09-19 16:13', ['tuesday', 'saturday']),
('2020-09-16 5:30', '2020-03-18 16:13', 1, 'weeks', '2020-09-19 16:13', ['tuesday', 'saturday']),
('2020-09-16 6:30', '2020-03-19 16:13', 1, 'weeks', '2020-09-19 16:13', ['tuesday', 'saturday']),
('2020-09-16 7:30', '2020-03-20 16:13', 1, 'weeks', '2020-09-19 16:13', ['tuesday', 'saturday']),
('2020-03-16 16:30', '2020-03-15 16:13', 1, 'weeks', '2020-03-18 16:13', ['wednesday']),
('2020-03-19 11:30', '2020-03-15 16:13', 2, 'weeks', '2020-03-23 16:13', ['monday', 'friday']),
('2020-03-24 11:30', '2020-03-15 16:13', 2, 'weeks', '2020-03-27 16:13', ['monday', 'friday']),
('2020-06-07 17:30', '2020-03-15 16:13', 2, 'weeks', '2020-06-15 16:13', ['monday', 'friday']),
('2020-06-07 17:30', '2020-03-15 16:13', 2, 'weeks', '2020-06-16 16:13', ['tuesday', 'wednesday']),
])
def test_next_day_when_repeatable(self, now_dt, start, period, unit, expected, weekdays=None):
date_utils._mocked_now = to_datetime(now_dt)
config = ScheduleConfig(True, to_datetime(start))
config.repeat_period = period
config.repeat_unit = unit
config.weekdays = weekdays
next_time = config.get_next_time()
self.assertEqual(to_datetime(expected), next_time)
def tearDown(self) -> None:
super().tearDown()
date_utils._mocked_now = None
| [
"parameterized.parameterized.expand"
] | [((360, 9252), 'parameterized.parameterized.expand', 'parameterized.expand', (["[('2020-03-19 11:30', '2020-03-15 16:13', 1, 'days', '2020-03-19 16:13'), (\n '2020-03-19 17:30', '2020-03-15 16:13', 1, 'days', '2020-03-20 16:13'),\n ('2020-03-15 11:30', '2020-03-15 16:13', 1, 'days', '2020-03-15 16:13'),\n ('2020-03-14 11:30', '2020-03-15 16:13', 1, 'days', '2020-03-15 16:13'),\n ('2020-03-15 16:13', '2020-03-15 16:13', 1, 'days', '2020-03-15 16:13'),\n ('2020-03-15 16:14', '2020-03-15 16:13', 1, 'days', '2020-03-16 16:13'),\n ('2020-03-19 11:30', '2020-03-15 16:13', 2, 'days', '2020-03-19 16:13'),\n ('2020-03-20 11:30', '2020-03-15 16:13', 2, 'days', '2020-03-21 16:13'),\n ('2020-03-19 16:13', '2020-03-15 16:13', 2, 'days', '2020-03-19 16:13'),\n ('2020-03-18 11:30', '2020-03-15 16:13', 5, 'days', '2020-03-20 16:13'),\n ('2020-03-20 11:30', '2020-03-15 16:13', 24, 'days', '2020-04-08 16:13'\n ), ('2020-04-09 11:30', '2020-03-15 16:13', 24, 'days',\n '2020-05-02 16:13'), ('2020-03-19 11:30', '2020-03-15 16:13', 1,\n 'hours', '2020-03-19 12:13'), ('2020-03-19 17:30', '2020-03-15 16:13', \n 1, 'hours', '2020-03-19 18:13'), ('2020-03-15 11:30',\n '2020-03-15 16:13', 1, 'hours', '2020-03-15 16:13'), (\n '2020-03-14 11:30', '2020-03-15 16:13', 1, 'hours', '2020-03-15 16:13'),\n ('2020-03-15 16:13', '2020-03-15 16:13', 1, 'hours', '2020-03-15 16:13'\n ), ('2020-03-15 16:14', '2020-03-15 16:13', 1, 'hours',\n '2020-03-15 17:13'), ('2022-01-02 06:30', '2022-01-01 04:14', 10,\n 'minutes', '2022-01-02 06:34'), ('2021-12-31 02:30', '2019-01-01 01:31',\n 10, 'minutes', '2021-12-31 02:31'), ('2023-08-29 16:14',\n '2020-03-15 16:13', 1, 'hours', '2023-08-29 17:13'), (\n '2020-03-19 10:30', '2020-03-15 16:13', 2, 'hours', '2020-03-19 12:13'),\n ('2020-03-19 11:30', '2020-03-15 16:13', 2, 'hours', '2020-03-19 12:13'\n ), ('2020-03-19 16:13', '2020-03-15 16:13', 2, 'hours',\n '2020-03-19 16:13'), ('2020-03-18 11:30', '2020-03-15 16:13', 5,\n 'hours', '2020-03-18 14:13'), ('2020-03-20 11:30', '2020-03-15 16:13', \n 24, 'hours', '2020-03-20 16:13'), ('2020-04-09 17:30',\n '2020-03-15 16:13', 24, 'hours', '2020-04-10 16:13'), (\n '2020-03-19 11:30', '2020-03-15 16:13', 1, 'months', '2020-04-15 16:13'\n ), ('2020-03-19 17:30', '2020-03-15 16:13', 1, 'months',\n '2020-04-15 16:13'), ('2020-03-15 11:30', '2020-03-15 16:13', 1,\n 'months', '2020-03-15 16:13'), ('2020-03-14 11:30', '2020-03-15 16:13',\n 1, 'months', '2020-03-15 16:13'), ('2020-03-15 16:13',\n '2020-03-15 16:13', 1, 'months', '2020-03-15 16:13'), (\n '2020-03-15 16:14', '2020-03-15 16:13', 1, 'months', '2020-04-15 16:13'\n ), ('2020-04-01 16:11', '2020-03-31 16:13', 1, 'months',\n '2020-04-30 16:13'), ('2021-01-31 20:00', '2021-01-31 16:13', 1,\n 'months', '2021-02-28 16:13'), ('2020-01-31 20:00', '2020-01-31 16:13',\n 1, 'months', '2020-02-29 16:13'), ('2020-03-19 10:30',\n '2020-03-15 16:13', 2, 'months', '2020-05-15 16:13'), (\n '2020-04-19 11:30', '2020-03-15 16:13', 2, 'months', '2020-05-15 16:13'\n ), ('2020-03-15 16:13', '2020-03-15 16:13', 2, 'months',\n '2020-03-15 16:13'), ('2020-04-01 16:11', '2020-03-31 16:13', 2,\n 'months', '2020-05-31 16:13'), ('2020-03-18 11:30', '2020-03-15 16:13',\n 5, 'months', '2020-08-15 16:13'), ('2020-08-18 11:30',\n '2020-03-15 16:13', 5, 'months', '2021-01-15 16:13'), (\n '2021-01-18 11:30', '2020-03-15 16:13', 5, 'months', '2021-06-15 16:13'\n ), ('2020-03-16 11:30', '2020-03-15 16:13', 13, 'months',\n '2021-04-15 16:13'), ('2020-03-19 11:30', '2020-03-15 16:13', 1,\n 'weeks', '2020-03-20 16:13', ['monday', 'friday']), ('2020-03-15 11:30',\n '2020-03-15 16:13', 1, 'weeks', '2020-03-16 16:13', ['monday', 'friday'\n ]), ('2020-03-16 11:30', '2020-03-15 16:13', 1, 'weeks',\n '2020-03-16 16:13', ['monday', 'friday']), ('2020-03-16 16:30',\n '2020-03-15 16:13', 1, 'weeks', '2020-03-20 16:13', ['monday', 'friday'\n ]), ('2020-03-20 11:30', '2020-03-15 16:13', 1, 'weeks',\n '2020-03-20 16:13', ['monday', 'friday']), ('2020-04-04 11:30',\n '2020-03-15 16:13', 1, 'weeks', '2020-04-06 16:13', ['monday', 'friday'\n ]), ('2020-04-07 11:30', '2020-03-15 16:13', 1, 'weeks',\n '2020-04-10 16:13', ['monday', 'friday']), ('2020-03-16 16:13',\n '2020-03-16 16:13', 1, 'weeks', '2020-03-16 16:13', ['monday', 'friday'\n ]), ('2020-03-16 16:14', '2020-03-16 16:13', 1, 'weeks',\n '2020-03-20 16:13', ['monday', 'friday']), ('2020-04-07 1:30',\n '2020-03-15 16:13', 1, 'weeks', '2020-04-08 16:13', ['monday',\n 'wednesday', 'friday']), ('2020-04-07 2:30', '2020-03-16 16:13', 1,\n 'weeks', '2020-04-08 16:13', ['monday', 'wednesday', 'friday']), (\n '2020-04-07 3:30', '2020-03-17 16:13', 1, 'weeks', '2020-04-08 16:13',\n ['monday', 'wednesday', 'friday']), ('2020-04-07 4:30',\n '2020-03-18 16:13', 1, 'weeks', '2020-04-08 16:13', ['monday',\n 'wednesday', 'friday']), ('2020-04-07 5:30', '2020-03-19 16:13', 1,\n 'weeks', '2020-04-08 16:13', ['monday', 'wednesday', 'friday']), (\n '2020-04-07 6:30', '2020-03-20 16:13', 1, 'weeks', '2020-04-08 16:13',\n ['monday', 'wednesday', 'friday']), ('2020-04-07 7:30',\n '2020-03-21 16:13', 1, 'weeks', '2020-04-08 16:13', ['monday',\n 'wednesday', 'friday']), ('2020-04-09 1:30', '2020-03-15 16:13', 1,\n 'weeks', '2020-04-10 16:13', ['monday', 'wednesday', 'friday']), (\n '2020-04-09 2:30', '2020-03-16 16:13', 1, 'weeks', '2020-04-10 16:13',\n ['monday', 'wednesday', 'friday']), ('2020-04-09 3:30',\n '2020-03-17 16:13', 1, 'weeks', '2020-04-10 16:13', ['monday',\n 'wednesday', 'friday']), ('2020-04-09 4:30', '2020-03-18 16:13', 1,\n 'weeks', '2020-04-10 16:13', ['monday', 'wednesday', 'friday']), (\n '2020-04-09 5:30', '2020-03-19 16:13', 1, 'weeks', '2020-04-10 16:13',\n ['monday', 'wednesday', 'friday']), ('2020-04-09 6:30',\n '2020-03-20 16:13', 1, 'weeks', '2020-04-10 16:13', ['monday',\n 'wednesday', 'friday']), ('2020-04-09 7:30', '2020-03-21 16:13', 1,\n 'weeks', '2020-04-10 16:13', ['monday', 'wednesday', 'friday']), (\n '2020-04-11 1:30', '2020-03-15 16:13', 1, 'weeks', '2020-04-13 16:13',\n ['monday', 'wednesday', 'friday']), ('2020-04-11 2:30',\n '2020-03-16 16:13', 1, 'weeks', '2020-04-13 16:13', ['monday',\n 'wednesday', 'friday']), ('2020-04-11 3:30', '2020-03-17 16:13', 1,\n 'weeks', '2020-04-13 16:13', ['monday', 'wednesday', 'friday']), (\n '2020-04-11 4:30', '2020-03-18 16:13', 1, 'weeks', '2020-04-13 16:13',\n ['monday', 'wednesday', 'friday']), ('2020-04-11 5:30',\n '2020-03-19 16:13', 1, 'weeks', '2020-04-13 16:13', ['monday',\n 'wednesday', 'friday']), ('2020-04-11 6:30', '2020-03-20 16:13', 1,\n 'weeks', '2020-04-13 16:13', ['monday', 'wednesday', 'friday']), (\n '2020-04-11 7:30', '2020-03-21 16:13', 1, 'weeks', '2020-04-13 16:13',\n ['monday', 'wednesday', 'friday']), ('2020-04-13 1:30',\n '2020-03-15 16:13', 1, 'weeks', '2020-04-13 16:13', ['monday',\n 'wednesday', 'friday']), ('2020-04-13 2:30', '2020-03-16 16:13', 1,\n 'weeks', '2020-04-13 16:13', ['monday', 'wednesday', 'friday']), (\n '2020-04-13 3:30', '2020-03-17 16:13', 1, 'weeks', '2020-04-13 16:13',\n ['monday', 'wednesday', 'friday']), ('2020-04-13 4:30',\n '2020-03-18 16:13', 1, 'weeks', '2020-04-13 16:13', ['monday',\n 'wednesday', 'friday']), ('2020-04-13 5:30', '2020-03-19 16:13', 1,\n 'weeks', '2020-04-13 16:13', ['monday', 'wednesday', 'friday']), (\n '2020-04-13 6:30', '2020-03-20 16:13', 1, 'weeks', '2020-04-13 16:13',\n ['monday', 'wednesday', 'friday']), ('2020-04-13 7:30',\n '2020-03-21 16:13', 1, 'weeks', '2020-04-13 16:13', ['monday',\n 'wednesday', 'friday']), ('2020-09-16 1:30', '2020-03-14 16:13', 1,\n 'weeks', '2020-09-19 16:13', ['tuesday', 'saturday']), (\n '2020-09-16 2:30', '2020-03-15 16:13', 1, 'weeks', '2020-09-19 16:13',\n ['tuesday', 'saturday']), ('2020-09-16 3:30', '2020-03-16 16:13', 1,\n 'weeks', '2020-09-19 16:13', ['tuesday', 'saturday']), (\n '2020-09-16 4:30', '2020-03-17 16:13', 1, 'weeks', '2020-09-19 16:13',\n ['tuesday', 'saturday']), ('2020-09-16 5:30', '2020-03-18 16:13', 1,\n 'weeks', '2020-09-19 16:13', ['tuesday', 'saturday']), (\n '2020-09-16 6:30', '2020-03-19 16:13', 1, 'weeks', '2020-09-19 16:13',\n ['tuesday', 'saturday']), ('2020-09-16 7:30', '2020-03-20 16:13', 1,\n 'weeks', '2020-09-19 16:13', ['tuesday', 'saturday']), (\n '2020-03-16 16:30', '2020-03-15 16:13', 1, 'weeks', '2020-03-18 16:13',\n ['wednesday']), ('2020-03-19 11:30', '2020-03-15 16:13', 2, 'weeks',\n '2020-03-23 16:13', ['monday', 'friday']), ('2020-03-24 11:30',\n '2020-03-15 16:13', 2, 'weeks', '2020-03-27 16:13', ['monday', 'friday'\n ]), ('2020-06-07 17:30', '2020-03-15 16:13', 2, 'weeks',\n '2020-06-15 16:13', ['monday', 'friday']), ('2020-06-07 17:30',\n '2020-03-15 16:13', 2, 'weeks', '2020-06-16 16:13', ['tuesday',\n 'wednesday'])]"], {}), "([('2020-03-19 11:30', '2020-03-15 16:13', 1, 'days',\n '2020-03-19 16:13'), ('2020-03-19 17:30', '2020-03-15 16:13', 1, 'days',\n '2020-03-20 16:13'), ('2020-03-15 11:30', '2020-03-15 16:13', 1, 'days',\n '2020-03-15 16:13'), ('2020-03-14 11:30', '2020-03-15 16:13', 1, 'days',\n '2020-03-15 16:13'), ('2020-03-15 16:13', '2020-03-15 16:13', 1, 'days',\n '2020-03-15 16:13'), ('2020-03-15 16:14', '2020-03-15 16:13', 1, 'days',\n '2020-03-16 16:13'), ('2020-03-19 11:30', '2020-03-15 16:13', 2, 'days',\n '2020-03-19 16:13'), ('2020-03-20 11:30', '2020-03-15 16:13', 2, 'days',\n '2020-03-21 16:13'), ('2020-03-19 16:13', '2020-03-15 16:13', 2, 'days',\n '2020-03-19 16:13'), ('2020-03-18 11:30', '2020-03-15 16:13', 5, 'days',\n '2020-03-20 16:13'), ('2020-03-20 11:30', '2020-03-15 16:13', 24,\n 'days', '2020-04-08 16:13'), ('2020-04-09 11:30', '2020-03-15 16:13', \n 24, 'days', '2020-05-02 16:13'), ('2020-03-19 11:30',\n '2020-03-15 16:13', 1, 'hours', '2020-03-19 12:13'), (\n '2020-03-19 17:30', '2020-03-15 16:13', 1, 'hours', '2020-03-19 18:13'),\n ('2020-03-15 11:30', '2020-03-15 16:13', 1, 'hours', '2020-03-15 16:13'\n ), ('2020-03-14 11:30', '2020-03-15 16:13', 1, 'hours',\n '2020-03-15 16:13'), ('2020-03-15 16:13', '2020-03-15 16:13', 1,\n 'hours', '2020-03-15 16:13'), ('2020-03-15 16:14', '2020-03-15 16:13', \n 1, 'hours', '2020-03-15 17:13'), ('2022-01-02 06:30',\n '2022-01-01 04:14', 10, 'minutes', '2022-01-02 06:34'), (\n '2021-12-31 02:30', '2019-01-01 01:31', 10, 'minutes',\n '2021-12-31 02:31'), ('2023-08-29 16:14', '2020-03-15 16:13', 1,\n 'hours', '2023-08-29 17:13'), ('2020-03-19 10:30', '2020-03-15 16:13', \n 2, 'hours', '2020-03-19 12:13'), ('2020-03-19 11:30',\n '2020-03-15 16:13', 2, 'hours', '2020-03-19 12:13'), (\n '2020-03-19 16:13', '2020-03-15 16:13', 2, 'hours', '2020-03-19 16:13'),\n ('2020-03-18 11:30', '2020-03-15 16:13', 5, 'hours', '2020-03-18 14:13'\n ), ('2020-03-20 11:30', '2020-03-15 16:13', 24, 'hours',\n '2020-03-20 16:13'), ('2020-04-09 17:30', '2020-03-15 16:13', 24,\n 'hours', '2020-04-10 16:13'), ('2020-03-19 11:30', '2020-03-15 16:13', \n 1, 'months', '2020-04-15 16:13'), ('2020-03-19 17:30',\n '2020-03-15 16:13', 1, 'months', '2020-04-15 16:13'), (\n '2020-03-15 11:30', '2020-03-15 16:13', 1, 'months', '2020-03-15 16:13'\n ), ('2020-03-14 11:30', '2020-03-15 16:13', 1, 'months',\n '2020-03-15 16:13'), ('2020-03-15 16:13', '2020-03-15 16:13', 1,\n 'months', '2020-03-15 16:13'), ('2020-03-15 16:14', '2020-03-15 16:13',\n 1, 'months', '2020-04-15 16:13'), ('2020-04-01 16:11',\n '2020-03-31 16:13', 1, 'months', '2020-04-30 16:13'), (\n '2021-01-31 20:00', '2021-01-31 16:13', 1, 'months', '2021-02-28 16:13'\n ), ('2020-01-31 20:00', '2020-01-31 16:13', 1, 'months',\n '2020-02-29 16:13'), ('2020-03-19 10:30', '2020-03-15 16:13', 2,\n 'months', '2020-05-15 16:13'), ('2020-04-19 11:30', '2020-03-15 16:13',\n 2, 'months', '2020-05-15 16:13'), ('2020-03-15 16:13',\n '2020-03-15 16:13', 2, 'months', '2020-03-15 16:13'), (\n '2020-04-01 16:11', '2020-03-31 16:13', 2, 'months', '2020-05-31 16:13'\n ), ('2020-03-18 11:30', '2020-03-15 16:13', 5, 'months',\n '2020-08-15 16:13'), ('2020-08-18 11:30', '2020-03-15 16:13', 5,\n 'months', '2021-01-15 16:13'), ('2021-01-18 11:30', '2020-03-15 16:13',\n 5, 'months', '2021-06-15 16:13'), ('2020-03-16 11:30',\n '2020-03-15 16:13', 13, 'months', '2021-04-15 16:13'), (\n '2020-03-19 11:30', '2020-03-15 16:13', 1, 'weeks', '2020-03-20 16:13',\n ['monday', 'friday']), ('2020-03-15 11:30', '2020-03-15 16:13', 1,\n 'weeks', '2020-03-16 16:13', ['monday', 'friday']), ('2020-03-16 11:30',\n '2020-03-15 16:13', 1, 'weeks', '2020-03-16 16:13', ['monday', 'friday'\n ]), ('2020-03-16 16:30', '2020-03-15 16:13', 1, 'weeks',\n '2020-03-20 16:13', ['monday', 'friday']), ('2020-03-20 11:30',\n '2020-03-15 16:13', 1, 'weeks', '2020-03-20 16:13', ['monday', 'friday'\n ]), ('2020-04-04 11:30', '2020-03-15 16:13', 1, 'weeks',\n '2020-04-06 16:13', ['monday', 'friday']), ('2020-04-07 11:30',\n '2020-03-15 16:13', 1, 'weeks', '2020-04-10 16:13', ['monday', 'friday'\n ]), ('2020-03-16 16:13', '2020-03-16 16:13', 1, 'weeks',\n '2020-03-16 16:13', ['monday', 'friday']), ('2020-03-16 16:14',\n '2020-03-16 16:13', 1, 'weeks', '2020-03-20 16:13', ['monday', 'friday'\n ]), ('2020-04-07 1:30', '2020-03-15 16:13', 1, 'weeks',\n '2020-04-08 16:13', ['monday', 'wednesday', 'friday']), (\n '2020-04-07 2:30', '2020-03-16 16:13', 1, 'weeks', '2020-04-08 16:13',\n ['monday', 'wednesday', 'friday']), ('2020-04-07 3:30',\n '2020-03-17 16:13', 1, 'weeks', '2020-04-08 16:13', ['monday',\n 'wednesday', 'friday']), ('2020-04-07 4:30', '2020-03-18 16:13', 1,\n 'weeks', '2020-04-08 16:13', ['monday', 'wednesday', 'friday']), (\n '2020-04-07 5:30', '2020-03-19 16:13', 1, 'weeks', '2020-04-08 16:13',\n ['monday', 'wednesday', 'friday']), ('2020-04-07 6:30',\n '2020-03-20 16:13', 1, 'weeks', '2020-04-08 16:13', ['monday',\n 'wednesday', 'friday']), ('2020-04-07 7:30', '2020-03-21 16:13', 1,\n 'weeks', '2020-04-08 16:13', ['monday', 'wednesday', 'friday']), (\n '2020-04-09 1:30', '2020-03-15 16:13', 1, 'weeks', '2020-04-10 16:13',\n ['monday', 'wednesday', 'friday']), ('2020-04-09 2:30',\n '2020-03-16 16:13', 1, 'weeks', '2020-04-10 16:13', ['monday',\n 'wednesday', 'friday']), ('2020-04-09 3:30', '2020-03-17 16:13', 1,\n 'weeks', '2020-04-10 16:13', ['monday', 'wednesday', 'friday']), (\n '2020-04-09 4:30', '2020-03-18 16:13', 1, 'weeks', '2020-04-10 16:13',\n ['monday', 'wednesday', 'friday']), ('2020-04-09 5:30',\n '2020-03-19 16:13', 1, 'weeks', '2020-04-10 16:13', ['monday',\n 'wednesday', 'friday']), ('2020-04-09 6:30', '2020-03-20 16:13', 1,\n 'weeks', '2020-04-10 16:13', ['monday', 'wednesday', 'friday']), (\n '2020-04-09 7:30', '2020-03-21 16:13', 1, 'weeks', '2020-04-10 16:13',\n ['monday', 'wednesday', 'friday']), ('2020-04-11 1:30',\n '2020-03-15 16:13', 1, 'weeks', '2020-04-13 16:13', ['monday',\n 'wednesday', 'friday']), ('2020-04-11 2:30', '2020-03-16 16:13', 1,\n 'weeks', '2020-04-13 16:13', ['monday', 'wednesday', 'friday']), (\n '2020-04-11 3:30', '2020-03-17 16:13', 1, 'weeks', '2020-04-13 16:13',\n ['monday', 'wednesday', 'friday']), ('2020-04-11 4:30',\n '2020-03-18 16:13', 1, 'weeks', '2020-04-13 16:13', ['monday',\n 'wednesday', 'friday']), ('2020-04-11 5:30', '2020-03-19 16:13', 1,\n 'weeks', '2020-04-13 16:13', ['monday', 'wednesday', 'friday']), (\n '2020-04-11 6:30', '2020-03-20 16:13', 1, 'weeks', '2020-04-13 16:13',\n ['monday', 'wednesday', 'friday']), ('2020-04-11 7:30',\n '2020-03-21 16:13', 1, 'weeks', '2020-04-13 16:13', ['monday',\n 'wednesday', 'friday']), ('2020-04-13 1:30', '2020-03-15 16:13', 1,\n 'weeks', '2020-04-13 16:13', ['monday', 'wednesday', 'friday']), (\n '2020-04-13 2:30', '2020-03-16 16:13', 1, 'weeks', '2020-04-13 16:13',\n ['monday', 'wednesday', 'friday']), ('2020-04-13 3:30',\n '2020-03-17 16:13', 1, 'weeks', '2020-04-13 16:13', ['monday',\n 'wednesday', 'friday']), ('2020-04-13 4:30', '2020-03-18 16:13', 1,\n 'weeks', '2020-04-13 16:13', ['monday', 'wednesday', 'friday']), (\n '2020-04-13 5:30', '2020-03-19 16:13', 1, 'weeks', '2020-04-13 16:13',\n ['monday', 'wednesday', 'friday']), ('2020-04-13 6:30',\n '2020-03-20 16:13', 1, 'weeks', '2020-04-13 16:13', ['monday',\n 'wednesday', 'friday']), ('2020-04-13 7:30', '2020-03-21 16:13', 1,\n 'weeks', '2020-04-13 16:13', ['monday', 'wednesday', 'friday']), (\n '2020-09-16 1:30', '2020-03-14 16:13', 1, 'weeks', '2020-09-19 16:13',\n ['tuesday', 'saturday']), ('2020-09-16 2:30', '2020-03-15 16:13', 1,\n 'weeks', '2020-09-19 16:13', ['tuesday', 'saturday']), (\n '2020-09-16 3:30', '2020-03-16 16:13', 1, 'weeks', '2020-09-19 16:13',\n ['tuesday', 'saturday']), ('2020-09-16 4:30', '2020-03-17 16:13', 1,\n 'weeks', '2020-09-19 16:13', ['tuesday', 'saturday']), (\n '2020-09-16 5:30', '2020-03-18 16:13', 1, 'weeks', '2020-09-19 16:13',\n ['tuesday', 'saturday']), ('2020-09-16 6:30', '2020-03-19 16:13', 1,\n 'weeks', '2020-09-19 16:13', ['tuesday', 'saturday']), (\n '2020-09-16 7:30', '2020-03-20 16:13', 1, 'weeks', '2020-09-19 16:13',\n ['tuesday', 'saturday']), ('2020-03-16 16:30', '2020-03-15 16:13', 1,\n 'weeks', '2020-03-18 16:13', ['wednesday']), ('2020-03-19 11:30',\n '2020-03-15 16:13', 2, 'weeks', '2020-03-23 16:13', ['monday', 'friday'\n ]), ('2020-03-24 11:30', '2020-03-15 16:13', 2, 'weeks',\n '2020-03-27 16:13', ['monday', 'friday']), ('2020-06-07 17:30',\n '2020-03-15 16:13', 2, 'weeks', '2020-06-15 16:13', ['monday', 'friday'\n ]), ('2020-06-07 17:30', '2020-03-15 16:13', 2, 'weeks',\n '2020-06-16 16:13', ['tuesday', 'wednesday'])])\n", (380, 9252), False, 'from parameterized import parameterized\n')] |
from http import HTTPStatus
from unittest import TestCase, mock
import jwt
from fastapi.testclient import TestClient
from src.api.review_resource import REVIEWS
from src.config import config
from src.main import app
from src.models.article import Article
from src.models.review import Review
def _bearer(**payload):
payload.setdefault("user", "666666003")
payload.setdefault("name", "customer")
return "Bearer " + jwt.encode(payload, config.JWT_SECRET, algorithm="HS256")
def mock_articles(arg1) -> [Article]:
return [Article(barcode="8400000000017", description="Mock most rated article", retailPrice=30),
Article(barcode="8400000000018", description="Mock", retailPrice=30),
Article(barcode="8400000000019", description="Mock 2", retailPrice=30)]
def mock_assert_article_existing_and_return(token, barcode) -> Article:
switcher = {
"8400000000017": Article(barcode="8400000000017", description="Mock most rated article", retailPrice=30),
"8400000000024": Article(barcode="8400000000024",
description="Mock second most rated article", retailPrice=5, stock=15),
"8400000000031": Article(barcode="8400000000031", description="Mock third most rated article", retailPrice=305),
"8400000000048": Article(barcode="8400000000048", description="Nothing", retailPrice=305),
"8400000000055": Article(barcode="8400000000055", description="Another article", retailPrice=305),
"8400000000079": Article(barcode="8400000000079", description="Another of another article", retailPrice=305),
"8400000000086": Article(barcode="8400000000086", description="Look at this article", retailPrice=305)
}
default_article = Article(barcode="8400000000017", description="Mock most rated article", retailPrice=30)
return switcher.get(barcode, default_article)
def mock_assert_article_existing_without_token(barcode) -> Article:
return mock_assert_article_existing_and_return("", barcode)
class TestReviewResource(TestCase):
@classmethod
def setUpClass(cls):
cls.bearer = _bearer(role="CUSTOMER")
cls.client = TestClient(app)
def test_search_not_token_forbidden_exception(self):
response = self.client.get(REVIEWS + "/search")
self.assertEqual(HTTPStatus.FORBIDDEN, response.status_code)
def test_search_not_role_unauthorized_exception(self):
bearer = _bearer(role="KK")
response = self.client.get(REVIEWS + "/search", headers={"Authorization": bearer})
self.assertEqual(HTTPStatus.FORBIDDEN, response.status_code)
def test_search_invalid_token_unauthorized_exception(self):
bearer = _bearer(role="CUSTOMER") + "kkkk"
response = self.client.get(REVIEWS + "/search", headers={"Authorization": bearer})
self.assertEqual(HTTPStatus.UNAUTHORIZED, response.status_code)
def test_search_not_included_role_forbidden_exception(self):
bearer = _bearer()
response = self.client.get(REVIEWS + "/search", headers={"Authorization": bearer})
self.assertEqual(HTTPStatus.UNAUTHORIZED, response.status_code)
def test_search_expired_token_unauthorized_exception(self):
bearer = _bearer(exp=1371720939, role="CUSTOMER")
response = self.client.get(REVIEWS + "/search", headers={"Authorization": bearer})
self.assertEqual(HTTPStatus.UNAUTHORIZED, response.status_code)
@mock.patch('src.services.review_service.get_all_bought_articles', side_effect=mock_articles)
@mock.patch('src.services.review_service.assert_article_existing_and_return',
side_effect=mock_assert_article_existing_and_return)
def __read_all(self, get_all_bought_articles, mock_article_existing_and_return):
bearer = _bearer(user="66", role="CUSTOMER")
response = self.client.get(REVIEWS + "/search", headers={"Authorization": bearer})
mock_article_existing_and_return.assert_called()
get_all_bought_articles.assert_called()
return response.json()
@mock.patch('src.services.review_service.assert_article_existing_and_return',
side_effect=mock_assert_article_existing_and_return)
def test_create(self, mock_article_existing_and_return):
creation_review = Review(barcode="8400000000031", score=1.5)
response = self.client.post(REVIEWS, json=creation_review.dict(), headers={"Authorization": self.bearer})
self.assertEqual(HTTPStatus.OK, response.status_code)
self.assertEqual(creation_review.barcode, response.json()['article']['barcode'])
self.assertEqual(creation_review.score, response.json()['score'])
mock_article_existing_and_return.assert_called()
@mock.patch('src.services.review_service.assert_article_existing_and_return',
side_effect=mock_assert_article_existing_and_return)
def test_update(self, mock_article_existing_and_return):
review = self.__read_all()[0]
update_review = Review(**review, barcode=review['article']['barcode'])
ide = update_review.id
update_review.opinion = 'Changed'
update_review.score = 4.5
bearer = _bearer(user="66", role="CUSTOMER")
response = self.client.put(REVIEWS + "/" + ide,
json=update_review.dict(), headers={"Authorization": bearer})
self.assertEqual(HTTPStatus.OK, response.status_code)
self.assertIsNotNone(response.json()['article'])
self.assertEqual('Changed', response.json()['opinion'])
self.assertEqual(4.5, response.json()['score'])
mock_article_existing_and_return.assert_called()
def test_search(self):
reviews = self.__read_all()
for review in reviews:
self.assertIsNotNone(review)
@mock.patch('src.services.review_service.assert_article_existing_without_token',
side_effect=mock_assert_article_existing_without_token)
def test_top_articles(self, assert_article_existing_without_token):
response = self.client.get(REVIEWS + "/topArticles")
self.assertEqual(HTTPStatus.OK, response.status_code)
articles = response.json()
for article in articles:
self.assertIsNotNone(article)
self.assertEqual("8400000000024", articles[0]['barcode'])
self.assertEqual("8400000000048", articles[1]['barcode'])
self.assertEqual("8400000000017", articles[2]['barcode'])
assert_article_existing_without_token.assert_called()
| [
"fastapi.testclient.TestClient",
"src.models.review.Review",
"src.models.article.Article",
"unittest.mock.patch",
"jwt.encode"
] | [((1747, 1838), 'src.models.article.Article', 'Article', ([], {'barcode': '"""8400000000017"""', 'description': '"""Mock most rated article"""', 'retailPrice': '(30)'}), "(barcode='8400000000017', description='Mock most rated article',\n retailPrice=30)\n", (1754, 1838), False, 'from src.models.article import Article\n'), ((3449, 3545), 'unittest.mock.patch', 'mock.patch', (['"""src.services.review_service.get_all_bought_articles"""'], {'side_effect': 'mock_articles'}), "('src.services.review_service.get_all_bought_articles',\n side_effect=mock_articles)\n", (3459, 3545), False, 'from unittest import TestCase, mock\n'), ((3547, 3680), 'unittest.mock.patch', 'mock.patch', (['"""src.services.review_service.assert_article_existing_and_return"""'], {'side_effect': 'mock_assert_article_existing_and_return'}), "('src.services.review_service.assert_article_existing_and_return',\n side_effect=mock_assert_article_existing_and_return)\n", (3557, 3680), False, 'from unittest import TestCase, mock\n'), ((4064, 4197), 'unittest.mock.patch', 'mock.patch', (['"""src.services.review_service.assert_article_existing_and_return"""'], {'side_effect': 'mock_assert_article_existing_and_return'}), "('src.services.review_service.assert_article_existing_and_return',\n side_effect=mock_assert_article_existing_and_return)\n", (4074, 4197), False, 'from unittest import TestCase, mock\n'), ((4742, 4875), 'unittest.mock.patch', 'mock.patch', (['"""src.services.review_service.assert_article_existing_and_return"""'], {'side_effect': 'mock_assert_article_existing_and_return'}), "('src.services.review_service.assert_article_existing_and_return',\n side_effect=mock_assert_article_existing_and_return)\n", (4752, 4875), False, 'from unittest import TestCase, mock\n'), ((5817, 5956), 'unittest.mock.patch', 'mock.patch', (['"""src.services.review_service.assert_article_existing_without_token"""'], {'side_effect': 'mock_assert_article_existing_without_token'}), "('src.services.review_service.assert_article_existing_without_token',\n side_effect=mock_assert_article_existing_without_token)\n", (5827, 5956), False, 'from unittest import TestCase, mock\n'), ((430, 487), 'jwt.encode', 'jwt.encode', (['payload', 'config.JWT_SECRET'], {'algorithm': '"""HS256"""'}), "(payload, config.JWT_SECRET, algorithm='HS256')\n", (440, 487), False, 'import jwt\n'), ((540, 631), 'src.models.article.Article', 'Article', ([], {'barcode': '"""8400000000017"""', 'description': '"""Mock most rated article"""', 'retailPrice': '(30)'}), "(barcode='8400000000017', description='Mock most rated article',\n retailPrice=30)\n", (547, 631), False, 'from src.models.article import Article\n'), ((641, 709), 'src.models.article.Article', 'Article', ([], {'barcode': '"""8400000000018"""', 'description': '"""Mock"""', 'retailPrice': '(30)'}), "(barcode='8400000000018', description='Mock', retailPrice=30)\n", (648, 709), False, 'from src.models.article import Article\n'), ((723, 793), 'src.models.article.Article', 'Article', ([], {'barcode': '"""8400000000019"""', 'description': '"""Mock 2"""', 'retailPrice': '(30)'}), "(barcode='8400000000019', description='Mock 2', retailPrice=30)\n", (730, 793), False, 'from src.models.article import Article\n'), ((911, 1002), 'src.models.article.Article', 'Article', ([], {'barcode': '"""8400000000017"""', 'description': '"""Mock most rated article"""', 'retailPrice': '(30)'}), "(barcode='8400000000017', description='Mock most rated article',\n retailPrice=30)\n", (918, 1002), False, 'from src.models.article import Article\n'), ((1025, 1133), 'src.models.article.Article', 'Article', ([], {'barcode': '"""8400000000024"""', 'description': '"""Mock second most rated article"""', 'retailPrice': '(5)', 'stock': '(15)'}), "(barcode='8400000000024', description=\n 'Mock second most rated article', retailPrice=5, stock=15)\n", (1032, 1133), False, 'from src.models.article import Article\n'), ((1188, 1287), 'src.models.article.Article', 'Article', ([], {'barcode': '"""8400000000031"""', 'description': '"""Mock third most rated article"""', 'retailPrice': '(305)'}), "(barcode='8400000000031', description=\n 'Mock third most rated article', retailPrice=305)\n", (1195, 1287), False, 'from src.models.article import Article\n'), ((1309, 1381), 'src.models.article.Article', 'Article', ([], {'barcode': '"""8400000000048"""', 'description': '"""Nothing"""', 'retailPrice': '(305)'}), "(barcode='8400000000048', description='Nothing', retailPrice=305)\n", (1316, 1381), False, 'from src.models.article import Article\n'), ((1408, 1493), 'src.models.article.Article', 'Article', ([], {'barcode': '"""8400000000055"""', 'description': '"""Another article"""', 'retailPrice': '(305)'}), "(barcode='8400000000055', description='Another article', retailPrice=305\n )\n", (1415, 1493), False, 'from src.models.article import Article\n'), ((1515, 1610), 'src.models.article.Article', 'Article', ([], {'barcode': '"""8400000000079"""', 'description': '"""Another of another article"""', 'retailPrice': '(305)'}), "(barcode='8400000000079', description='Another of another article',\n retailPrice=305)\n", (1522, 1610), False, 'from src.models.article import Article\n'), ((1633, 1722), 'src.models.article.Article', 'Article', ([], {'barcode': '"""8400000000086"""', 'description': '"""Look at this article"""', 'retailPrice': '(305)'}), "(barcode='8400000000086', description='Look at this article',\n retailPrice=305)\n", (1640, 1722), False, 'from src.models.article import Article\n'), ((2167, 2182), 'fastapi.testclient.TestClient', 'TestClient', (['app'], {}), '(app)\n', (2177, 2182), False, 'from fastapi.testclient import TestClient\n'), ((4297, 4339), 'src.models.review.Review', 'Review', ([], {'barcode': '"""8400000000031"""', 'score': '(1.5)'}), "(barcode='8400000000031', score=1.5)\n", (4303, 4339), False, 'from src.models.review import Review\n'), ((5011, 5065), 'src.models.review.Review', 'Review', ([], {'barcode': "review['article']['barcode']"}), "(**review, barcode=review['article']['barcode'])\n", (5017, 5065), False, 'from src.models.review import Review\n')] |
import json
sequence_name_list = ['A','G','L','map2photo','S']
description_list = ['Viewpoint Appearance','Viewpoint','ViewPoint Lighting','Map to Photo','Modality']
label_list = [
['arch', 'obama', 'vprice0', 'vprice1', 'vprice2', 'yosemite'],
['adam', 'boat','ExtremeZoomA','face','fox','graf','mag','shop','there','vin'],
['amos1','bdom','brugge_square', 'GC2','light','madrid',\
'notredame15','paintedladies','rushmore','trevi','vatican'],
['map1', 'map2', 'map3', 'map4', 'map5', 'map6'],
['angiogram','brain1','EO-IR-2',\
'maunaloa','mms68','mms75','treebranch']
]
#label_list = [
# ['arch', 'obama', 'vprice0', 'vprice1', 'vprice2', 'yosemite']
# ]
json_data = {}
json_data['Dataset Name'] = 'W1BS'
json_data['Description'] = 'Baseline Stereo Benchmark'
json_data['url'] = 'http://cmp.felk.cvut.cz/wbs/datasets/W1BS_with_patches.tar.gz'
json_data['Sequence Number'] = len(sequence_name_list)
json_data['Sequence Name List'] = sequence_name_list
json_data['Sequences'] = []
for idx, sequence_name in enumerate(sequence_name_list):
sequence = {}
sequence['Name'] = sequence_name
sequence['Description'] = sequence_name
sequence['Label'] = description_list[idx]
sequence['Images'] = []
sequence['Image Number'] = len(label_list[idx])*2
sequence['Link Number'] = len(label_list[idx])
sequence['Links'] = []
for image_idx, image_label in enumerate(label_list[idx]):
image = {}
image['file'] = '{}/1/{}.bmp'.format(sequence_name,image_label)
image['id'] = str(image_label) + '_1'
image['label'] = str(image_label) + '_1'
sequence['Images'].append(image)
image = {}
image['file'] = '{}/2/{}.bmp'.format(sequence_name,image_label)
image['id'] = str(image_label) + '_2'
image['label'] = str(image_label) + '_2'
sequence['Images'].append(image)
link = {}
link['source'] = str(image_label) + '_1'
link['target'] = str(image_label) + '_2'
link['file'] = '{}/h/{}.txt'.format(sequence_name, image_label)
sequence['Links'].append(link)
json_data['Sequences'].append(sequence)
with open('./datasets/dataset_info/{}.json'.format('W1BS'),'w') as json_file:
json.dump(json_data, json_file, indent=2)
| [
"json.dump"
] | [((2312, 2353), 'json.dump', 'json.dump', (['json_data', 'json_file'], {'indent': '(2)'}), '(json_data, json_file, indent=2)\n', (2321, 2353), False, 'import json\n')] |
"""Module which implements various types of projections."""
from typing import List, Callable
import tensorflow as tf
from neuralmonkey.nn.utils import dropout
def maxout(inputs: tf.Tensor,
size: int,
scope: str = "MaxoutProjection") -> tf.Tensor:
"""Apply a maxout operation.
Implementation of Maxout layer (Goodfellow et al., 2013).
http://arxiv.org/pdf/1302.4389.pdf
z = Wx + b
y_i = max(z_{2i-1}, z_{2i})
Arguments:
inputs: A tensor or list of tensors. It should be 2D tensors with
equal length in the first dimension (batch size)
size: The size of dimension 1 of the output tensor.
scope: The name of the scope used for the variables
Returns:
A tensor of shape batch x size
"""
with tf.variable_scope(scope):
projected = tf.layers.dense(inputs, size * 2, name=scope)
maxout_input = tf.reshape(projected, [-1, 1, 2, size])
maxpooled = tf.nn.max_pool(
maxout_input, [1, 1, 2, 1], [1, 1, 2, 1], "SAME")
reshaped = tf.reshape(maxpooled, [-1, size])
return reshaped
def multilayer_projection(
input_: tf.Tensor,
layer_sizes: List[int],
train_mode: tf.Tensor,
activation: Callable[[tf.Tensor], tf.Tensor] = tf.nn.relu,
dropout_keep_prob: float = 1.0,
scope: str = "mlp") -> tf.Tensor:
mlp_input = input_
with tf.variable_scope(scope):
for i, size in enumerate(layer_sizes):
mlp_input = tf.layers.dense(
mlp_input,
size,
activation=activation,
name="mlp_layer_{}".format(i))
mlp_input = dropout(mlp_input, dropout_keep_prob, train_mode)
return mlp_input
def glu(input_: tf.Tensor,
gating_fn: Callable[[tf.Tensor], tf.Tensor] = tf.sigmoid) -> tf.Tensor:
"""Apply a Gated Linear Unit.
Gated Linear Unit - Dauphin et al. (2016).
http://arxiv.org/abs/1612.08083
"""
dimensions = input_.get_shape().as_list()
if dimensions[-1] % 2 != 0:
raise ValueError("Input size should be an even number")
lin, nonlin = tf.split(input_, 2, axis=len(dimensions) - 1)
return lin * gating_fn(nonlin)
| [
"tensorflow.nn.max_pool",
"tensorflow.variable_scope",
"tensorflow.reshape",
"tensorflow.layers.dense",
"neuralmonkey.nn.utils.dropout"
] | [((801, 825), 'tensorflow.variable_scope', 'tf.variable_scope', (['scope'], {}), '(scope)\n', (818, 825), True, 'import tensorflow as tf\n'), ((847, 892), 'tensorflow.layers.dense', 'tf.layers.dense', (['inputs', '(size * 2)'], {'name': 'scope'}), '(inputs, size * 2, name=scope)\n', (862, 892), True, 'import tensorflow as tf\n'), ((916, 955), 'tensorflow.reshape', 'tf.reshape', (['projected', '[-1, 1, 2, size]'], {}), '(projected, [-1, 1, 2, size])\n', (926, 955), True, 'import tensorflow as tf\n'), ((976, 1040), 'tensorflow.nn.max_pool', 'tf.nn.max_pool', (['maxout_input', '[1, 1, 2, 1]', '[1, 1, 2, 1]', '"""SAME"""'], {}), "(maxout_input, [1, 1, 2, 1], [1, 1, 2, 1], 'SAME')\n", (990, 1040), True, 'import tensorflow as tf\n'), ((1074, 1107), 'tensorflow.reshape', 'tf.reshape', (['maxpooled', '[-1, size]'], {}), '(maxpooled, [-1, size])\n', (1084, 1107), True, 'import tensorflow as tf\n'), ((1433, 1457), 'tensorflow.variable_scope', 'tf.variable_scope', (['scope'], {}), '(scope)\n', (1450, 1457), True, 'import tensorflow as tf\n'), ((1707, 1756), 'neuralmonkey.nn.utils.dropout', 'dropout', (['mlp_input', 'dropout_keep_prob', 'train_mode'], {}), '(mlp_input, dropout_keep_prob, train_mode)\n', (1714, 1756), False, 'from neuralmonkey.nn.utils import dropout\n')] |
# -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2019-06-16 15:12
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('photos', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='image',
name='image_category',
),
migrations.RemoveField(
model_name='image',
name='image_location',
),
migrations.DeleteModel(
name='Category',
),
migrations.DeleteModel(
name='Image',
),
migrations.DeleteModel(
name='Location',
),
]
| [
"django.db.migrations.DeleteModel",
"django.db.migrations.RemoveField"
] | [((278, 343), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""image"""', 'name': '"""image_category"""'}), "(model_name='image', name='image_category')\n", (300, 343), False, 'from django.db import migrations\n'), ((388, 453), 'django.db.migrations.RemoveField', 'migrations.RemoveField', ([], {'model_name': '"""image"""', 'name': '"""image_location"""'}), "(model_name='image', name='image_location')\n", (410, 453), False, 'from django.db import migrations\n'), ((498, 537), 'django.db.migrations.DeleteModel', 'migrations.DeleteModel', ([], {'name': '"""Category"""'}), "(name='Category')\n", (520, 537), False, 'from django.db import migrations\n'), ((570, 606), 'django.db.migrations.DeleteModel', 'migrations.DeleteModel', ([], {'name': '"""Image"""'}), "(name='Image')\n", (592, 606), False, 'from django.db import migrations\n'), ((639, 678), 'django.db.migrations.DeleteModel', 'migrations.DeleteModel', ([], {'name': '"""Location"""'}), "(name='Location')\n", (661, 678), False, 'from django.db import migrations\n')] |
import time, datetime, argparse
import os, sys
import numpy as np
np.set_printoptions(precision=2)
import matplotlib.pyplot as plt
import copy as cp
import pickle
PROJECT_PATH = '/home/nbuckman/Dropbox (MIT)/DRL/2020_01_cooperative_mpc/mpc-multiple-vehicles/'
sys.path.append(PROJECT_PATH)
import casadi as cas
import src.MPC_Casadi as mpc
import src.TrafficWorld as tw
import src.IterativeBestResponseMPCMultiple as mibr
import src.car_plotting_multiple as cmplot
##########################################################
svo_theta = np.pi/4.0
# random_seed = args.random_seed[0]
random_seed = 3
NEW = True
if NEW:
optional_suffix = "ellipses"
subdir_name = datetime.datetime.now().strftime("%Y%m%d_%H%M%S") + optional_suffix
folder = "results/" + subdir_name + "/"
os.makedirs(folder)
os.makedirs(folder+"imgs/")
os.makedirs(folder+"data/")
os.makedirs(folder+"vids/")
os.makedirs(folder+"plots/")
else:
subdir_name = "20200224-103456_real_dim_CA"
folder = "results/" + subdir_name + "/"
print(folder)
if random_seed > 0:
np.random.seed(random_seed)
#######################################################################
T = 3 # MPC Planning Horizon
dt = 0.3
N = int(T/dt) #Number of control intervals in MPC
n_rounds_mpc = 6
percent_mpc_executed = 0.5 ## This is the percent of MPC that is executed
number_ctrl_pts_executed = int(np.floor(N*percent_mpc_executed))
XAMB_ONLY = False
n_other = 2
n_rounds_ibr = 2
world = tw.TrafficWorld(2, 0, 1000)
# large_world = tw.TrafficWorld(2, 0, 1000, 5.0)
#########################################################################
actual_xamb = np.zeros((6, n_rounds_mpc*number_ctrl_pts_executed + 1))
actual_uamb = np.zeros((2, n_rounds_mpc*number_ctrl_pts_executed))
actual_xothers = [np.zeros((6, n_rounds_mpc*number_ctrl_pts_executed + 1)) for i in range(n_other)]
actual_uothers = [np.zeros((2, n_rounds_mpc*number_ctrl_pts_executed)) for i in range(n_other)]
actual_all_other_x0 = [np.zeros((6, 2*N)) for i in range(n_other)]
xamb = np.zeros(shape=(6, N+1))
t_start_time = time.time()
####################################################
## Create the Cars in this Problem
all_other_x0 = []
all_other_u = []
all_other_MPC = []
all_other_x = [np.zeros(shape=(6, N+1)) for i in range(n_other)]
next_x0 = 0
for i in range(n_other):
x1_MPC = mpc.MPC(dt)
x1_MPC.n_circles = 3
x1_MPC.theta_iamb = svo_theta
x1_MPC.N = N
x1_MPC.k_change_u_v = 0.001
x1_MPC.max_delta_u = 50 * np.pi/180 * x1_MPC.dt
x1_MPC.k_u_v = 0.01
x1_MPC.k_u_delta = .00001
x1_MPC.k_change_u_v = 0.01
x1_MPC.k_change_u_delta = 0.001
x1_MPC.k_s = 0
x1_MPC.k_x = 0
x1_MPC.k_x_dot = -1.0 / 100.0
x1_MPC.k_lat = 0.001
x1_MPC.k_lon = 0.0
x1_MPC.k_phi_error = 0.001
x1_MPC.k_phi_dot = 0.01
####Vehicle Initial Conditions
if i%2 == 0:
lane_number = 0
next_x0 += x1_MPC.L + 2*x1_MPC.min_dist
else:
lane_number = 1
initial_speed = 0.75*x1_MPC.max_v
traffic_world = world
x1_MPC.fd = x1_MPC.gen_f_desired_lane(traffic_world, lane_number, True)
x0 = np.array([next_x0, traffic_world.get_lane_centerline_y(lane_number), 0, 0, initial_speed, 0]).T
## Set the initial control of the other vehicles
u1 = np.zeros((2,N))
# u1[0,:] = np.clip(np.pi/180 *np.random.normal(size=(1,N)), -2 * np.pi/180, 2 * np.pi/180)
SAME_SIDE = False
if lane_number == 1 or SAME_SIDE:
u1[0,0] = 2 * np.pi/180
else:
u1[0,0] = -2 * np.pi/180
u1[0,0] = 0
all_other_MPC += [x1_MPC]
all_other_x0 += [x0]
all_other_u += [u1]
# Settings for Ambulance
amb_MPC = cp.deepcopy(x1_MPC)
amb_MPC.theta_iamb = 0.0
amb_MPC.k_u_v = 0.0000
amb_MPC.k_u_delta = .01
amb_MPC.k_change_u_v = 0.0000
amb_MPC.k_change_u_delta = 0
amb_MPC.k_s = 0
amb_MPC.k_x = 0
amb_MPC.k_x_dot = -1.0 / 100.0
amb_MPC.k_x = -1.0/100
amb_MPC.k_x_dot = 0
amb_MPC.k_lat = 0.00001
amb_MPC.k_lon = 0.0
# amb_MPC.min_v = 0.8*initial_speed
amb_MPC.max_v = 35 * 0.447 # m/s
amb_MPC.k_phi_error = 0.1
amb_MPC.k_phi_dot = 0.01
NO_GRASS = False
amb_MPC.min_y = world.y_min
amb_MPC.max_y = world.y_max
if NO_GRASS:
amb_MPC.min_y += world.grass_width
amb_MPC.max_y -= world.grass_width
amb_MPC.fd = amb_MPC.gen_f_desired_lane(world, 0, True)
x0_amb = np.array([0, 0, 0, 0, initial_speed , 0]).T
pickle.dump(x1_MPC, open(folder + "data/"+"mpc%d"%i + ".p",'wb'))
pickle.dump(amb_MPC, open(folder + "data/"+"mpcamb" + ".p",'wb'))
########################################################################
#### SOLVE THE MPC #####################################################
for i_mpc in range(n_rounds_mpc):
min_slack = np.infty
actual_t = i_mpc * number_ctrl_pts_executed
###### Update the initial conditions for all vehicles
if i_mpc > 0:
x0_amb = xamb[:, number_ctrl_pts_executed]
for i in range(len(all_other_x0)):
all_other_x0[i] = all_other_x[i][:, number_ctrl_pts_executed]
###### Initial guess for the other u. This will be updated once the other vehicles
###### solve the best response to the ambulance. Initial guess just looks at the last solution. This could also be a lange change
# Obtain a simulated trajectory from other vehicle control inputs
all_other_x = [np.zeros(shape=(6, N+1)) for i in range(n_other)]
all_other_x_des = [np.zeros(shape=(3, N+1)) for i in range(n_other)]
for i in range(n_other):
if i_mpc == 0:
all_other_u[i] = np.zeros(shape=(6,N))
else:
all_other_u[i] = np.concatenate((all_other_u[i][:, number_ctrl_pts_executed:], np.tile(all_other_u[i][:,-1:],(1, number_ctrl_pts_executed))),axis=1) ##
x_mpci, u_all_i, x_0_i = all_other_MPC[i], all_other_u[i], all_other_x0[i]
all_other_x[i], all_other_x_des[i] = x_mpci.forward_simulate_all(x_0_i, u_all_i)
for i_rounds_ibr in range(n_rounds_ibr):
########## Solve the Ambulance MPC ##########
response_MPC = amb_MPC
response_x0 = x0_amb
nonresponse_MPC_list = all_other_MPC
nonresponse_x0_list = all_other_x0
nonresponse_u_list = all_other_u
nonresponse_x_list = all_other_x
nonresponse_xd_list = all_other_x_des
################# Generate the warm starts ###############################
u_warm_profiles = mibr.generate_warm_u(N, response_MPC)
### Ambulance Warm Start
if i_rounds_ibr > 0: # warm start with the solution from the last IBR round
u_warm_profiles["previous"] = uamb
else:
# take the control inputs of the last MPC and continue the ctrl
if i_mpc > 0:
u_warm_profiles["previous"] = np.concatenate((uamb[:, number_ctrl_pts_executed:], np.tile(uamb[:,-1:],(1, number_ctrl_pts_executed))),axis=1) ##
#######################################################################
min_response_cost = 99999999
for k_warm in u_warm_profiles.keys():
u_warm = u_warm_profiles[k_warm]
x_warm, x_des_warm = response_MPC.forward_simulate_all(response_x0.reshape(6,1), u_warm)
bri = mibr.IterativeBestResponseMPCMultiple(response_MPC, None, nonresponse_MPC_list )
k_slack = 10000.0
k_CA = 0.000000000000000
k_CA_power = 4
wall_CA = True
bri.k_slack = k_slack
bri.k_CA = k_CA
bri.k_CA_power = k_CA_power
bri.world = world
bri.wall_CA = wall_CA
# for slack_var in bri.slack_vars_list: ## Added to constrain slacks
# bri.opti.subject_to(cas.vec(slack_var) <= 1.0)
INFEASIBLE = True
bri.generate_optimization(N, T, response_x0, None, nonresponse_x0_list, 1, slack=False)
bri.opti.set_initial(bri.u_opt, u_warm)
bri.opti.set_initial(bri.x_opt, x_warm)
bri.opti.set_initial(bri.x_desired, x_des_warm)
### Set the trajectories of the nonresponse vehicles (as given)
for i in range(n_other):
bri.opti.set_value(bri.allother_x_opt[i], nonresponse_x_list[i])
bri.opti.set_value(bri.allother_x_desired[i], nonresponse_xd_list[i])
### Solve the Optimization
# Debugging
# plot_range = [N]
# bri.opti.callback(lambda i: bri.debug_callback(i, plot_range))
# bri.opti.callback(lambda i: print("J_i %.03f, J_j %.03f, Slack %.03f, CA %.03f"%(bri.solution.value(bri.response_svo_cost), bri.solution.value(bri.other_svo_cost), bri.solution.value(bri.k_slack*bri.slack_cost), bri.solution.value(bri.k_CA*bri.collision_cost))))
try:
bri.solve(None, nonresponse_u_list)
x1, u1, x1_des, _, _, _, _, _, _ = bri.get_solution()
print("i_mpc %d n_round %d i %02d Cost %.02f Slack %.02f "%(i_mpc, i_rounds_ibr, i, bri.solution.value(bri.total_svo_cost), bri.solution.value(bri.slack_cost)))
print("J_i %.03f, J_j %.03f, Slack %.03f, CA %.03f"%(bri.solution.value(bri.response_svo_cost), bri.solution.value(bri.other_svo_cost), bri.solution.value(bri.k_slack*bri.slack_cost), bri.solution.value(bri.k_CA*bri.collision_cost)))
print("Dir:", subdir_name)
print("k_warm", k_warm)
INFEASIBLE = False
if bri.solution.value(bri.slack_cost) < min_slack:
current_cost = bri.solution.value(bri.total_svo_cost)
if current_cost < min_response_cost:
uamb = u1
xamb = x1
xamb_des = x1_des
min_response_cost = current_cost
min_response_warm = k_warm
min_bri = bri
# file_name = folder + "data/"+'%03d'%ibr_sub_it
# mibr.save_state(file_name, xamb, uamb, xamb_des, all_other_x, all_other_u, all_other_x_des)
# mibr.save_costs(file_name, bri)
except RuntimeError:
print("Infeasibility: k_warm %s"%k_warm)
# ibr_sub_it +=1
########### SOLVE FOR THE OTHER VEHICLES ON THE ROAD
if not XAMB_ONLY:
for i in range(len(all_other_MPC)):
response_MPC = all_other_MPC[i]
response_x0 = all_other_x0[i]
nonresponse_MPC_list = all_other_MPC[:i] + all_other_MPC[i+1:]
nonresponse_x0_list = all_other_x0[:i] + all_other_x0[i+1:]
nonresponse_u_list = all_other_u[:i] + all_other_u[i+1:]
nonresponse_x_list = all_other_x[:i] + all_other_x[i+1:]
nonresponse_xd_list = all_other_x_des[:i] + all_other_x_des[i+1:]
################ Warm Start
u_warm_profiles = mibr.generate_warm_u(N, response_MPC)
if i_rounds_ibr > 0: # warm start with the solution from the last IBR round
u_warm_profiles["previous"] = all_other_u[i]
else:
# take the control inputs of the last MPC and continue the ctrl
if i_mpc > 0:
u_warm_profiles["previous"] = np.concatenate((all_other_u[i][:, number_ctrl_pts_executed:], np.tile(all_other_u[i][:,-1:],(1, number_ctrl_pts_executed))),axis=1) ##
min_response_cost = 99999999
for k_warm in u_warm_profiles.keys():
u_warm = u_warm_profiles[k_warm]
x_warm, x_des_warm = response_MPC.forward_simulate_all(response_x0.reshape(6,1), u_warm)
bri = mibr.IterativeBestResponseMPCMultiple(response_MPC, amb_MPC, nonresponse_MPC_list)
bri.k_slack = k_slack
bri.k_CA = k_CA
bri.k_CA_power = k_CA_power
bri.world = world
bri.wall_CA = wall_CA
INFEASIBLE = True
bri.generate_optimization(N, T, response_x0, x0_amb, nonresponse_x0_list, 1, slack=False)
# for slack_var in bri.slack_vars_list: ## Added to constrain slacks
# bri.opti.subject_to(cas.vec(slack_var) <= 1.0)
bri.opti.set_initial(bri.u_opt, u_warm)
bri.opti.set_initial(bri.x_opt, x_warm)
bri.opti.set_initial(bri.x_desired, x_des_warm)
### Set the trajectories of the nonresponse vehicles (as given)
bri.opti.set_value(bri.xamb_opt, xamb)
for i in range(len(nonresponse_x_list)):
bri.opti.set_value(bri.allother_x_opt[i], nonresponse_x_list[i])
bri.opti.set_value(bri.allother_x_desired[i], nonresponse_xd_list[i])
# Debugging
# bri.opti.callback(lambda i: bri.debug_callback(i, [N]))
# bri.opti.callback(lambda i: print("J_i %.03f, J_j %.03f, Slack %.03f, CA %.03f"%(bri.solution.value(bri.response_svo_cost), bri.solution.value(bri.other_svo_cost), bri.solution.value(bri.k_slack*bri.slack_cost), bri.solution.value(bri.k_CA*bri.collision_cost))))
try: ### Solve the Optimization
bri.solve(uamb, nonresponse_u_list)
x1_nr, u1_nr, x1_des_nr, _, _, _, _, _, _ = bri.get_solution()
print(" i_mpc %d n_round %d i %02d Cost %.02f Slack %.02f "%(i_mpc, i_rounds_ibr, i, bri.solution.value(bri.total_svo_cost), bri.solution.value(bri.slack_cost)))
print(" J_i %.03f, J_j %.03f, Slack %.03f, CA %.03f"%(bri.solution.value(bri.response_svo_cost), bri.solution.value(bri.other_svo_cost), bri.solution.value(bri.k_slack*bri.slack_cost), bri.solution.value(bri.k_CA*bri.collision_cost)))
print(" Dir:", subdir_name)
print(" k_warm", k_warm)
INFEASIBLE = False
if bri.solution.value(bri.slack_cost) < min_slack:
current_cost = bri.solution.value(bri.total_svo_cost)
if current_cost < min_response_cost:
all_other_u[i] = u1_nr
all_other_x = all_other_x[:i] + [x1_nr] + all_other_x[i:]
all_other_u = all_other_u[:i] + [u1_nr] + all_other_u[i:]
all_other_x_des = all_other_x_des[:i] + [x1_des_nr] + all_other_x_des[i:]
min_response_cost = current_cost
min_response_warm = k_warm
min_bri = bri
# file_name = folder + "data/"+'%03d'%ibr_sub_it
# mibr.save_state(file_name, xamb, uamb, xamb_des, all_other_x, all_other_u, all_other_x_des)
# mibr.save_costs(file_name, bri)
except RuntimeError:
print(" Infeasibility: k_warm %s"%k_warm)
# ibr_sub_it +=1
#
print(" IBR Done: Rd %02d / %02d"%(i_rounds_ibr, n_rounds_ibr))
file_name = folder + "data/"+'r%02d%03d'%(i_mpc, i_rounds_ibr)
if not INFEASIBLE:
mibr.save_state(file_name, xamb, uamb, xamb_des, xothers, uothers, xothers_des)
mibr.save_costs(file_name, bri)
actual_t = i_mpc * number_ctrl_pts_executed
actual_xamb[:,actual_t:actual_t+number_ctrl_pts_executed+1] = xamb[:,:number_ctrl_pts_executed+1]
print(" MPC Done: Rd %02d / %02d"%(i_mpc, n_rounds_mpc))
print(" Full MPC Solution", xamb[0:2,:])
print(" Executed MPC", xamb[0:2,:number_ctrl_pts_executed+1])
print(" Solution Costs...")
for cost in bri.car1_costs_list:
print("%.04f"%bri.solution.value(cost))
print(min_bri.solution.value(min_bri.k_CA * min_bri.collision_cost), min_bri.solution.value(min_bri.collision_cost))
print(min_bri.solution.value(min_bri.k_slack * min_bri.slack_cost), min_bri.solution.value(min_bri.slack_cost))
print(" Save to...", file_name)
actual_uamb[:,actual_t:actual_t+number_ctrl_pts_executed] = uamb[:,:number_ctrl_pts_executed]
plot_range = range(N+1)
for k in plot_range:
cmplot.plot_multiple_cars( k, min_bri.responseMPC, xothers, xamb, True, None, None, None, min_bri.world, 0)
plt.show()
plt.plot(xamb[4,:],'--')
plt.plot(xamb[4,:] * np.cos(xamb[2,:]))
plt.ylabel("Velocity / Vx")
plt.hlines(35*0.447,0,xamb.shape[1])
plt.show()
plt.plot(uamb[1,:],'o')
plt.hlines(amb_MPC.max_v_u,0,xamb.shape[1])
plt.ylabel("delta_u_v")
plt.show()
for i in range(len(xothers)):
actual_xothers[i][:,actual_t:actual_t+number_ctrl_pts_executed+1] = xothers[i][:,:number_ctrl_pts_executed+1]
actual_uothers[i][:,actual_t:actual_t+number_ctrl_pts_executed] = uothers[i][:,:number_ctrl_pts_executed]
# all_other_u[i] = np.concatenate((uothers[i][:, number_ctrl_pts_executed:],uothers[i][:,:number_ctrl_pts_executed]),axis=1)
else:
raise Exception("Xamb is None", i_mpc, i_rounds_ibr, "slack cost", bri.solution.value(bri.slack_cost))
print("Solver Done! Runtime: %.1d"%(time.time()-t_start_time))
| [
"src.IterativeBestResponseMPCMultiple.save_state",
"matplotlib.pyplot.ylabel",
"src.TrafficWorld.TrafficWorld",
"numpy.array",
"copy.deepcopy",
"sys.path.append",
"src.IterativeBestResponseMPCMultiple.IterativeBestResponseMPCMultiple",
"matplotlib.pyplot.plot",
"numpy.random.seed",
"src.MPC_Casadi... | [((66, 98), 'numpy.set_printoptions', 'np.set_printoptions', ([], {'precision': '(2)'}), '(precision=2)\n', (85, 98), True, 'import numpy as np\n'), ((261, 290), 'sys.path.append', 'sys.path.append', (['PROJECT_PATH'], {}), '(PROJECT_PATH)\n', (276, 290), False, 'import os, sys\n'), ((1477, 1504), 'src.TrafficWorld.TrafficWorld', 'tw.TrafficWorld', (['(2)', '(0)', '(1000)'], {}), '(2, 0, 1000)\n', (1492, 1504), True, 'import src.TrafficWorld as tw\n'), ((1647, 1705), 'numpy.zeros', 'np.zeros', (['(6, n_rounds_mpc * number_ctrl_pts_executed + 1)'], {}), '((6, n_rounds_mpc * number_ctrl_pts_executed + 1))\n', (1655, 1705), True, 'import numpy as np\n'), ((1718, 1772), 'numpy.zeros', 'np.zeros', (['(2, n_rounds_mpc * number_ctrl_pts_executed)'], {}), '((2, n_rounds_mpc * number_ctrl_pts_executed))\n', (1726, 1772), True, 'import numpy as np\n'), ((2043, 2069), 'numpy.zeros', 'np.zeros', ([], {'shape': '(6, N + 1)'}), '(shape=(6, N + 1))\n', (2051, 2069), True, 'import numpy as np\n'), ((2083, 2094), 'time.time', 'time.time', ([], {}), '()\n', (2092, 2094), False, 'import time, datetime, argparse\n'), ((3687, 3706), 'copy.deepcopy', 'cp.deepcopy', (['x1_MPC'], {}), '(x1_MPC)\n', (3698, 3706), True, 'import copy as cp\n'), ((788, 807), 'os.makedirs', 'os.makedirs', (['folder'], {}), '(folder)\n', (799, 807), False, 'import os, sys\n'), ((812, 841), 'os.makedirs', 'os.makedirs', (["(folder + 'imgs/')"], {}), "(folder + 'imgs/')\n", (823, 841), False, 'import os, sys\n'), ((844, 873), 'os.makedirs', 'os.makedirs', (["(folder + 'data/')"], {}), "(folder + 'data/')\n", (855, 873), False, 'import os, sys\n'), ((876, 905), 'os.makedirs', 'os.makedirs', (["(folder + 'vids/')"], {}), "(folder + 'vids/')\n", (887, 905), False, 'import os, sys\n'), ((908, 938), 'os.makedirs', 'os.makedirs', (["(folder + 'plots/')"], {}), "(folder + 'plots/')\n", (919, 938), False, 'import os, sys\n'), ((1073, 1100), 'numpy.random.seed', 'np.random.seed', (['random_seed'], {}), '(random_seed)\n', (1087, 1100), True, 'import numpy as np\n'), ((1386, 1420), 'numpy.floor', 'np.floor', (['(N * percent_mpc_executed)'], {}), '(N * percent_mpc_executed)\n', (1394, 1420), True, 'import numpy as np\n'), ((1789, 1847), 'numpy.zeros', 'np.zeros', (['(6, n_rounds_mpc * number_ctrl_pts_executed + 1)'], {}), '((6, n_rounds_mpc * number_ctrl_pts_executed + 1))\n', (1797, 1847), True, 'import numpy as np\n'), ((1889, 1943), 'numpy.zeros', 'np.zeros', (['(2, n_rounds_mpc * number_ctrl_pts_executed)'], {}), '((2, n_rounds_mpc * number_ctrl_pts_executed))\n', (1897, 1943), True, 'import numpy as np\n'), ((1990, 2010), 'numpy.zeros', 'np.zeros', (['(6, 2 * N)'], {}), '((6, 2 * N))\n', (1998, 2010), True, 'import numpy as np\n'), ((2252, 2278), 'numpy.zeros', 'np.zeros', ([], {'shape': '(6, N + 1)'}), '(shape=(6, N + 1))\n', (2260, 2278), True, 'import numpy as np\n'), ((2352, 2363), 'src.MPC_Casadi.MPC', 'mpc.MPC', (['dt'], {}), '(dt)\n', (2359, 2363), True, 'import src.MPC_Casadi as mpc\n'), ((3303, 3319), 'numpy.zeros', 'np.zeros', (['(2, N)'], {}), '((2, N))\n', (3311, 3319), True, 'import numpy as np\n'), ((4350, 4390), 'numpy.array', 'np.array', (['[0, 0, 0, 0, initial_speed, 0]'], {}), '([0, 0, 0, 0, initial_speed, 0])\n', (4358, 4390), True, 'import numpy as np\n'), ((5355, 5381), 'numpy.zeros', 'np.zeros', ([], {'shape': '(6, N + 1)'}), '(shape=(6, N + 1))\n', (5363, 5381), True, 'import numpy as np\n'), ((5428, 5454), 'numpy.zeros', 'np.zeros', ([], {'shape': '(3, N + 1)'}), '(shape=(3, N + 1))\n', (5436, 5454), True, 'import numpy as np\n'), ((6431, 6468), 'src.IterativeBestResponseMPCMultiple.generate_warm_u', 'mibr.generate_warm_u', (['N', 'response_MPC'], {}), '(N, response_MPC)\n', (6451, 6468), True, 'import src.IterativeBestResponseMPCMultiple as mibr\n'), ((15747, 15826), 'src.IterativeBestResponseMPCMultiple.save_state', 'mibr.save_state', (['file_name', 'xamb', 'uamb', 'xamb_des', 'xothers', 'uothers', 'xothers_des'], {}), '(file_name, xamb, uamb, xamb_des, xothers, uothers, xothers_des)\n', (15762, 15826), True, 'import src.IterativeBestResponseMPCMultiple as mibr\n'), ((15835, 15866), 'src.IterativeBestResponseMPCMultiple.save_costs', 'mibr.save_costs', (['file_name', 'bri'], {}), '(file_name, bri)\n', (15850, 15866), True, 'import src.IterativeBestResponseMPCMultiple as mibr\n'), ((16944, 16970), 'matplotlib.pyplot.plot', 'plt.plot', (['xamb[4, :]', '"""--"""'], {}), "(xamb[4, :], '--')\n", (16952, 16970), True, 'import matplotlib.pyplot as plt\n'), ((17025, 17052), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Velocity / Vx"""'], {}), "('Velocity / Vx')\n", (17035, 17052), True, 'import matplotlib.pyplot as plt\n'), ((17061, 17101), 'matplotlib.pyplot.hlines', 'plt.hlines', (['(35 * 0.447)', '(0)', 'xamb.shape[1]'], {}), '(35 * 0.447, 0, xamb.shape[1])\n', (17071, 17101), True, 'import matplotlib.pyplot as plt\n'), ((17106, 17116), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (17114, 17116), True, 'import matplotlib.pyplot as plt\n'), ((17125, 17150), 'matplotlib.pyplot.plot', 'plt.plot', (['uamb[1, :]', '"""o"""'], {}), "(uamb[1, :], 'o')\n", (17133, 17150), True, 'import matplotlib.pyplot as plt\n'), ((17157, 17202), 'matplotlib.pyplot.hlines', 'plt.hlines', (['amb_MPC.max_v_u', '(0)', 'xamb.shape[1]'], {}), '(amb_MPC.max_v_u, 0, xamb.shape[1])\n', (17167, 17202), True, 'import matplotlib.pyplot as plt\n'), ((17209, 17232), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""delta_u_v"""'], {}), "('delta_u_v')\n", (17219, 17232), True, 'import matplotlib.pyplot as plt\n'), ((17241, 17251), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (17249, 17251), True, 'import matplotlib.pyplot as plt\n'), ((5559, 5581), 'numpy.zeros', 'np.zeros', ([], {'shape': '(6, N)'}), '(shape=(6, N))\n', (5567, 5581), True, 'import numpy as np\n'), ((7247, 7326), 'src.IterativeBestResponseMPCMultiple.IterativeBestResponseMPCMultiple', 'mibr.IterativeBestResponseMPCMultiple', (['response_MPC', 'None', 'nonresponse_MPC_list'], {}), '(response_MPC, None, nonresponse_MPC_list)\n', (7284, 7326), True, 'import src.IterativeBestResponseMPCMultiple as mibr\n'), ((16800, 16910), 'src.car_plotting_multiple.plot_multiple_cars', 'cmplot.plot_multiple_cars', (['k', 'min_bri.responseMPC', 'xothers', 'xamb', '(True)', 'None', 'None', 'None', 'min_bri.world', '(0)'], {}), '(k, min_bri.responseMPC, xothers, xamb, True, None,\n None, None, min_bri.world, 0)\n', (16825, 16910), True, 'import src.car_plotting_multiple as cmplot\n'), ((16925, 16935), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (16933, 16935), True, 'import matplotlib.pyplot as plt\n'), ((17826, 17837), 'time.time', 'time.time', ([], {}), '()\n', (17835, 17837), False, 'import time, datetime, argparse\n'), ((672, 695), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (693, 695), False, 'import time, datetime, argparse\n'), ((11081, 11118), 'src.IterativeBestResponseMPCMultiple.generate_warm_u', 'mibr.generate_warm_u', (['N', 'response_MPC'], {}), '(N, response_MPC)\n', (11101, 11118), True, 'import src.IterativeBestResponseMPCMultiple as mibr\n'), ((16998, 17016), 'numpy.cos', 'np.cos', (['xamb[2, :]'], {}), '(xamb[2, :])\n', (17004, 17016), True, 'import numpy as np\n'), ((5686, 5748), 'numpy.tile', 'np.tile', (['all_other_u[i][:, -1:]', '(1, number_ctrl_pts_executed)'], {}), '(all_other_u[i][:, -1:], (1, number_ctrl_pts_executed))\n', (5693, 5748), True, 'import numpy as np\n'), ((11921, 12007), 'src.IterativeBestResponseMPCMultiple.IterativeBestResponseMPCMultiple', 'mibr.IterativeBestResponseMPCMultiple', (['response_MPC', 'amb_MPC', 'nonresponse_MPC_list'], {}), '(response_MPC, amb_MPC,\n nonresponse_MPC_list)\n', (11958, 12007), True, 'import src.IterativeBestResponseMPCMultiple as mibr\n'), ((6853, 6905), 'numpy.tile', 'np.tile', (['uamb[:, -1:]', '(1, number_ctrl_pts_executed)'], {}), '(uamb[:, -1:], (1, number_ctrl_pts_executed))\n', (6860, 6905), True, 'import numpy as np\n'), ((11535, 11597), 'numpy.tile', 'np.tile', (['all_other_u[i][:, -1:]', '(1, number_ctrl_pts_executed)'], {}), '(all_other_u[i][:, -1:], (1, number_ctrl_pts_executed))\n', (11542, 11597), True, 'import numpy as np\n')] |
import math
from functools import lru_cache
class Polygon:
def __init__(self, n, R):
if n < 3:
raise ValueError('Polygon must have at least 3 vertices.')
self._n = n
self._R = R
def __repr__(self):
return f'Polygon(n={self._n}, R={self._R})'
@property
def count_vertices(self):
return self._n
@property
def count_edges(self):
return self._n
@property
def circumradius(self):
return self._R
@property
def interior_angle(self):
return (self._n - 2) * 180 / self._n
@property
def side_length(self):
return 2 * self._R * math.sin(math.pi / self._n)
@property
def apothem(self):
return self._R * math.cos(math.pi / self._n)
@property
def area(self):
return self._n / 2 * self.side_length * self.apothem
@property
def perimeter(self):
return self._n * self.side_length
def __eq__(self, other):
if isinstance(other, self.__class__):
return (self.count_edges == other.count_edges and self.circumradius == other.circumradius)
else:
return NotImplemented
def __gt__(self, other):
if isinstance(other, self.__class__):
return self.count_vertices > other.count_vertices
else:
return NotImplemented
class Polygons:
def __init__(self, m, R):
if m < 3:
raise ValueError('m must be greater than 3')
self._m = m
self._R = R
self._polygons = [Polygon(i, R) for i in range(3, m+1)]
def __len__(self):
return self._m - 2
def __repr__(self):
return f'Polygons(m={self._m}, R={self._R})'
def __getitem__(self, s):
return self._polygons[s]
def __iter__(self):
return self.PolygonIterator(self)
@property
def max_efficiency_polygon(self):
sorted_polygons = sorted(self._polygons,
key=lambda p: p.area/p.perimeter,
reverse=True)
return sorted_polygons[0]
class PolyIterator:
def __init__(self, poly_obj):
self._poly_obj = poly_obj
self._index = 0
def __iter__(self):
return self
def __next__(self):
if self._index >= len(self._poly_obj):
raise StopIteration
else:
item = self._poly_obj._polygons[self._index]
self._index += 1
return item | [
"math.cos",
"math.sin"
] | [((656, 683), 'math.sin', 'math.sin', (['(math.pi / self._n)'], {}), '(math.pi / self._n)\n', (664, 683), False, 'import math\n'), ((747, 774), 'math.cos', 'math.cos', (['(math.pi / self._n)'], {}), '(math.pi / self._n)\n', (755, 774), False, 'import math\n')] |
import numpy as np
# softmax function
def softmax(a):
exp_a = np.exp(a)
sum_a = np.sum(exp_a)
return exp_a / sum_a
# modified softmax function
def modified_softmax(a):
maxA = np.max(a)
exp_a = np.exp(a - maxA)
sum_a = np.sum(exp_a)
return exp_a / sum_a
| [
"numpy.exp",
"numpy.sum",
"numpy.max"
] | [((68, 77), 'numpy.exp', 'np.exp', (['a'], {}), '(a)\n', (74, 77), True, 'import numpy as np\n'), ((90, 103), 'numpy.sum', 'np.sum', (['exp_a'], {}), '(exp_a)\n', (96, 103), True, 'import numpy as np\n'), ((195, 204), 'numpy.max', 'np.max', (['a'], {}), '(a)\n', (201, 204), True, 'import numpy as np\n'), ((218, 234), 'numpy.exp', 'np.exp', (['(a - maxA)'], {}), '(a - maxA)\n', (224, 234), True, 'import numpy as np\n'), ((247, 260), 'numpy.sum', 'np.sum', (['exp_a'], {}), '(exp_a)\n', (253, 260), True, 'import numpy as np\n')] |
# Copyright 2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
from ..objects.exceptions import NotFoundError
import re
class Tier():
def __init__(self, name, typ, version):
self.name = name
self.type = typ
self.version = version.strip()
self.string = self.__str__()
def to_struct(self):
strct = {
'Name': self.name,
'Type': self.type,
}
if self.version:
strct['Version'] = self.version
return strct
def __str__(self):
s = self.name + '-' + self.type
if self.version:
s += '-' + self.version
return s
def __eq__(self, other):
if not isinstance(other, Tier):
return False
return self.string.lower() == other.string.lower()
@staticmethod
def get_all_tiers():
lst = [
Tier('WebServer', 'Standard', '1.0'),
Tier('Worker', 'SQS/HTTP', '1.0'),
Tier('Worker', 'SQS/HTTP', '1.1'),
Tier('Worker', 'SQS/HTTP', ''),
]
return lst
@staticmethod
def parse_tier(string):
if string.lower() == 'web' or string.lower() == 'webserver':
return Tier('WebServer', 'Standard', '1.0')
if string.lower() == 'worker':
return Tier('Worker', 'SQS/HTTP', '')
params = string.split('-')
if len(params) == 3:
name, typ, version = string.split('-')
elif len(params) == 2:
name, typ = string.split('-')
if re.match('\d+[.]\d+', typ):
version = typ
else:
version = ''
else:
raise NotFoundError('Tier Not found')
# we want to return the Proper, uppercase version
if name.lower() == 'webserver' or name.lower() == 'web':
return Tier('WebServer', 'Standard', version)
elif name.lower() == 'worker':
return Tier('Worker', 'SQS/HTTP', version)
# tier not found
raise NotFoundError('Tier Not found') | [
"re.match"
] | [((2048, 2076), 're.match', 're.match', (['"""\\\\d+[.]\\\\d+"""', 'typ'], {}), "('\\\\d+[.]\\\\d+', typ)\n", (2056, 2076), False, 'import re\n')] |
"""
PGN Scraper is a small program which downloads each of a user's archived games from chess.com and stores them in a pgn file.
When running the user is asked for the account name which shall be scraped and for game types.
The scraper only downloads games of the correct type.
Supported types are: bullet, rapid, blitz
rated, unrated
standard chess, other ruless (chess960, oddchess, etc.)
"""
from datetime import datetime
import json
import urllib.request
import os
def CheckFileName(file_name):
"""
This function checks if a file with file_name already exists. If yes an error message is printed and the script aborted.
"""
if os.path.isfile(os.getcwd()+f"/{file_name}"):
print(f"Error: A file named '{file_name}' already exists.")
print("Exiting...")
quit()
def GameTypeTrue(game,game_type,rated,rules):
"""
This function checks if the game is of the type defined in game_type (bullet, rapid or blitz) and returns either True or False.
"""
# Check if game is of the correct type
for type in game_type:
for ra in rated:
for ru in rules:
if (game["time_class"] == type) and (game["rated"] == ra) and ( (game["rules"] == "chess") == ru):
return True
# If not correct type return False
return False
def initScrape():
"""
This functions is used to set up the scraping parameters like account name and game type.
"""
# Input account name
acc_name = input("Enter account name: ").strip()
# Check if acc_name is empty
if bool(acc_name) == False:
print("Error: Empty account name!")
quit()
# Input game type
#game_type_code = input("Enter game type [1] All (default), [2] Rapid, [3] Blitz, [4] Bullet, [5] Rapid and Blitz: ").strip()
# If game_type_code is empty set to 1
#if bool(game_type_code) == False:
game_type_code = "1"
# Create dictionary for different game type options und apply input
game_type_dict = {
"1" : ["bullet", "blitz", "rapid"],
"2" : ["rapid"],
"3" : ["blitz"],
"4" : ["bullet"],
"5" : ["blitz", "rapid"]
}
game_type = game_type_dict["1"]
# Input rated/unrated
#rated_code = input("Consider [1] only rated games (default), [2] only unrated or [3] all games: ").strip()
# If rated_code is empty set to 1
#if bool(rated_code) == False:
rated_code = "1"
# Create dictionary for rated/unraked and apply input
rated_dict = {
"1" : [True],
"2" : [False],
"3" : [True, False]
}
# try:
rated = rated_dict["3"]
# except KeyError:
# print("Error: Invalid input!\nExiting...")
# quit()
# Input rules ("chess"/other)
# rules_code = input("Consider [1] only standard chess (default), [2] only other modes (oddchess, bughouse etc.) or [3] any type: ").strip()
# If rules_code is empty set to 1
# if bool(rules_code) == False:
rules_code = "1"
# Create dictionary for rules and apply input
rules_dict = {
"1" : [True],
"2" : [False],
"3" : [True, False]
}
#try:
rules = rules_dict[rules_code]
# except KeyError:
# print("Error: Invalid input!\nExiting...")
# quit()
# Print warning if only rated and only other rules are selected
if (rated_code == "1") and (rules_code == "2"):
print("Warning: You selected only rated AND only other chess modes!")
print(" Other chess modes are often unrated!")
return [acc_name, game_type, rated, rules]
def beginScrape(params):
"""
The downloading of the PGN archives happens here.
The file is saved as "username_YYYY-MM-dd.pgn"
"""
# Passing the predefined parameters
acc_name = params[0]
game_type = params[1]
rated = params[2]
rules = params[3]
# Create name of pgn file
now = datetime.now()
date = now.strftime("%Y-%m-%d")
game_type_string = "_".join(game_type)
file_name = f"{acc_name}_{date}_{game_type_string}.pgn"
# Check if file already exists
CheckFileName(file_name)
# Run the request, check games for type and write correct ones to file
with urllib.request.urlopen(f"https://api.chess.com/pub/player/{acc_name}/games/archives") as url:
archives = list(dict(json.loads(url.read().decode()))["archives"])
for archive in archives:
with urllib.request.urlopen(archive) as url:
games = list(dict(json.loads(url.read().decode()))["games"])
for game in games:
if GameTypeTrue(game,game_type,rated,rules):
with open(file_name, "a") as text_file:
print(game["pgn"], file=text_file)
print("\n", file=text_file)
def main():
"""
Scrape PGN files from chess.com .
"""
params = initScrape()
beginScrape(params)
if __name__ == '__main__':
main()
| [
"datetime.datetime.now",
"os.getcwd"
] | [((3969, 3983), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3981, 3983), False, 'from datetime import datetime\n'), ((706, 717), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (715, 717), False, 'import os\n')] |
import sqlite3
import codecs # for using '한글'
import os
# 타이틀 정보 읽어오기
f = codecs.open("jjd_info_title.txt", "r")
title_list = []
while True:
line = f.readline() # 한 줄씩 읽
if not line: break # break the loop when it's End Of File
title_list.append(line) # split the line and append it to list
f.close()
# 날짜 정보 읽어오기
f = codecs.open("jjd_info_date.txt", "r")
date_list = []
while True:
line = f.readline() # 한 줄씩 읽
if not line: break # break the loop when it's End Of File
date_list.append(line) # split the line and append it to list
f.close()
# 조회수 정보 읽어오기
f = codecs.open("jjd_info_view.txt", "r")
view_list = []
while True:
line = f.readline()
if not line: break
view_list.append(line)
f.close
# href(링크) 정보 읽어오기
f = codecs.open("jjd_info_href.txt", "r")
href_list = []
while True:
line = f.readline()
if not line: break
href_list.append(line)
f.close
################################################################################
###################################### DB ######################################
# below 'print' is for checking the data structure. Don't care.
#print("saved data(1) : ", list[0][0])
#print("saved data(2) : ", list[1])
# connect 'db.sqlite3' in the django folder and manipulate it
con = sqlite3.connect("db.sqlite3")
cur = con.cursor() # use 'cursor' to use DB
# you don't need to care the below CREATE TABLE command.
# cur.execute("CREATE TABLE if not exists website1_crawlingdata(Name text, Period text);")
total_list = []
for i in range(len(date_list)):
temp = [str(i+1), title_list[i], date_list[i], view_list[i], href_list[i]]
total_list.append(temp)
# print(total_list)
cur.execute("delete from website1_jjd_info;")
idx = 0 # 리스트의 인덱스에 접근하는 변수
while idx < len(date_list):
cur.execute("INSERT INTO website1_jjd_info VALUES(?, ?, ?, ?, ?);", total_list[idx])
# 'INSERT' each value of the total_list to the table of DB.
idx += 1
con.commit() # The new input is gonna be saved in the DB with 'commit' command
idx = 0
con.close()
| [
"codecs.open",
"sqlite3.connect"
] | [((75, 113), 'codecs.open', 'codecs.open', (['"""jjd_info_title.txt"""', '"""r"""'], {}), "('jjd_info_title.txt', 'r')\n", (86, 113), False, 'import codecs\n'), ((354, 391), 'codecs.open', 'codecs.open', (['"""jjd_info_date.txt"""', '"""r"""'], {}), "('jjd_info_date.txt', 'r')\n", (365, 391), False, 'import codecs\n'), ((631, 668), 'codecs.open', 'codecs.open', (['"""jjd_info_view.txt"""', '"""r"""'], {}), "('jjd_info_view.txt', 'r')\n", (642, 668), False, 'import codecs\n'), ((814, 851), 'codecs.open', 'codecs.open', (['"""jjd_info_href.txt"""', '"""r"""'], {}), "('jjd_info_href.txt', 'r')\n", (825, 851), False, 'import codecs\n'), ((1345, 1374), 'sqlite3.connect', 'sqlite3.connect', (['"""db.sqlite3"""'], {}), "('db.sqlite3')\n", (1360, 1374), False, 'import sqlite3\n')] |
from opt_utils import *
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("-s", "--skip_compilation", action='store_true', help="skip compilation")
args = parser.parse_args()
if not args.skip_compilation:
compile_all_opt_examples()
for example in all_examples:
args = []
output = run_example(example, args, True).decode('ascii')
with open(example + ".log", "w") as text_file:
text_file.write(output)
| [
"argparse.ArgumentParser"
] | [((49, 74), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (72, 74), False, 'import argparse\n')] |
import unittest
from unittest.mock import Mock
import mock
import peerfinder.peerfinder as peerfinder
import requests
from ipaddress import IPv6Address, IPv4Address
class testPeerFinder(unittest.TestCase):
def setUp(self):
self.netixlan_set = {
"id": 1,
"ix_id": 2,
"name": "Test IX",
"ixlan_id": 3,
"notes": "",
"speed": 1000,
"asn": 65536,
"ipaddr4": ["192.0.2.1"],
"ipaddr6": ["0100::"],
"is_rs_peer": True,
"operational": True,
"created": "2010-01-01T00:00:00Z",
"updated": "2010-01-01T00:00:00Z",
"status": "ok",
}
self.netfac_set = {
"id": 1,
"name": "Test Facility",
"city": "Dublin",
"country": "IE",
"fac_id": 1,
"local_asn": 65536,
"created": "2010-01-01T00:00:00Z",
"updated": "2010-01-01T00:00:00Z",
"status": "ok",
}
self.peer = {"name": "<NAME>", "asn": 65536}
def test_pdb_to_ixp(self):
expected = peerfinder.IXP(
name="Test IX",
subnet4=[IPv4Address("192.0.2.1")],
subnet6=[IPv6Address("0100::")],
speed=1000,
)
self.assertEqual(expected, peerfinder.pdb_to_ixp(self.netixlan_set))
def test_pdb_to_peer(self):
ixp = peerfinder.pdb_to_ixp(self.netixlan_set)
fac = peerfinder.pdb_to_fac(self.netfac_set)
expected = peerfinder.Peer(
name="<NAME>", ASN=65536, peering_on=ixp, present_in=fac,
)
self.assertEqual(expected, peerfinder.pdb_to_peer(self.peer, ixp, fac))
def test_pdb_to_fac(self):
expected = peerfinder.Facility(name="Test Facility", ASN=65536)
self.assertEqual(expected, peerfinder.pdb_to_fac(self.netfac_set))
def test__dedup_ixs(self):
expected = {
"Test IX": {
"ipaddr4": [["192.0.2.1"], ["192.0.2.1"]],
"ipaddr6": [["0100::"], ["0100::"]],
"name": "Test IX",
"speed": 2000,
}
}
self.assertEqual(
expected, peerfinder._dedup_ixs([self.netixlan_set, self.netixlan_set]),
)
def test_fetch_ix_from_ixps(self):
expected = peerfinder.pdb_to_ixp(self.netixlan_set)
ixp = [peerfinder.pdb_to_ixp(self.netixlan_set)]
self.assertEqual(expected, peerfinder.fetch_ix_from_ixps("Test IX", ixp))
def test_fetch_fac_from_facilities(self):
expected = peerfinder.pdb_to_fac(self.netfac_set)
fac = [peerfinder.pdb_to_fac(self.netfac_set)]
self.assertEqual(expected, peerfinder.fetch_ix_from_ixps("Test Facility", fac))
def test_fetch_common_ixps(self):
ixp = [peerfinder.pdb_to_ixp(self.netixlan_set)]
fac = [peerfinder.pdb_to_fac(self.netfac_set)]
peer = [peerfinder.pdb_to_peer(self.peer, ixp, fac)]
expected = {"Test IX"}
self.assertEqual(expected, peerfinder.fetch_common_ixps(peer))
def test_fetch_common_facilities(self):
ixp = [peerfinder.pdb_to_ixp(self.netixlan_set)]
fac = [peerfinder.pdb_to_fac(self.netfac_set)]
peer = [peerfinder.pdb_to_peer(self.peer, ixp, fac)]
expected = {"Test Facility"}
self.assertEqual(expected, peerfinder.fetch_common_facilities(peer))
@mock.patch.object(requests, "get", autospec=True)
def test_getPeeringDBSuccess(self, requests_mock):
r_mock = Mock()
r_mock.status_code = 200
r_mock.text = "some text"
r_mock.json.return_value = {"data": [0]}
requests_mock.return_value = r_mock
expected = {"data": [0]}
self.assertEqual(expected, peerfinder.getPeeringDB("23456"))
def test_fetch_fac_from_facilities(self):
fac = [peerfinder.pdb_to_fac(self.netfac_set)]
fac_name = "Test Facility"
expected = peerfinder.Facility(name="Test Facility", ASN=65536)
self.assertEqual(expected, peerfinder.fetch_fac_from_facilities(fac_name, fac))
def test_fetch_different_ixps(self):
ix1 = peerfinder.IXP(
name="Test IX1",
subnet4=[IPv4Address("192.0.2.1")],
subnet6=[IPv6Address("0100::")],
speed=1000,
)
ix2 = peerfinder.IXP(
name="Test IX2",
subnet4=[IPv4Address("192.0.2.2")],
subnet6=[IPv6Address("0100::")],
speed=1000,
)
expected = ["Test IX1", "Test IX2"]
peer1 = peerfinder.Peer(name="peer1", ASN=1, present_in=[], peering_on=[ix1])
peer2 = peerfinder.Peer(name="peer2", ASN=1, present_in=[], peering_on=[ix2])
self.assertEqual(expected, peerfinder.fetch_different_ixps([peer1, peer2]))
def test_print_ixp(self):
ix1 = peerfinder.IXP(
name="Test IX1",
subnet4=[IPv4Address("192.0.2.1")],
subnet6=[IPv6Address("0100::")],
speed=1000,
)
ix2 = peerfinder.IXP(
name="Test IX2",
subnet4=[IPv4Address("192.0.2.2")],
subnet6=[IPv6Address("0100::")],
speed=1000,
)
peer1 = peerfinder.Peer(name="peer1", ASN=1, present_in=[], peering_on=[ix1])
peer2 = peerfinder.Peer(
name="peer2", ASN=1, present_in=[], peering_on=[ix1, ix2]
)
self.assertIsNone(peerfinder.print_ixp([peer1, peer2]))
def test_print_fac(self):
fac1 = peerfinder.Facility(name="Test Facility 1", ASN=1,)
fac2 = peerfinder.Facility(name="Test Facility 2", ASN=1,)
peer1 = peerfinder.Peer(
name="peer1", ASN=1, present_in=[fac1, fac2], peering_on=[]
)
peer2 = peerfinder.Peer(name="peer2", ASN=1, present_in=[fac1], peering_on=[])
self.assertIsNone(peerfinder.print_fac([peer1, peer2]))
def test_print_uncommon(self):
ix1 = peerfinder.IXP(
name="Test IX1",
subnet4=[IPv4Address("192.0.2.1")],
subnet6=[IPv6Address("0100::")],
speed=1000,
)
ix2 = peerfinder.IXP(
name="Test IX2",
subnet4=[IPv4Address("192.0.2.2")],
subnet6=[IPv6Address("0100::")],
speed=1000,
)
peer1 = peerfinder.Peer(name="peer1", ASN=1, present_in=[], peering_on=[ix1])
peer2 = peerfinder.Peer(
name="peer2", ASN=1, present_in=[], peering_on=[ix1, ix2]
)
self.assertIsNone(peerfinder.print_uncommon([peer1, peer2]))
if __name__ == "__main__":
unittest.main()
| [
"ipaddress.IPv4Address",
"unittest.main",
"peerfinder.peerfinder.pdb_to_peer",
"peerfinder.peerfinder.getPeeringDB",
"peerfinder.peerfinder.pdb_to_fac",
"peerfinder.peerfinder.fetch_common_facilities",
"peerfinder.peerfinder.Peer",
"peerfinder.peerfinder.fetch_different_ixps",
"peerfinder.peerfinder... | [((3453, 3502), 'mock.patch.object', 'mock.patch.object', (['requests', '"""get"""'], {'autospec': '(True)'}), "(requests, 'get', autospec=True)\n", (3470, 3502), False, 'import mock\n'), ((6661, 6676), 'unittest.main', 'unittest.main', ([], {}), '()\n', (6674, 6676), False, 'import unittest\n'), ((1444, 1484), 'peerfinder.peerfinder.pdb_to_ixp', 'peerfinder.pdb_to_ixp', (['self.netixlan_set'], {}), '(self.netixlan_set)\n', (1465, 1484), True, 'import peerfinder.peerfinder as peerfinder\n'), ((1499, 1537), 'peerfinder.peerfinder.pdb_to_fac', 'peerfinder.pdb_to_fac', (['self.netfac_set'], {}), '(self.netfac_set)\n', (1520, 1537), True, 'import peerfinder.peerfinder as peerfinder\n'), ((1557, 1630), 'peerfinder.peerfinder.Peer', 'peerfinder.Peer', ([], {'name': '"""<NAME>"""', 'ASN': '(65536)', 'peering_on': 'ixp', 'present_in': 'fac'}), "(name='<NAME>', ASN=65536, peering_on=ixp, present_in=fac)\n", (1572, 1630), True, 'import peerfinder.peerfinder as peerfinder\n'), ((1785, 1837), 'peerfinder.peerfinder.Facility', 'peerfinder.Facility', ([], {'name': '"""Test Facility"""', 'ASN': '(65536)'}), "(name='Test Facility', ASN=65536)\n", (1804, 1837), True, 'import peerfinder.peerfinder as peerfinder\n'), ((2373, 2413), 'peerfinder.peerfinder.pdb_to_ixp', 'peerfinder.pdb_to_ixp', (['self.netixlan_set'], {}), '(self.netixlan_set)\n', (2394, 2413), True, 'import peerfinder.peerfinder as peerfinder\n'), ((2619, 2657), 'peerfinder.peerfinder.pdb_to_fac', 'peerfinder.pdb_to_fac', (['self.netfac_set'], {}), '(self.netfac_set)\n', (2640, 2657), True, 'import peerfinder.peerfinder as peerfinder\n'), ((3575, 3581), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (3579, 3581), False, 'from unittest.mock import Mock\n'), ((4000, 4052), 'peerfinder.peerfinder.Facility', 'peerfinder.Facility', ([], {'name': '"""Test Facility"""', 'ASN': '(65536)'}), "(name='Test Facility', ASN=65536)\n", (4019, 4052), True, 'import peerfinder.peerfinder as peerfinder\n'), ((4615, 4684), 'peerfinder.peerfinder.Peer', 'peerfinder.Peer', ([], {'name': '"""peer1"""', 'ASN': '(1)', 'present_in': '[]', 'peering_on': '[ix1]'}), "(name='peer1', ASN=1, present_in=[], peering_on=[ix1])\n", (4630, 4684), True, 'import peerfinder.peerfinder as peerfinder\n'), ((4701, 4770), 'peerfinder.peerfinder.Peer', 'peerfinder.Peer', ([], {'name': '"""peer2"""', 'ASN': '(1)', 'present_in': '[]', 'peering_on': '[ix2]'}), "(name='peer2', ASN=1, present_in=[], peering_on=[ix2])\n", (4716, 4770), True, 'import peerfinder.peerfinder as peerfinder\n'), ((5274, 5343), 'peerfinder.peerfinder.Peer', 'peerfinder.Peer', ([], {'name': '"""peer1"""', 'ASN': '(1)', 'present_in': '[]', 'peering_on': '[ix1]'}), "(name='peer1', ASN=1, present_in=[], peering_on=[ix1])\n", (5289, 5343), True, 'import peerfinder.peerfinder as peerfinder\n'), ((5360, 5434), 'peerfinder.peerfinder.Peer', 'peerfinder.Peer', ([], {'name': '"""peer2"""', 'ASN': '(1)', 'present_in': '[]', 'peering_on': '[ix1, ix2]'}), "(name='peer2', ASN=1, present_in=[], peering_on=[ix1, ix2])\n", (5375, 5434), True, 'import peerfinder.peerfinder as peerfinder\n'), ((5567, 5617), 'peerfinder.peerfinder.Facility', 'peerfinder.Facility', ([], {'name': '"""Test Facility 1"""', 'ASN': '(1)'}), "(name='Test Facility 1', ASN=1)\n", (5586, 5617), True, 'import peerfinder.peerfinder as peerfinder\n'), ((5634, 5684), 'peerfinder.peerfinder.Facility', 'peerfinder.Facility', ([], {'name': '"""Test Facility 2"""', 'ASN': '(1)'}), "(name='Test Facility 2', ASN=1)\n", (5653, 5684), True, 'import peerfinder.peerfinder as peerfinder\n'), ((5702, 5778), 'peerfinder.peerfinder.Peer', 'peerfinder.Peer', ([], {'name': '"""peer1"""', 'ASN': '(1)', 'present_in': '[fac1, fac2]', 'peering_on': '[]'}), "(name='peer1', ASN=1, present_in=[fac1, fac2], peering_on=[])\n", (5717, 5778), True, 'import peerfinder.peerfinder as peerfinder\n'), ((5817, 5887), 'peerfinder.peerfinder.Peer', 'peerfinder.Peer', ([], {'name': '"""peer2"""', 'ASN': '(1)', 'present_in': '[fac1]', 'peering_on': '[]'}), "(name='peer2', ASN=1, present_in=[fac1], peering_on=[])\n", (5832, 5887), True, 'import peerfinder.peerfinder as peerfinder\n'), ((6376, 6445), 'peerfinder.peerfinder.Peer', 'peerfinder.Peer', ([], {'name': '"""peer1"""', 'ASN': '(1)', 'present_in': '[]', 'peering_on': '[ix1]'}), "(name='peer1', ASN=1, present_in=[], peering_on=[ix1])\n", (6391, 6445), True, 'import peerfinder.peerfinder as peerfinder\n'), ((6462, 6536), 'peerfinder.peerfinder.Peer', 'peerfinder.Peer', ([], {'name': '"""peer2"""', 'ASN': '(1)', 'present_in': '[]', 'peering_on': '[ix1, ix2]'}), "(name='peer2', ASN=1, present_in=[], peering_on=[ix1, ix2])\n", (6477, 6536), True, 'import peerfinder.peerfinder as peerfinder\n'), ((1355, 1395), 'peerfinder.peerfinder.pdb_to_ixp', 'peerfinder.pdb_to_ixp', (['self.netixlan_set'], {}), '(self.netixlan_set)\n', (1376, 1395), True, 'import peerfinder.peerfinder as peerfinder\n'), ((1689, 1732), 'peerfinder.peerfinder.pdb_to_peer', 'peerfinder.pdb_to_peer', (['self.peer', 'ixp', 'fac'], {}), '(self.peer, ixp, fac)\n', (1711, 1732), True, 'import peerfinder.peerfinder as peerfinder\n'), ((1873, 1911), 'peerfinder.peerfinder.pdb_to_fac', 'peerfinder.pdb_to_fac', (['self.netfac_set'], {}), '(self.netfac_set)\n', (1894, 1911), True, 'import peerfinder.peerfinder as peerfinder\n'), ((2241, 2302), 'peerfinder.peerfinder._dedup_ixs', 'peerfinder._dedup_ixs', (['[self.netixlan_set, self.netixlan_set]'], {}), '([self.netixlan_set, self.netixlan_set])\n', (2262, 2302), True, 'import peerfinder.peerfinder as peerfinder\n'), ((2429, 2469), 'peerfinder.peerfinder.pdb_to_ixp', 'peerfinder.pdb_to_ixp', (['self.netixlan_set'], {}), '(self.netixlan_set)\n', (2450, 2469), True, 'import peerfinder.peerfinder as peerfinder\n'), ((2506, 2551), 'peerfinder.peerfinder.fetch_ix_from_ixps', 'peerfinder.fetch_ix_from_ixps', (['"""Test IX"""', 'ixp'], {}), "('Test IX', ixp)\n", (2535, 2551), True, 'import peerfinder.peerfinder as peerfinder\n'), ((2673, 2711), 'peerfinder.peerfinder.pdb_to_fac', 'peerfinder.pdb_to_fac', (['self.netfac_set'], {}), '(self.netfac_set)\n', (2694, 2711), True, 'import peerfinder.peerfinder as peerfinder\n'), ((2748, 2799), 'peerfinder.peerfinder.fetch_ix_from_ixps', 'peerfinder.fetch_ix_from_ixps', (['"""Test Facility"""', 'fac'], {}), "('Test Facility', fac)\n", (2777, 2799), True, 'import peerfinder.peerfinder as peerfinder\n'), ((2855, 2895), 'peerfinder.peerfinder.pdb_to_ixp', 'peerfinder.pdb_to_ixp', (['self.netixlan_set'], {}), '(self.netixlan_set)\n', (2876, 2895), True, 'import peerfinder.peerfinder as peerfinder\n'), ((2912, 2950), 'peerfinder.peerfinder.pdb_to_fac', 'peerfinder.pdb_to_fac', (['self.netfac_set'], {}), '(self.netfac_set)\n', (2933, 2950), True, 'import peerfinder.peerfinder as peerfinder\n'), ((2968, 3011), 'peerfinder.peerfinder.pdb_to_peer', 'peerfinder.pdb_to_peer', (['self.peer', 'ixp', 'fac'], {}), '(self.peer, ixp, fac)\n', (2990, 3011), True, 'import peerfinder.peerfinder as peerfinder\n'), ((3079, 3113), 'peerfinder.peerfinder.fetch_common_ixps', 'peerfinder.fetch_common_ixps', (['peer'], {}), '(peer)\n', (3107, 3113), True, 'import peerfinder.peerfinder as peerfinder\n'), ((3175, 3215), 'peerfinder.peerfinder.pdb_to_ixp', 'peerfinder.pdb_to_ixp', (['self.netixlan_set'], {}), '(self.netixlan_set)\n', (3196, 3215), True, 'import peerfinder.peerfinder as peerfinder\n'), ((3232, 3270), 'peerfinder.peerfinder.pdb_to_fac', 'peerfinder.pdb_to_fac', (['self.netfac_set'], {}), '(self.netfac_set)\n', (3253, 3270), True, 'import peerfinder.peerfinder as peerfinder\n'), ((3288, 3331), 'peerfinder.peerfinder.pdb_to_peer', 'peerfinder.pdb_to_peer', (['self.peer', 'ixp', 'fac'], {}), '(self.peer, ixp, fac)\n', (3310, 3331), True, 'import peerfinder.peerfinder as peerfinder\n'), ((3405, 3445), 'peerfinder.peerfinder.fetch_common_facilities', 'peerfinder.fetch_common_facilities', (['peer'], {}), '(peer)\n', (3439, 3445), True, 'import peerfinder.peerfinder as peerfinder\n'), ((3810, 3842), 'peerfinder.peerfinder.getPeeringDB', 'peerfinder.getPeeringDB', (['"""23456"""'], {}), "('23456')\n", (3833, 3842), True, 'import peerfinder.peerfinder as peerfinder\n'), ((3906, 3944), 'peerfinder.peerfinder.pdb_to_fac', 'peerfinder.pdb_to_fac', (['self.netfac_set'], {}), '(self.netfac_set)\n', (3927, 3944), True, 'import peerfinder.peerfinder as peerfinder\n'), ((4088, 4139), 'peerfinder.peerfinder.fetch_fac_from_facilities', 'peerfinder.fetch_fac_from_facilities', (['fac_name', 'fac'], {}), '(fac_name, fac)\n', (4124, 4139), True, 'import peerfinder.peerfinder as peerfinder\n'), ((4806, 4853), 'peerfinder.peerfinder.fetch_different_ixps', 'peerfinder.fetch_different_ixps', (['[peer1, peer2]'], {}), '([peer1, peer2])\n', (4837, 4853), True, 'import peerfinder.peerfinder as peerfinder\n'), ((5483, 5519), 'peerfinder.peerfinder.print_ixp', 'peerfinder.print_ixp', (['[peer1, peer2]'], {}), '([peer1, peer2])\n', (5503, 5519), True, 'import peerfinder.peerfinder as peerfinder\n'), ((5914, 5950), 'peerfinder.peerfinder.print_fac', 'peerfinder.print_fac', (['[peer1, peer2]'], {}), '([peer1, peer2])\n', (5934, 5950), True, 'import peerfinder.peerfinder as peerfinder\n'), ((6585, 6626), 'peerfinder.peerfinder.print_uncommon', 'peerfinder.print_uncommon', (['[peer1, peer2]'], {}), '([peer1, peer2])\n', (6610, 6626), True, 'import peerfinder.peerfinder as peerfinder\n'), ((1214, 1238), 'ipaddress.IPv4Address', 'IPv4Address', (['"""192.0.2.1"""'], {}), "('192.0.2.1')\n", (1225, 1238), False, 'from ipaddress import IPv6Address, IPv4Address\n'), ((1262, 1283), 'ipaddress.IPv6Address', 'IPv6Address', (['"""0100::"""'], {}), "('0100::')\n", (1273, 1283), False, 'from ipaddress import IPv6Address, IPv4Address\n'), ((4263, 4287), 'ipaddress.IPv4Address', 'IPv4Address', (['"""192.0.2.1"""'], {}), "('192.0.2.1')\n", (4274, 4287), False, 'from ipaddress import IPv6Address, IPv4Address\n'), ((4311, 4332), 'ipaddress.IPv6Address', 'IPv6Address', (['"""0100::"""'], {}), "('0100::')\n", (4322, 4332), False, 'from ipaddress import IPv6Address, IPv4Address\n'), ((4449, 4473), 'ipaddress.IPv4Address', 'IPv4Address', (['"""192.0.2.2"""'], {}), "('192.0.2.2')\n", (4460, 4473), False, 'from ipaddress import IPv6Address, IPv4Address\n'), ((4497, 4518), 'ipaddress.IPv6Address', 'IPv6Address', (['"""0100::"""'], {}), "('0100::')\n", (4508, 4518), False, 'from ipaddress import IPv6Address, IPv4Address\n'), ((4966, 4990), 'ipaddress.IPv4Address', 'IPv4Address', (['"""192.0.2.1"""'], {}), "('192.0.2.1')\n", (4977, 4990), False, 'from ipaddress import IPv6Address, IPv4Address\n'), ((5014, 5035), 'ipaddress.IPv6Address', 'IPv6Address', (['"""0100::"""'], {}), "('0100::')\n", (5025, 5035), False, 'from ipaddress import IPv6Address, IPv4Address\n'), ((5152, 5176), 'ipaddress.IPv4Address', 'IPv4Address', (['"""192.0.2.2"""'], {}), "('192.0.2.2')\n", (5163, 5176), False, 'from ipaddress import IPv6Address, IPv4Address\n'), ((5200, 5221), 'ipaddress.IPv6Address', 'IPv6Address', (['"""0100::"""'], {}), "('0100::')\n", (5211, 5221), False, 'from ipaddress import IPv6Address, IPv4Address\n'), ((6068, 6092), 'ipaddress.IPv4Address', 'IPv4Address', (['"""192.0.2.1"""'], {}), "('192.0.2.1')\n", (6079, 6092), False, 'from ipaddress import IPv6Address, IPv4Address\n'), ((6116, 6137), 'ipaddress.IPv6Address', 'IPv6Address', (['"""0100::"""'], {}), "('0100::')\n", (6127, 6137), False, 'from ipaddress import IPv6Address, IPv4Address\n'), ((6254, 6278), 'ipaddress.IPv4Address', 'IPv4Address', (['"""192.0.2.2"""'], {}), "('192.0.2.2')\n", (6265, 6278), False, 'from ipaddress import IPv6Address, IPv4Address\n'), ((6302, 6323), 'ipaddress.IPv6Address', 'IPv6Address', (['"""0100::"""'], {}), "('0100::')\n", (6313, 6323), False, 'from ipaddress import IPv6Address, IPv4Address\n')] |
# -*- encoding: utf-8 -*-
#
# Copyright © 2012 New Dream Network, LLC (DreamHost)
#
# Author: <NAME> <<EMAIL>>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Handler for producing network counter messages from Neutron notification
events.
"""
from oslo.config import cfg
from ceilometer.openstack.common.gettextutils import _ # noqa
from ceilometer.openstack.common import log
from ceilometer import plugin
from ceilometer import sample
OPTS = [
cfg.StrOpt('neutron_control_exchange',
default='neutron',
help="Exchange name for Neutron notifications",
deprecated_name='quantum_control_exchange'),
]
cfg.CONF.register_opts(OPTS)
LOG = log.getLogger(__name__)
class NetworkNotificationBase(plugin.NotificationBase):
resource_name = None
@property
def event_types(self):
return [
# NOTE(flwang): When the *.create.start notification sending,
# there is no resource id assigned by Neutron yet. So we ignore
# the *.create.start notification for now and only listen the
# *.create.end to make sure the resource id is existed.
'%s.create.end' % (self.resource_name),
'%s.update.*' % (self.resource_name),
'%s.exists' % (self.resource_name),
# FIXME(dhellmann): Neutron delete notifications do
# not include the same metadata as the other messages,
# so we ignore them for now. This isn't ideal, since
# it may mean we miss charging for some amount of time,
# but it is better than throwing away the existing
# metadata for a resource when it is deleted.
##'%s.delete.start' % (self.resource_name),
]
@staticmethod
def get_exchange_topics(conf):
"""Return a sequence of ExchangeTopics defining the exchange and topics
to be connected for this plugin.
"""
return [
plugin.ExchangeTopics(
exchange=conf.neutron_control_exchange,
topics=set(topic + ".info"
for topic in conf.notification_topics)),
]
def process_notification(self, message):
LOG.info(_('network notification %r') % message)
message['payload'] = message['payload'][self.resource_name]
counter_name = getattr(self, 'counter_name', self.resource_name)
unit_value = getattr(self, 'unit', self.resource_name)
yield sample.Sample.from_notification(
name=counter_name,
type=sample.TYPE_GAUGE,
unit=unit_value,
volume=1,
user_id=message['_context_user_id'],
project_id=message['payload']['tenant_id'],
resource_id=message['payload']['id'],
message=message)
event_type_split = message['event_type'].split('.')
if len(event_type_split) > 2:
yield sample.Sample.from_notification(
name=counter_name
+ "." + event_type_split[1],
type=sample.TYPE_DELTA,
unit=unit_value,
volume=1,
user_id=message['_context_user_id'],
project_id=message['payload']['tenant_id'],
resource_id=message['payload']['id'],
message=message)
class Network(NetworkNotificationBase):
"""Listen for Neutron network notifications in order to mediate with the
metering framework.
"""
resource_name = 'network'
class Subnet(NetworkNotificationBase):
"""Listen for Neutron notifications in order to mediate with the
metering framework.
"""
resource_name = 'subnet'
class Port(NetworkNotificationBase):
"""Listen for Neutron notifications in order to mediate with the
metering framework.
"""
resource_name = 'port'
class Router(NetworkNotificationBase):
"""Listen for Neutron notifications in order to mediate with the
metering framework.
"""
resource_name = 'router'
class FloatingIP(NetworkNotificationBase):
"""Listen for Neutron notifications in order to mediate with the
metering framework.
"""
resource_name = 'floatingip'
counter_name = 'ip.floating'
unit = 'ip'
class Bandwidth(NetworkNotificationBase):
"""Listen for Neutron notifications in order to mediate with the
metering framework.
"""
event_types = ['l3.meter']
def process_notification(self, message):
yield sample.Sample.from_notification(
name='bandwidth',
type=sample.TYPE_DELTA,
unit='B',
volume=message['payload']['bytes'],
user_id=None,
project_id=message['payload']['tenant_id'],
resource_id=message['payload']['label_id'],
message=message)
| [
"oslo.config.cfg.StrOpt",
"ceilometer.sample.Sample.from_notification",
"ceilometer.openstack.common.gettextutils._",
"ceilometer.openstack.common.log.getLogger",
"oslo.config.cfg.CONF.register_opts"
] | [((1160, 1188), 'oslo.config.cfg.CONF.register_opts', 'cfg.CONF.register_opts', (['OPTS'], {}), '(OPTS)\n', (1182, 1188), False, 'from oslo.config import cfg\n'), ((1196, 1219), 'ceilometer.openstack.common.log.getLogger', 'log.getLogger', (['__name__'], {}), '(__name__)\n', (1209, 1219), False, 'from ceilometer.openstack.common import log\n'), ((961, 1120), 'oslo.config.cfg.StrOpt', 'cfg.StrOpt', (['"""neutron_control_exchange"""'], {'default': '"""neutron"""', 'help': '"""Exchange name for Neutron notifications"""', 'deprecated_name': '"""quantum_control_exchange"""'}), "('neutron_control_exchange', default='neutron', help=\n 'Exchange name for Neutron notifications', deprecated_name=\n 'quantum_control_exchange')\n", (971, 1120), False, 'from oslo.config import cfg\n'), ((2995, 3245), 'ceilometer.sample.Sample.from_notification', 'sample.Sample.from_notification', ([], {'name': 'counter_name', 'type': 'sample.TYPE_GAUGE', 'unit': 'unit_value', 'volume': '(1)', 'user_id': "message['_context_user_id']", 'project_id': "message['payload']['tenant_id']", 'resource_id': "message['payload']['id']", 'message': 'message'}), "(name=counter_name, type=sample.TYPE_GAUGE,\n unit=unit_value, volume=1, user_id=message['_context_user_id'],\n project_id=message['payload']['tenant_id'], resource_id=message[\n 'payload']['id'], message=message)\n", (3026, 3245), False, 'from ceilometer import sample\n'), ((5018, 5270), 'ceilometer.sample.Sample.from_notification', 'sample.Sample.from_notification', ([], {'name': '"""bandwidth"""', 'type': 'sample.TYPE_DELTA', 'unit': '"""B"""', 'volume': "message['payload']['bytes']", 'user_id': 'None', 'project_id': "message['payload']['tenant_id']", 'resource_id': "message['payload']['label_id']", 'message': 'message'}), "(name='bandwidth', type=sample.TYPE_DELTA,\n unit='B', volume=message['payload']['bytes'], user_id=None, project_id=\n message['payload']['tenant_id'], resource_id=message['payload'][\n 'label_id'], message=message)\n", (5049, 5270), False, 'from ceilometer import sample\n'), ((2736, 2764), 'ceilometer.openstack.common.gettextutils._', '_', (['"""network notification %r"""'], {}), "('network notification %r')\n", (2737, 2764), False, 'from ceilometer.openstack.common.gettextutils import _\n'), ((3447, 3726), 'ceilometer.sample.Sample.from_notification', 'sample.Sample.from_notification', ([], {'name': "(counter_name + '.' + event_type_split[1])", 'type': 'sample.TYPE_DELTA', 'unit': 'unit_value', 'volume': '(1)', 'user_id': "message['_context_user_id']", 'project_id': "message['payload']['tenant_id']", 'resource_id': "message['payload']['id']", 'message': 'message'}), "(name=counter_name + '.' + event_type_split[\n 1], type=sample.TYPE_DELTA, unit=unit_value, volume=1, user_id=message[\n '_context_user_id'], project_id=message['payload']['tenant_id'],\n resource_id=message['payload']['id'], message=message)\n", (3478, 3726), False, 'from ceilometer import sample\n')] |
from itertools import product
import struct
import pickle
import numpy as np
from scipy import sparse
from scipy import isnan as scipy_isnan
import numpy.matlib
ASCII_FACET = """facet normal 0 0 0
outer loop
vertex {face[0][0]:.4f} {face[0][1]:.4f} {face[0][2]:.4f}
vertex {face[1][0]:.4f} {face[1][1]:.4f} {face[1][2]:.4f}
vertex {face[2][0]:.4f} {face[2][1]:.4f} {face[2][2]:.4f}
endloop
endfacet
"""
BINARY_HEADER ="80sI"
BINARY_FACET = "12fH"
class ASCII_STL_Writer(object):
""" Export 3D objects build of 3 or 4 vertices as ASCII STL file.
"""
def __init__(self, stream):
self.fp = stream
self._write_header()
def _write_header(self):
self.fp.write("solid python\n")
def close(self):
self.fp.write("endsolid python\n")
def _write(self, face):
self.fp.write(ASCII_FACET.format(face=face))
def _split(self, face):
p1, p2, p3, p4 = face
return (p1, p2, p3), (p3, p4, p1)
def add_face(self, face):
""" Add one face with 3 or 4 vertices. """
if len(face) == 4:
face1, face2 = self._split(face)
self._write(face1)
self._write(face2)
elif len(face) == 3:
self._write(face)
else:
raise ValueError('only 3 or 4 vertices for each face')
def add_faces(self, faces):
""" Add many faces. """
for face in faces:
self.add_face(face)
class Binary_STL_Writer(ASCII_STL_Writer):
""" Export 3D objects build of 3 or 4 vertices as binary STL file.
"""
def __init__(self, stream):
self.counter = 0
super(Binary_STL_Writer, self).__init__(stream)
def close(self):
self._write_header()
def _write_header(self):
self.fp.seek(0)
self.fp.write(struct.pack(BINARY_HEADER, b'Python Binary STL Writer', self.counter))
def _write(self, face):
self.counter += 1
data = [
0., 0., 0.,
face[0][0], face[0][1], face[0][2],
face[1][0], face[1][1], face[1][2],
face[2][0], face[2][1], face[2][2],
0
]
self.fp.write(struct.pack(BINARY_FACET, *data))
def get_quad(center, n, side=1.):
x, y, z = np.array(center).astype('float64')
n1, n2, n3 = np.array(n).astype('float64')
l = side/2.
nm = np.linalg.norm
s = np.sign
if any(np.isnan(v) for v in n):
return
if np.allclose(n, np.zeros(n.shape)):
return
# Build two vectors orthogonal between themselves and the normal
if (np.abs(n2) > 0.2 or np.abs(n3) > 0.2):
C = np.array([1, 0, 0])
else:
C = np.array([0, 1, 0])
ortho1 = np.cross(n, C)
ortho1 *= l / np.linalg.norm(ortho1)
ortho2 = np.cross(n, ortho1)
ortho2 *= l / np.linalg.norm(ortho2)
#ortho1[[2,1]] = ortho1[[1,2]]
#ortho2[[2,1]] = ortho2[[1,2]]
ortho1[1] = -ortho1[1]
ortho2[1] = -ortho2[1]
return [[
center + ortho1,
center + ortho2,
center - ortho1,
center - ortho2,
]]
def surfaceFromNormals(normals):
valid_indices = ~np.isnan(normals)
w, h, d = normals.shape
nx = np.transpose(np.hstack((
normals[:,:,0].ravel(),
normals[:,:,0].ravel(),
)))
ny = np.transpose(np.hstack((
normals[:,:,1].ravel(),
normals[:,:,1].ravel(),
)))
nz = np.transpose(np.hstack((
normals[:,:,2].ravel(),
normals[:,:,2].ravel(),
)))
vectorsize = nz.shape
valid_idx = ~np.isnan(nz)
M = sparse.dia_matrix((2*w*h, w*h), dtype=np.float64)
# n_z z(x + 1, y) - n_z z(x,y) = n_x
M.setdiag(-nz, 0)
M.setdiag(nz, 1)
# n_z z(x, y + 1) - n_z z(x,y) = n_y
M.setdiag(-nz, -w*h)
M.setdiag(np.hstack(([0] * w, nz)), -w*h + w)
# Boundary values
# n_y ( z(x,y) - z(x + 1, y)) = n_x ( z(x,y) - z(x, y + 1))
# TODO: Redo for boundaries in Y-axis
M = M.tolil()
half_size = valid_idx.size // 2
bidxd = np.hstack((np.diff(valid_idx.astype('int8')[:half_size]), [0]))
inner_boundaries = np.roll(bidxd==1, 1) | (bidxd==-1)
outer_boundaries = (bidxd==1) | (np.roll(bidxd==-1, 1))
nz_t = np.transpose(valid_idx.reshape((w,h,d*2//3)), (1, 0, 2)).ravel()
valid_idx_t = ~np.isnan(nz_t)
bidxd = np.hstack((np.diff(valid_idx_t.astype('int8')[:half_size]), [0]))
inner_boundaries |= np.roll(bidxd==1, 1) | (bidxd==-1)
outer_boundaries |= (bidxd==1) | (np.roll(bidxd==-1, 1))
bidx = np.zeros((half_size,), dtype=np.bool)
bidx[inner_boundaries] = True
bidx = np.indices(bidx.shape)[0][bidx]
M[bidx, bidx] = nx[bidx]
M[bidx, bidx + w] = -nx[bidx]
M[bidx + half_size, bidx] = ny[bidx]
M[bidx + half_size, bidx + 1] = -ny[bidx]
M = M.tocsr()[valid_idx]
weight = 1
OB = np.zeros((outer_boundaries.sum(), w*h,))
OB[np.indices((outer_boundaries.sum(),))[0], np.where(outer_boundaries==True)] = weight
M = sparse.vstack((M,OB))
# Build [ n_x n_y ]'
m = np.hstack((
normals[:,:,0].ravel(),
normals[:,:,1].ravel(),
)).reshape(-1, 1)
print(inner_boundaries.shape, m.shape)
i_b = np.hstack((inner_boundaries, inner_boundaries)).reshape(-1,1)
print(i_b.shape, m.shape)
m[i_b] = 0
m = m[valid_idx]
m = np.vstack((
m,
np.zeros((outer_boundaries.sum(), 1)),
))
# Solve least squares
assert not np.isnan(m).any()
# x, istop, itn, r1norm, r2norm, anorm, acond, arnorm, xnorm, var = sparse.linalg.lsqr(M, m)
x, istop, itn, normr, normar, norma, conda, normx = sparse.linalg.lsmr(M, m)
# Build the surface (x, y, z) with the computed values of z
surface = np.dstack((
np.indices((w, h))[0],
np.indices((w, h))[1],
x.reshape((w, h))
))
return surface
def writeMesh(surface, normals, filename):
s = surface
with open(filename, 'wb') as fp:
writer = Binary_STL_Writer(fp)
for x in range(0, s.shape[0], 5):
for y in range(0, s.shape[1], 5):
#for x, y in product(range(s.shape[0]), range(s.shape[1])):
quad = get_quad(
s[x,y,:],
normals[x,y,:],
4,
)
if quad:
writer.add_faces(quad)
writer.close()
def write3dNormals(normals, filename):
with open(filename, 'wb') as fp:
writer = Binary_STL_Writer(fp)
for x in range(0, normals.shape[0], 5):
for y in range(0, normals.shape[1], 5):
quad = get_quad(
(0, x, y),
normals[x,y,:],
4,
)
if quad:
writer.add_faces(quad)
writer.close()
def surfaceToHeight(surface):
minH = np.amin(surface[:,:,2])
maxH = np.amax(surface[:,:,2])
scale = maxH - minH
height = (surface[:,:,2] - minH) / scale
return height
def writeObj(surface, normals, filename):
print('obj here')
if __name__ == '__main__':
with open('data.pkl', 'rb') as fhdl:
normals = pickle.load(fhdl)
writeMesh(normals)
| [
"scipy.sparse.linalg.lsmr",
"numpy.abs",
"numpy.roll",
"numpy.cross",
"numpy.amin",
"numpy.hstack",
"numpy.where",
"scipy.sparse.dia_matrix",
"pickle.load",
"struct.pack",
"numpy.indices",
"numpy.array",
"numpy.zeros",
"numpy.isnan",
"numpy.linalg.norm",
"scipy.sparse.vstack",
"numpy... | [((2702, 2716), 'numpy.cross', 'np.cross', (['n', 'C'], {}), '(n, C)\n', (2710, 2716), True, 'import numpy as np\n'), ((2771, 2790), 'numpy.cross', 'np.cross', (['n', 'ortho1'], {}), '(n, ortho1)\n', (2779, 2790), True, 'import numpy as np\n'), ((3564, 3619), 'scipy.sparse.dia_matrix', 'sparse.dia_matrix', (['(2 * w * h, w * h)'], {'dtype': 'np.float64'}), '((2 * w * h, w * h), dtype=np.float64)\n', (3581, 3619), False, 'from scipy import sparse\n'), ((4522, 4559), 'numpy.zeros', 'np.zeros', (['(half_size,)'], {'dtype': 'np.bool'}), '((half_size,), dtype=np.bool)\n', (4530, 4559), True, 'import numpy as np\n'), ((4982, 5004), 'scipy.sparse.vstack', 'sparse.vstack', (['(M, OB)'], {}), '((M, OB))\n', (4995, 5004), False, 'from scipy import sparse\n'), ((5615, 5639), 'scipy.sparse.linalg.lsmr', 'sparse.linalg.lsmr', (['M', 'm'], {}), '(M, m)\n', (5633, 5639), False, 'from scipy import sparse\n'), ((6859, 6884), 'numpy.amin', 'np.amin', (['surface[:, :, 2]'], {}), '(surface[:, :, 2])\n', (6866, 6884), True, 'import numpy as np\n'), ((6894, 6919), 'numpy.amax', 'np.amax', (['surface[:, :, 2]'], {}), '(surface[:, :, 2])\n', (6901, 6919), True, 'import numpy as np\n'), ((2463, 2480), 'numpy.zeros', 'np.zeros', (['n.shape'], {}), '(n.shape)\n', (2471, 2480), True, 'import numpy as np\n'), ((2627, 2646), 'numpy.array', 'np.array', (['[1, 0, 0]'], {}), '([1, 0, 0])\n', (2635, 2646), True, 'import numpy as np\n'), ((2669, 2688), 'numpy.array', 'np.array', (['[0, 1, 0]'], {}), '([0, 1, 0])\n', (2677, 2688), True, 'import numpy as np\n'), ((2735, 2757), 'numpy.linalg.norm', 'np.linalg.norm', (['ortho1'], {}), '(ortho1)\n', (2749, 2757), True, 'import numpy as np\n'), ((2809, 2831), 'numpy.linalg.norm', 'np.linalg.norm', (['ortho2'], {}), '(ortho2)\n', (2823, 2831), True, 'import numpy as np\n'), ((3135, 3152), 'numpy.isnan', 'np.isnan', (['normals'], {}), '(normals)\n', (3143, 3152), True, 'import numpy as np\n'), ((3542, 3554), 'numpy.isnan', 'np.isnan', (['nz'], {}), '(nz)\n', (3550, 3554), True, 'import numpy as np\n'), ((3778, 3802), 'numpy.hstack', 'np.hstack', (['([0] * w, nz)'], {}), '(([0] * w, nz))\n', (3787, 3802), True, 'import numpy as np\n'), ((4101, 4123), 'numpy.roll', 'np.roll', (['(bidxd == 1)', '(1)'], {}), '(bidxd == 1, 1)\n', (4108, 4123), True, 'import numpy as np\n'), ((4173, 4196), 'numpy.roll', 'np.roll', (['(bidxd == -1)', '(1)'], {}), '(bidxd == -1, 1)\n', (4180, 4196), True, 'import numpy as np\n'), ((4292, 4306), 'numpy.isnan', 'np.isnan', (['nz_t'], {}), '(nz_t)\n', (4300, 4306), True, 'import numpy as np\n'), ((4409, 4431), 'numpy.roll', 'np.roll', (['(bidxd == 1)', '(1)'], {}), '(bidxd == 1, 1)\n', (4416, 4431), True, 'import numpy as np\n'), ((4482, 4505), 'numpy.roll', 'np.roll', (['(bidxd == -1)', '(1)'], {}), '(bidxd == -1, 1)\n', (4489, 4505), True, 'import numpy as np\n'), ((7165, 7182), 'pickle.load', 'pickle.load', (['fhdl'], {}), '(fhdl)\n', (7176, 7182), False, 'import pickle\n'), ((1808, 1877), 'struct.pack', 'struct.pack', (['BINARY_HEADER', "b'Python Binary STL Writer'", 'self.counter'], {}), "(BINARY_HEADER, b'Python Binary STL Writer', self.counter)\n", (1819, 1877), False, 'import struct\n'), ((2165, 2197), 'struct.pack', 'struct.pack', (['BINARY_FACET', '*data'], {}), '(BINARY_FACET, *data)\n', (2176, 2197), False, 'import struct\n'), ((2249, 2265), 'numpy.array', 'np.array', (['center'], {}), '(center)\n', (2257, 2265), True, 'import numpy as np\n'), ((2301, 2312), 'numpy.array', 'np.array', (['n'], {}), '(n)\n', (2309, 2312), True, 'import numpy as np\n'), ((2400, 2411), 'numpy.isnan', 'np.isnan', (['v'], {}), '(v)\n', (2408, 2411), True, 'import numpy as np\n'), ((2576, 2586), 'numpy.abs', 'np.abs', (['n2'], {}), '(n2)\n', (2582, 2586), True, 'import numpy as np\n'), ((2596, 2606), 'numpy.abs', 'np.abs', (['n3'], {}), '(n3)\n', (2602, 2606), True, 'import numpy as np\n'), ((4605, 4627), 'numpy.indices', 'np.indices', (['bidx.shape'], {}), '(bidx.shape)\n', (4615, 4627), True, 'import numpy as np\n'), ((4931, 4965), 'numpy.where', 'np.where', (['(outer_boundaries == True)'], {}), '(outer_boundaries == True)\n', (4939, 4965), True, 'import numpy as np\n'), ((5189, 5236), 'numpy.hstack', 'np.hstack', (['(inner_boundaries, inner_boundaries)'], {}), '((inner_boundaries, inner_boundaries))\n', (5198, 5236), True, 'import numpy as np\n'), ((5444, 5455), 'numpy.isnan', 'np.isnan', (['m'], {}), '(m)\n', (5452, 5455), True, 'import numpy as np\n'), ((5739, 5757), 'numpy.indices', 'np.indices', (['(w, h)'], {}), '((w, h))\n', (5749, 5757), True, 'import numpy as np\n'), ((5770, 5788), 'numpy.indices', 'np.indices', (['(w, h)'], {}), '((w, h))\n', (5780, 5788), True, 'import numpy as np\n')] |
'''
This is a python script that requires you have python installed, or in a cloud environment.
This script scrapes the CVS website looking for vaccine appointments in the cities you list.
To update for your area, update the locations commented below.
If you receive an error that says something is not installed, type
pip install requests
etc.
Happy vaccination!
'''
import requests
import time
import smtplib
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from datetime import datetime, timedelta
def send(message, thetime, state):
carriers = {
'att': '@mms.att.net',
'tmobile': '@tmomail.net',
'verizon': '@vtext.com',
'sprint': '@page.nextel.com',
'gmail': '@gmail.com'
}
# Replace the receivernumber, senderaddr, and senderpass with your own
# Consider using a list for multiple recievers.
# To use gmail, you need to allow less secure apps to connect
# Also, probably a good idea to set up a burner gmail for the sending
to_number = f"RECEIVERADDR{carriers['tmobile']}" # ", ".join() for multiple
sender = f"SENDERADDR{carriers['gmail']}"
password = '<PASSWORD>'
subject = f"CVS Availability in {state}"
# prepend thetime
message.insert(0, thetime.strftime("%m/%d/%Y, %H:%M %p"))
# append the link
if len(message) == 1:
message.append('No new appointments available.')
else:
message.append('https://www.cvs.com/vaccine/intake/store/covid-screener/covid-qns')
port = 587 # 587 for starttls, 465 for SSL and use ssl
smtp_server = "smtp.gmail.com"
msg_body = ", ".join(message)
msg = MIMEMultipart('alternative')
msg['From'] = sender
msg['To'] = to_number
msg['subject'] = subject
part = MIMEText(msg_body, 'plain', 'UTF-8')
msg.attach(part)
# Establish a secure session with gmail's outgoing SMTP server using your gmail account
server = smtplib.SMTP( smtp_server, port )
server.starttls()
server.login(sender, password)
# Send text message through SMS gateway of destination number
server.sendmail( sender, to_number, msg.as_string())
server.quit()
def findAVaccine():
timer = 3600
init_time = datetime.now()
hours_to_run = 24 ###Update this to set the number of hours you want the script to run.
max_time = init_time + timedelta(hours=hours_to_run)
state = 'CA' ###Update with your state abbreviation. Be sure to use all CAPS, e.g. RI
cvs_url = f"https://www.cvs.com/immunizations/covid-19-vaccine.vaccine-status.{state.lower()}.json?vaccineinfo"
header = "https://www.cvs.com/immunizations/covid-19-vaccine"
###Update with your cities nearby
cities = ['ALAMEDA', 'ALAMO', 'ALBANY', 'ANTIOCH', 'BERKELEY', 'CHICO', 'COLMA', 'CUPERTINO', 'DALY CITY', 'DAVIS',
'EAST PALO ALTO', 'HAYWARD', 'LAFAYETTE', 'LATHROP', 'LIVERMORE', 'LOS GATOS', 'DANVILLE', 'DIXON', 'DUBLIN', 'EL CERRITO',
'ELK GROVE', 'EMERYVILLE' 'FAIRFIELD', 'FREMONT', 'MENLO PARK', 'SAN FRANCISCO', 'OAKLAND', 'WOODLAND', 'SACRAMENTO',
'STOCKTON', 'VACAVILLE', 'VALLEJO', 'WALNUT CREEK', 'MILL VALLEY', 'MORAGA', 'NEWARK', 'NOVATO', 'ORINDA', 'PITTSBURG',
'PINOLE', 'PLEASANT HILL', 'REDWOOD CITY', 'RICHMOND', '<NAME>', '<NAME>', '<NAME>', '<NAME>', '<NAME>',
'<NAME>', '<NAME>', 'SAUSALITO', 'SARATOGA'
]
previousmessage = []
while datetime.now() < max_time:
thetime = datetime.now()
message = []
response = requests.get(cvs_url, headers={"Referer":header})
payload = response.json()
print(thetime)
for item in payload["responsePayloadData"]["data"][state]:
city = item.get('city')
status = item.get('status')
if (city in cities) and (status == 'Available'):
message.append(f"{city}, {state} -- {status}")
print(f"{city}, {state} -- {status}")
print()
# Decouple the checking to sending alerts
# if no change for an hour, just send a message that there's no change
if (message != previousmessage) or ((thetime - init_time).total_seconds() > timer):
# set previous to this new one
previousmessage = message[:]
# reset the timer
init_time = datetime.now()
# send the email!
print('Sending status update...')
send(message, thetime, state)
# This runs every 300 seconds (5 minutes)
# Email will be sent every hour, or when a change is detected
time.sleep(300)
if __name__ == '__main__':
try:
findAVaccine()
except KeyboardInterrupt:
print('Exiting...')
| [
"smtplib.SMTP",
"requests.get",
"time.sleep",
"datetime.datetime.now",
"email.mime.multipart.MIMEMultipart",
"datetime.timedelta",
"email.mime.text.MIMEText"
] | [((1676, 1704), 'email.mime.multipart.MIMEMultipart', 'MIMEMultipart', (['"""alternative"""'], {}), "('alternative')\n", (1689, 1704), False, 'from email.mime.multipart import MIMEMultipart\n'), ((1796, 1832), 'email.mime.text.MIMEText', 'MIMEText', (['msg_body', '"""plain"""', '"""UTF-8"""'], {}), "(msg_body, 'plain', 'UTF-8')\n", (1804, 1832), False, 'from email.mime.text import MIMEText\n'), ((1959, 1990), 'smtplib.SMTP', 'smtplib.SMTP', (['smtp_server', 'port'], {}), '(smtp_server, port)\n', (1971, 1990), False, 'import smtplib\n'), ((2243, 2257), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2255, 2257), False, 'from datetime import datetime, timedelta\n'), ((2377, 2406), 'datetime.timedelta', 'timedelta', ([], {'hours': 'hours_to_run'}), '(hours=hours_to_run)\n', (2386, 2406), False, 'from datetime import datetime, timedelta\n'), ((3413, 3427), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3425, 3427), False, 'from datetime import datetime, timedelta\n'), ((3459, 3473), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (3471, 3473), False, 'from datetime import datetime, timedelta\n'), ((3515, 3565), 'requests.get', 'requests.get', (['cvs_url'], {'headers': "{'Referer': header}"}), "(cvs_url, headers={'Referer': header})\n", (3527, 3565), False, 'import requests\n'), ((4598, 4613), 'time.sleep', 'time.sleep', (['(300)'], {}), '(300)\n', (4608, 4613), False, 'import time\n'), ((4328, 4342), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (4340, 4342), False, 'from datetime import datetime, timedelta\n')] |
''' Controller para fornecer dados da CEE '''
from flask_restful import Resource
from service.qry_options_builder import QueryOptionsBuilder
from model.thematic import Thematic
class BaseResource(Resource):
''' Classe de base de resource '''
DEFAULT_SWAGGER_PARAMS = [
{"name": "valor", "required": False, "type": 'string', "in": "query",
"description": "Coluna com o valor agregado. Agrega o valor \
presente na coluna informada (vide opções nas categorias), de \
acordo com a função de agregação informada (vide parâmetro \
agregacao)."},
{"name": "agregacao", "required": False, "type": 'string', "in": "query",
"description": "Função de agregação a ser usada. As funções \
disponíveis são DISTINCT, COUNT, SUM, MAX, MIN, PCT_COUNT, \
PCT_SUM, RANK_COUNT, RANK_SUM, RANK_DENSE_COUNT e \
RANK_DESNE_SUM. \
Os atributos retornados terão nome formado pelo nome da \
função precedido de 'agr_' (ex. 'agr_sum')."},
{"name": "ordenacao", "required": False, "type": 'string', "in": "query",
"description": "Colunas de ordenação para o resultado, dentre \
as colunas presentes nas categorias. Adicionalmente, pode-se \
incluir a coluna de agregação (ex. 'sum'). Uma coluna com \
ordenação inversa deve ser precedida de '-' \
(ex. order=-sum)."},
{"name": "filtros", "required": False, "type": 'string', "in": "query",
"description": "Operações lógicas para a filtragem dos registros \
do resultado. Operadores disponíveis: eq, ne, in, gt, ge, lt, le, \
and e or. Como redigir: ',' para separar operações e '-' para \
separar parâmetros da operação. \
Exemplo: &filtros=ge-ano-2014,and,lt-ano-2018."},
{"name": "calcs", "required": False, "type": 'string', "in": "query",
"description": "Campo calculado sobre grupos padrões do resource. \
Havendo qualquer agregação, o agrupamento será feito pelas \
categorias fornecidas na query. \
Calcs disponiveis: min_part, max_part, avg_part, var_part, \
ln_var_part, norm_pos_part, ln_norm_pos_part, norm_part e \
ln_norm_part."}
]
CAT_DETAIL = "Para renomear campos do dataset de retorno, após o campo de \
consulta, adicionar o novo nome, separado por '-' (ex: campo-campo_novo)."
CAT_IND_BR = "Informações que devem ser trazidas no dataset. \
Campos disponíveis: cd_mun_ibge, nu_competencia, \
cd_indicador, ds_agreg_primaria, ds_agreg_secundaria, \
ds_indicador, ds_fonte, nu_competencia_min, nu_competencia_max, \
vl_indicador, vl_indicador_min, vl_indicador_max e media_br. \
" + CAT_DETAIL
CAT_IND_UF = "Informações que devem ser trazidas no dataset. \
Campos disponíveis: cd_mun_ibge, nu_competencia, \
nu_competencia_min, nu_competencia_max, nm_uf, sg_uf, \
cd_prt, nm_prt, cd_regiao, nm_regiao, cd_uf, cd_indicador, \
ds_agreg_primaria, ds_agreg_secundaria, ds_indicador, \
ds_fonte, vl_indicador, vl_indicador_br, vl_indicador_min_br, \
vl_indicador_max_br, media_br, pct_br, rank_br e \
rank_br_total. " + CAT_DETAIL
CAT_IND_MUN = "Informações que devem ser trazidas no dataset. \
Campos disponíveis: cd_mun_ibge, nu_competencia, \
nu_competencia_min, nu_competencia_max, nm_municipio_uf, \
latitude, longitude, nm_uf, sg_uf, cd_unidade, cd_prt, \
nm_prt, nm_unidade, tp_unidade, sg_unidade, cd_mesorregiao, \
nm_mesorregiao, cd_microrregiao, nm_microrregiao, \
cd_regiao, nm_regiao, cd_mun_ibge_dv, nm_municipio, cd_uf, \
cd_indicador, ds_agreg_primaria, ds_agreg_secundaria, \
ds_indicador, vl_indicador, vl_indicador_uf, \
vl_indicador_min_uf, vl_indicador_max_uf, media_uf, pct_uf, \
rank_uf, rank_uf_total, vl_indicador_br, vl_indicador_min_br \
vl_indicador_max_br, media_br, pct_br, rank_br e \
rank_br_total. " + CAT_DETAIL
EMPRESA_DEFAULT_SWAGGER_PARAMS = [
{
"name": "dados",
"description": "Fonte de dados para consulta (rais, caged, catweb etc)",
"required": False,
"type": 'string',
"in": "query"
},
{
"name": "competencia",
"description": "Competência a ser retornada. Depende da fonte de dados \
(ex. para uma fonte pode ser AAAA, enquanto para outras AAAAMM)",
"required": False,
"type": 'string',
"in": "query"
},
{
"name": "id_pf",
"description": "Identificador da Pessoa Física, dentro da empresa. \
Tem que informar o dataset (param 'dados')",
"required": False,
"type": 'string',
"in": "query"
},
{
"name": "perspectiva",
"description": "Valor que filtra uma perspectiva predefinida de um dataset \
(ex. No catweb, 'Empregador'). Nem todos os datasets tem essa opção.",
"required": False,
"type": 'string',
"in": "query"
},
{
"name": "reduzido",
"description": "Sinalizador que indica conjunto reduzido de colunas (S para sim)",
"required": False,
"type": 'string',
"in": "query"
},
{
"name": "cnpj_raiz", "required": True, "type": 'string', "in": "path",
"description": "CNPJ Raiz da empresa consultada"
}
]
@staticmethod
def build_options(r_args, rules='query'):
''' Constrói as opções da pesquisa '''
return QueryOptionsBuilder.build_options(r_args, rules)
@staticmethod
def build_person_options(r_args, mod='empresa'):
''' Constrói as opções da pesquisa '''
return QueryOptionsBuilder.build_person_options(r_args, mod)
def __init__(self):
''' Construtor'''
self.domain = None
self.set_domain()
def get_domain(self):
''' Carrega o modelo de domínio, se não o encontrar '''
if self.domain is None:
self.domain = Thematic()
return self.domain
def set_domain(self):
''' Setter invoked from constructor '''
self.domain = Thematic()
| [
"service.qry_options_builder.QueryOptionsBuilder.build_options",
"model.thematic.Thematic",
"service.qry_options_builder.QueryOptionsBuilder.build_person_options"
] | [((5847, 5895), 'service.qry_options_builder.QueryOptionsBuilder.build_options', 'QueryOptionsBuilder.build_options', (['r_args', 'rules'], {}), '(r_args, rules)\n', (5880, 5895), False, 'from service.qry_options_builder import QueryOptionsBuilder\n'), ((6030, 6083), 'service.qry_options_builder.QueryOptionsBuilder.build_person_options', 'QueryOptionsBuilder.build_person_options', (['r_args', 'mod'], {}), '(r_args, mod)\n', (6070, 6083), False, 'from service.qry_options_builder import QueryOptionsBuilder\n'), ((6472, 6482), 'model.thematic.Thematic', 'Thematic', ([], {}), '()\n', (6480, 6482), False, 'from model.thematic import Thematic\n'), ((6337, 6347), 'model.thematic.Thematic', 'Thematic', ([], {}), '()\n', (6345, 6347), False, 'from model.thematic import Thematic\n')] |
import json
import os
def calculo(self):
meta = float(input('valor da meta: ')) # 1000000
valorinicial = float(input('valor inicial: ')) # 5637.99
valormensal = float(input('investimento mensal: ')) # 150
dividendos = float(input('dividendos: ')) # 16.86
meta = meta - valorinicial - valormensal - dividendos
i = 0
while i < meta:
meta = meta - valormensal - dividendos
print(meta)
dividendos = dividendos + 1.37
i = i + 1
print (i)
return i
def viver_de_renda_hglg():
preco = 194
div = 0.78
magic_number = int(preco/div)
valor = preco * magic_number
rmd = 1000 #renda mensal desejada
valor_nescessario = magic_number*rmd
return valor_nescessario
def viver_de_renda_knri(self):
preco = 185
div = 0.74
magic_number = int(preco/div)
valor = preco * magic_number
rmd = 10000 #renda mensal desejada
valor_nescessario = magic_number*rmd
return valor_nescessario
def viver_de_renda_bcff(self):
preco = 99
div = 0.53
magic_number = int(preco/div)
valor = preco * magic_number
rmd = 10000 #renda mensal desejada
valor_nescessario = magic_number*rmd
return valor_nescessario
def vdrFII():
preco = 478
div = 2.05
rmd = 10000
magic_number = int(preco/div)
valor_nescessario = magic_number*rmd
print(valor_nescessario)
def sair():
print("\nObrigado por utilizar a calculadora. Até logo!")
exit()
def chamarMenu():
#os.system('clear')
escolha = int(input("##### INVESTPY #####"
"\nDigite: "
"\n<1> adicionar ação"
"\n<2> exibir ações"
"\n<3> sair\n> "))
return escolha
def ler_arquivo(arquivo):
if os.path.exists(arquivo):
with open(arquivo, "r") as arq_json:
dicionario=json.load(arq_json)
else:
dicionario = {}
return dicionario
def gravar_arquivo(dicionario,arquivo):
with open(arquivo, "w") as arq_json:
json.dump(dicionario, arq_json)
def registrar(dicionario, arquivo):
resp = "S"
while resp == "S":
dicionario[input("Digite o nome da ação: ")] = [
input("Digite o valor da ação: "),
input("Digite o valor do dividendo: "),
input("Digite a quantidade de cotas:")]
resp = input("Digite <S> para continuar.").upper()
gravar_arquivo(dicionario,arquivo)
return "JSON gerado!!!!"
def exibir(arquivo):
dicionario = ler_arquivo(arquivo)
for chave, dado in dicionario.items():
print("Ação.........: ", chave)
print("Valor........: ", dado[0])
print("Dividendos...: ", dado[1])
print("Cotas........: ", dado[2])
acoes = ler_arquivo("acoes.json")
opcao=chamarMenu()
while opcao > 0 and opcao < 5:
if opcao == 1:
print(registrar(acoes, "acoes.json"))
elif opcao == 2:
exibir("acoes.json")
elif opcao == 3:
sair()
opcao = chamarMenu() | [
"json.load",
"os.path.exists",
"json.dump"
] | [((1715, 1738), 'os.path.exists', 'os.path.exists', (['arquivo'], {}), '(arquivo)\n', (1729, 1738), False, 'import os\n'), ((1974, 2005), 'json.dump', 'json.dump', (['dicionario', 'arq_json'], {}), '(dicionario, arq_json)\n', (1983, 2005), False, 'import json\n'), ((1808, 1827), 'json.load', 'json.load', (['arq_json'], {}), '(arq_json)\n', (1817, 1827), False, 'import json\n')] |
# this version is adapted from http://wiki.ipython.org/Old_Embedding/GTK
"""
Backend to the console plugin.
@author: <NAME>
@organization: IBM Corporation
@copyright: Copyright (c) 2007 IBM Corporation
@license: BSD
All rights reserved. This program and the accompanying materials are made
available under the terms of the BSD which accompanies this distribution, and
is available at U{http://www.opensource.org/licenses/bsd-license.php}
"""
# this file is a modified version of source code from the Accerciser project
# http://live.gnome.org/accerciser
from gi.repository import Gtk
from gi.repository import Gdk
import re
import sys
import os
from gi.repository import Pango
from io import StringIO
from functools import reduce
try:
import IPython
except Exception as e:
raise "Error importing IPython (%s)" % str(e)
ansi_colors = {'0;30': 'Black',
'0;31': 'Red',
'0;32': 'Green',
'0;33': 'Brown',
'0;34': 'Blue',
'0;35': 'Purple',
'0;36': 'Cyan',
'0;37': 'LightGray',
'1;30': 'DarkGray',
'1;31': 'DarkRed',
'1;32': 'SeaGreen',
'1;33': 'Yellow',
'1;34': 'LightBlue',
'1;35': 'MediumPurple',
'1;36': 'LightCyan',
'1;37': 'White'}
class IterableIPShell:
def __init__(self,argv=None,user_ns=None,user_global_ns=None,
cin=None, cout=None,cerr=None, input_func=None):
if input_func:
IPython.iplib.raw_input_original = input_func
if cin:
IPython.Shell.Term.cin = cin
if cout:
IPython.Shell.Term.cout = cout
if cerr:
IPython.Shell.Term.cerr = cerr
if argv is None:
argv=[]
# This is to get rid of the blockage that occurs during
# IPython.Shell.InteractiveShell.user_setup()
IPython.iplib.raw_input = lambda x: None
self.term = IPython.genutils.IOTerm(cin=cin, cout=cout, cerr=cerr)
os.environ['TERM'] = 'dumb'
excepthook = sys.excepthook
self.IP = IPython.Shell.make_IPython(argv,user_ns=user_ns,
user_global_ns=user_global_ns,
embedded=True,
shell_class=IPython.Shell.InteractiveShell)
self.IP.system = lambda cmd: self.shell(self.IP.var_expand(cmd),
header='IPython system call: ',
verbose=self.IP.rc.system_verbose)
sys.excepthook = excepthook
self.iter_more = 0
self.history_level = 0
self.complete_sep = re.compile('[\s\{\}\[\]\(\)]')
def execute(self):
self.history_level = 0
orig_stdout = sys.stdout
sys.stdout = IPython.Shell.Term.cout
try:
line = self.IP.raw_input(None, self.iter_more)
if self.IP.autoindent:
self.IP.readline_startup_hook(None)
except KeyboardInterrupt:
self.IP.write('\nKeyboardInterrupt\n')
self.IP.resetbuffer()
# keep cache in sync with the prompt counter:
self.IP.outputcache.prompt_count -= 1
if self.IP.autoindent:
self.IP.indent_current_nsp = 0
self.iter_more = 0
except:
self.IP.showtraceback()
else:
self.iter_more = self.IP.push(line)
if (self.IP.SyntaxTB.last_syntax_error and
self.IP.rc.autoedit_syntax):
self.IP.edit_syntax_error()
if self.iter_more:
self.prompt = str(self.IP.outputcache.prompt2).strip()
if self.IP.autoindent:
self.IP.readline_startup_hook(self.IP.pre_readline)
else:
self.prompt = str(self.IP.outputcache.prompt1).strip()
sys.stdout = orig_stdout
def historyBack(self):
self.history_level -= 1
return self._getHistory()
def historyForward(self):
self.history_level += 1
return self._getHistory()
def _getHistory(self):
try:
rv = self.IP.user_ns['In'][self.history_level].strip('\n')
except IndexError:
self.history_level = 0
rv = ''
return rv
def updateNamespace(self, ns_dict):
self.IP.user_ns.update(ns_dict)
def complete(self, line):
split_line = self.complete_sep.split(line)
possibilities = self.IP.complete(split_line[-1])
if possibilities:
common_prefix = reduce(self._commonPrefix, possibilities)
completed = line[:-len(split_line[-1])]+common_prefix
else:
completed = line
return completed, possibilities
def _commonPrefix(self, str1, str2):
for i in range(len(str1)):
if not str2.startswith(str1[:i+1]):
return str1[:i]
return str1
def shell(self, cmd,verbose=0,debug=0,header=''):
stat = 0
if verbose or debug: print(header+cmd)
# flush stdout so we don't mangle python's buffering
if not debug:
input, output = os.popen4(cmd)
print(output.read())
output.close()
input.close()
class ConsoleView(Gtk.TextView):
def __init__(self):
Gtk.TextView.__init__(self)
self.modify_font(Pango.FontDescription('Mono'))
self.set_cursor_visible(True)
self.text_buffer = self.get_buffer()
self.mark = self.text_buffer.create_mark('scroll_mark',
self.text_buffer.get_end_iter(),
False)
for code in ansi_colors:
self.text_buffer.create_tag(code,
foreground=ansi_colors[code],
weight=700)
self.text_buffer.create_tag('0')
self.text_buffer.create_tag('notouch', editable=False)
self.color_pat = re.compile('\x01?\x1b\[(.*?)m\x02?')
self.line_start = \
self.text_buffer.create_mark('line_start',
self.text_buffer.get_end_iter(), True
)
self.connect('key-press-event', self._onKeypress)
self.last_cursor_pos = 0
def write(self, text, editable=False):
segments = self.color_pat.split(text)
segment = segments.pop(0)
start_mark = self.text_buffer.create_mark(None,
self.text_buffer.get_end_iter(),
True)
self.text_buffer.insert(self.text_buffer.get_end_iter(), segment)
if segments:
ansi_tags = self.color_pat.findall(text)
for tag in ansi_tags:
i = segments.index(tag)
self.text_buffer.insert_with_tags_by_name(self.text_buffer.get_end_iter(),
segments[i+1], tag)
segments.pop(i)
if not editable:
self.text_buffer.apply_tag_by_name('notouch',
self.text_buffer.get_iter_at_mark(start_mark),
self.text_buffer.get_end_iter())
self.text_buffer.delete_mark(start_mark)
self.scroll_mark_onscreen(self.mark)
def showPrompt(self, prompt):
self.write(prompt)
self.text_buffer.move_mark(self.line_start,self.text_buffer.get_end_iter())
def changeLine(self, text):
iter = self.text_buffer.get_iter_at_mark(self.line_start)
iter.forward_to_line_end()
self.text_buffer.delete(self.text_buffer.get_iter_at_mark(self.line_start), iter)
self.write(text, True)
def getCurrentLine(self):
rv = self.text_buffer.get_slice(self.text_buffer.get_iter_at_mark(self.line_start),
self.text_buffer.get_end_iter(), False)
return rv
def showReturned(self, text):
iter = self.text_buffer.get_iter_at_mark(self.line_start)
iter.forward_to_line_end()
self.text_buffer.apply_tag_by_name('notouch',
self.text_buffer.get_iter_at_mark(self.line_start),
iter)
self.write('\n'+text)
if text:
self.write('\n')
self.showPrompt(self.prompt)
self.text_buffer.move_mark(self.line_start,self.text_buffer.get_end_iter())
self.text_buffer.place_cursor(self.text_buffer.get_end_iter())
def _onKeypress(self, obj, event):
if not event.string:
return
insert_mark = self.text_buffer.get_insert()
insert_iter = self.text_buffer.get_iter_at_mark(insert_mark)
selection_mark = self.text_buffer.get_selection_bound()
selection_iter = self.text_buffer.get_iter_at_mark(selection_mark)
start_iter = self.text_buffer.get_iter_at_mark(self.line_start)
if start_iter.compare(insert_iter) <= 0 and \
start_iter.compare(selection_iter) <= 0:
return
elif start_iter.compare(insert_iter) > 0 and \
start_iter.compare(selection_iter) > 0:
self.text_buffer.place_cursor(start_iter)
elif insert_iter.compare(selection_iter) < 0:
self.text_buffer.move_mark(insert_mark, start_iter)
elif insert_iter.compare(selection_iter) > 0:
self.text_buffer.move_mark(selection_mark, start_iter)
class IPythonView(ConsoleView, IterableIPShell):
def __init__(self, **kw):
ConsoleView.__init__(self)
self.cout = StringIO()
IterableIPShell.__init__(self, cout=self.cout,cerr=self.cout,
input_func=self.raw_input, **kw)
self.connect('key_press_event', self.keyPress)
self.execute()
self.cout.truncate(0)
self.showPrompt(self.prompt)
self.interrupt = False
def raw_input(self, prompt=''):
if self.interrupt:
self.interrupt = False
raise KeyboardInterrupt
return self.getCurrentLine()
def keyPress(self, widget, event):
if event.state & Gdk.ModifierType.CONTROL_MASK and event.keyval == 99:
self.interrupt = True
self._processLine()
return True
elif event.keyval == Gtk.keysyms.Return:
self._processLine()
return True
elif event.keyval == Gtk.keysyms.Up:
self.changeLine(self.historyBack())
return True
elif event.keyval == Gtk.keysyms.Down:
self.changeLine(self.historyForward())
return True
# todo: Home needs to advance past the ipython prompt
elif event.keyval == Gtk.keysyms.Tab:
if not self.getCurrentLine().strip():
return False
completed, possibilities = self.complete(self.getCurrentLine())
if len(possibilities) > 1:
slice = self.getCurrentLine()
self.write('\n')
for symbol in possibilities:
self.write(symbol+'\n')
self.showPrompt(self.prompt)
self.changeLine(completed or slice)
return True
def _processLine(self):
self.history_pos = 0
self.execute()
rv = self.cout.getvalue()
if rv: rv = rv.strip('\n')
self.showReturned(rv)
self.cout.truncate(0)
| [
"IPython.Shell.make_IPython",
"re.compile",
"functools.reduce",
"os.popen4",
"IPython.genutils.IOTerm",
"gi.repository.Pango.FontDescription",
"io.StringIO",
"gi.repository.Gtk.TextView.__init__"
] | [((1979, 2033), 'IPython.genutils.IOTerm', 'IPython.genutils.IOTerm', ([], {'cin': 'cin', 'cout': 'cout', 'cerr': 'cerr'}), '(cin=cin, cout=cout, cerr=cerr)\n', (2002, 2033), False, 'import IPython\n'), ((2113, 2257), 'IPython.Shell.make_IPython', 'IPython.Shell.make_IPython', (['argv'], {'user_ns': 'user_ns', 'user_global_ns': 'user_global_ns', 'embedded': '(True)', 'shell_class': 'IPython.Shell.InteractiveShell'}), '(argv, user_ns=user_ns, user_global_ns=\n user_global_ns, embedded=True, shell_class=IPython.Shell.InteractiveShell)\n', (2139, 2257), False, 'import IPython\n'), ((2706, 2743), 're.compile', 're.compile', (['"""[\\\\s\\\\{\\\\}\\\\[\\\\]\\\\(\\\\)]"""'], {}), "('[\\\\s\\\\{\\\\}\\\\[\\\\]\\\\(\\\\)]')\n", (2716, 2743), False, 'import re\n'), ((5057, 5084), 'gi.repository.Gtk.TextView.__init__', 'Gtk.TextView.__init__', (['self'], {}), '(self)\n', (5078, 5084), False, 'from gi.repository import Gtk\n'), ((5701, 5738), 're.compile', 're.compile', (['"""\x01?\x1b\\\\[(.*?)m\x02?"""'], {}), "('\\x01?\\x1b\\\\[(.*?)m\\x02?')\n", (5711, 5738), False, 'import re\n'), ((9097, 9107), 'io.StringIO', 'StringIO', ([], {}), '()\n', (9105, 9107), False, 'from io import StringIO\n'), ((4381, 4422), 'functools.reduce', 'reduce', (['self._commonPrefix', 'possibilities'], {}), '(self._commonPrefix, possibilities)\n', (4387, 4422), False, 'from functools import reduce\n'), ((4913, 4927), 'os.popen4', 'os.popen4', (['cmd'], {}), '(cmd)\n', (4922, 4927), False, 'import os\n'), ((5106, 5135), 'gi.repository.Pango.FontDescription', 'Pango.FontDescription', (['"""Mono"""'], {}), "('Mono')\n", (5127, 5135), False, 'from gi.repository import Pango\n')] |
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^get', views.index, name='index'),
url(r'^details/(?P<id>\w)/$', views.details, name='details'),
url(r'^add', views.add, name='add'),
url(r'^delete', views.delete, name='delete'),
url(r'^update', views.update, name='update'),
# url(r'^signup', views.signup, name='signup'),
# url(r'^login', views.login, name='login'),
# url(r'^login/$', auth_views.login),
]
| [
"django.conf.urls.url"
] | [((75, 111), 'django.conf.urls.url', 'url', (['"""^$"""', 'views.index'], {'name': '"""index"""'}), "('^$', views.index, name='index')\n", (78, 111), False, 'from django.conf.urls import url\n'), ((118, 156), 'django.conf.urls.url', 'url', (['"""^get"""', 'views.index'], {'name': '"""index"""'}), "('^get', views.index, name='index')\n", (121, 156), False, 'from django.conf.urls import url\n'), ((163, 223), 'django.conf.urls.url', 'url', (['"""^details/(?P<id>\\\\w)/$"""', 'views.details'], {'name': '"""details"""'}), "('^details/(?P<id>\\\\w)/$', views.details, name='details')\n", (166, 223), False, 'from django.conf.urls import url\n'), ((230, 264), 'django.conf.urls.url', 'url', (['"""^add"""', 'views.add'], {'name': '"""add"""'}), "('^add', views.add, name='add')\n", (233, 264), False, 'from django.conf.urls import url\n'), ((271, 314), 'django.conf.urls.url', 'url', (['"""^delete"""', 'views.delete'], {'name': '"""delete"""'}), "('^delete', views.delete, name='delete')\n", (274, 314), False, 'from django.conf.urls import url\n'), ((322, 365), 'django.conf.urls.url', 'url', (['"""^update"""', 'views.update'], {'name': '"""update"""'}), "('^update', views.update, name='update')\n", (325, 365), False, 'from django.conf.urls import url\n')] |
import cv2
import numpy as np
import matplotlib.pyplot as plt
#from matplotlib import pyplot as plt
from tkinter import filedialog
from tkinter import *
root = Tk()
root.withdraw()
root.filename = filedialog.askopenfilename(initialdir = "/",title = "Select file",filetypes = (("all files",".*"),("jpg files",".jpg")))
img = cv2.imread(root.filename)
root.destroy()
# Convert to gray-scale
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
# Blur the image to reduce noise
img_blur = cv2.medianBlur(gray, 5)
# Apply hough transform on the image8 $$$img.shape[0]/16, param1=100, param2=11, minRadius=62, maxRadius=67
# Draw detected circles; circles = cv2.HoughCircles(img_blur, cv2.HOUGH_GRADIENT, 1, img.shape[0]/16, param1=200, param2=25, minRadius=60, maxRadius=67)
face_cascade = cv2.CascadeClassifier('C:/Users/andre/Desktop/NovenoSemestre/VisionArtificial/Python/haarcascade_frontalface_alt.xml')
gray=cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)
faces = face_cascade.detectMultiScale(gray, 1.3, 5)
for (x,y,w,h) in faces:
center = (x + w//2, y + h//2)
#circles = cv2.HoughCircles(img_blur, cv2.HOUGH_GRADIENT, 1, img.shape[0]/128, param1=100, param2=11, minRadius=50, maxRadius=100)
circles = cv2.HoughCircles(img_blur, cv2.HOUGH_GRADIENT, 1, img.shape[0]/128, param1=100, param2=11, minRadius=(w//2-10), maxRadius=(w//2+10))
(h, w) = img_blur.shape[:2] #Calcular tamaño de la imageb
(pointRefX,pointRefY) = center
puntoMinimo =100
if circles is not None:
circles = np.uint16(np.around(circles))
for i in circles[0, :]:
#Definir el circulo mas cercano de la
xCercano =np.absolute(i[0]-pointRefX)
yCercano =np.absolute(i[1]-pointRefY)
puntoCercano = xCercano+yCercano
if (puntoCercano < puntoMinimo):
puntoMinimo = puntoCercano
circuloCercano = i
# Draw outer circle
#frame = cv2.ellipse(img, center, (w//2, h//2), 0, 0, 360,(100, 7, 55), 2)
cv2.ellipse(img, (circuloCercano[0], circuloCercano[1]),(circuloCercano[2],circuloCercano[2]+15),0,0,360,(0, 255, 0), 2)
# Draw inner circle
cv2.circle(img, (circuloCercano[0], circuloCercano[1]), circuloCercano[2], (0, 255, 0), 2)
cv2.circle(img, (circuloCercano[0], circuloCercano[1]), 2, (0, 0, 255), 3)
""" cv2.circle(img, (circuloCercano[0], circuloCercano[1]), circuloCercano[2], (0, 255, 0), 2)
# Draw inner circle
cv2.circle(img, (circuloCercano[0], circuloCercano[1]), 2, (0, 0, 255), 3) """
""" if circles is not None:
circles = np.uint16(np.around(circles))
for i in circles[0, :]:
#Definir el circulo mas cercano de la
xCercano =np.absolute(i[0]-pointRefX)
yCercano =np.absolute(i[1]-pointRefY)
puntoCercano = xCercano+yCercano
if (puntoCercano < puntoMinimo):
puntoMinimo = puntoCercano
circuloCercano = i
# Draw outer circle
cv2.circle(img, (i[0], i[1]), i[2], (0, 255, 0), 2)
# Draw inner circle
cv2.circle(img, (i[0], i[1]), 2, (0, 0, 255), 3)
"""
cv2.imshow("Mascara",img)
cv2.waitKey(0) | [
"numpy.absolute",
"cv2.medianBlur",
"cv2.HoughCircles",
"cv2.imshow",
"cv2.ellipse",
"cv2.circle",
"cv2.waitKey",
"numpy.around",
"cv2.cvtColor",
"cv2.CascadeClassifier",
"cv2.imread",
"tkinter.filedialog.askopenfilename"
] | [((208, 332), 'tkinter.filedialog.askopenfilename', 'filedialog.askopenfilename', ([], {'initialdir': '"""/"""', 'title': '"""Select file"""', 'filetypes': "(('all files', '.*'), ('jpg files', '.jpg'))"}), "(initialdir='/', title='Select file', filetypes=(\n ('all files', '.*'), ('jpg files', '.jpg')))\n", (234, 332), False, 'from tkinter import filedialog\n'), ((336, 361), 'cv2.imread', 'cv2.imread', (['root.filename'], {}), '(root.filename)\n', (346, 361), False, 'import cv2\n'), ((413, 450), 'cv2.cvtColor', 'cv2.cvtColor', (['img', 'cv2.COLOR_BGR2GRAY'], {}), '(img, cv2.COLOR_BGR2GRAY)\n', (425, 450), False, 'import cv2\n'), ((497, 520), 'cv2.medianBlur', 'cv2.medianBlur', (['gray', '(5)'], {}), '(gray, 5)\n', (511, 520), False, 'import cv2\n'), ((800, 928), 'cv2.CascadeClassifier', 'cv2.CascadeClassifier', (['"""C:/Users/andre/Desktop/NovenoSemestre/VisionArtificial/Python/haarcascade_frontalface_alt.xml"""'], {}), "(\n 'C:/Users/andre/Desktop/NovenoSemestre/VisionArtificial/Python/haarcascade_frontalface_alt.xml'\n )\n", (821, 928), False, 'import cv2\n'), ((929, 966), 'cv2.cvtColor', 'cv2.cvtColor', (['img', 'cv2.COLOR_BGR2GRAY'], {}), '(img, cv2.COLOR_BGR2GRAY)\n', (941, 966), False, 'import cv2\n'), ((1228, 1370), 'cv2.HoughCircles', 'cv2.HoughCircles', (['img_blur', 'cv2.HOUGH_GRADIENT', '(1)', '(img.shape[0] / 128)'], {'param1': '(100)', 'param2': '(11)', 'minRadius': '(w // 2 - 10)', 'maxRadius': '(w // 2 + 10)'}), '(img_blur, cv2.HOUGH_GRADIENT, 1, img.shape[0] / 128,\n param1=100, param2=11, minRadius=w // 2 - 10, maxRadius=w // 2 + 10)\n', (1244, 1370), False, 'import cv2\n'), ((1991, 2123), 'cv2.ellipse', 'cv2.ellipse', (['img', '(circuloCercano[0], circuloCercano[1])', '(circuloCercano[2], circuloCercano[2] + 15)', '(0)', '(0)', '(360)', '(0, 255, 0)', '(2)'], {}), '(img, (circuloCercano[0], circuloCercano[1]), (circuloCercano[2],\n circuloCercano[2] + 15), 0, 0, 360, (0, 255, 0), 2)\n', (2002, 2123), False, 'import cv2\n'), ((2134, 2228), 'cv2.circle', 'cv2.circle', (['img', '(circuloCercano[0], circuloCercano[1])', 'circuloCercano[2]', '(0, 255, 0)', '(2)'], {}), '(img, (circuloCercano[0], circuloCercano[1]), circuloCercano[2],\n (0, 255, 0), 2)\n', (2144, 2228), False, 'import cv2\n'), ((2226, 2300), 'cv2.circle', 'cv2.circle', (['img', '(circuloCercano[0], circuloCercano[1])', '(2)', '(0, 0, 255)', '(3)'], {}), '(img, (circuloCercano[0], circuloCercano[1]), 2, (0, 0, 255), 3)\n', (2236, 2300), False, 'import cv2\n'), ((3134, 3160), 'cv2.imshow', 'cv2.imshow', (['"""Mascara"""', 'img'], {}), "('Mascara', img)\n", (3144, 3160), False, 'import cv2\n'), ((3163, 3177), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (3174, 3177), False, 'import cv2\n'), ((1524, 1542), 'numpy.around', 'np.around', (['circles'], {}), '(circles)\n', (1533, 1542), True, 'import numpy as np\n'), ((1653, 1682), 'numpy.absolute', 'np.absolute', (['(i[0] - pointRefX)'], {}), '(i[0] - pointRefX)\n', (1664, 1682), True, 'import numpy as np\n'), ((1701, 1730), 'numpy.absolute', 'np.absolute', (['(i[1] - pointRefY)'], {}), '(i[1] - pointRefY)\n', (1712, 1730), True, 'import numpy as np\n')] |
"""
MIT License
Copyright (c) 2020 <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import re
url_pattern = re.compile(
r"^(?:http)s?://" # http:// or https://
# domain...
r"(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)"
r"+(?:[A-Z]{2,6}\.?|[A-Z0-9-]{2,}\.?)|"
r"localhost|" # localhost...
r"\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})" # ...or ip
r"(?::\d+)?" # optional port
r"(?:/?|[/?]\S+)$", re.IGNORECASE
)
def validate_url(url: str) -> bool:
return bool(url_pattern.match(url))
| [
"re.compile"
] | [((1097, 1312), 're.compile', 're.compile', (['"""^(?:http)s?://(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\\\\.)+(?:[A-Z]{2,6}\\\\.?|[A-Z0-9-]{2,}\\\\.?)|localhost|\\\\d{1,3}\\\\.\\\\d{1,3}\\\\.\\\\d{1,3}\\\\.\\\\d{1,3})(?::\\\\d+)?(?:/?|[/?]\\\\S+)$"""', 're.IGNORECASE'], {}), "(\n '^(?:http)s?://(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\\\\.)+(?:[A-Z]{2,6}\\\\.?|[A-Z0-9-]{2,}\\\\.?)|localhost|\\\\d{1,3}\\\\.\\\\d{1,3}\\\\.\\\\d{1,3}\\\\.\\\\d{1,3})(?::\\\\d+)?(?:/?|[/?]\\\\S+)$'\n , re.IGNORECASE)\n", (1107, 1312), False, 'import re\n')] |
import sys
from PyQt5.QtWidgets import *
from PyQt5.QtCore import *
from PyQt5.QtGui import *
import qtawesome
import matplotlib.pyplot as plt
import csv
import numpy as np
import datetime
import os
class Stack:
def __init__(self):
self.items=[]
def isEmpty(self):
return self.items==[]
def push(self,item):
self.items.append(item)
def peek(self):
return self.items[len(self.items)-1]
def pop(self):
return self.items.pop()
def size(self):
return len(self.items)
class MainUI(QMainWindow):
def __init__(self):
super().__init__()
self.initUI()
self.advice=[]
self.stack=Stack()
self.isLeftPressDown = False
self.dragPosition = 0
self.Numbers = self.enum(UP=0, DOWN=1, LEFT=2, RIGHT=3, LEFTTOP=4, LEFTBOTTOM=5, RIGHTBOTTOM=6, RIGHTTOP=7,NONE=8)
self.dir = self.Numbers.NONE
self.setMouseTracking(True)
def enum(self, **enums):
return type('Enum', (), enums)
def mouseReleaseEvent(self, event):
if (event.button() == Qt.LeftButton):
self.isLeftPressDown = False
if (self.dir != self.Numbers.NONE):
self.releaseMouse()
def mousePressEvent(self, event):
if (event.button() == Qt.LeftButton):
self.isLeftPressDown = True
if (self.dir != self.Numbers.NONE):
self.mouseGrabber()
else:
self.dragPosition = event.globalPos() - self.frameGeometry().topLeft()
def mouseMoveEvent(self, event):
gloPoint = event.globalPos()
rect = self.rect()
tl = self.mapToGlobal(rect.topLeft())
rb = self.mapToGlobal(rect.bottomRight())
if (not self.isLeftPressDown):
self.region(gloPoint)
else:
if (self.dir != self.Numbers.NONE):
rmove = QRect(tl, rb)
if (self.dir == self.Numbers.LEFT):
if (rb.x() - gloPoint.x() <= self.minimumWidth()):
rmove.setX(tl.x())
else:
rmove.setX(gloPoint.x())
elif (self.dir == self.Numbers.RIGHT):
rmove.setWidth(gloPoint.x() - tl.x())
elif (self.dir == self.Numbers.UP):
if (rb.y() - gloPoint.y() <= self.minimumHeight()):
rmove.setY(tl.y())
else:
rmove.setY(gloPoint.y())
elif (self.dir == self.Numbers.DOWN):
rmove.setHeight(gloPoint.y() - tl.y())
elif (self.dir == self.Numbers.LEFTTOP):
if (rb.x() - gloPoint.x() <= self.minimumWidth()):
rmove.setX(tl.x())
else:
rmove.setX(gloPoint.x())
if (rb.y() - gloPoint.y() <= self.minimumHeight()):
rmove.setY(tl.y())
else:
rmove.setY(gloPoint.y())
elif (self.dir == self.Numbers.RIGHTTOP):
rmove.setWidth(gloPoint.x() - tl.x())
rmove.setY(gloPoint.y())
elif (self.dir == self.Numbers.LEFTBOTTOM):
rmove.setX(gloPoint.x())
rmove.setHeight(gloPoint.y() - tl.y())
elif (self.dir == self.Numbers.RIGHTBOTTOM):
rmove.setWidth(gloPoint.x() - tl.x())
rmove.setHeight(gloPoint.y() - tl.y())
else:
pass
self.setGeometry(rmove)
else:
self.move(event.globalPos() - self.dragPosition)
event.accept()
def initUI(self):
self.setFixedSize(1200,900)
self.main_widget = QWidget()
self.main_layout = QGridLayout()
self.main_widget.setLayout(self.main_layout)
self.left_widget = QWidget()
self.left_widget.setObjectName('left_widget')
self.left_layout = QGridLayout()
self.left_widget.setLayout(self.left_layout)
self.right_widget = QWidget()
self.right_widget.setObjectName('right_widget')
self.right_layout = QGridLayout()
self.right_widget.setLayout(self.right_layout)
self.main_layout.addWidget(self.left_widget,0,0,16,2)
self.main_layout.addWidget(self.right_widget,0,2,16,9)
self.setCentralWidget(self.main_widget)
self.left_label_1 = QPushButton("参数设置")
self.left_label_1.setObjectName('left_label')
self.left_label_1.setEnabled(False)
self.left_label_2 = QPushButton("图像显示")
self.left_label_2.setObjectName('left_label')
self.left_label_2.setEnabled(False)
self.left_label_3 = QPushButton("帮助")
self.left_label_3.setObjectName('left_label')
self.left_label_3.setEnabled(False)
self.left_button_1 = QPushButton(qtawesome.icon('fa.rmb', color='white'), "设置期初资金")
self.left_button_1.setObjectName('left_button')
self.left_button_1.clicked.connect(self.buttonDialog1)
self.left_button_2 = QPushButton(qtawesome.icon('fa.hourglass-start', color='white'), "设置交易开始时间")
self.left_button_2.setObjectName('left_button')
self.left_button_2.clicked.connect(self.buttonDialog2)
self.left_button_3 = QPushButton(qtawesome.icon('fa.hourglass-end', color='white'), "设置交易结束时间")
self.left_button_3.setObjectName('left_button')
self.left_button_3.clicked.connect(self.buttonDialog3)
self.left_button_4 = QPushButton(qtawesome.icon('fa.line-chart', color='white'), "修改唐奇安通道")
self.left_button_4.setObjectName('left_button')
self.left_button_4.clicked.connect(self.buttonDialog4)
self.left_button_5 = QPushButton(qtawesome.icon('fa.check-circle-o', color='white'), "修改ATR")
self.left_button_5.setObjectName('left_button')
self.left_button_5.clicked.connect(self.buttonDialog5)
self.left_button_6 = QPushButton(qtawesome.icon('fa.pie-chart', color='white'), "修改手续费")
self.left_button_6.setObjectName('left_button')
self.left_button_6.clicked.connect(self.buttonDialog6)
self.left_button_7 = QPushButton(qtawesome.icon('fa.sort-amount-asc', color='white'), "修改投资系数")
self.left_button_7.setObjectName('left_button')
self.left_button_7.clicked.connect(self.buttonDialog7)
self.left_checkbox_1 = QCheckBox('策略收益')
self.left_checkbox_1.setChecked(True)
self.left_checkbox_2 = QCheckBox('沪深300')
self.left_checkbox_2.setChecked(True)
self.left_checkbox_3 = QCheckBox('仓位图')
self.left_checkbox_3.setChecked(True)
self.left_button_8 = QPushButton(qtawesome.icon('fa.question', color='white'), "专业名词含义查询")
self.left_button_8.setObjectName('left_button')
self.left_button_8.clicked.connect(self.buttonDialog8)
self.left_button_9 = QPushButton(qtawesome.icon('fa.comment', color='white'), "反馈建议")
self.left_button_9.setObjectName('left_button')
self.left_button_9.clicked.connect(self.buttonDialog9)
self.left_button_10 = QPushButton(qtawesome.icon('fa.envelope', color='white'), "联系我们")
self.left_button_10.setObjectName('left_button')
self.left_button_10.clicked.connect(self.buttonDialog10)
self.left_layout.addWidget(self.left_label_1, 0, 0, 1, 3)
self.left_layout.addWidget(self.left_button_1, 1, 0, 1, 3)
self.left_layout.addWidget(self.left_button_2, 2, 0, 1, 3)
self.left_layout.addWidget(self.left_button_3, 3, 0, 1, 3)
self.left_layout.addWidget(self.left_button_4, 4, 0, 1, 3)
self.left_layout.addWidget(self.left_button_5, 5, 0, 1, 3)
self.left_layout.addWidget(self.left_button_6, 6, 0, 1, 3)
self.left_layout.addWidget(self.left_button_7, 7, 0, 1, 3)
self.left_layout.addWidget(self.left_label_2, 8, 0, 1, 3)
self.left_layout.addWidget(self.left_checkbox_1, 9, 0, 1, 3)
self.left_layout.addWidget(self.left_checkbox_2, 10, 0, 1, 3)
self.left_layout.addWidget(self.left_checkbox_3, 11, 0, 1, 3)
self.left_layout.addWidget(self.left_label_3, 12, 0, 1, 3)
self.left_layout.addWidget(self.left_button_8, 13, 0, 1, 3)
self.left_layout.addWidget(self.left_button_9, 14, 0, 1, 3)
self.left_layout.addWidget(self.left_button_10, 15, 0, 1, 3)
self.left_checkbox_1.setStyleSheet("QCheckBox{color:rgb(255,250,250)}")
self.left_checkbox_2.setStyleSheet("QCheckBox{color:rgb(255,250,250)}")
self.left_checkbox_3.setStyleSheet("QCheckBox{color:rgb(255,250,250)}")
self.left_widget.setStyleSheet('''
QCheckBox{font-family: "Helvetica Neue", Helvetica, KaiTi, sans-serif;
font-size:16px}
QPushButton{border:none;
color:white;
text-align: left;
font-family: "Helvetica Neue", Helvetica, KaiTi, sans-serif;
font-size:16px}
QPushButton#left_label{
border:none;
border-bottom:1px solid white;
font-size:20px;
font-weight:700;
font-family: "Helvetica Neue", Helvetica, KaiTi, sans-serif;
}
QPushButton#left_button:hover{border-left:4px solid blue;font-weight:700;}
QWidget#left_widget{
background:gray;
border-top:1px solid white;
border-bottom:1px solid white;
border-left:1px solid white;
border-top-left-radius:10px;
border-bottom-left-radius:10px;
}
''')
self.right_label_0 =QLabel('')
self.right_label_1 = QLabel('期初资金')
self.right_label_1.setAlignment(Qt.AlignCenter)
self.right_label_1.setFont(QFont('KaiTi',12))
self.right_label_2 = QLabel('总资产')
self.right_label_2.setAlignment(Qt.AlignCenter)
self.right_label_2.setFont(QFont('KaiTi', 12))
self.right_label_3 = QLabel('累计盈亏')
self.right_label_3.setAlignment(Qt.AlignCenter)
self.right_label_3.setFont(QFont('KaiTi', 12))
self.right_label_4 = QLabel('可交易天数')
self.right_label_4.setAlignment(Qt.AlignCenter)
self.right_label_4.setFont(QFont('KaiTi', 12))
self.right_label_5 = QLabel('基准收益率')
self.right_label_5.setAlignment(Qt.AlignCenter)
self.right_label_5.setFont(QFont('KaiTi', 12))
self.right_label_6 = QLabel('年化收益率')
self.right_label_6.setAlignment(Qt.AlignCenter)
self.right_label_6.setFont(QFont('KaiTi', 12))
self.right_label_7 = QLabel('开始时间')
self.right_label_7.setAlignment(Qt.AlignCenter)
self.right_label_7.setFont(QFont('KaiTi', 12))
self.right_label_8 = QLabel('结束时间')
self.right_label_8.setAlignment(Qt.AlignCenter)
self.right_label_8.setFont(QFont('KaiTi', 12))
self.right_label_9 = QLabel('胜率')
self.right_label_9.setAlignment(Qt.AlignCenter)
self.right_label_9.setFont(QFont('KaiTi', 12))
self.right_layout.addWidget(self.right_label_0, 0, 3, 1, 3)
self.right_layout.addWidget(self.right_label_1, 1, 3, 1, 1)
self.right_layout.addWidget(self.right_label_2, 1, 4, 1, 1)
self.right_layout.addWidget(self.right_label_3, 1, 5, 1, 1)
self.right_layout.addWidget(self.right_label_4, 1, 6, 1, 1)
self.right_layout.addWidget(self.right_label_5, 1, 7, 1, 1)
self.right_layout.addWidget(self.right_label_6, 1, 8, 1, 1)
self.right_layout.addWidget(self.right_label_7, 1, 9, 1, 1)
self.right_layout.addWidget(self.right_label_8, 1, 10, 1, 1)
self.right_layout.addWidget(self.right_label_9, 1, 11, 1, 1)
self.right_lineEdit_1 = QLineEdit()
self.right_lineEdit_1.setReadOnly(True)
self.right_lineEdit_1.setText('')
self.right_lineEdit_2 = QLineEdit()
self.right_lineEdit_2.setReadOnly(True)
self.right_lineEdit_2.setText('')
self.right_lineEdit_3 = QLineEdit()
self.right_lineEdit_3.setReadOnly(True)
self.right_lineEdit_3.setText('')
self.right_lineEdit_4 = QLineEdit()
self.right_lineEdit_4.setReadOnly(True)
self.right_lineEdit_4.setText('')
self.right_lineEdit_5 = QLineEdit()
self.right_lineEdit_5.setReadOnly(True)
self.right_lineEdit_5.setText('')
self.right_lineEdit_6 = QLineEdit()
self.right_lineEdit_6.setReadOnly(True)
self.right_lineEdit_6.setText('')
self.right_lineEdit_7 = QLineEdit()
self.right_lineEdit_7.setReadOnly(True)
self.right_lineEdit_7.setText('')
self.right_lineEdit_8 = QLineEdit()
self.right_lineEdit_8.setReadOnly(True)
self.right_lineEdit_8.setText('')
self.right_lineEdit_9 = QLineEdit()
self.right_lineEdit_9.setReadOnly(True)
self.right_lineEdit_9.setText('')
self.right_layout.addWidget(self.right_lineEdit_1, 2, 3, 1, 1)
self.right_layout.addWidget(self.right_lineEdit_2, 2, 4, 1, 1)
self.right_layout.addWidget(self.right_lineEdit_3, 2, 5, 1, 1)
self.right_layout.addWidget(self.right_lineEdit_4, 2, 6, 1, 1)
self.right_layout.addWidget(self.right_lineEdit_5, 2, 7, 1, 1)
self.right_layout.addWidget(self.right_lineEdit_6, 2, 8, 1, 1)
self.right_layout.addWidget(self.right_lineEdit_7, 2, 9, 1, 1)
self.right_layout.addWidget(self.right_lineEdit_8, 2, 10, 1, 1)
self.right_layout.addWidget(self.right_lineEdit_9, 2, 11, 1, 1)
self.right_figure_1 = QLabel()
self.figure_1 = QPixmap("猫咪老师4.png")
self.right_figure_1.setPixmap(self.figure_1)
self.right_figure_1.setScaledContents(True)
self.right_figure_2 = QLabel()
self.figure_2 = QPixmap("喵.png")
self.right_figure_2.setPixmap(self.figure_2)
self.right_figure_2.setScaledContents(True)
self.right_layout.addWidget(self.right_figure_1, 3, 3, 7, 9)
self.right_layout.addWidget(self.right_figure_2, 10, 3, 5, 9)
self.right_button_1 = QPushButton(qtawesome.icon('fa.repeat', color='blue'), "测试/重测")
self.right_button_1.clicked.connect(self.start)
self.right_button_1.clicked.connect(self.tryOrRepeat1)
self.right_button_1.clicked.connect(self.tryOrRepeat2)
self.right_button_2 = QPushButton(qtawesome.icon('fa.floppy-o', color='gray'), "删除当前结果")
self.right_button_2.clicked.connect(self.figuredelete)
self.right_button_3 = QPushButton(qtawesome.icon('fa.times', color='red'), "退出")
self.right_button_3.clicked.connect(self.quitApplicaton)
self.right_layout.addWidget(self.right_button_1, 16, 3, 1, 3)
self.right_layout.addWidget(self.right_button_2, 16, 6, 1, 3)
self.right_layout.addWidget(self.right_button_3, 16, 9, 1, 3)
self.right_widget.setStyleSheet('''
QWidget#right_widget{
color:#232C51;
background:white;
border-top:1px solid darkGray;
border-bottom:1px solid darkGray;
border-right:1px solid darkGray;
border-top-right-radius:10px;
border-bottom-right-radius:10px;
}
QLabel{
border:None;
font-weight:700;
font-size=25px;
font-family: "Helvetica Neue", Helvetica, KaiTi, sans-serif;
}
QLineEdit{
font:bold;
border:1px solid gray;
width:300px;
padding:2px 4px;
background-color:rgb(255,250,250);
selection-color:white;
}
QPushButton{font-family: "Helvetica Neue", Helvetica, KaiTi, sans-serif;
font-size:16px}
''')
self.setAttribute(Qt.WA_TranslucentBackground)
self.setWindowFlag(Qt.FramelessWindowHint)
self.main_layout.setSpacing(0)
def buttonDialog1(self):
self.dialog1 = QDialog()
self.dialog1.setWindowIcon(QIcon("猫咪老师1.jpg"))
self.dialog1.resize(250,100)
self.dialog1.setWindowTitle('设置期初资金')
formLayout = QFormLayout()
label = QLabel('请输入您的期初资金(整数万元)')
self.edit1 = QLineEdit()
self.edit1.setValidator(QIntValidator())
self.edit1.setAlignment(Qt.AlignRight)
self.edit1.setFont(QFont('Arial', 10))
button_ok = QPushButton('OK')
button_ok.clicked.connect(self.okk1)
button_cancel = QPushButton('Cancel')
button_cancel.clicked.connect(self.cancel1)
formLayout.addRow(label)
formLayout.addRow(self.edit1)
formLayout.addRow(button_ok, button_cancel)
self.dialog1.setLayout(formLayout)
self.dialog1.setStyleSheet('''
QPushButton{color:black;text-align: center;}
QLabel{font-family: "Helvetica Neue", Helvetica, KaiTi, sans-serif;font-size:16px}
QDialog{background:lightgray;
border-top:1px solid royalblue;
border-bottom:1px solid royalblue;
border-left:1px solid royalblue;
border-right:1px solid royalblue;
border-top-left-radius:10px;
border-bottom-left-radius:10px;
border-top-right-radius:10px;
border-bottom-right-radius:10px;
}
''')
self.dialog1.setWindowModality(Qt.ApplicationModal)
self.dialog1.exec_()
def okk1(self):
if self.edit1.text() != '':
global initial_cash
global cash
initial_cash=eval(self.edit1.text())*10000
self.dialog1.close()
def cancel1(self):
self.edit1.setText('')
def buttonDialog2(self):
self.dialog2 = QDialog()
self.dialog2.setWindowIcon(QIcon("猫咪老师1.jpg"))
self.dialog2.resize(280,100)
self.dialog2.setWindowTitle('设置交易开始时间')
formLayout = QFormLayout()
label1 = QLabel('请输入您的交易开始时间')
label2 = QLabel('时间格式示例:2011-03-01')
label3 = QLabel('时间范围为2011-03-01至2021-04-01')
self.edit2 = QLineEdit()
self.edit2.setAlignment(Qt.AlignRight)
self.edit2.setFont(QFont('Arial', 10))
button_ok = QPushButton('OK')
button_ok.clicked.connect(self.okk2)
button_cancel = QPushButton('Cancel')
button_cancel.clicked.connect(self.cancel2)
formLayout.addRow(label1)
formLayout.addRow(label2)
formLayout.addRow(label3)
formLayout.addRow(self.edit2)
formLayout.addRow(button_ok, button_cancel)
self.dialog2.setLayout(formLayout)
self.dialog2.setStyleSheet('''
QPushButton{color:black;text-align: center;}
QLabel{font-family: "Helvetica Neue", Helvetica, KaiTi, sans-serif;font-size:16px}
QDialog{background:lightgray;
border-top:1px solid royalblue;
border-bottom:1px solid royalblue;
border-left:1px solid royalblue;
border-right:1px solid royalblue;
border-top-left-radius:10px;
border-bottom-left-radius:10px;
border-top-right-radius:10px;
border-bottom-right-radius:10px;
}
''')
self.dialog2.setWindowModality(Qt.ApplicationModal)
self.dialog2.exec_()
def okk2(self):
if self.edit2.text()!='':
global start_time
start_time=self.edit2.text()
start_time = nearestdate(start_time, 1)
self.dialog2.close()
def cancel2(self):
self.edit2.setText('')
def buttonDialog3(self):
self.dialog3 = QDialog()
self.dialog3.setWindowIcon(QIcon("猫咪老师1.jpg"))
self.dialog3.resize(280,100)
self.dialog3.setWindowTitle('设置交易结束时间')
formLayout = QFormLayout()
label1 = QLabel('请输入您的交易结束时间')
label2 = QLabel('时间格式示例:2021-04-01')
label3 = QLabel('时间范围为2011-03-01至2021-04-01')
self.edit3 = QLineEdit()
self.edit3.setAlignment(Qt.AlignRight)
self.edit3.setFont(QFont('Arial', 10))
button_ok = QPushButton('OK')
button_ok.clicked.connect(self.okk3)
button_cancel = QPushButton('Cancel')
button_cancel.clicked.connect(self.cancel3)
formLayout.addRow(label1)
formLayout.addRow(label2)
formLayout.addRow(label3)
formLayout.addRow(self.edit3)
formLayout.addRow(button_ok, button_cancel)
self.dialog3.setLayout(formLayout)
self.dialog3.setStyleSheet('''
QPushButton{color:black;text-align: center;}
QLabel{font-family: "Helvetica Neue", Helvetica, KaiTi, sans-serif;font-size:16px}
QDialog{background:lightgray;
border-top:1px solid royalblue;
border-bottom:1px solid royalblue;
border-left:1px solid royalblue;
border-right:1px solid royalblue;
border-top-left-radius:10px;
border-bottom-left-radius:10px;
border-top-right-radius:10px;
border-bottom-right-radius:10px;
}
''')
self.dialog3.setWindowModality(Qt.ApplicationModal)
self.dialog3.exec_()
def okk3(self):
if self.edit3.text()!='':
global end_time
end_time=self.edit3.text()
end_time = nearestdate(end_time, -1)
self.dialog3.close()
def cancel3(self):
self.edit3.setText('')
def buttonDialog4(self):
self.dialog4 = QDialog()
self.dialog4.setWindowIcon(QIcon("猫咪老师1.jpg"))
self.dialog4.resize(280,100)
self.dialog4.setWindowTitle('修改唐奇安通道')
formLayout = QFormLayout()
label = QLabel('唐奇安通道修改为(5~50):')
self.edit4 = QLineEdit('20')
self.edit4.setReadOnly(True)
self.edit4.setAlignment(Qt.AlignRight)
self.edit4.setFont(QFont('Arial', 10))
self.slider1 = QSlider(Qt.Horizontal)
self.slider1.setMinimum(5)
self.slider1.setMaximum(50)
self.slider1.setSingleStep(1)
self.slider1.setValue(20)
self.slider1.setTickPosition(QSlider.TicksBelow)
self.slider1.setTickInterval(1)
self.slider1.valueChanged.connect(self.valueChange1)
button_ok = QPushButton('OK')
button_ok.clicked.connect(self.okk4)
button_cancel = QPushButton('Cancel')
button_cancel.clicked.connect(self.cancel4)
formLayout.addRow(label)
formLayout.addRow(self.edit4)
formLayout.addRow(self.slider1)
formLayout.addRow(button_ok, button_cancel)
self.dialog4.setLayout(formLayout)
self.dialog4.setStyleSheet('''
QPushButton{color:black;text-align: center;}
QLabel{font-family: "Helvetica Neue", Helvetica, KaiTi, sans-serif;font-size:16px}
QDialog{background:lightgray;
border-top:1px solid royalblue;
border-bottom:1px solid royalblue;
border-left:1px solid royalblue;
border-right:1px solid royalblue;
border-top-left-radius:10px;
border-bottom-left-radius:10px;
border-top-right-radius:10px;
border-bottom-right-radius:10px;
}
''')
self.dialog4.setWindowModality(Qt.ApplicationModal)
self.dialog4.exec_()
def okk4(self):
global Dontime
Dontime=int(self.edit4.text())
self.dialog4.close()
def cancel4(self):
self.slider1.setValue(20)
def valueChange1(self):
self.edit4.setText('%d'%self.slider1.value())
def buttonDialog5(self):
self.dialog5 = QDialog()
self.dialog5.setWindowIcon(QIcon("猫咪老师1.jpg"))
self.dialog5.resize(250,100)
self.dialog5.setWindowTitle('修改ATR')
formLayout = QFormLayout()
label = QLabel('ATR修改为(5~50):')
self.edit5 = QLineEdit('20')
self.edit5.setReadOnly(True)
self.edit5.setAlignment(Qt.AlignRight)
self.edit5.setFont(QFont('Arial', 10))
self.slider2 = QSlider(Qt.Horizontal)
self.slider2.setMinimum(5)
self.slider2.setMaximum(50)
self.slider2.setSingleStep(1)
self.slider2.setValue(20)
self.slider2.setTickPosition(QSlider.TicksBelow)
self.slider2.setTickInterval(1)
self.slider2.valueChanged.connect(self.valueChange2)
button_ok = QPushButton('OK')
button_ok.clicked.connect(self.okk5)
button_cancel = QPushButton('Cancel')
button_cancel.clicked.connect(self.cancel5)
formLayout.addRow(label)
formLayout.addRow(self.edit5)
formLayout.addRow(self.slider2)
formLayout.addRow(button_ok, button_cancel)
self.dialog5.setLayout(formLayout)
self.dialog5.setStyleSheet('''
QPushButton{color:black;text-align: center;}
QLabel{font-family: "Helvetica Neue", Helvetica, KaiTi, sans-serif;font-size:16px}
QDialog{background:lightgray;
border-top:1px solid royalblue;
border-bottom:1px solid royalblue;
border-left:1px solid royalblue;
border-right:1px solid royalblue;
border-top-left-radius:10px;
border-bottom-left-radius:10px;
border-top-right-radius:10px;
border-bottom-right-radius:10px;
}
''')
self.dialog5.setWindowModality(Qt.ApplicationModal)
self.dialog5.exec_()
def okk5(self):
global atrtime
atrtime=int(self.edit5.text())
self.dialog5.close()
def cancel5(self):
self.slider2.setValue(20)
def valueChange2(self):
self.edit5.setText('%d'%self.slider2.value())
def buttonDialog6(self):
self.dialog6 = QDialog()
self.dialog6.setWindowIcon(QIcon("猫咪老师1.jpg"))
self.dialog6.resize(280,100)
self.dialog6.setWindowTitle('修改手续费')
formLayout = QFormLayout()
label = QLabel('修改手续费为(单位:万分之一):')
self.edit6 = QLineEdit('1')
self.edit6.setValidator(QIntValidator())
self.edit6.setAlignment(Qt.AlignRight)
self.edit6.setFont(QFont('Arial', 10))
button_ok = QPushButton('OK')
button_ok.clicked.connect(self.okk6)
button_cancel = QPushButton('Cancel')
button_cancel.clicked.connect(self.cancel6)
formLayout.addRow(label)
formLayout.addRow(self.edit6)
formLayout.addRow(button_ok, button_cancel)
self.dialog6.setLayout(formLayout)
self.dialog6.setStyleSheet('''
QPushButton{color:black;text-align: center;}
QLabel{font-family: "Helvetica Neue", Helvetica, KaiTi, sans-serif;font-size:16px}
QDialog{background:lightgray;
border-top:1px solid royalblue;
border-bottom:1px solid royalblue;
border-left:1px solid royalblue;
border-right:1px solid royalblue;
border-top-left-radius:10px;
border-bottom-left-radius:10px;
border-top-right-radius:10px;
border-bottom-right-radius:10px;
}
''')
self.dialog6.setWindowModality(Qt.ApplicationModal)
self.dialog6.exec_()
def okk6(self):
if self.edit6.text() != '':
global backtest_commission_ratio
backtest_commission_ratio=eval(self.edit6.text())/10000
self.dialog6.close()
def cancel6(self):
self.edit6.setText('1')
def buttonDialog7(self):
self.dialog7 = QDialog()
self.dialog7.setWindowIcon(QIcon("猫咪老师1.jpg"))
self.dialog7.resize(280,100)
self.dialog7.setWindowTitle('修改投资系数')
formLayout = QFormLayout()
label = QLabel('修改投资系数为(单位:百分之一):')
self.edit7 = QLineEdit('1')
self.edit7.setAlignment(Qt.AlignRight)
self.edit7.setFont(QFont('Arial', 10))
button_ok = QPushButton('OK')
button_ok.clicked.connect(self.okk7)
button_cancel = QPushButton('Cancel')
button_cancel.clicked.connect(self.cancel7)
formLayout.addRow(label)
formLayout.addRow(self.edit7)
formLayout.addRow(button_ok, button_cancel)
self.dialog7.setLayout(formLayout)
self.dialog7.setStyleSheet('''
QPushButton{color:black;text-align: center;}
QLabel{font-family: "Helvetica Neue", Helvetica, KaiTi, sans-serif;font-size:16px}
QDialog{background:lightgray;
border-top:1px solid royalblue;
border-bottom:1px solid royalblue;
border-left:1px solid royalblue;
border-right:1px solid royalblue;
border-top-left-radius:10px;
border-bottom-left-radius:10px;
border-top-right-radius:10px;
border-bottom-right-radius:10px;
}
''')
self.dialog7.setWindowModality(Qt.ApplicationModal)
self.dialog7.exec_()
def okk7(self):
if self.edit7.text() != '':
global unit_rate
unit_rate=eval(self.edit7.text())/100
self.dialog7.close()
def cancel7(self):
self.edit7.setText('1')
def buttonDialog8(self):
self.dialog8 = QDialog()
self.dialog8.setWindowIcon(QIcon("猫咪老师1.jpg"))
self.dialog8.resize(280,100)
self.dialog8.setWindowTitle('专业名词含义查询')
layout=QVBoxLayout()
self.label = QLabel('请选择专业名词:')
self.cb = QComboBox()
self.cb.addItems(['唐奇安通道', 'ATR', '投资系数', '基准收益率','年化收益率'])
self.cb.currentIndexChanged.connect(self.selectionChange)
layout.addWidget(self.label)
layout.addWidget(self.cb)
self.dialog8.setLayout(layout)
self.dialog8.setStyleSheet('''
QLabel{font-family: "Helvetica Neue", Helvetica, KaiTi, sans-serif;font-size:16px}
QComboBox{font-family: "Helvetica Neue", Helvetica, KaiTi, sans-serif;font-size:16px}
QDialog{background:lightgray;
border-top:1px solid royalblue;
border-bottom:1px solid royalblue;
border-left:1px solid royalblue;
border-right:1px solid royalblue;
border-top-left-radius:10px;
border-bottom-left-radius:10px;
border-top-right-radius:10px;
border-bottom-right-radius:10px;
}
''')
self.dialog8.setWindowModality(Qt.ApplicationModal)
self.dialog8.exec_()
def selectionChange(self,i):
dict0={'唐奇安通道':"唐奇安通道主要是一个突破型趋势跟踪指标,可以提供两种不同的突破信号", 'ATR':"ATR是日内指数最大波动的平均振幅,由当日最高、最低价和上一交易日的收盘价决定", '投资系数':"每一次开仓交易合约数unit的确定是将总资产的投资系数除以价值波动量得到", '基准收益率':"默认沪深300指数收益",'年化收益率':"年化收益率是指投资期限为一年的收益率"}
self.label.setText('%s'%dict0[self.cb.currentText()])
def buttonDialog9(self):
self.dialog9 = QDialog()
self.dialog9.setWindowIcon(QIcon("猫咪老师1.jpg"))
self.dialog9.resize(250,100)
self.dialog9.setWindowTitle('反馈建议')
formlayout=QFormLayout()
label = QLabel('您的反馈与建议是:')
self.edit9 = QTextEdit('')
self.edit9.setAlignment(Qt.AlignLeft)
self.edit9.setFont(QFont('KaiTi', 10))
button_ok = QPushButton('OK')
button_ok.clicked.connect(self.okk9)
button_cancel = QPushButton('Cancel')
button_cancel.clicked.connect(self.cancel9)
formlayout.addRow(label)
formlayout.addRow(self.edit9)
formlayout.addRow(button_ok,button_cancel)
self.dialog9.setLayout(formlayout)
self.dialog9.setStyleSheet('''
QPushButton{color:black;text-align: center;}
QLabel{font-family: "Helvetica Neue", Helvetica, KaiTi, sans-serif;font-size:16px}
QDialog{background:lightgray;
border-top:1px solid royalblue;
border-bottom:1px solid royalblue;
border-left:1px solid royalblue;
border-right:1px solid royalblue;
border-top-left-radius:10px;
border-bottom-left-radius:10px;
border-top-right-radius:10px;
border-bottom-right-radius:10px;
}
''')
self.dialog9.setWindowModality(Qt.ApplicationModal)
self.dialog9.exec_()
def okk9(self):
QMessageBox.about(self,'感谢','感谢您的反馈与建议!基于您的反馈与建议,我们会努力做得更好!')
self.dialog9.close()
def cancel9(self):
self.edit9.setText('')
def buttonDialog10(self):
self.dialog10 = QDialog()
self.dialog10.setWindowIcon(QIcon("猫咪老师1.jpg"))
self.dialog10.resize(250,150)
self.dialog10.setWindowTitle('联系我们')
layout=QVBoxLayout()
label1 = QLabel('欢迎您来信联系我们!')
label2 = QLabel('我们的邮箱是:')
label5 = QLabel('<EMAIL>')
label6 = QLabel('<EMAIL>')
label7 = QLabel('<EMAIL>')
label3 = QLabel('')
label3.setOpenExternalLinks(True)
label3.setText("<A href='https://mail.163.com/'>网易邮箱</a>")
label3.setAlignment(Qt.AlignCenter)
label3.setToolTip('点击进入网易邮箱主页')
label4 = QLabel('')
label4.setOpenExternalLinks(True)
label4.setText("<A href='https://mail.qq.com/'>QQ邮箱</a>")
label4.setAlignment(Qt.AlignCenter)
label4.setToolTip('点击进入QQ邮箱主页')
layout.addWidget(label1)
layout.addWidget(label2)
layout.addWidget(label5)
layout.addWidget(label6)
layout.addWidget(label7)
layout.addWidget(label3)
layout.addWidget(label4)
self.dialog10.setLayout(layout)
self.dialog10.setStyleSheet('''
QLabel{font-family: "Helvetica Neue", Helvetica, KaiTi, sans-serif;font-size:16px}
QDialog{background:lightgray;
border-top:1px solid royalblue;
border-bottom:1px solid royalblue;
border-left:1px solid royalblue;
border-right:1px solid royalblue;
border-top-left-radius:10px;
border-bottom-left-radius:10px;
border-top-right-radius:10px;
border-bottom-right-radius:10px;
}
''')
self.dialog10.setWindowModality(Qt.ApplicationModal)
self.dialog10.exec_()
def tryOrRepeat1(self):
if self.left_checkbox_1.isChecked() or self.left_checkbox_2.isChecked():
plt.figure()
plt.title('Asset-Time')
if self.left_checkbox_1.isChecked():
plt.plot(xs, l_asset, linestyle='-', color='firebrick', linewidth=1.5, label='Asset')
if self.left_checkbox_2.isChecked():
plt.plot(xs, l_index, linestyle='-', color='royalblue', linewidth=1, label='Index')
plt.plot(xs, l_initial, linestyle='--', color='black', label='Initial')
plt.xlabel('Time')
plt.ylabel('Asset')
plt.gcf().autofmt_xdate()
plt.legend()
plt.rcParams['figure.figsize'] = (9.0, 4.0)
theTime1=datetime.datetime.now()
figure_1_name='figure_1'+str(theTime1)+'.png'
figure_1_name = ''.join(figure_1_name.split(':'))
self.stack.push(figure_1_name)
plt.savefig(figure_1_name,dpi=300,bbox_inches='tight')
plt.close()
self.figure_1=QPixmap(figure_1_name)
self.right_figure_1.setPixmap(self.figure_1)
else:
self.figure_1 = QPixmap("猫咪老师4.png")
self.right_figure_1.setPixmap(self.figure_1)
def tryOrRepeat2(self):
if self.left_checkbox_3.isChecked():
plt.figure()
plt.title('Long/Short-Time')
long_tem = []
short_tem = []
initial_bar = []
for i in range(len(position_long)-1):
long_tem.append(position_long[i][1])
short_tem.append(-position_short[i][1])
initial_bar.append(0)
plt.bar(xs, long_tem,linestyle='-', color='firebrick', linewidth=1, label='long')
plt.bar(xs, short_tem,linestyle='-', color='royalblue', linewidth=1, label='short')
plt.plot(xs, initial_bar, linestyle='--', color='black', label='Initial')
plt.xlabel('Time')
plt.ylabel('')
plt.gcf().autofmt_xdate()
plt.legend()
plt.rcParams['figure.figsize'] = (9.0, 4.0)
theTime2 = datetime.datetime.now()
figure_2_name = 'figure_2' + str(theTime2) + '.png'
figure_2_name = ''.join(figure_2_name.split(':'))
self.stack.push(figure_2_name)
plt.savefig(figure_2_name, dpi=300, bbox_inches='tight')
plt.close()
self.figure_2 = QPixmap(figure_2_name)
self.right_figure_2.setPixmap(self.figure_2)
else:
self.figure_2 = QPixmap("喵.png")
self.right_figure_2.setPixmap(self.figure_2)
def quitApplicaton(self):
app = MainUI.instance()
app.quit()
def figuredelete(self):
figure_1_delete=self.stack.pop()
figure_2_delete = self.stack.pop()
os.remove(figure_1_delete)
os.remove(figure_2_delete)
self.right_button_2.setEnabled(False)
def start(self):
global time
global date
global winningRate
global baseline
global annualized_rate
global xs
global l_initial
global position_long
global position_short
global l_time
global l_asset
global l_index
self.right_button_2.setEnabled(True)
position_long = []
position_short = []
for n in range(finddatepos(start_time), finddatepos(end_time) + 1):
position_long.append([result[n][0], 0])
position_short.append([result[n][0], 0])
cash.append([result[n][0], 0])
cash[0][1] = initial_cash
start_date_position = finddatepos(start_time)
end_date_position = finddatepos(end_time)
for d in range(start_date_position + 1, end_date_position + 1):
on_bar(result[d][0], atrtime)
in_bar(result[d][0], atrtime)
l_time = []
l_asset = []
l_index = []
time = 0
for d in range(start_date_position + 1, end_date_position + 1):
time += 1
l_time.append(result[d][0])
l_asset.append(current_asset(result[d][0]))
l_index.append(result[d][4] * initial_cash / result[start_date_position + 1][4])
if position_short[time][1] != position_short[time - 1][1] or position_long[time][1] != \
position_long[time - 1][1]:
date += 1
if current_asset(result[d][0]) >= current_asset(result[d - 1][0]):
winningRate += 1
winningRate /= date
baseline = (l_index[-1] / l_index[0]) - 1
d1 = datetime.datetime(int(start_time.split('-')[0]), int(start_time.split('-')[1]),
int(start_time.split('-')[2]))
d2 = datetime.datetime(int(end_time.split('-')[0]), int(end_time.split('-')[1]), int(end_time.split('-')[2]))
interval = d2 - d1
annualized_rate = ((current_asset(end_time) / current_asset(start_time)) - 1) * 365 / interval.days
xs =[]
xs = [datetime.datetime.strptime(d, '%Y-%m-%d').date() for d in l_time]
l_initial = []
l_initial = [initial_cash] * (end_date_position - start_date_position)
self.right_lineEdit_1.setText('%d' % int(initial_cash))
self.right_lineEdit_2.setText('%d' % int(current_asset(end_time)))
self.right_lineEdit_3.setText('%d' % int(current_asset(end_time)-initial_cash))
self.right_lineEdit_4.setText('%d' % date)
baseline0 = baseline * 100
self.right_lineEdit_5.setText('%.2f' % baseline0 + '%')
annualized_rate0 = annualized_rate * 100
self.right_lineEdit_6.setText('%.2f' % annualized_rate0 + '%')
self.right_lineEdit_7.setText('%s' % start_time)
self.right_lineEdit_8.setText('%s' % end_time)
winningRate0 = winningRate * 100
self.right_lineEdit_9.setText('%.2f' % winningRate0 + '%')
def main():
app = QApplication(sys.argv)
gui = MainUI()
gui.show()
sys.exit(app.exec_())
def finddatepos(date):
i = 0
while result[i][0] != date:
i += 1
return i
def calAtr(result, start_time, end_time, tr_list): # Calculate atr
counter = 0
atr_list = []
for i in range(1, len(result)-1):
if result[i][0] == start_time:
counter = 1
if counter == 1:
tr = max(float(result[i][2])-float(result[i][3]), float(result[i][2])-float(result[i-1][4]), float(result[i-1][4])-float(result[i][3]))
tr_list.append([result[i][0], tr])
atr_list.append(tr)
if result[i][0] == end_time:
counter = 0
atr = int(np.floor(np.mean(atr_list)))
atr_half = int(np.floor(0.5 * atr))
return [atr, atr_half]
def calDon(result, time, atr_half, Dontime = 30): # Calculate Donchian tunnel
for i in range(Dontime, len(result)-1):
high_list = []
low_list = []
if result[i][0] == time:
for j in range(i-Dontime, i):
high_list.append(result[j][2])
low_list.append(result[j][3])
don_open = np.max(high_list)
don_close = np.min(low_list)
short_add_point = don_close - atr_half
short_stop_loss = don_close + atr_half
long_add_point = don_open + atr_half
long_stop_loss = don_open - atr_half
return [long_add_point, long_stop_loss, short_add_point, short_stop_loss]
def on_bar(date, atrtime = 10):
i = 0
while result[i][0] != date:
i += 1
yesterday = result[i-1][0]
startatrday = result[i-atrtime][0]
open = result[i][1]
atr = calAtr(result, startatrday, yesterday, tr_list)[0]
atr_half = calAtr(result, startatrday, yesterday, tr_list)[1]
Donlst = calDon(result, date, atr_half)
long_add_point = Donlst[0]
long_stop_loss = Donlst[1]
short_add_point = Donlst[2]
short_stop_loss = Donlst[3]
date_pos = 0
while cash[date_pos][0] != date:
date_pos += 1
position_long[date_pos][1] = position_long[date_pos - 1][1]
position_short[date_pos][1] = position_short[date_pos - 1][1]
cash[date_pos][1] = cash[date_pos - 1][1]
if position_long[date_pos][1] == 0 and position_short[date_pos][1] == 0:
if open > long_add_point - atr_half:
# 如果向上突破唐奇安通道,则开多
if cash[date_pos][1] >= (1 + backtest_commission_ratio) * open * unit(current_asset(yesterday),yesterday):
position_long[date_pos][1] = unit(current_asset(yesterday),yesterday)
print(date, '开多仓%.1f'%(unit(current_asset(yesterday),yesterday)))
cash[date_pos][1] -= (1 + backtest_commission_ratio) * open * unit(current_asset(yesterday),yesterday)
else:
position_long[date_pos][1] = cash[date_pos][1] / (1 + backtest_commission_ratio) / open
print(date, '开多仓%.1f'%(cash[date_pos][1] / (1 + backtest_commission_ratio) / open))
cash[date_pos][1] = 0
if open < short_add_point + atr_half:
# 如果向下突破唐奇安通道,则开空
position_short[date_pos][1] = unit(current_asset(yesterday),yesterday)
print(date, '开空仓%.1f'%(unit(current_asset(yesterday),yesterday)))
cash[date_pos][1] += (1 - backtest_commission_ratio) * open * unit(current_asset(yesterday),yesterday)
if position_long[date_pos][1] != 0:
if open > long_add_point:
# 当突破1/2atr时加仓
if cash[date_pos][1] >= (1 + backtest_commission_ratio) * open * unit(current_asset(yesterday), yesterday):
position_long[date_pos][1] += unit(current_asset(yesterday),yesterday)
print(date, '继续加仓%.1f'%(unit(current_asset(yesterday),yesterday)))
cash[date_pos][1] -= (1 + backtest_commission_ratio) * open * unit(current_asset(yesterday), yesterday)
else:
position_long[date_pos][1] += cash[date_pos][1] / (1 + backtest_commission_ratio) / open
print(date, '继续加仓%.1f' % (cash[date_pos][1] / (1 + backtest_commission_ratio) / open))
cash[date_pos][1] = 0
if open < long_stop_loss:
# 持多仓,止损位计算
if position_long[date_pos][1] - unit(current_asset(yesterday),yesterday) >= 0:
print(date, '平多仓%.1f'%(unit(current_asset(yesterday),yesterday)))
cash[date_pos][1] += (1 - backtest_commission_ratio) * open * unit(current_asset(yesterday),
yesterday)
else:
print(date, '平多仓%.1f' % (position_long[date_pos][1]))
cash[date_pos][1] += (1 - backtest_commission_ratio) * position_long[date_pos][1] * open
position_long[date_pos][1] = max(position_long[date_pos][1] - unit(current_asset(yesterday),yesterday), 0)
'''print(date, '平多仓%.1f'%(position_long[date_pos][1]))
cash[date_pos][1] += (1 - backtest_commission_ratio) * open * position_long[date_pos][1]
position_long[date_pos][1] = 0'''
if position_short[date_pos][1] != 0:
if open < short_add_point:
# 当突破1/2atr时加仓
position_short[date_pos][1] += unit(current_asset(yesterday),yesterday)
print(date, '继续加仓%.1f'%(unit(current_asset(yesterday),yesterday)))
cash[date_pos][1] += (1 - backtest_commission_ratio) * open * unit(current_asset(yesterday), yesterday)
if open > short_stop_loss:
# 持空仓,止损位计算
m = min(position_short[date_pos][1] * open, open * unit(current_asset(yesterday),yesterday), cash[date_pos][1] / (1 + backtest_commission_ratio))
print(date, '平空仓%.1f'%(m / open))
cash[date_pos][1] -= (1 + backtest_commission_ratio) * m
position_short[date_pos][1] = position_short[date_pos][1] - m / open
'''m = position_short[date_pos][1] * open
print(date, '平空仓%.1f'%(m / open))
cash[date_pos][1] -= (1 + backtest_commission_ratio) * m
position_short[date_pos][1] = position_short[date_pos][1] - m / open'''
def in_bar(date, atrtime = 10):
i = 0
while result[i][0] != date:
i += 1
yesterday = result[i-1][0]
startatrday = result[i-atrtime][0]
close = result[i][4]
atr = calAtr(result, startatrday, yesterday, tr_list)[0]
atr_half = calAtr(result, startatrday, yesterday, tr_list)[1]
Donlst = calDon(result, date, atr_half)
long_add_point = Donlst[0]
long_stop_loss = Donlst[1]
short_add_point = Donlst[2]
short_stop_loss = Donlst[3]
date_pos = 0
while cash[date_pos][0] != date:
date_pos += 1
if position_long[date_pos][1] == 0 and position_short[date_pos][1] == 0:
if close > long_add_point - atr_half:
# 如果向上突破唐奇安通道,则开多
if cash[date_pos][1] >= (1 + backtest_commission_ratio) * close * unit(current_asset(yesterday),yesterday):
position_long[date_pos][1] = unit(current_asset(yesterday),yesterday)
print(date, '开多仓%.1f'%(unit(current_asset(yesterday),yesterday)))
cash[date_pos][1] -= (1 + backtest_commission_ratio) * close * unit(current_asset(yesterday),yesterday)
else:
position_long[date_pos][1] = cash[date_pos][1] / (1 + backtest_commission_ratio) / close
print(date, '开多仓%.1f'%(cash[date_pos][1] / (1 + backtest_commission_ratio) / close))
cash[date_pos][1] = 0
if close < short_add_point + atr_half:
# 如果向下突破唐奇安通道,则开空
position_short[date_pos][1] = unit(current_asset(yesterday),yesterday)
print(date, '开空仓%.1f'%(unit(current_asset(yesterday),yesterday)))
cash[date_pos][1] += (1 - backtest_commission_ratio) * close * unit(current_asset(yesterday),yesterday)
if position_long[date_pos][1] != 0:
if close > long_add_point:
# 当突破1/2atr时加仓
if cash[date_pos][1] >= (1 + backtest_commission_ratio) * close * unit(current_asset(yesterday), yesterday):
position_long[date_pos][1] += unit(current_asset(yesterday),yesterday)
print(date, '继续加仓%.1f'%(unit(current_asset(yesterday),yesterday)))
cash[date_pos][1] -= (1 + backtest_commission_ratio) * close * unit(current_asset(yesterday), yesterday)
else:
position_long[date_pos][1] += cash[date_pos][1] / (1 + backtest_commission_ratio) / close
print(date, '继续加仓%.1f' % (cash[date_pos][1] / (1 + backtest_commission_ratio) / close))
cash[date_pos][1] = 0
if close < long_stop_loss:
# 持多仓,止损位计算
if position_long[date_pos][1] - unit(current_asset(yesterday),yesterday) >= 0:
print(date, '平多仓%.1f'%(unit(current_asset(yesterday),yesterday)))
cash[date_pos][1] += (1 - backtest_commission_ratio) * close * unit(current_asset(yesterday),
yesterday)
else:
print(date, '平多仓%.1f' % (position_long[date_pos][1]))
cash[date_pos][1] += (1 - backtest_commission_ratio) * position_long[date_pos][1] * close
position_long[date_pos][1] = max(position_long[date_pos][1] - unit(current_asset(yesterday),yesterday), 0)
'''print(date, '平多仓%.1f'%(position_long[date_pos][1]))
cash[date_pos][1] += (1 - backtest_commission_ratio) * close * position_long[date_pos][1]
position_long[date_pos][1] = 0'''
if position_short[date_pos][1] != 0:
if close < short_add_point:
# 当突破1/2atr时加仓
position_short[date_pos][1] += unit(current_asset(yesterday),yesterday)
print(date, '继续加仓%.1f'%(unit(current_asset(yesterday),yesterday)))
cash[date_pos][1] += (1 - backtest_commission_ratio) * close * unit(current_asset(yesterday), yesterday)
if close > short_stop_loss:
# 持空仓,止损位计算
m = min(position_short[date_pos][1] * close, close * unit(current_asset(yesterday),yesterday), cash[date_pos][1] / (1 + backtest_commission_ratio))
print(date, '平空仓%.1f'%(m / close))
cash[date_pos][1] -= (1 + backtest_commission_ratio) * m
position_short[date_pos][1] = position_short[date_pos][1] - m / close
'''m = position_short[date_pos][1] * close
print(date, '平空仓%.1f'%(m / close))
cash[date_pos][1] -= (1 + backtest_commission_ratio) * m
position_short[date_pos][1] = position_short[date_pos][1] - m / close'''
def unit(total_asset, date, atrtime = 10):
i = 0
while result[i][0] != date:
i += 1
end_time = result[i + atrtime - 1][0]
DV = calAtr(result, date, end_time, tr_list)[0]
return total_asset * unit_rate / DV
def current_asset(date):
date_pos = 0
while cash[date_pos][0] != date:
date_pos += 1
return cash[date_pos][1] + (position_long[date_pos][1] - position_short[date_pos][1]) * result[finddatepos(date)][4]
def nearestdate(date, counter = 1):
dateset = set()
for k in range(len(result)):
dateset.add(result[k][0])
while date not in dateset:
dt = datetime.datetime.strptime(date, '%Y-%m-%d')
if counter == 1:
date = (dt + datetime.timedelta(days=1)).strftime('%Y-%m-%d')
if date[8] == '0':
date = date[:8] + date[9:]
if date[5] == '0':
date = date[:5] + date[6:]
elif counter == -1:
date = (dt - datetime.timedelta(days=1)).strftime('%Y-%m-%d')
if date[8] == '0':
date = date[:8] + date[9:]
if date[5] == '0':
date = date[:5] + date[6:]
return date
if __name__ == '__main__':
csvFile = open("data.csv", "r")
reader = csv.reader(csvFile)
result = []
for item in reader:
# Ignore first line
if reader.line_num == 1:
continue
result.append(
[item[0], float(item[1]), float(item[2]), float(item[3]), float(item[4])]) # date, open, high, low, close
csvFile.close()
initial_cash = 0
backtest_commission_ratio = 0.0001
start_time = '2021-03-01'
end_time = '2021-04-27'
tr_list = []
cash = []
position_short = []
position_long = []
atrtime = 20
Dontime = 30
unit_rate = 0.01
winningRate = 0
date = 0
time = 0
baseline = 0
annualized_rate = 0
l_time = []
l_asset = []
l_index = []
xs=[]
l_initial = []
main() | [
"matplotlib.pyplot.ylabel",
"datetime.timedelta",
"os.remove",
"numpy.mean",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"numpy.max",
"matplotlib.pyplot.close",
"numpy.min",
"csv.reader",
"matplotlib.pyplot.savefig",
"matplotlib.pyplot.gcf",
"numpy.floor",
"matplotlib.pyplot.titl... | [((55610, 55629), 'csv.reader', 'csv.reader', (['csvFile'], {}), '(csvFile)\n', (55620, 55629), False, 'import csv\n'), ((40154, 40180), 'os.remove', 'os.remove', (['figure_1_delete'], {}), '(figure_1_delete)\n', (40163, 40180), False, 'import os\n'), ((40190, 40216), 'os.remove', 'os.remove', (['figure_2_delete'], {}), '(figure_2_delete)\n', (40199, 40216), False, 'import os\n'), ((44136, 44155), 'numpy.floor', 'np.floor', (['(0.5 * atr)'], {}), '(0.5 * atr)\n', (44144, 44155), True, 'import numpy as np\n'), ((54950, 54994), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['date', '"""%Y-%m-%d"""'], {}), "(date, '%Y-%m-%d')\n", (54976, 54994), False, 'import datetime\n'), ((5133, 5172), 'qtawesome.icon', 'qtawesome.icon', (['"""fa.rmb"""'], {'color': '"""white"""'}), "('fa.rmb', color='white')\n", (5147, 5172), False, 'import qtawesome\n'), ((5347, 5398), 'qtawesome.icon', 'qtawesome.icon', (['"""fa.hourglass-start"""'], {'color': '"""white"""'}), "('fa.hourglass-start', color='white')\n", (5361, 5398), False, 'import qtawesome\n'), ((5575, 5624), 'qtawesome.icon', 'qtawesome.icon', (['"""fa.hourglass-end"""'], {'color': '"""white"""'}), "('fa.hourglass-end', color='white')\n", (5589, 5624), False, 'import qtawesome\n'), ((5801, 5847), 'qtawesome.icon', 'qtawesome.icon', (['"""fa.line-chart"""'], {'color': '"""white"""'}), "('fa.line-chart', color='white')\n", (5815, 5847), False, 'import qtawesome\n'), ((6023, 6073), 'qtawesome.icon', 'qtawesome.icon', (['"""fa.check-circle-o"""'], {'color': '"""white"""'}), "('fa.check-circle-o', color='white')\n", (6037, 6073), False, 'import qtawesome\n'), ((6247, 6292), 'qtawesome.icon', 'qtawesome.icon', (['"""fa.pie-chart"""'], {'color': '"""white"""'}), "('fa.pie-chart', color='white')\n", (6261, 6292), False, 'import qtawesome\n'), ((6466, 6517), 'qtawesome.icon', 'qtawesome.icon', (['"""fa.sort-amount-asc"""'], {'color': '"""white"""'}), "('fa.sort-amount-asc', color='white')\n", (6480, 6517), False, 'import qtawesome\n'), ((6983, 7027), 'qtawesome.icon', 'qtawesome.icon', (['"""fa.question"""'], {'color': '"""white"""'}), "('fa.question', color='white')\n", (6997, 7027), False, 'import qtawesome\n'), ((7204, 7247), 'qtawesome.icon', 'qtawesome.icon', (['"""fa.comment"""'], {'color': '"""white"""'}), "('fa.comment', color='white')\n", (7218, 7247), False, 'import qtawesome\n'), ((7421, 7465), 'qtawesome.icon', 'qtawesome.icon', (['"""fa.envelope"""'], {'color': '"""white"""'}), "('fa.envelope', color='white')\n", (7435, 7465), False, 'import qtawesome\n'), ((14685, 14726), 'qtawesome.icon', 'qtawesome.icon', (['"""fa.repeat"""'], {'color': '"""blue"""'}), "('fa.repeat', color='blue')\n", (14699, 14726), False, 'import qtawesome\n'), ((14965, 15008), 'qtawesome.icon', 'qtawesome.icon', (['"""fa.floppy-o"""'], {'color': '"""gray"""'}), "('fa.floppy-o', color='gray')\n", (14979, 15008), False, 'import qtawesome\n'), ((15127, 15166), 'qtawesome.icon', 'qtawesome.icon', (['"""fa.times"""'], {'color': '"""red"""'}), "('fa.times', color='red')\n", (15141, 15166), False, 'import qtawesome\n'), ((37332, 37344), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (37342, 37344), True, 'import matplotlib.pyplot as plt\n'), ((37358, 37381), 'matplotlib.pyplot.title', 'plt.title', (['"""Asset-Time"""'], {}), "('Asset-Time')\n", (37367, 37381), True, 'import matplotlib.pyplot as plt\n'), ((37699, 37770), 'matplotlib.pyplot.plot', 'plt.plot', (['xs', 'l_initial'], {'linestyle': '"""--"""', 'color': '"""black"""', 'label': '"""Initial"""'}), "(xs, l_initial, linestyle='--', color='black', label='Initial')\n", (37707, 37770), True, 'import matplotlib.pyplot as plt\n'), ((37784, 37802), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time"""'], {}), "('Time')\n", (37794, 37802), True, 'import matplotlib.pyplot as plt\n'), ((37816, 37835), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Asset"""'], {}), "('Asset')\n", (37826, 37835), True, 'import matplotlib.pyplot as plt\n'), ((37888, 37900), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (37898, 37900), True, 'import matplotlib.pyplot as plt\n'), ((37981, 38004), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (38002, 38004), False, 'import datetime\n'), ((38184, 38240), 'matplotlib.pyplot.savefig', 'plt.savefig', (['figure_1_name'], {'dpi': '(300)', 'bbox_inches': '"""tight"""'}), "(figure_1_name, dpi=300, bbox_inches='tight')\n", (38195, 38240), True, 'import matplotlib.pyplot as plt\n'), ((38252, 38263), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (38261, 38263), True, 'import matplotlib.pyplot as plt\n'), ((38595, 38607), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (38605, 38607), True, 'import matplotlib.pyplot as plt\n'), ((38621, 38649), 'matplotlib.pyplot.title', 'plt.title', (['"""Long/Short-Time"""'], {}), "('Long/Short-Time')\n", (38630, 38649), True, 'import matplotlib.pyplot as plt\n'), ((38949, 39036), 'matplotlib.pyplot.bar', 'plt.bar', (['xs', 'long_tem'], {'linestyle': '"""-"""', 'color': '"""firebrick"""', 'linewidth': '(1)', 'label': '"""long"""'}), "(xs, long_tem, linestyle='-', color='firebrick', linewidth=1, label=\n 'long')\n", (38956, 39036), True, 'import matplotlib.pyplot as plt\n'), ((39044, 39133), 'matplotlib.pyplot.bar', 'plt.bar', (['xs', 'short_tem'], {'linestyle': '"""-"""', 'color': '"""royalblue"""', 'linewidth': '(1)', 'label': '"""short"""'}), "(xs, short_tem, linestyle='-', color='royalblue', linewidth=1, label\n ='short')\n", (39051, 39133), True, 'import matplotlib.pyplot as plt\n'), ((39141, 39214), 'matplotlib.pyplot.plot', 'plt.plot', (['xs', 'initial_bar'], {'linestyle': '"""--"""', 'color': '"""black"""', 'label': '"""Initial"""'}), "(xs, initial_bar, linestyle='--', color='black', label='Initial')\n", (39149, 39214), True, 'import matplotlib.pyplot as plt\n'), ((39228, 39246), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time"""'], {}), "('Time')\n", (39238, 39246), True, 'import matplotlib.pyplot as plt\n'), ((39260, 39274), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['""""""'], {}), "('')\n", (39270, 39274), True, 'import matplotlib.pyplot as plt\n'), ((39327, 39339), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (39337, 39339), True, 'import matplotlib.pyplot as plt\n'), ((39422, 39445), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (39443, 39445), False, 'import datetime\n'), ((39631, 39687), 'matplotlib.pyplot.savefig', 'plt.savefig', (['figure_2_name'], {'dpi': '(300)', 'bbox_inches': '"""tight"""'}), "(figure_2_name, dpi=300, bbox_inches='tight')\n", (39642, 39687), True, 'import matplotlib.pyplot as plt\n'), ((39701, 39712), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (39710, 39712), True, 'import matplotlib.pyplot as plt\n'), ((44096, 44113), 'numpy.mean', 'np.mean', (['atr_list'], {}), '(atr_list)\n', (44103, 44113), True, 'import numpy as np\n'), ((44554, 44571), 'numpy.max', 'np.max', (['high_list'], {}), '(high_list)\n', (44560, 44571), True, 'import numpy as np\n'), ((44597, 44613), 'numpy.min', 'np.min', (['low_list'], {}), '(low_list)\n', (44603, 44613), True, 'import numpy as np\n'), ((37449, 37538), 'matplotlib.pyplot.plot', 'plt.plot', (['xs', 'l_asset'], {'linestyle': '"""-"""', 'color': '"""firebrick"""', 'linewidth': '(1.5)', 'label': '"""Asset"""'}), "(xs, l_asset, linestyle='-', color='firebrick', linewidth=1.5,\n label='Asset')\n", (37457, 37538), True, 'import matplotlib.pyplot as plt\n'), ((37602, 37690), 'matplotlib.pyplot.plot', 'plt.plot', (['xs', 'l_index'], {'linestyle': '"""-"""', 'color': '"""royalblue"""', 'linewidth': '(1)', 'label': '"""Index"""'}), "(xs, l_index, linestyle='-', color='royalblue', linewidth=1, label=\n 'Index')\n", (37610, 37690), True, 'import matplotlib.pyplot as plt\n'), ((37849, 37858), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (37856, 37858), True, 'import matplotlib.pyplot as plt\n'), ((39288, 39297), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (39295, 39297), True, 'import matplotlib.pyplot as plt\n'), ((42428, 42469), 'datetime.datetime.strptime', 'datetime.datetime.strptime', (['d', '"""%Y-%m-%d"""'], {}), "(d, '%Y-%m-%d')\n", (42454, 42469), False, 'import datetime\n'), ((55047, 55073), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(1)'}), '(days=1)\n', (55065, 55073), False, 'import datetime\n'), ((55303, 55329), 'datetime.timedelta', 'datetime.timedelta', ([], {'days': '(1)'}), '(days=1)\n', (55321, 55329), False, 'import datetime\n')] |
from datetime import datetime
from django.db import connection
from posthog.models import Person
from posthog.test.base import BaseTest
# How we expect this function to behave:
# | call | value exists | call TS is ___ existing TS | previous fn | write/override
# 1| set | no | N/A | N/A | yes
# 2| set_once | no | N/A | N/A | yes
# 3| set | yes | before | set | no
# 4| set | yes | before | set_once | yes
# 5| set | yes | after | set | yes
# 6| set | yes | after | set_once | yes
# 7| set_once | yes | before | set | no
# 8| set_once | yes | before | set_once | yes
# 9| set_once | yes | after | set | no
# 10| set_once | yes | after | set_once | no
# 11| set | yes | equal | set | no
# 12| set_once | yes | equal | set | no
# 13| set | yes | equal | set_once | yes
# 14| set_once | yes | equal | set_once | no
FUTURE_TIMESTAMP = datetime(2050, 1, 1, 1, 1, 1).isoformat()
PAST_TIMESTAMP = datetime(2000, 1, 1, 1, 1, 1).isoformat()
# Refers to migration 0176_update_person_props_function
# This is a Postgres function we use in the plugin server
class TestShouldUpdatePersonProp(BaseTest):
def test_update_without_properties_last_updated_at(self):
person = Person.objects.create(
team=self.team,
properties={"a": 0, "b": 0},
properties_last_updated_at={},
properties_last_operation={"a": "set", "b": "set_once"},
)
with connection.cursor() as cursor:
cursor.execute(
f"""
SELECT update_person_props(
{person.id},
now()::text,
array[
row('set', 'a', '1'::jsonb)::person_property_update,
row('set_once', 'b', '1'::jsonb)::person_property_update
]
)
"""
)
updated_person = Person.objects.get(id=person.id)
# dont update set_once call
self.assertEqual(updated_person.properties, {"a": 1, "b": 0})
self.assertEqual(updated_person.properties_last_operation, {"a": "set", "b": "set_once"})
self.assertIsNotNone(updated_person.properties_last_updated_at["a"])
def test_update_without_properties_last_operation(self):
person = Person.objects.create(
team=self.team,
properties={"a": 0, "b": 0},
properties_last_updated_at={"a": FUTURE_TIMESTAMP, "b": FUTURE_TIMESTAMP,},
properties_last_operation={},
)
with connection.cursor() as cursor:
cursor.execute(
f"""
SELECT update_person_props(
{person.id},
now()::text,
array[
row('set', 'a', '1'::jsonb)::person_property_update,
row('set_once', 'b', '1'::jsonb)::person_property_update
]
)
"""
)
updated_person = Person.objects.get(id=person.id)
# dont update set_once call
self.assertEqual(updated_person.properties, {"a": 1, "b": 0})
self.assertEqual(updated_person.properties_last_operation, {"a": "set"})
self.assertNotEqual(updated_person.properties_last_updated_at["a"], FUTURE_TIMESTAMP)
# tests cases 1 and 2 from the table
def test_update_non_existent_prop(self):
person = Person.objects.create(
team=self.team, properties={}, properties_last_updated_at={}, properties_last_operation={}
)
with connection.cursor() as cursor:
cursor.execute(
f"""
SELECT update_person_props(
{person.id},
now()::text,
array[
row('set', 'a', '1'::jsonb)::person_property_update,
row('set_once', 'b', '1'::jsonb)::person_property_update
]
)
"""
)
updated_person = Person.objects.get(id=person.id)
# both updated
self.assertEqual(updated_person.properties, {"a": 1, "b": 1})
self.assertEqual(updated_person.properties_last_operation, {"a": "set", "b": "set_once"})
self.assertIsNotNone(updated_person.properties_last_updated_at["a"])
self.assertIsNotNone(updated_person.properties_last_updated_at["b"])
# # tests cases 3 and 4 from the table
def test_set_operation_with_earlier_timestamp(self):
person = Person.objects.create(
team=self.team,
properties={"a": 0, "b": 0},
properties_last_updated_at={"a": FUTURE_TIMESTAMP, "b": FUTURE_TIMESTAMP,},
properties_last_operation={"a": "set", "b": "set_once"},
)
with connection.cursor() as cursor:
cursor.execute(
f"""
SELECT update_person_props(
{person.id},
now()::text,
array[
row('set', 'a', '1'::jsonb)::person_property_update,
row('set', 'b', '1'::jsonb)::person_property_update
]
)
"""
)
updated_person = Person.objects.get(id=person.id)
# b updated
self.assertEqual(updated_person.properties, {"a": 0, "b": 1})
self.assertEqual(updated_person.properties_last_operation, {"a": "set", "b": "set"})
self.assertEqual(updated_person.properties_last_updated_at["a"], FUTURE_TIMESTAMP)
self.assertNotEqual(updated_person.properties_last_updated_at["b"], FUTURE_TIMESTAMP)
# # tests cases 5 and 6 from the table
def test_set_operation_with_older_timestamp(self):
person = Person.objects.create(
team=self.team,
properties={"a": 0, "b": 0},
properties_last_updated_at={"a": PAST_TIMESTAMP, "b": PAST_TIMESTAMP,},
properties_last_operation={"a": "set", "b": "set_once"},
)
with connection.cursor() as cursor:
cursor.execute(
f"""
SELECT update_person_props(
{person.id},
now()::text,
array[
row('set', 'a', '1'::jsonb)::person_property_update,
row('set', 'b', '1'::jsonb)::person_property_update
]
)
"""
)
updated_person = Person.objects.get(id=person.id)
# both updated
self.assertEqual(updated_person.properties, {"a": 1, "b": 1})
self.assertEqual(updated_person.properties_last_operation, {"a": "set", "b": "set"})
self.assertNotEqual(updated_person.properties_last_updated_at["a"], PAST_TIMESTAMP)
self.assertNotEqual(updated_person.properties_last_updated_at["b"], PAST_TIMESTAMP)
# tests cases 7 and 8 from the table
def test_set_once_operation_with_earlier_timestamp(self):
person = Person.objects.create(
team=self.team,
properties={"a": 0, "b": 0},
properties_last_updated_at={"a": FUTURE_TIMESTAMP, "b": FUTURE_TIMESTAMP,},
properties_last_operation={"a": "set", "b": "set_once"},
)
with connection.cursor() as cursor:
cursor.execute(
f"""
SELECT update_person_props(
{person.id},
now()::text,
array[
row('set_once', 'a', '1'::jsonb)::person_property_update,
row('set_once', 'b', '1'::jsonb)::person_property_update
]
)
"""
)
updated_person = Person.objects.get(id=person.id)
# b updated
self.assertEqual(updated_person.properties, {"a": 0, "b": 1})
self.assertEqual(updated_person.properties_last_operation, {"a": "set", "b": "set_once"})
self.assertEqual(updated_person.properties_last_updated_at["a"], FUTURE_TIMESTAMP)
self.assertNotEqual(updated_person.properties_last_updated_at["b"], FUTURE_TIMESTAMP)
# tests cases 9 and 10 from the table
def test_set_once_operation_with_older_timestamp(self):
person = Person.objects.create(
team=self.team,
properties={"a": 0, "b": 0},
properties_last_updated_at={"a": PAST_TIMESTAMP, "b": PAST_TIMESTAMP,},
properties_last_operation={"a": "set", "b": "set_once"},
)
with connection.cursor() as cursor:
cursor.execute(
f"""
SELECT update_person_props(
{person.id},
now()::text,
array[
row('set_once', 'a', '1'::jsonb)::person_property_update,
row('set_once', 'b', '1'::jsonb)::person_property_update
]
)
"""
)
updated_person = Person.objects.get(id=person.id)
# neither updated
self.assertEqual(updated_person.properties, {"a": 0, "b": 0})
self.assertEqual(updated_person.properties_last_operation, {"a": "set", "b": "set_once"})
self.assertEqual(updated_person.properties_last_updated_at["a"], PAST_TIMESTAMP)
self.assertEqual(updated_person.properties_last_updated_at["b"], PAST_TIMESTAMP)
# # tests cases 11-14 from the table
def test_equal_timestamps(self):
timestamp = PAST_TIMESTAMP
person = Person.objects.create(
team=self.team,
properties={"a": 0, "b": 0, "c": 0, "d": 0},
properties_last_updated_at={"a": timestamp, "b": timestamp, "c": timestamp, "d": timestamp},
properties_last_operation={"a": "set", "b": "set", "c": "set_once", "d": "set_once"},
)
with connection.cursor() as cursor:
cursor.execute(
f"""
SELECT update_person_props(
{person.id},
'{timestamp}',
array[
row('set', 'a', '1'::jsonb)::person_property_update,
row('set_once', 'b', '1'::jsonb)::person_property_update,
row('set', 'c', '1'::jsonb)::person_property_update,
row('set_once', 'd', '1'::jsonb)::person_property_update
]
)
"""
)
updated_person = Person.objects.get(id=person.id)
# update if current op is set and last op is set_once i.e. "c"
self.assertEqual(updated_person.properties, {"a": 0, "b": 0, "c": 1, "d": 0})
self.assertEqual(
updated_person.properties_last_operation, {"a": "set", "b": "set", "c": "set", "d": "set_once"}
) # c changed
self.assertEqual(updated_person.properties_last_updated_at["a"], PAST_TIMESTAMP)
self.assertEqual(updated_person.properties_last_updated_at["b"], PAST_TIMESTAMP)
self.assertEqual(updated_person.properties_last_updated_at["c"], PAST_TIMESTAMP)
self.assertEqual(updated_person.properties_last_updated_at["c"], PAST_TIMESTAMP)
| [
"datetime.datetime",
"django.db.connection.cursor",
"posthog.models.Person.objects.create",
"posthog.models.Person.objects.get"
] | [((1405, 1434), 'datetime.datetime', 'datetime', (['(2050)', '(1)', '(1)', '(1)', '(1)', '(1)'], {}), '(2050, 1, 1, 1, 1, 1)\n', (1413, 1434), False, 'from datetime import datetime\n'), ((1464, 1493), 'datetime.datetime', 'datetime', (['(2000)', '(1)', '(1)', '(1)', '(1)', '(1)'], {}), '(2000, 1, 1, 1, 1, 1)\n', (1472, 1493), False, 'from datetime import datetime\n'), ((1744, 1906), 'posthog.models.Person.objects.create', 'Person.objects.create', ([], {'team': 'self.team', 'properties': "{'a': 0, 'b': 0}", 'properties_last_updated_at': '{}', 'properties_last_operation': "{'a': 'set', 'b': 'set_once'}"}), "(team=self.team, properties={'a': 0, 'b': 0},\n properties_last_updated_at={}, properties_last_operation={'a': 'set',\n 'b': 'set_once'})\n", (1765, 1906), False, 'from posthog.models import Person\n'), ((2438, 2470), 'posthog.models.Person.objects.get', 'Person.objects.get', ([], {'id': 'person.id'}), '(id=person.id)\n', (2456, 2470), False, 'from posthog.models import Person\n'), ((2832, 3011), 'posthog.models.Person.objects.create', 'Person.objects.create', ([], {'team': 'self.team', 'properties': "{'a': 0, 'b': 0}", 'properties_last_updated_at': "{'a': FUTURE_TIMESTAMP, 'b': FUTURE_TIMESTAMP}", 'properties_last_operation': '{}'}), "(team=self.team, properties={'a': 0, 'b': 0},\n properties_last_updated_at={'a': FUTURE_TIMESTAMP, 'b':\n FUTURE_TIMESTAMP}, properties_last_operation={})\n", (2853, 3011), False, 'from posthog.models import Person\n'), ((3544, 3576), 'posthog.models.Person.objects.get', 'Person.objects.get', ([], {'id': 'person.id'}), '(id=person.id)\n', (3562, 3576), False, 'from posthog.models import Person\n'), ((3963, 4080), 'posthog.models.Person.objects.create', 'Person.objects.create', ([], {'team': 'self.team', 'properties': '{}', 'properties_last_updated_at': '{}', 'properties_last_operation': '{}'}), '(team=self.team, properties={},\n properties_last_updated_at={}, properties_last_operation={})\n', (3984, 4080), False, 'from posthog.models import Person\n'), ((4583, 4615), 'posthog.models.Person.objects.get', 'Person.objects.get', ([], {'id': 'person.id'}), '(id=person.id)\n', (4601, 4615), False, 'from posthog.models import Person\n'), ((5080, 5286), 'posthog.models.Person.objects.create', 'Person.objects.create', ([], {'team': 'self.team', 'properties': "{'a': 0, 'b': 0}", 'properties_last_updated_at': "{'a': FUTURE_TIMESTAMP, 'b': FUTURE_TIMESTAMP}", 'properties_last_operation': "{'a': 'set', 'b': 'set_once'}"}), "(team=self.team, properties={'a': 0, 'b': 0},\n properties_last_updated_at={'a': FUTURE_TIMESTAMP, 'b':\n FUTURE_TIMESTAMP}, properties_last_operation={'a': 'set', 'b': 'set_once'})\n", (5101, 5286), False, 'from posthog.models import Person\n'), ((5813, 5845), 'posthog.models.Person.objects.get', 'Person.objects.get', ([], {'id': 'person.id'}), '(id=person.id)\n', (5831, 5845), False, 'from posthog.models import Person\n'), ((6331, 6533), 'posthog.models.Person.objects.create', 'Person.objects.create', ([], {'team': 'self.team', 'properties': "{'a': 0, 'b': 0}", 'properties_last_updated_at': "{'a': PAST_TIMESTAMP, 'b': PAST_TIMESTAMP}", 'properties_last_operation': "{'a': 'set', 'b': 'set_once'}"}), "(team=self.team, properties={'a': 0, 'b': 0},\n properties_last_updated_at={'a': PAST_TIMESTAMP, 'b': PAST_TIMESTAMP},\n properties_last_operation={'a': 'set', 'b': 'set_once'})\n", (6352, 6533), False, 'from posthog.models import Person\n'), ((7061, 7093), 'posthog.models.Person.objects.get', 'Person.objects.get', ([], {'id': 'person.id'}), '(id=person.id)\n', (7079, 7093), False, 'from posthog.models import Person\n'), ((7586, 7792), 'posthog.models.Person.objects.create', 'Person.objects.create', ([], {'team': 'self.team', 'properties': "{'a': 0, 'b': 0}", 'properties_last_updated_at': "{'a': FUTURE_TIMESTAMP, 'b': FUTURE_TIMESTAMP}", 'properties_last_operation': "{'a': 'set', 'b': 'set_once'}"}), "(team=self.team, properties={'a': 0, 'b': 0},\n properties_last_updated_at={'a': FUTURE_TIMESTAMP, 'b':\n FUTURE_TIMESTAMP}, properties_last_operation={'a': 'set', 'b': 'set_once'})\n", (7607, 7792), False, 'from posthog.models import Person\n'), ((8330, 8362), 'posthog.models.Person.objects.get', 'Person.objects.get', ([], {'id': 'person.id'}), '(id=person.id)\n', (8348, 8362), False, 'from posthog.models import Person\n'), ((8857, 9059), 'posthog.models.Person.objects.create', 'Person.objects.create', ([], {'team': 'self.team', 'properties': "{'a': 0, 'b': 0}", 'properties_last_updated_at': "{'a': PAST_TIMESTAMP, 'b': PAST_TIMESTAMP}", 'properties_last_operation': "{'a': 'set', 'b': 'set_once'}"}), "(team=self.team, properties={'a': 0, 'b': 0},\n properties_last_updated_at={'a': PAST_TIMESTAMP, 'b': PAST_TIMESTAMP},\n properties_last_operation={'a': 'set', 'b': 'set_once'})\n", (8878, 9059), False, 'from posthog.models import Person\n'), ((9597, 9629), 'posthog.models.Person.objects.get', 'Person.objects.get', ([], {'id': 'person.id'}), '(id=person.id)\n', (9615, 9629), False, 'from posthog.models import Person\n'), ((10134, 10407), 'posthog.models.Person.objects.create', 'Person.objects.create', ([], {'team': 'self.team', 'properties': "{'a': 0, 'b': 0, 'c': 0, 'd': 0}", 'properties_last_updated_at': "{'a': timestamp, 'b': timestamp, 'c': timestamp, 'd': timestamp}", 'properties_last_operation': "{'a': 'set', 'b': 'set', 'c': 'set_once', 'd': 'set_once'}"}), "(team=self.team, properties={'a': 0, 'b': 0, 'c': 0,\n 'd': 0}, properties_last_updated_at={'a': timestamp, 'b': timestamp,\n 'c': timestamp, 'd': timestamp}, properties_last_operation={'a': 'set',\n 'b': 'set', 'c': 'set_once', 'd': 'set_once'})\n", (10155, 10407), False, 'from posthog.models import Person\n'), ((11096, 11128), 'posthog.models.Person.objects.get', 'Person.objects.get', ([], {'id': 'person.id'}), '(id=person.id)\n', (11114, 11128), False, 'from posthog.models import Person\n'), ((1972, 1991), 'django.db.connection.cursor', 'connection.cursor', ([], {}), '()\n', (1989, 1991), False, 'from django.db import connection\n'), ((3078, 3097), 'django.db.connection.cursor', 'connection.cursor', ([], {}), '()\n', (3095, 3097), False, 'from django.db import connection\n'), ((4113, 4132), 'django.db.connection.cursor', 'connection.cursor', ([], {}), '()\n', (4130, 4132), False, 'from django.db import connection\n'), ((5352, 5371), 'django.db.connection.cursor', 'connection.cursor', ([], {}), '()\n', (5369, 5371), False, 'from django.db import connection\n'), ((6600, 6619), 'django.db.connection.cursor', 'connection.cursor', ([], {}), '()\n', (6617, 6619), False, 'from django.db import connection\n'), ((7859, 7878), 'django.db.connection.cursor', 'connection.cursor', ([], {}), '()\n', (7876, 7878), False, 'from django.db import connection\n'), ((9126, 9145), 'django.db.connection.cursor', 'connection.cursor', ([], {}), '()\n', (9143, 9145), False, 'from django.db import connection\n'), ((10469, 10488), 'django.db.connection.cursor', 'connection.cursor', ([], {}), '()\n', (10486, 10488), False, 'from django.db import connection\n')] |
from setuptools import setup, find_packages
with open("README.md", "r") as fh:
long_description = fh.read()
setup(name='aif360',
version='0.1.0',
description='IBM AI Fairness 360',
author='aif360 developers',
author_email='<EMAIL>',
url='https://github.com/IBM/AIF360',
long_description=long_description,
long_description_content_type='text/markdown',
license='Apache License 2.0',
packages=find_packages(),
# python_requires='>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <3.7',
install_requires=[
'numpy',
'scipy',
'pandas==0.23.3',
'scikit-learn',
'numba',
],
include_package_data=True,
zip_safe=False)
| [
"setuptools.find_packages"
] | [((451, 466), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (464, 466), False, 'from setuptools import setup, find_packages\n')] |
from __future__ import annotations
import lzma, pickle
from typing import TYPE_CHECKING
from numpy import e
from tcod.console import Console
from tcod.map import compute_fov
import exceptions, render_functions
from message_log import MessageLog
if TYPE_CHECKING:
from entity import Actor
from game_map import GameMap, GameWorld
class Engine:
game_map: GameMap
game_world: GameWorld
def __init__(self, player: Actor):
self.message_log = MessageLog()
self.mouse_location = (0, 0)
self.player = player
def handle_enemy_turns(self) -> None:
for entity in set(self.game_map.actors) - {self.player}:
if entity.ai:
try:
entity.ai.perform()
except exceptions.Impossible:
pass
def update_fov(self) -> None:
self.game_map.visible[:] = compute_fov(
self.game_map.tiles["transparent"],
(self.player.x, self.player.y),
radius=8
)
self.game_map.explored |= self.game_map.visible
def render(self, console: Console) -> None:
self.game_map.render(console)
self.message_log.render(console=console, x=21, y=45, width=40, height=5)
render_functions.render_bar(console=console, current_val=self.player.fighter.hp,
max_val=self.player.fighter.max_hp, total_width=20)
render_functions.render_level(console=console, dungeon_level=self.game_world.current_floor, location=(0, 47))
render_functions.render_name_at_location(console=console, x=21, y=44, engine=self)
def save_as(self, filename: str) -> None:
save_data = lzma.compress(pickle.dumps(self))
with open(filename, "wb") as f:
f.write(save_data)
| [
"render_functions.render_bar",
"pickle.dumps",
"tcod.map.compute_fov",
"render_functions.render_level",
"message_log.MessageLog",
"render_functions.render_name_at_location"
] | [((471, 483), 'message_log.MessageLog', 'MessageLog', ([], {}), '()\n', (481, 483), False, 'from message_log import MessageLog\n'), ((894, 988), 'tcod.map.compute_fov', 'compute_fov', (["self.game_map.tiles['transparent']", '(self.player.x, self.player.y)'], {'radius': '(8)'}), "(self.game_map.tiles['transparent'], (self.player.x, self.player\n .y), radius=8)\n", (905, 988), False, 'from tcod.map import compute_fov\n'), ((1266, 1403), 'render_functions.render_bar', 'render_functions.render_bar', ([], {'console': 'console', 'current_val': 'self.player.fighter.hp', 'max_val': 'self.player.fighter.max_hp', 'total_width': '(20)'}), '(console=console, current_val=self.player.\n fighter.hp, max_val=self.player.fighter.max_hp, total_width=20)\n', (1293, 1403), False, 'import exceptions, render_functions\n'), ((1415, 1529), 'render_functions.render_level', 'render_functions.render_level', ([], {'console': 'console', 'dungeon_level': 'self.game_world.current_floor', 'location': '(0, 47)'}), '(console=console, dungeon_level=self.\n game_world.current_floor, location=(0, 47))\n', (1444, 1529), False, 'import exceptions, render_functions\n'), ((1533, 1619), 'render_functions.render_name_at_location', 'render_functions.render_name_at_location', ([], {'console': 'console', 'x': '(21)', 'y': '(44)', 'engine': 'self'}), '(console=console, x=21, y=44,\n engine=self)\n', (1573, 1619), False, 'import exceptions, render_functions\n'), ((1697, 1715), 'pickle.dumps', 'pickle.dumps', (['self'], {}), '(self)\n', (1709, 1715), False, 'import lzma, pickle\n')] |
import tensorflow as tf
from tensorflow.keras.models import Model
from tensorflow.keras.layers import Dense
from activations.activations import tan_sigmoid, exponential, ReLU
class FFNN(Model):
""" Creates a generic Feedforward neural network.
"""
def __init__(self):
super(FFNN, self).__init__()
def build(self, input_shape):
self.dense1 = Dense(units=input_shape[-1], activation=ReLU)
self.output_layer = Dense(units=1, activation=exponential)
def call(self, inputs):
x = self.dense1(inputs)
x = self.output_layer(x)
return tf.reduce_sum(x, axis=-1)
| [
"tensorflow.reduce_sum",
"tensorflow.keras.layers.Dense"
] | [((379, 424), 'tensorflow.keras.layers.Dense', 'Dense', ([], {'units': 'input_shape[-1]', 'activation': 'ReLU'}), '(units=input_shape[-1], activation=ReLU)\n', (384, 424), False, 'from tensorflow.keras.layers import Dense\n'), ((453, 491), 'tensorflow.keras.layers.Dense', 'Dense', ([], {'units': '(1)', 'activation': 'exponential'}), '(units=1, activation=exponential)\n', (458, 491), False, 'from tensorflow.keras.layers import Dense\n'), ((605, 630), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['x'], {'axis': '(-1)'}), '(x, axis=-1)\n', (618, 630), True, 'import tensorflow as tf\n')] |
# VOC分割训练集和测试集
import os
import random
import shutil
trainval_percent = 0.1
train_percent = 0.9
imgfilepath = '../myData/JPEGImages' #原数据存放地
total_img = os.listdir(imgfilepath)
sample_num = len(total_img)
trains = random.sample(total_img,int(sample_num*train_percent))
for file in total_img:
if file in trains:
shutil.copy(os.path.join(imgfilepath,file),"./myData/coco/images/train/"+file)
else:
shutil.copy(os.path.join(imgfilepath,file),"./myData/coco/images/val/"+file)
print(file)
| [
"os.listdir",
"os.path.join"
] | [((157, 180), 'os.listdir', 'os.listdir', (['imgfilepath'], {}), '(imgfilepath)\n', (167, 180), False, 'import os\n'), ((342, 373), 'os.path.join', 'os.path.join', (['imgfilepath', 'file'], {}), '(imgfilepath, file)\n', (354, 373), False, 'import os\n'), ((439, 470), 'os.path.join', 'os.path.join', (['imgfilepath', 'file'], {}), '(imgfilepath, file)\n', (451, 470), False, 'import os\n')] |
"""Example SciUnit model classes."""
import random
from sciunit.models import Model
from sciunit.capabilities import ProducesNumber
from sciunit.utils import class_intern, method_cache
from sciunit.utils import method_memoize # Decorator for caching of capability method results.
from typing import Union
class ConstModel(Model, ProducesNumber):
"""A model that always produces a constant number as output."""
def __init__(self, constant: Union[int, float], name: str=None):
self.constant = constant
super(ConstModel, self).__init__(name=name, constant=constant)
def produce_number(self) -> Union[int, float]:
return self.constant
class UniformModel(Model, ProducesNumber):
"""A model that always produces a random uniformly distributed number.
in [a,b] as output."""
def __init__(self, a, b, name=None):
self.a, self.b = a, b
super(UniformModel, self).__init__(name=name, a=a, b=b)
def produce_number(self) -> float:
"""Produece a number between `a` and `b`.
Returns:
float: The number between a and b.
"""
return random.uniform(self.a, self.b)
################################################################
# Here are several examples of caching and sharing can be used
# to reduce the computational load of testing.
################################################################
class UniqueRandomNumberModel(Model, ProducesNumber):
"""An example model to ProducesNumber."""
def produce_number(self) -> float:
"""Each call to this method will produce a different random number.
Returns:
float: A random number produced.
"""
return random.random()
class RepeatedRandomNumberModel(Model, ProducesNumber):
"""An example model to demonstrate ProducesNumber with cypy.lazy."""
@method_memoize
def produce_number(self):
"""Each call to this method will produce the same random number as was returned in the first call, ensuring reproducibility and eliminating computational overhead.
Returns:
float: A random number produced.
"""
return random.random()
@class_intern
class SharedModel(Model):
"""A model that, each time it is instantiated with the same parameters,
will return the same instance at the same locaiton in memory.
Attributes should not be set post-instantiation
unless the goal is to set those attributes on all models of this class."""
pass
class PersistentUniformModel(UniformModel):
"""TODO"""
def run(self) -> None:
self._x = random.uniform(self.a, self.b)
def produce_number(self) -> float:
return self._x
class CacheByInstancePersistentUniformModel(PersistentUniformModel):
"""TODO"""
@method_cache(by='instance', method='run')
def produce_number(self) -> float:
return self._x
class CacheByValuePersistentUniformModel(PersistentUniformModel):
"""TODO"""
@method_cache(by='value', method='run')
def produce_number(self) -> float:
return self._x
| [
"random.random",
"random.uniform",
"sciunit.utils.method_cache"
] | [((2810, 2851), 'sciunit.utils.method_cache', 'method_cache', ([], {'by': '"""instance"""', 'method': '"""run"""'}), "(by='instance', method='run')\n", (2822, 2851), False, 'from sciunit.utils import class_intern, method_cache\n'), ((3003, 3041), 'sciunit.utils.method_cache', 'method_cache', ([], {'by': '"""value"""', 'method': '"""run"""'}), "(by='value', method='run')\n", (3015, 3041), False, 'from sciunit.utils import class_intern, method_cache\n'), ((1137, 1167), 'random.uniform', 'random.uniform', (['self.a', 'self.b'], {}), '(self.a, self.b)\n', (1151, 1167), False, 'import random\n'), ((1717, 1732), 'random.random', 'random.random', ([], {}), '()\n', (1730, 1732), False, 'import random\n'), ((2177, 2192), 'random.random', 'random.random', ([], {}), '()\n', (2190, 2192), False, 'import random\n'), ((2624, 2654), 'random.uniform', 'random.uniform', (['self.a', 'self.b'], {}), '(self.a, self.b)\n', (2638, 2654), False, 'import random\n')] |
from sentence_transformers import SentenceTransformer
from semantic.config import CONFIG
model = SentenceTransformer(CONFIG["model_name"])
| [
"sentence_transformers.SentenceTransformer"
] | [((98, 139), 'sentence_transformers.SentenceTransformer', 'SentenceTransformer', (["CONFIG['model_name']"], {}), "(CONFIG['model_name'])\n", (117, 139), False, 'from sentence_transformers import SentenceTransformer\n')] |
from tensorflow.keras.models import Model
from tensorflow.keras.layers import Dense, Flatten, Dropout, Input
from tensorflow.keras.layers import MaxPooling1D, Conv1D
from tensorflow.keras.layers import LSTM, Bidirectional
from tensorflow.keras.layers import BatchNormalization, GlobalAveragePooling1D, Permute, concatenate, Activation, add
import numpy as np
import math
def get_model(model_name, input_shape, nb_class):
if model_name == "vgg":
model = cnn_vgg(input_shape, nb_class)
elif model_name == "lstm1":
model = lstm1(input_shape, nb_class)
elif model_name == "lstm":
model = lstm1v0(input_shape, nb_class)
elif model_name == "lstm2":
model = lstm2(input_shape, nb_class)
elif model_name == "blstm1":
model = blstm1(input_shape, nb_class)
elif model_name == "blstm2":
model = blstm2(input_shape, nb_class)
elif model_name == "lstmfcn":
model = lstm_fcn(input_shape, nb_class)
elif model_name == "resnet":
model = cnn_resnet(input_shape, nb_class)
elif model_name == "mlp":
model = mlp4(input_shape, nb_class)
elif model_name == "lenet":
model = cnn_lenet(input_shape, nb_class)
else:
print("model name missing")
return model
def mlp4(input_shape, nb_class):
# <NAME>, <NAME>, <NAME>, "Time Series Classification from Scratch with Deep Neural Networks: A Strong Baseline," Int. Joint Conf. Neural Networks, 2017, pp. 1578-1585
ip = Input(shape=input_shape)
fc = Flatten()(ip)
fc = Dropout(0.1)(fc)
fc = Dense(500, activation='relu')(fc)
fc = Dropout(0.2)(fc)
fc = Dense(500, activation='relu')(fc)
fc = Dropout(0.2)(fc)
fc = Dense(500, activation='relu')(fc)
fc = Dropout(0.3)(fc)
out = Dense(nb_class, activation='softmax')(fc)
model = Model([ip], [out])
model.summary()
return model
def cnn_lenet(input_shape, nb_class):
# <NAME>, <NAME>, <NAME>, and <NAME>, “Gradient-based learning applied to document recognition,” Proceedings of the IEEE, vol. 86, no. 11, pp. 2278–2324, 1998.
ip = Input(shape=input_shape)
conv = ip
nb_cnn = int(round(math.log(input_shape[0], 2))-3)
print("pooling layers: %d"%nb_cnn)
for i in range(nb_cnn):
conv = Conv1D(6+10*i, 3, padding='same', activation="relu", kernel_initializer='he_uniform')(conv)
conv = MaxPooling1D(pool_size=2)(conv)
flat = Flatten()(conv)
fc = Dense(120, activation='relu')(flat)
fc = Dropout(0.5)(fc)
fc = Dense(84, activation='relu')(fc)
fc = Dropout(0.5)(fc)
out = Dense(nb_class, activation='softmax')(fc)
model = Model([ip], [out])
model.summary()
return model
def cnn_vgg(input_shape, nb_class):
# <NAME> and <NAME>, "Very deep convolutional networks for large-scale image recognition," arXiv preprint arXiv:1409.1556, 2014.
ip = Input(shape=input_shape)
conv = ip
nb_cnn = int(round(math.log(input_shape[0], 2))-3)
print("pooling layers: %d"%nb_cnn)
for i in range(nb_cnn):
num_filters = min(64*2**i, 512)
conv = Conv1D(num_filters, 3, padding='same', activation="relu", kernel_initializer='he_uniform')(conv)
conv = Conv1D(num_filters, 3, padding='same', activation="relu", kernel_initializer='he_uniform')(conv)
if i > 1:
conv = Conv1D(num_filters, 3, padding='same', activation="relu", kernel_initializer='he_uniform')(conv)
conv = MaxPooling1D(pool_size=2)(conv)
flat = Flatten()(conv)
fc = Dense(4096, activation='relu')(flat)
fc = Dropout(0.5)(fc)
fc = Dense(4096, activation='relu')(fc)
fc = Dropout(0.5)(fc)
out = Dense(nb_class, activation='softmax')(fc)
model = Model([ip], [out])
model.summary()
return model
def lstm1v0(input_shape, nb_class):
# Original proposal:
# <NAME> and <NAME>, “Long Short-Term Memory,” Neural Computation, vol. 9, no. 8, pp. 1735–1780, Nov. 1997.
ip = Input(shape=input_shape)
l2 = LSTM(512)(ip)
out = Dense(nb_class, activation='softmax')(l2)
model = Model([ip], [out])
model.summary()
return model
def lstm1(input_shape, nb_class):
# Original proposal:
# <NAME> and <NAME>, “Long Short-Term Memory,” Neural Computation, vol. 9, no. 8, pp. 1735–1780, Nov. 1997.
# Hyperparameter choices:
# <NAME> and <NAME>, "Optimal hyperparameters for deep lstm-networks for sequence labeling tasks," arXiv, preprint arXiv:1707.06799, 2017
ip = Input(shape=input_shape)
l2 = LSTM(100)(ip)
out = Dense(nb_class, activation='softmax')(l2)
model = Model([ip], [out])
model.summary()
return model
def lstm2(input_shape, nb_class):
ip = Input(shape=input_shape)
l1 = LSTM(100, return_sequences=True)(ip)
l2 = LSTM(100)(l1)
out = Dense(nb_class, activation='softmax')(l2)
model = Model([ip], [out])
model.summary()
return model
def blstm1(input_shape, nb_class):
# Original proposal:
# <NAME> and <NAME>, “Bidirectional recurrent neural networks,” IEEE Transactions on Signal Processing, vol. 45, no. 11, pp. 2673–2681, 1997.
# Hyperparameter choices:
# <NAME> and <NAME>, "Optimal hyperparameters for deep lstm-networks for sequence labeling tasks," arXiv, preprint arXiv:1707.06799, 2017
ip = Input(shape=input_shape)
l2 = Bidirectional(LSTM(100))(ip)
out = Dense(nb_class, activation='softmax')(l2)
model = Model([ip], [out])
model.summary()
return model
def blstm2(input_shape, nb_class):
ip = Input(shape=input_shape)
l1 = Bidirectional(LSTM(100, return_sequences=True))(ip)
l2 = Bidirectional(LSTM(100))(l1)
out = Dense(nb_class, activation='softmax')(l2)
model = Model([ip], [out])
model.summary()
return model
def lstm_fcn(input_shape, nb_class):
# <NAME>, <NAME>, <NAME>, and <NAME>, “LSTM Fully Convolutional Networks for Time Series Classification,” IEEE Access, vol. 6, pp. 1662–1669, 2018.
ip = Input(shape=input_shape)
# lstm part is a 1 time step multivariate as described in Karim et al. Seems strange, but works I guess.
lstm = Permute((2, 1))(ip)
lstm = LSTM(128)(lstm)
lstm = Dropout(0.8)(lstm)
conv = Conv1D(128, 8, padding='same', kernel_initializer='he_uniform')(ip)
conv = BatchNormalization()(conv)
conv = Activation('relu')(conv)
conv = Conv1D(256, 5, padding='same', kernel_initializer='he_uniform')(conv)
conv = BatchNormalization()(conv)
conv = Activation('relu')(conv)
conv = Conv1D(128, 3, padding='same', kernel_initializer='he_uniform')(conv)
conv = BatchNormalization()(conv)
conv = Activation('relu')(conv)
flat = GlobalAveragePooling1D()(conv)
flat = concatenate([lstm, flat])
out = Dense(nb_class, activation='softmax')(flat)
model = Model([ip], [out])
model.summary()
return model
def cnn_resnet(input_shape, nb_class):
# <NAME>, <NAME>, <NAME>, <NAME>, <NAME>, "Data augmentation using synthetic data for time series classification with deep residual networks," International Workshop on Advanced Analytics and Learning on Temporal Data ECML/PKDD, 2018
ip = Input(shape=input_shape)
residual = ip
conv = ip
for i, nb_nodes in enumerate([64, 128, 128]):
conv = Conv1D(nb_nodes, 8, padding='same', kernel_initializer="glorot_uniform")(conv)
conv = BatchNormalization()(conv)
conv = Activation('relu')(conv)
conv = Conv1D(nb_nodes, 5, padding='same', kernel_initializer="glorot_uniform")(conv)
conv = BatchNormalization()(conv)
conv = Activation('relu')(conv)
conv = Conv1D(nb_nodes, 3, padding='same', kernel_initializer="glorot_uniform")(conv)
conv = BatchNormalization()(conv)
conv = Activation('relu')(conv)
if i < 2:
# expands dimensions according to Fawaz et al.
residual = Conv1D(nb_nodes, 1, padding='same', kernel_initializer="glorot_uniform")(residual)
residual = BatchNormalization()(residual)
conv = add([residual, conv])
conv = Activation('relu')(conv)
residual = conv
flat = GlobalAveragePooling1D()(conv)
out = Dense(nb_class, activation='softmax')(flat)
model = Model([ip], [out])
model.summary()
return model | [
"tensorflow.keras.layers.Input",
"tensorflow.keras.layers.Permute",
"tensorflow.keras.layers.Dropout",
"tensorflow.keras.layers.add",
"tensorflow.keras.layers.BatchNormalization",
"tensorflow.keras.layers.concatenate",
"tensorflow.keras.layers.LSTM",
"tensorflow.keras.layers.MaxPooling1D",
"tensorfl... | [((1491, 1515), 'tensorflow.keras.layers.Input', 'Input', ([], {'shape': 'input_shape'}), '(shape=input_shape)\n', (1496, 1515), False, 'from tensorflow.keras.layers import Dense, Flatten, Dropout, Input\n'), ((1874, 1892), 'tensorflow.keras.models.Model', 'Model', (['[ip]', '[out]'], {}), '([ip], [out])\n', (1879, 1892), False, 'from tensorflow.keras.models import Model\n'), ((2147, 2171), 'tensorflow.keras.layers.Input', 'Input', ([], {'shape': 'input_shape'}), '(shape=input_shape)\n', (2152, 2171), False, 'from tensorflow.keras.layers import Dense, Flatten, Dropout, Input\n'), ((2736, 2754), 'tensorflow.keras.models.Model', 'Model', (['[ip]', '[out]'], {}), '([ip], [out])\n', (2741, 2754), False, 'from tensorflow.keras.models import Model\n'), ((2977, 3001), 'tensorflow.keras.layers.Input', 'Input', ([], {'shape': 'input_shape'}), '(shape=input_shape)\n', (2982, 3001), False, 'from tensorflow.keras.layers import Dense, Flatten, Dropout, Input\n'), ((3860, 3878), 'tensorflow.keras.models.Model', 'Model', (['[ip]', '[out]'], {}), '([ip], [out])\n', (3865, 3878), False, 'from tensorflow.keras.models import Model\n'), ((4108, 4132), 'tensorflow.keras.layers.Input', 'Input', ([], {'shape': 'input_shape'}), '(shape=input_shape)\n', (4113, 4132), False, 'from tensorflow.keras.layers import Dense, Flatten, Dropout, Input\n'), ((4222, 4240), 'tensorflow.keras.models.Model', 'Model', (['[ip]', '[out]'], {}), '([ip], [out])\n', (4227, 4240), False, 'from tensorflow.keras.models import Model\n'), ((4644, 4668), 'tensorflow.keras.layers.Input', 'Input', ([], {'shape': 'input_shape'}), '(shape=input_shape)\n', (4649, 4668), False, 'from tensorflow.keras.layers import Dense, Flatten, Dropout, Input\n'), ((4758, 4776), 'tensorflow.keras.models.Model', 'Model', (['[ip]', '[out]'], {}), '([ip], [out])\n', (4763, 4776), False, 'from tensorflow.keras.models import Model\n'), ((4861, 4885), 'tensorflow.keras.layers.Input', 'Input', ([], {'shape': 'input_shape'}), '(shape=input_shape)\n', (4866, 4885), False, 'from tensorflow.keras.layers import Dense, Flatten, Dropout, Input\n'), ((5021, 5039), 'tensorflow.keras.models.Model', 'Model', (['[ip]', '[out]'], {}), '([ip], [out])\n', (5026, 5039), False, 'from tensorflow.keras.models import Model\n'), ((5474, 5498), 'tensorflow.keras.layers.Input', 'Input', ([], {'shape': 'input_shape'}), '(shape=input_shape)\n', (5479, 5498), False, 'from tensorflow.keras.layers import Dense, Flatten, Dropout, Input\n'), ((5603, 5621), 'tensorflow.keras.models.Model', 'Model', (['[ip]', '[out]'], {}), '([ip], [out])\n', (5608, 5621), False, 'from tensorflow.keras.models import Model\n'), ((5706, 5730), 'tensorflow.keras.layers.Input', 'Input', ([], {'shape': 'input_shape'}), '(shape=input_shape)\n', (5711, 5730), False, 'from tensorflow.keras.layers import Dense, Flatten, Dropout, Input\n'), ((5896, 5914), 'tensorflow.keras.models.Model', 'Model', (['[ip]', '[out]'], {}), '([ip], [out])\n', (5901, 5914), False, 'from tensorflow.keras.models import Model\n'), ((6154, 6178), 'tensorflow.keras.layers.Input', 'Input', ([], {'shape': 'input_shape'}), '(shape=input_shape)\n', (6159, 6178), False, 'from tensorflow.keras.layers import Dense, Flatten, Dropout, Input\n'), ((6903, 6928), 'tensorflow.keras.layers.concatenate', 'concatenate', (['[lstm, flat]'], {}), '([lstm, flat])\n', (6914, 6928), False, 'from tensorflow.keras.layers import BatchNormalization, GlobalAveragePooling1D, Permute, concatenate, Activation, add\n'), ((6997, 7015), 'tensorflow.keras.models.Model', 'Model', (['[ip]', '[out]'], {}), '([ip], [out])\n', (7002, 7015), False, 'from tensorflow.keras.models import Model\n'), ((7344, 7368), 'tensorflow.keras.layers.Input', 'Input', ([], {'shape': 'input_shape'}), '(shape=input_shape)\n', (7349, 7368), False, 'from tensorflow.keras.layers import Dense, Flatten, Dropout, Input\n'), ((8445, 8463), 'tensorflow.keras.models.Model', 'Model', (['[ip]', '[out]'], {}), '([ip], [out])\n', (8450, 8463), False, 'from tensorflow.keras.models import Model\n'), ((1525, 1534), 'tensorflow.keras.layers.Flatten', 'Flatten', ([], {}), '()\n', (1532, 1534), False, 'from tensorflow.keras.layers import Dense, Flatten, Dropout, Input\n'), ((1553, 1565), 'tensorflow.keras.layers.Dropout', 'Dropout', (['(0.1)'], {}), '(0.1)\n', (1560, 1565), False, 'from tensorflow.keras.layers import Dense, Flatten, Dropout, Input\n'), ((1592, 1621), 'tensorflow.keras.layers.Dense', 'Dense', (['(500)'], {'activation': '"""relu"""'}), "(500, activation='relu')\n", (1597, 1621), False, 'from tensorflow.keras.layers import Dense, Flatten, Dropout, Input\n'), ((1635, 1647), 'tensorflow.keras.layers.Dropout', 'Dropout', (['(0.2)'], {}), '(0.2)\n', (1642, 1647), False, 'from tensorflow.keras.layers import Dense, Flatten, Dropout, Input\n'), ((1666, 1695), 'tensorflow.keras.layers.Dense', 'Dense', (['(500)'], {'activation': '"""relu"""'}), "(500, activation='relu')\n", (1671, 1695), False, 'from tensorflow.keras.layers import Dense, Flatten, Dropout, Input\n'), ((1709, 1721), 'tensorflow.keras.layers.Dropout', 'Dropout', (['(0.2)'], {}), '(0.2)\n', (1716, 1721), False, 'from tensorflow.keras.layers import Dense, Flatten, Dropout, Input\n'), ((1740, 1769), 'tensorflow.keras.layers.Dense', 'Dense', (['(500)'], {'activation': '"""relu"""'}), "(500, activation='relu')\n", (1745, 1769), False, 'from tensorflow.keras.layers import Dense, Flatten, Dropout, Input\n'), ((1783, 1795), 'tensorflow.keras.layers.Dropout', 'Dropout', (['(0.3)'], {}), '(0.3)\n', (1790, 1795), False, 'from tensorflow.keras.layers import Dense, Flatten, Dropout, Input\n'), ((1815, 1852), 'tensorflow.keras.layers.Dense', 'Dense', (['nb_class'], {'activation': '"""softmax"""'}), "(nb_class, activation='softmax')\n", (1820, 1852), False, 'from tensorflow.keras.layers import Dense, Flatten, Dropout, Input\n'), ((2497, 2506), 'tensorflow.keras.layers.Flatten', 'Flatten', ([], {}), '()\n', (2504, 2506), False, 'from tensorflow.keras.layers import Dense, Flatten, Dropout, Input\n'), ((2527, 2556), 'tensorflow.keras.layers.Dense', 'Dense', (['(120)'], {'activation': '"""relu"""'}), "(120, activation='relu')\n", (2532, 2556), False, 'from tensorflow.keras.layers import Dense, Flatten, Dropout, Input\n'), ((2572, 2584), 'tensorflow.keras.layers.Dropout', 'Dropout', (['(0.5)'], {}), '(0.5)\n', (2579, 2584), False, 'from tensorflow.keras.layers import Dense, Flatten, Dropout, Input\n'), ((2603, 2631), 'tensorflow.keras.layers.Dense', 'Dense', (['(84)'], {'activation': '"""relu"""'}), "(84, activation='relu')\n", (2608, 2631), False, 'from tensorflow.keras.layers import Dense, Flatten, Dropout, Input\n'), ((2645, 2657), 'tensorflow.keras.layers.Dropout', 'Dropout', (['(0.5)'], {}), '(0.5)\n', (2652, 2657), False, 'from tensorflow.keras.layers import Dense, Flatten, Dropout, Input\n'), ((2677, 2714), 'tensorflow.keras.layers.Dense', 'Dense', (['nb_class'], {'activation': '"""softmax"""'}), "(nb_class, activation='softmax')\n", (2682, 2714), False, 'from tensorflow.keras.layers import Dense, Flatten, Dropout, Input\n'), ((3618, 3627), 'tensorflow.keras.layers.Flatten', 'Flatten', ([], {}), '()\n', (3625, 3627), False, 'from tensorflow.keras.layers import Dense, Flatten, Dropout, Input\n'), ((3648, 3678), 'tensorflow.keras.layers.Dense', 'Dense', (['(4096)'], {'activation': '"""relu"""'}), "(4096, activation='relu')\n", (3653, 3678), False, 'from tensorflow.keras.layers import Dense, Flatten, Dropout, Input\n'), ((3694, 3706), 'tensorflow.keras.layers.Dropout', 'Dropout', (['(0.5)'], {}), '(0.5)\n', (3701, 3706), False, 'from tensorflow.keras.layers import Dense, Flatten, Dropout, Input\n'), ((3725, 3755), 'tensorflow.keras.layers.Dense', 'Dense', (['(4096)'], {'activation': '"""relu"""'}), "(4096, activation='relu')\n", (3730, 3755), False, 'from tensorflow.keras.layers import Dense, Flatten, Dropout, Input\n'), ((3769, 3781), 'tensorflow.keras.layers.Dropout', 'Dropout', (['(0.5)'], {}), '(0.5)\n', (3776, 3781), False, 'from tensorflow.keras.layers import Dense, Flatten, Dropout, Input\n'), ((3801, 3838), 'tensorflow.keras.layers.Dense', 'Dense', (['nb_class'], {'activation': '"""softmax"""'}), "(nb_class, activation='softmax')\n", (3806, 3838), False, 'from tensorflow.keras.layers import Dense, Flatten, Dropout, Input\n'), ((4143, 4152), 'tensorflow.keras.layers.LSTM', 'LSTM', (['(512)'], {}), '(512)\n', (4147, 4152), False, 'from tensorflow.keras.layers import LSTM, Bidirectional\n'), ((4167, 4204), 'tensorflow.keras.layers.Dense', 'Dense', (['nb_class'], {'activation': '"""softmax"""'}), "(nb_class, activation='softmax')\n", (4172, 4204), False, 'from tensorflow.keras.layers import Dense, Flatten, Dropout, Input\n'), ((4679, 4688), 'tensorflow.keras.layers.LSTM', 'LSTM', (['(100)'], {}), '(100)\n', (4683, 4688), False, 'from tensorflow.keras.layers import LSTM, Bidirectional\n'), ((4703, 4740), 'tensorflow.keras.layers.Dense', 'Dense', (['nb_class'], {'activation': '"""softmax"""'}), "(nb_class, activation='softmax')\n", (4708, 4740), False, 'from tensorflow.keras.layers import Dense, Flatten, Dropout, Input\n'), ((4896, 4928), 'tensorflow.keras.layers.LSTM', 'LSTM', (['(100)'], {'return_sequences': '(True)'}), '(100, return_sequences=True)\n', (4900, 4928), False, 'from tensorflow.keras.layers import LSTM, Bidirectional\n'), ((4942, 4951), 'tensorflow.keras.layers.LSTM', 'LSTM', (['(100)'], {}), '(100)\n', (4946, 4951), False, 'from tensorflow.keras.layers import LSTM, Bidirectional\n'), ((4966, 5003), 'tensorflow.keras.layers.Dense', 'Dense', (['nb_class'], {'activation': '"""softmax"""'}), "(nb_class, activation='softmax')\n", (4971, 5003), False, 'from tensorflow.keras.layers import Dense, Flatten, Dropout, Input\n'), ((5548, 5585), 'tensorflow.keras.layers.Dense', 'Dense', (['nb_class'], {'activation': '"""softmax"""'}), "(nb_class, activation='softmax')\n", (5553, 5585), False, 'from tensorflow.keras.layers import Dense, Flatten, Dropout, Input\n'), ((5841, 5878), 'tensorflow.keras.layers.Dense', 'Dense', (['nb_class'], {'activation': '"""softmax"""'}), "(nb_class, activation='softmax')\n", (5846, 5878), False, 'from tensorflow.keras.layers import Dense, Flatten, Dropout, Input\n'), ((6304, 6319), 'tensorflow.keras.layers.Permute', 'Permute', (['(2, 1)'], {}), '((2, 1))\n', (6311, 6319), False, 'from tensorflow.keras.layers import BatchNormalization, GlobalAveragePooling1D, Permute, concatenate, Activation, add\n'), ((6336, 6345), 'tensorflow.keras.layers.LSTM', 'LSTM', (['(128)'], {}), '(128)\n', (6340, 6345), False, 'from tensorflow.keras.layers import LSTM, Bidirectional\n'), ((6363, 6375), 'tensorflow.keras.layers.Dropout', 'Dropout', (['(0.8)'], {}), '(0.8)\n', (6370, 6375), False, 'from tensorflow.keras.layers import Dense, Flatten, Dropout, Input\n'), ((6394, 6457), 'tensorflow.keras.layers.Conv1D', 'Conv1D', (['(128)', '(8)'], {'padding': '"""same"""', 'kernel_initializer': '"""he_uniform"""'}), "(128, 8, padding='same', kernel_initializer='he_uniform')\n", (6400, 6457), False, 'from tensorflow.keras.layers import MaxPooling1D, Conv1D\n'), ((6473, 6493), 'tensorflow.keras.layers.BatchNormalization', 'BatchNormalization', ([], {}), '()\n', (6491, 6493), False, 'from tensorflow.keras.layers import BatchNormalization, GlobalAveragePooling1D, Permute, concatenate, Activation, add\n'), ((6511, 6529), 'tensorflow.keras.layers.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (6521, 6529), False, 'from tensorflow.keras.layers import BatchNormalization, GlobalAveragePooling1D, Permute, concatenate, Activation, add\n'), ((6548, 6611), 'tensorflow.keras.layers.Conv1D', 'Conv1D', (['(256)', '(5)'], {'padding': '"""same"""', 'kernel_initializer': '"""he_uniform"""'}), "(256, 5, padding='same', kernel_initializer='he_uniform')\n", (6554, 6611), False, 'from tensorflow.keras.layers import MaxPooling1D, Conv1D\n'), ((6629, 6649), 'tensorflow.keras.layers.BatchNormalization', 'BatchNormalization', ([], {}), '()\n', (6647, 6649), False, 'from tensorflow.keras.layers import BatchNormalization, GlobalAveragePooling1D, Permute, concatenate, Activation, add\n'), ((6667, 6685), 'tensorflow.keras.layers.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (6677, 6685), False, 'from tensorflow.keras.layers import BatchNormalization, GlobalAveragePooling1D, Permute, concatenate, Activation, add\n'), ((6704, 6767), 'tensorflow.keras.layers.Conv1D', 'Conv1D', (['(128)', '(3)'], {'padding': '"""same"""', 'kernel_initializer': '"""he_uniform"""'}), "(128, 3, padding='same', kernel_initializer='he_uniform')\n", (6710, 6767), False, 'from tensorflow.keras.layers import MaxPooling1D, Conv1D\n'), ((6785, 6805), 'tensorflow.keras.layers.BatchNormalization', 'BatchNormalization', ([], {}), '()\n', (6803, 6805), False, 'from tensorflow.keras.layers import BatchNormalization, GlobalAveragePooling1D, Permute, concatenate, Activation, add\n'), ((6823, 6841), 'tensorflow.keras.layers.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (6833, 6841), False, 'from tensorflow.keras.layers import BatchNormalization, GlobalAveragePooling1D, Permute, concatenate, Activation, add\n'), ((6860, 6884), 'tensorflow.keras.layers.GlobalAveragePooling1D', 'GlobalAveragePooling1D', ([], {}), '()\n', (6882, 6884), False, 'from tensorflow.keras.layers import BatchNormalization, GlobalAveragePooling1D, Permute, concatenate, Activation, add\n'), ((6940, 6977), 'tensorflow.keras.layers.Dense', 'Dense', (['nb_class'], {'activation': '"""softmax"""'}), "(nb_class, activation='softmax')\n", (6945, 6977), False, 'from tensorflow.keras.layers import Dense, Flatten, Dropout, Input\n'), ((8235, 8256), 'tensorflow.keras.layers.add', 'add', (['[residual, conv]'], {}), '([residual, conv])\n', (8238, 8256), False, 'from tensorflow.keras.layers import BatchNormalization, GlobalAveragePooling1D, Permute, concatenate, Activation, add\n'), ((8346, 8370), 'tensorflow.keras.layers.GlobalAveragePooling1D', 'GlobalAveragePooling1D', ([], {}), '()\n', (8368, 8370), False, 'from tensorflow.keras.layers import BatchNormalization, GlobalAveragePooling1D, Permute, concatenate, Activation, add\n'), ((8388, 8425), 'tensorflow.keras.layers.Dense', 'Dense', (['nb_class'], {'activation': '"""softmax"""'}), "(nb_class, activation='softmax')\n", (8393, 8425), False, 'from tensorflow.keras.layers import Dense, Flatten, Dropout, Input\n'), ((2338, 2432), 'tensorflow.keras.layers.Conv1D', 'Conv1D', (['(6 + 10 * i)', '(3)'], {'padding': '"""same"""', 'activation': '"""relu"""', 'kernel_initializer': '"""he_uniform"""'}), "(6 + 10 * i, 3, padding='same', activation='relu', kernel_initializer\n ='he_uniform')\n", (2344, 2432), False, 'from tensorflow.keras.layers import MaxPooling1D, Conv1D\n'), ((2445, 2470), 'tensorflow.keras.layers.MaxPooling1D', 'MaxPooling1D', ([], {'pool_size': '(2)'}), '(pool_size=2)\n', (2457, 2470), False, 'from tensorflow.keras.layers import MaxPooling1D, Conv1D\n'), ((3208, 3302), 'tensorflow.keras.layers.Conv1D', 'Conv1D', (['num_filters', '(3)'], {'padding': '"""same"""', 'activation': '"""relu"""', 'kernel_initializer': '"""he_uniform"""'}), "(num_filters, 3, padding='same', activation='relu',\n kernel_initializer='he_uniform')\n", (3214, 3302), False, 'from tensorflow.keras.layers import MaxPooling1D, Conv1D\n'), ((3320, 3414), 'tensorflow.keras.layers.Conv1D', 'Conv1D', (['num_filters', '(3)'], {'padding': '"""same"""', 'activation': '"""relu"""', 'kernel_initializer': '"""he_uniform"""'}), "(num_filters, 3, padding='same', activation='relu',\n kernel_initializer='he_uniform')\n", (3326, 3414), False, 'from tensorflow.keras.layers import MaxPooling1D, Conv1D\n'), ((3566, 3591), 'tensorflow.keras.layers.MaxPooling1D', 'MaxPooling1D', ([], {'pool_size': '(2)'}), '(pool_size=2)\n', (3578, 3591), False, 'from tensorflow.keras.layers import MaxPooling1D, Conv1D\n'), ((5523, 5532), 'tensorflow.keras.layers.LSTM', 'LSTM', (['(100)'], {}), '(100)\n', (5527, 5532), False, 'from tensorflow.keras.layers import LSTM, Bidirectional\n'), ((5755, 5787), 'tensorflow.keras.layers.LSTM', 'LSTM', (['(100)'], {'return_sequences': '(True)'}), '(100, return_sequences=True)\n', (5759, 5787), False, 'from tensorflow.keras.layers import LSTM, Bidirectional\n'), ((5816, 5825), 'tensorflow.keras.layers.LSTM', 'LSTM', (['(100)'], {}), '(100)\n', (5820, 5825), False, 'from tensorflow.keras.layers import LSTM, Bidirectional\n'), ((7471, 7543), 'tensorflow.keras.layers.Conv1D', 'Conv1D', (['nb_nodes', '(8)'], {'padding': '"""same"""', 'kernel_initializer': '"""glorot_uniform"""'}), "(nb_nodes, 8, padding='same', kernel_initializer='glorot_uniform')\n", (7477, 7543), False, 'from tensorflow.keras.layers import MaxPooling1D, Conv1D\n'), ((7565, 7585), 'tensorflow.keras.layers.BatchNormalization', 'BatchNormalization', ([], {}), '()\n', (7583, 7585), False, 'from tensorflow.keras.layers import BatchNormalization, GlobalAveragePooling1D, Permute, concatenate, Activation, add\n'), ((7607, 7625), 'tensorflow.keras.layers.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (7617, 7625), False, 'from tensorflow.keras.layers import BatchNormalization, GlobalAveragePooling1D, Permute, concatenate, Activation, add\n'), ((7648, 7720), 'tensorflow.keras.layers.Conv1D', 'Conv1D', (['nb_nodes', '(5)'], {'padding': '"""same"""', 'kernel_initializer': '"""glorot_uniform"""'}), "(nb_nodes, 5, padding='same', kernel_initializer='glorot_uniform')\n", (7654, 7720), False, 'from tensorflow.keras.layers import MaxPooling1D, Conv1D\n'), ((7742, 7762), 'tensorflow.keras.layers.BatchNormalization', 'BatchNormalization', ([], {}), '()\n', (7760, 7762), False, 'from tensorflow.keras.layers import BatchNormalization, GlobalAveragePooling1D, Permute, concatenate, Activation, add\n'), ((7784, 7802), 'tensorflow.keras.layers.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (7794, 7802), False, 'from tensorflow.keras.layers import BatchNormalization, GlobalAveragePooling1D, Permute, concatenate, Activation, add\n'), ((7825, 7897), 'tensorflow.keras.layers.Conv1D', 'Conv1D', (['nb_nodes', '(3)'], {'padding': '"""same"""', 'kernel_initializer': '"""glorot_uniform"""'}), "(nb_nodes, 3, padding='same', kernel_initializer='glorot_uniform')\n", (7831, 7897), False, 'from tensorflow.keras.layers import MaxPooling1D, Conv1D\n'), ((7919, 7939), 'tensorflow.keras.layers.BatchNormalization', 'BatchNormalization', ([], {}), '()\n', (7937, 7939), False, 'from tensorflow.keras.layers import BatchNormalization, GlobalAveragePooling1D, Permute, concatenate, Activation, add\n'), ((7961, 7979), 'tensorflow.keras.layers.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (7971, 7979), False, 'from tensorflow.keras.layers import BatchNormalization, GlobalAveragePooling1D, Permute, concatenate, Activation, add\n'), ((8189, 8209), 'tensorflow.keras.layers.BatchNormalization', 'BatchNormalization', ([], {}), '()\n', (8207, 8209), False, 'from tensorflow.keras.layers import BatchNormalization, GlobalAveragePooling1D, Permute, concatenate, Activation, add\n'), ((8272, 8290), 'tensorflow.keras.layers.Activation', 'Activation', (['"""relu"""'], {}), "('relu')\n", (8282, 8290), False, 'from tensorflow.keras.layers import BatchNormalization, GlobalAveragePooling1D, Permute, concatenate, Activation, add\n'), ((2219, 2246), 'math.log', 'math.log', (['input_shape[0]', '(2)'], {}), '(input_shape[0], 2)\n', (2227, 2246), False, 'import math\n'), ((3049, 3076), 'math.log', 'math.log', (['input_shape[0]', '(2)'], {}), '(input_shape[0], 2)\n', (3057, 3076), False, 'import math\n'), ((3454, 3548), 'tensorflow.keras.layers.Conv1D', 'Conv1D', (['num_filters', '(3)'], {'padding': '"""same"""', 'activation': '"""relu"""', 'kernel_initializer': '"""he_uniform"""'}), "(num_filters, 3, padding='same', activation='relu',\n kernel_initializer='he_uniform')\n", (3460, 3548), False, 'from tensorflow.keras.layers import MaxPooling1D, Conv1D\n'), ((8087, 8159), 'tensorflow.keras.layers.Conv1D', 'Conv1D', (['nb_nodes', '(1)'], {'padding': '"""same"""', 'kernel_initializer': '"""glorot_uniform"""'}), "(nb_nodes, 1, padding='same', kernel_initializer='glorot_uniform')\n", (8093, 8159), False, 'from tensorflow.keras.layers import MaxPooling1D, Conv1D\n')] |
#! /usr/bin/python
# encoding=utf-8
import os
import datetime,time
from selenium import webdriver
import config
import threading
import numpy as np
def writelog(msg,log):
nt=datetime.datetime.now().strftime('%Y-%m-%d %H-%M-%S')
text="[%s] %s " % (nt,msg)
os.system("echo %s >> %s" % (text.encode('utf8'),log))
def create_chrome():
ops=webdriver.ChromeOptions()
ops.add_experimental_option('mobileEmulation',config.mobileEmulation)
web=webdriver.Chrome(chrome_options=ops)
web.set_page_load_timeout(10)
web.set_script_timeout(10)
web.set_window_size(config.mWidth,config.mHeight)
return web
#Create Threading Pool
def threading_pool(tnum,funname):
threadlist=[]
for i in range(tnum):
t=threading.Thread(target=funname)
threadlist.append(t)
for t in threadlist:
t.setDaemon(True)
t.start()
for t in threadlist:
t.join()
return threadlist
def set_interval(*args):
s = 3
e = 6
if len(args)>=1:
s = args[0]
if len(args)>=2:
e = args[1]
f = np.random.uniform(s,e)
time.sleep(f)
| [
"selenium.webdriver.ChromeOptions",
"selenium.webdriver.Chrome",
"time.sleep",
"datetime.datetime.now",
"numpy.random.uniform",
"threading.Thread"
] | [((355, 380), 'selenium.webdriver.ChromeOptions', 'webdriver.ChromeOptions', ([], {}), '()\n', (378, 380), False, 'from selenium import webdriver\n'), ((464, 500), 'selenium.webdriver.Chrome', 'webdriver.Chrome', ([], {'chrome_options': 'ops'}), '(chrome_options=ops)\n', (480, 500), False, 'from selenium import webdriver\n'), ((1096, 1119), 'numpy.random.uniform', 'np.random.uniform', (['s', 'e'], {}), '(s, e)\n', (1113, 1119), True, 'import numpy as np\n'), ((1123, 1136), 'time.sleep', 'time.sleep', (['f'], {}), '(f)\n', (1133, 1136), False, 'import datetime, time\n'), ((748, 780), 'threading.Thread', 'threading.Thread', ([], {'target': 'funname'}), '(target=funname)\n', (764, 780), False, 'import threading\n'), ((181, 204), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (202, 204), False, 'import datetime, time\n')] |
import numpy as np
import pandas as pd
import os
import matplotlib.pyplot as plt
from sklearn import datasets, linear_model
from difflib import SequenceMatcher
import seaborn as sns
from statistics import mean
from ast import literal_eval
from scipy import stats
from sklearn.linear_model import LinearRegression
from sklearn.linear_model import LogisticRegression
from pygam import LinearGAM, s, l, f
from matplotlib import lines
import six
def extract_boar_teloFISH_as_list(path):
"""
FUNCTION FOR PULLING KELLY'S TELOFISH DATA FOR 40 BOARS into a LIST.. TO BE MADE INTO A DATAFRAME & JOINED W/
MAIN DATAFRAME if possible
These excel files take forever to load.. the objective here is to synthesize all the excel files for
telomere FISH data into one dataframe, then save that dataframe to csv file to be retrieved later
loading one whole csv file containing all the data will be much, much faster than loading the parts of the whole
Along the way, we'll normalize the teloFISH data using controls internal to each excel file
"""
boar_teloFISH_list = []
for file in os.scandir(path):
if 'Hyb' in file.name:
print(f'Handling {file.name}...')
full_name = path + file.name
# making a dict of excel sheets, where KEY:VALUE pairs are SAMPLE ID:TELO DATA
telo_excel_dict = pd.read_excel(full_name, sheet_name=None, skiprows=4, usecols=[3], nrows=5000)
if 'Telomere Template' in telo_excel_dict.keys():
del telo_excel_dict['Telomere Template']
excel_file_list = []
for sample_id, telos in telo_excel_dict.items():
telos_cleaned = clean_individ_telos(telos)
if sample_id != 'Control':
excel_file_list.append([sample_id, telos_cleaned.values, np.mean(telos_cleaned)])
elif sample_id == 'Control':
control_value = np.mean(telos_cleaned)
#normalize teloFISH values by control value
for sample in excel_file_list:
sample_data = sample
#normalize individual telos
sample_data[1] = np.divide(sample_data[1], control_value)
#normalize telo means
sample_data[2] = np.divide(sample_data[2], control_value)
boar_teloFISH_list.append(sample_data)
print('Finished collecting boar teloFISH data')
return boar_teloFISH_list
def gen_missing_values_andimpute_or_randomsampledown(n_cells, telosPercell, df):
max_telos = n_cells * telosPercell
half_telos = (n_cells * telosPercell) / 2
if df.size > max_telos:
df_sampled = df.sample(max_telos)
return df_sampled
if df.size > 25 and df.size <= half_telos:
missing_data_difference = abs( (n_cells * telosPercell) - df.size )
rsampled = df.sample(missing_data_difference, replace=True, random_state=28)
concat_ed = pd.concat([rsampled, df], sort=False)
np.random.shuffle(concat_ed.to_numpy())
return concat_ed
if df.size > 25 and df.size < max_telos:
missing_data_difference = abs( (n_cells * telosPercell) - df.size )
rsampled = df.sample(missing_data_difference, random_state=28)
concat_ed = pd.concat([rsampled, df], sort=False)
np.random.shuffle(concat_ed.to_numpy())
return concat_ed
else:
return df
def clean_individ_telos(telo_data):
labels=[6, 172, 338, 504, 670, 836, 1002, 1168, 1334, 1500, 1666, 1832,
1998, 2164, 2330, 2496, 2662, 2828, 2994, 3160, 3326, 3492, 3658, 3824,
3990, 4156, 4322, 4488, 4654, 4820]
labels_offset_by6 = [(x-6) for x in labels]
telo_data = telo_data.drop(labels_offset_by6)
telo_data = pd.to_numeric(telo_data.iloc[:,0], errors='coerce')
telo_data = telo_data.dropna(axis=0, how='any')
telo_data = telo_data.to_frame(name=None)
telo_data = telo_data[(np.abs(stats.zscore(telo_data)) < 3).all(axis=1)]
telo_data = pd.Series(telo_data.iloc[:,0])
telo_data = gen_missing_values_andimpute_or_randomsampledown(30, 160, telo_data)
telo_data.reset_index(drop=True, inplace=True)
return telo_data
def remove_dashes_space_sampleIDs(row):
if '-' in str(row):
row = str(row).replace('-', '').replace(' ', '')
if '_' in str(row):
row = str(row).replace('_', '')
if ' ' in str(row):
row = str(row).replace(' ', '')
if 'gps' in str(row):
row = str(row).replace('gps', '')
if 'GPS' in str(row):
row = str(row).replace('GPS', '')
if 'collar' in (row):
row = str(row).replace('collar', '')
if 'COLLAR' in str(row):
row = str(row).replace('COLLAR', '')
return str(row)
def readable_snake_df_dummy_variables(snake_df):
Exposure_Status = []
for row in snake_df['Sample ID']:
if row.startswith('C'):
Exposure_Status.append('Control')
elif row.startswith('E'):
Exposure_Status.append('Exposed')
snake_df['Exposure Status'] = Exposure_Status
### making dummy variables for snake exposure status
snake_dum = pd.get_dummies(snake_df['Exposure Status'], prefix='Encoded', drop_first=True)
snake_df['Encoded Exposed'] = snake_dum
return snake_df
def count_shared_sample_IDs(df1, df2, print_names=None):
df1_IDs = set(df1['Sample ID'].unique())
df2_IDs = set(df2['Sample ID'].unique())
# common_IDs = df1_list - (df1_list - df2_list)
common_IDs = list(df1_IDs & df2_IDs)
print(f'The number of sample IDs in common are: {len(common_IDs)}')
if print_names == 'yes' or print_names == 'Yes':
print(f'The sample IDs in common are:\n{common_IDs}')
def average_age_weeks(row):
if '-' in str(row):
numbers = str(row).split('-')
average = (int(numbers[1]) + int(numbers[0])) / len(numbers)
return int(average)
else:
return int(row)
def quartile_cts_rel_to_df1(df1, df2):
df1 = pd.DataFrame(df1)
df2 = pd.DataFrame(df2)
# count how many instances in df2 are below the 0.25 quantile of df1
quartile_1 = df2[df2 <= df1.quantile(0.25)].count()
# count how many instances in df2 are within the 0.25 - 0.75 range quantile of df1
quartile_2_3 = df2[(df2 > df1.quantile(0.25)) & (df2 < df1.quantile(0.75))].count()
# count how many instances in df2 are above 0.75 range quantile of df1
quartile_4 = df2[df2 >= df1.quantile(0.75)].count()
# return counts of values
return int(quartile_1.values), int(quartile_2_3.values), int(quartile_4.values)
def make_quartiles_columns(total_boar_telos, df):
pos_1, pos_2, pos_3 = 17, 18, 19
sample_id, telo_data = 0, 1
for i, row in df.iterrows():
boar_sample_telos = row[telo_data]
df.iat[i, pos_1], df.iat[i, pos_2], df.iat[i, pos_3] = (quartile_cts_rel_to_df1(total_boar_telos, boar_sample_telos))
return df
def linear_regression_graphs_between_variables(x=None, y=None, data=None,
hue=None, col=None,
hue_order=None, col_order=None,
snake=False):
if 'Binary' in y:
ax=sns.lmplot(x=x, y=y, hue=hue, col=col, data=data, logistic=True,
height=5.5, aspect=1, scatter_kws={"s": 175, "edgecolor":'black'})
else:
ax=sns.lmplot(x=x, y=y, hue=hue, col=col, data=data,
height=5.5, aspect=1, scatter_kws={"s": 175, "edgecolor":'black'})
fig = ax.fig
ax.set_xlabels(x, fontsize=18)
ax.set_xticklabels(fontsize=14)
ax.set_ylabels(y, fontsize=18)
ax.set_yticklabels(fontsize=14)
ax.set_titles(size=14)
# if 'Cortisol' in y:
# ax.set(ylim=(0, 40))
plt.subplots_adjust(top=0.88)
if hue == None and col == None:
fig.suptitle(f'{x} vs.\n {y} in Fukushima Wild Boar', fontsize=18,
)
# ax.savefig(f"../graphs/{x} vs {y}.png", dpi=400)
if snake:
fig.suptitle(f'{x} vs.\n {y} in Fukushima Wild Snake', fontsize=18,
)
# elif hue == 'Sex' and col == 'Sex':
# fig.suptitle(f'{x} vs. {y}\nper Sex in Fukushima Wild Boar', fontsize=16, weight='bold')
# fig.legend(fontsize='large')
# ax.savefig(f"../graphs/{x} vs {y} per sex.png", dpi=400)
def graph_dose_age_vs_telos(df=None, x=None, x2=None, y=None, hue=None,):
f, axes = plt.subplots(1, 2, figsize=(12,5), sharey=False, sharex=False)
# dose vs. telomeres
sns.regplot(x=x, y=y, data=df, ax=axes[0],
# hue=hue,
scatter_kws={'alpha':0.8, 'linewidth':1, 'edgecolor':'black', 's':df['Age (months)']*12, })
axes[0].set_xlabel(x, fontsize=14)
axes[0].set_ylabel(y, fontsize=14)
axes[0].tick_params(labelsize=12)
# age vs. telomeres
sns.regplot(x=x2, y=y, data=df, ax=axes[1],
# hue=hue,
scatter_kws={'alpha':0.8, 'linewidth':1, 'edgecolor':'black', 's':175, })
axes[1].set_xlabel(x2, fontsize=14)
axes[1].set_xlim(-4,55)
axes[1].set_ylabel(y, fontsize=14)
if y == 'Mean Telomere Length (FISH)':
axes[1].set_ylim(0.2,1.6)
if y == 'Mean Telomere Length (qPCR)':
axes[1].set_ylim(0.6,1.8)
axes[1].tick_params(labelsize=12)
def score_linear_regressions(x=None, y=None, data=None, sexes=['Overall']):
for sex in sexes:
if sex == 'Overall':
X_r = data[x].values.reshape(-1, len(x))
y_r = data[y].values.reshape(-1, 1)
regression = LinearRegression().fit(X_r,y_r)
print(f'Linear regression for {x} vs. {y}:\nOverall R2 is {regression.score(X_r, y_r):.4f}\n')
return regression
else:
X_r = data[data['Sex'] == sex][x].values.reshape(-1, len(x))
y_r = data[data['Sex'] == sex][y].values.reshape(-1, 1)
regression = LinearRegression().fit(X_r,y_r)
print(f"Linear regression for {x} vs. {y}:\nR2 for {sex}s is {regression.score(X_r, y_r):.4f}")
return regression
def eval_number(x):
if x > 15:
x = 1
return x
elif x < 15:
x = 0
return x
def score_logistic_regressions(x=None, y=None, data=None):
# for y in y_cols:
sexes = [
# 'Male',
# 'Female',
'Overall']
for sex in sexes:
if sex == 'Overall':
X_r = data[x].values.reshape(-1, 1)
y_r = data[y].values.reshape(-1, )
log_reg = LogisticRegression(solver='lbfgs')
regression = log_reg.fit(X_r,y_r)
print(f'Logistic regression for {x} vs. {y}:\nOverall R2 is {regression.score(X_r, y_r):.4f}\n')
else:
X_r = data[data['Sex'] == sex][x].values.reshape(-1, 1)
y_r = data[data['Sex'] == sex][y].values.reshape(-1, )
regression = LinearRegression().fit(X_r,y_r)
print(f"Logistic regression for {x} vs. {y}:\nR2 for {sex}s is {regression.score(X_r, y_r):.4f}")
def encode_sex(row):
if row == 'Male':
return 0
elif row == 'Female':
return 1
else:
print(f'ERROR.. row == {row}')
def merge_return_df_cols_interest(dose_df, cortisol_df, cols_of_interest):
merge_dose_cortisol = dose_df.merge(cortisol_df, on=['Sample ID'])
trim_dose_cortisol = merge_dose_cortisol[cols_of_interest].copy()
return trim_dose_cortisol
def enforce_col_types(df):
for col in df.columns:
if col == 'Sample ID' or col == 'Sex':
df[col] = df[col].astype('str')
elif col == 'Age (months)' or col == 'encode sex':
df[col] = df[col].astype('int64')
else:
df[col] = df[col].astype('float64')
def male_or_female(row):
if row == 'M' or row == 'm' or row == 'Male':
return 'Male'
elif row == 'F' or row == 'f' or row == 'Female':
return 'Female'
else:
print(f'error... row == {row}')
return np.NaN
def make_age_class(row):
if row <= 12:
return 'piglet'
elif row > 12 and row < 24:
return 'yearling'
elif row >= 20:
return 'adult'
def linear_regression_scores_X_y(df, y, y_name, dose_types):
"""
specifically for EDA
"""
for Xn in dose_types:
features_list = [[Xn], [Xn, 'Age (months)'], [Xn, 'Age (months)', 'encoded sex']]
for features in features_list:
X = df[features].values.reshape(-1, len(features))
fit_lm = LinearRegression().fit(X, y)
print(f'OLS | {features} vs. {y_name} --> R2: {fit_lm.score(X, y):.4f}')
print('')
return fit_lm
def fit_gam_plot_dependencies(df=None, features=None, target=None,
basis_1=s, basis_2=False, summary=False):
X = df[features]
y = df[target]
if basis_1 and basis_2:
gam = LinearGAM(basis_1(0, lam=60) + basis_2(1, lam=60), fit_intercept=True).fit(X, y)
elif basis_1:
gam = LinearGAM(basis_1(0, lam=60), fit_intercept=True).fit(X, y)
else:
print('no basis called for features.. error')
if summary:
print(gam.summary())
plot_gam_partial_dependencies(gam, features, target)
def plot_gam_partial_dependencies(gam, features, target):
for i, term in enumerate(gam.terms):
if term.isintercept:
continue
XX = gam.generate_X_grid(term=i)
pdep, confi = gam.partial_dependence(term=i, X=XX, width=0.95)
plt.figure()
plt.plot(XX[:, term.feature], pdep)
plt.plot(XX[:, term.feature], confi, c='r', ls='--')
plt.xlabel(f'{features[i]}', fontsize=14)
plt.ylabel(f'{target}', fontsize=14)
plt.title(f'Functional dependence of Y on X', fontsize=14)
plt.show()
def graph_y_vs_dose_age_sex(df=None, x=None, x2=None, x3=None, y=None, hue=None,
dose_x_size='Age (months)', multiplier=12):
f, axes = plt.subplots(1, 3, figsize=(15,5), sharey=True, sharex=False)
fontsize=16
colors = sns.color_palette('Paired', len(df['Sample ID'].unique())),
t = (0.7,)
test = [x + t for x in colors[0]]
# DOSE vs. Y
sns.regplot(x=x, y=y, data=df, ax=axes[0], color=test[4],
scatter_kws={'alpha':.8, 'linewidth':1, 'edgecolor':'black', 's':df[dose_x_size]*multiplier})
# AGE vs. Y
# male O markers
sns.regplot(x=x2, y=y, data=df[df['Sex'] == 'Male'], ax=axes[1], color=test[8], marker='o', fit_reg=False,
scatter_kws={'alpha':.8, 'linewidth':1, 'edgecolor':'black', 's':175,})
# female X markers
sns.regplot(x=x2, y=y, data=df[df['Sex'] == 'Female'], ax=axes[1], color=test[8], marker='X', fit_reg=False,
scatter_kws={'alpha':.8, 'linewidth':1, 'edgecolor':'black', 's':200,})
# plotting just the linear reg
sns.regplot(x=x2, y=y, data=df, ax=axes[1], color=test[8], scatter_kws={'s':0,})
# creating custom legend
handles, labels = [], []
line1 = lines.Line2D([], [], color=test[8], alpha=.8, marker='o', mew=1, mec='black')
line2 = lines.Line2D([], [], color=test[8], alpha=.8, marker='X', mew=1, mec='black')
handles.append(line1)
handles.append(line2)
labels.append('Male')
labels.append('Female')
axes[1].legend(handles, labels, loc='upper right',ncol=1, fancybox=True,
fontsize=fontsize, markerscale=2)
# SEX vs. Y
palette_cust = {'Male':test[0], 'Female':test[10]}
sns.boxplot(x=x3, y=y, dodge=True, palette=palette_cust, order=['Male', 'Female'], data=df, ax=axes[2],)
for patch in axes[2].artists:
r, g, b, a = patch.get_facecolor()
patch.set_facecolor((r, g, b, .6))
sns.swarmplot(x=x3, y=y, dodge=True, palette=palette_cust, order=['Male', 'Female'], data=df, ax=axes[2],
size=12, edgecolor='black', linewidth=1, **{'alpha':0.8})
x_name = 'Reasonable Total Life Time Dose (mGy)'
axes[0].set_xlabel(x_name, fontsize=fontsize)
axes[0].set_ylabel(y, fontsize=fontsize)
axes[0].tick_params(labelsize=fontsize)
axes[1].set_xlabel(x2, fontsize=fontsize)
axes[1].set_ylabel('', fontsize=fontsize)
axes[1].tick_params(labelsize=fontsize)
axes[2].set_xlabel(x3, fontsize=fontsize)
axes[2].set_ylabel('', fontsize=fontsize)
axes[2].tick_params(labelsize=fontsize)
# axes[0].set_xlim(-50,700)
# axes[1].set_xlim(-4,55)
if y == 'Mean Telomere Length (Telo-FISH)':
axes[0].set_ylim(0.2,1.6)
axes[1].set_ylim(0.2,1.6)
y_name = y
elif y == 'Mean Telomere Length (qPCR)':
axes[0].set_ylim(0.6,1.8)
axes[1].set_ylim(0.6,1.8)
y_name = y
elif y == 'Cortisol (pg/mg)':
axes[0].set_ylim(-3, 35)
y_name = y.replace('/', '')
elif y == 'Average # of dicentrics per cell':
axes[0].set_ylim(-0.005, .065)
y_name = y
plt.tight_layout()
plt.savefig(f'graphs/main figures/{y_name} vs {x} and {x2}.png', dpi=600, bbox_inches='tight')
def render_mpl_table(data, col_width=3.0, row_height=0.625, font_size=14,
header_color='#40466e', row_colors=['#f1f1f2', 'w'], edge_color='black',
bbox=[0, 0, 1, 1], header_columns=0, path=None,
ax=None, **kwargs):
if ax is None:
size = (np.array(data.shape[::-1]) + np.array([0, 1])) * np.array([col_width, row_height])
fig, ax = plt.subplots(figsize=size)
ax.axis('off')
mpl_table = ax.table(cellText=data.values, bbox=bbox, colLabels=data.columns, **kwargs)
mpl_table.auto_set_font_size(False)
mpl_table.set_fontsize(font_size)
for k, cell in six.iteritems(mpl_table._cells):
cell.set_edgecolor(edge_color)
if k[0] == 0 or k[1] < header_columns:
cell.set_text_props(weight='bold', color='w')
cell.set_facecolor(header_color)
else:
cell.set_facecolor(row_colors[k[0]%len(row_colors) ])
plt.tight_layout()
if path != None:
plt.savefig(path, dpi=600, bbox_inches='tight')
plt.close() | [
"matplotlib.pyplot.ylabel",
"numpy.array",
"pandas.read_excel",
"matplotlib.lines.Line2D",
"numpy.divide",
"numpy.mean",
"seaborn.regplot",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.close",
"pandas.DataFrame",
"seaborn.swarmplot",
"matplotlib.pyplot.savefig",
... | [((1115, 1131), 'os.scandir', 'os.scandir', (['path'], {}), '(path)\n', (1125, 1131), False, 'import os\n'), ((3840, 3892), 'pandas.to_numeric', 'pd.to_numeric', (['telo_data.iloc[:, 0]'], {'errors': '"""coerce"""'}), "(telo_data.iloc[:, 0], errors='coerce')\n", (3853, 3892), True, 'import pandas as pd\n'), ((4083, 4114), 'pandas.Series', 'pd.Series', (['telo_data.iloc[:, 0]'], {}), '(telo_data.iloc[:, 0])\n', (4092, 4114), True, 'import pandas as pd\n'), ((5259, 5337), 'pandas.get_dummies', 'pd.get_dummies', (["snake_df['Exposure Status']"], {'prefix': '"""Encoded"""', 'drop_first': '(True)'}), "(snake_df['Exposure Status'], prefix='Encoded', drop_first=True)\n", (5273, 5337), True, 'import pandas as pd\n'), ((6163, 6180), 'pandas.DataFrame', 'pd.DataFrame', (['df1'], {}), '(df1)\n', (6175, 6180), True, 'import pandas as pd\n'), ((6191, 6208), 'pandas.DataFrame', 'pd.DataFrame', (['df2'], {}), '(df2)\n', (6203, 6208), True, 'import pandas as pd\n'), ((8015, 8044), 'matplotlib.pyplot.subplots_adjust', 'plt.subplots_adjust', ([], {'top': '(0.88)'}), '(top=0.88)\n', (8034, 8044), True, 'import matplotlib.pyplot as plt\n'), ((8703, 8766), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(2)'], {'figsize': '(12, 5)', 'sharey': '(False)', 'sharex': '(False)'}), '(1, 2, figsize=(12, 5), sharey=False, sharex=False)\n', (8715, 8766), True, 'import matplotlib.pyplot as plt\n'), ((8795, 8937), 'seaborn.regplot', 'sns.regplot', ([], {'x': 'x', 'y': 'y', 'data': 'df', 'ax': 'axes[0]', 'scatter_kws': "{'alpha': 0.8, 'linewidth': 1, 'edgecolor': 'black', 's': df['Age (months)'\n ] * 12}"}), "(x=x, y=y, data=df, ax=axes[0], scatter_kws={'alpha': 0.8,\n 'linewidth': 1, 'edgecolor': 'black', 's': df['Age (months)'] * 12})\n", (8806, 8937), True, 'import seaborn as sns\n'), ((9120, 9243), 'seaborn.regplot', 'sns.regplot', ([], {'x': 'x2', 'y': 'y', 'data': 'df', 'ax': 'axes[1]', 'scatter_kws': "{'alpha': 0.8, 'linewidth': 1, 'edgecolor': 'black', 's': 175}"}), "(x=x2, y=y, data=df, ax=axes[1], scatter_kws={'alpha': 0.8,\n 'linewidth': 1, 'edgecolor': 'black', 's': 175})\n", (9131, 9243), True, 'import seaborn as sns\n'), ((14484, 14546), 'matplotlib.pyplot.subplots', 'plt.subplots', (['(1)', '(3)'], {'figsize': '(15, 5)', 'sharey': '(True)', 'sharex': '(False)'}), '(1, 3, figsize=(15, 5), sharey=True, sharex=False)\n', (14496, 14546), True, 'import matplotlib.pyplot as plt\n'), ((14725, 14893), 'seaborn.regplot', 'sns.regplot', ([], {'x': 'x', 'y': 'y', 'data': 'df', 'ax': 'axes[0]', 'color': 'test[4]', 'scatter_kws': "{'alpha': 0.8, 'linewidth': 1, 'edgecolor': 'black', 's': df[dose_x_size] *\n multiplier}"}), "(x=x, y=y, data=df, ax=axes[0], color=test[4], scatter_kws={\n 'alpha': 0.8, 'linewidth': 1, 'edgecolor': 'black', 's': df[dose_x_size\n ] * multiplier})\n", (14736, 14893), True, 'import seaborn as sns\n'), ((14939, 15130), 'seaborn.regplot', 'sns.regplot', ([], {'x': 'x2', 'y': 'y', 'data': "df[df['Sex'] == 'Male']", 'ax': 'axes[1]', 'color': 'test[8]', 'marker': '"""o"""', 'fit_reg': '(False)', 'scatter_kws': "{'alpha': 0.8, 'linewidth': 1, 'edgecolor': 'black', 's': 175}"}), "(x=x2, y=y, data=df[df['Sex'] == 'Male'], ax=axes[1], color=test\n [8], marker='o', fit_reg=False, scatter_kws={'alpha': 0.8, 'linewidth':\n 1, 'edgecolor': 'black', 's': 175})\n", (14950, 15130), True, 'import seaborn as sns\n'), ((15161, 15354), 'seaborn.regplot', 'sns.regplot', ([], {'x': 'x2', 'y': 'y', 'data': "df[df['Sex'] == 'Female']", 'ax': 'axes[1]', 'color': 'test[8]', 'marker': '"""X"""', 'fit_reg': '(False)', 'scatter_kws': "{'alpha': 0.8, 'linewidth': 1, 'edgecolor': 'black', 's': 200}"}), "(x=x2, y=y, data=df[df['Sex'] == 'Female'], ax=axes[1], color=\n test[8], marker='X', fit_reg=False, scatter_kws={'alpha': 0.8,\n 'linewidth': 1, 'edgecolor': 'black', 's': 200})\n", (15172, 15354), True, 'import seaborn as sns\n'), ((15397, 15482), 'seaborn.regplot', 'sns.regplot', ([], {'x': 'x2', 'y': 'y', 'data': 'df', 'ax': 'axes[1]', 'color': 'test[8]', 'scatter_kws': "{'s': 0}"}), "(x=x2, y=y, data=df, ax=axes[1], color=test[8], scatter_kws={'s': 0}\n )\n", (15408, 15482), True, 'import seaborn as sns\n'), ((15558, 15636), 'matplotlib.lines.Line2D', 'lines.Line2D', (['[]', '[]'], {'color': 'test[8]', 'alpha': '(0.8)', 'marker': '"""o"""', 'mew': '(1)', 'mec': '"""black"""'}), "([], [], color=test[8], alpha=0.8, marker='o', mew=1, mec='black')\n", (15570, 15636), False, 'from matplotlib import lines\n'), ((15648, 15726), 'matplotlib.lines.Line2D', 'lines.Line2D', (['[]', '[]'], {'color': 'test[8]', 'alpha': '(0.8)', 'marker': '"""X"""', 'mew': '(1)', 'mec': '"""black"""'}), "([], [], color=test[8], alpha=0.8, marker='X', mew=1, mec='black')\n", (15660, 15726), False, 'from matplotlib import lines\n'), ((16058, 16165), 'seaborn.boxplot', 'sns.boxplot', ([], {'x': 'x3', 'y': 'y', 'dodge': '(True)', 'palette': 'palette_cust', 'order': "['Male', 'Female']", 'data': 'df', 'ax': 'axes[2]'}), "(x=x3, y=y, dodge=True, palette=palette_cust, order=['Male',\n 'Female'], data=df, ax=axes[2])\n", (16069, 16165), True, 'import seaborn as sns\n'), ((16301, 16473), 'seaborn.swarmplot', 'sns.swarmplot', ([], {'x': 'x3', 'y': 'y', 'dodge': '(True)', 'palette': 'palette_cust', 'order': "['Male', 'Female']", 'data': 'df', 'ax': 'axes[2]', 'size': '(12)', 'edgecolor': '"""black"""', 'linewidth': '(1)'}), "(x=x3, y=y, dodge=True, palette=palette_cust, order=['Male',\n 'Female'], data=df, ax=axes[2], size=12, edgecolor='black', linewidth=1,\n **{'alpha': 0.8})\n", (16314, 16473), True, 'import seaborn as sns\n'), ((17508, 17526), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (17524, 17526), True, 'import matplotlib.pyplot as plt\n'), ((17531, 17629), 'matplotlib.pyplot.savefig', 'plt.savefig', (['f"""graphs/main figures/{y_name} vs {x} and {x2}.png"""'], {'dpi': '(600)', 'bbox_inches': '"""tight"""'}), "(f'graphs/main figures/{y_name} vs {x} and {x2}.png', dpi=600,\n bbox_inches='tight')\n", (17542, 17629), True, 'import matplotlib.pyplot as plt\n'), ((18292, 18323), 'six.iteritems', 'six.iteritems', (['mpl_table._cells'], {}), '(mpl_table._cells)\n', (18305, 18323), False, 'import six\n'), ((18598, 18616), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (18614, 18616), True, 'import matplotlib.pyplot as plt\n'), ((18703, 18714), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (18712, 18714), True, 'import matplotlib.pyplot as plt\n'), ((3008, 3045), 'pandas.concat', 'pd.concat', (['[rsampled, df]'], {'sort': '(False)'}), '([rsampled, df], sort=False)\n', (3017, 3045), True, 'import pandas as pd\n'), ((3332, 3369), 'pandas.concat', 'pd.concat', (['[rsampled, df]'], {'sort': '(False)'}), '([rsampled, df], sort=False)\n', (3341, 3369), True, 'import pandas as pd\n'), ((7472, 7608), 'seaborn.lmplot', 'sns.lmplot', ([], {'x': 'x', 'y': 'y', 'hue': 'hue', 'col': 'col', 'data': 'data', 'logistic': '(True)', 'height': '(5.5)', 'aspect': '(1)', 'scatter_kws': "{'s': 175, 'edgecolor': 'black'}"}), "(x=x, y=y, hue=hue, col=col, data=data, logistic=True, height=5.5,\n aspect=1, scatter_kws={'s': 175, 'edgecolor': 'black'})\n", (7482, 7608), True, 'import seaborn as sns\n'), ((7634, 7755), 'seaborn.lmplot', 'sns.lmplot', ([], {'x': 'x', 'y': 'y', 'hue': 'hue', 'col': 'col', 'data': 'data', 'height': '(5.5)', 'aspect': '(1)', 'scatter_kws': "{'s': 175, 'edgecolor': 'black'}"}), "(x=x, y=y, hue=hue, col=col, data=data, height=5.5, aspect=1,\n scatter_kws={'s': 175, 'edgecolor': 'black'})\n", (7644, 7755), True, 'import seaborn as sns\n'), ((13999, 14011), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (14009, 14011), True, 'import matplotlib.pyplot as plt\n'), ((14020, 14055), 'matplotlib.pyplot.plot', 'plt.plot', (['XX[:, term.feature]', 'pdep'], {}), '(XX[:, term.feature], pdep)\n', (14028, 14055), True, 'import matplotlib.pyplot as plt\n'), ((14064, 14116), 'matplotlib.pyplot.plot', 'plt.plot', (['XX[:, term.feature]', 'confi'], {'c': '"""r"""', 'ls': '"""--"""'}), "(XX[:, term.feature], confi, c='r', ls='--')\n", (14072, 14116), True, 'import matplotlib.pyplot as plt\n'), ((14125, 14166), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['f"""{features[i]}"""'], {'fontsize': '(14)'}), "(f'{features[i]}', fontsize=14)\n", (14135, 14166), True, 'import matplotlib.pyplot as plt\n'), ((14175, 14211), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['f"""{target}"""'], {'fontsize': '(14)'}), "(f'{target}', fontsize=14)\n", (14185, 14211), True, 'import matplotlib.pyplot as plt\n'), ((14220, 14278), 'matplotlib.pyplot.title', 'plt.title', (['f"""Functional dependence of Y on X"""'], {'fontsize': '(14)'}), "(f'Functional dependence of Y on X', fontsize=14)\n", (14229, 14278), True, 'import matplotlib.pyplot as plt\n'), ((14287, 14297), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (14295, 14297), True, 'import matplotlib.pyplot as plt\n'), ((18050, 18076), 'matplotlib.pyplot.subplots', 'plt.subplots', ([], {'figsize': 'size'}), '(figsize=size)\n', (18062, 18076), True, 'import matplotlib.pyplot as plt\n'), ((18651, 18698), 'matplotlib.pyplot.savefig', 'plt.savefig', (['path'], {'dpi': '(600)', 'bbox_inches': '"""tight"""'}), "(path, dpi=600, bbox_inches='tight')\n", (18662, 18698), True, 'import matplotlib.pyplot as plt\n'), ((1372, 1450), 'pandas.read_excel', 'pd.read_excel', (['full_name'], {'sheet_name': 'None', 'skiprows': '(4)', 'usecols': '[3]', 'nrows': '(5000)'}), '(full_name, sheet_name=None, skiprows=4, usecols=[3], nrows=5000)\n', (1385, 1450), True, 'import pandas as pd\n'), ((10867, 10901), 'sklearn.linear_model.LogisticRegression', 'LogisticRegression', ([], {'solver': '"""lbfgs"""'}), "(solver='lbfgs')\n", (10885, 10901), False, 'from sklearn.linear_model import LogisticRegression\n'), ((17998, 18031), 'numpy.array', 'np.array', (['[col_width, row_height]'], {}), '([col_width, row_height])\n', (18006, 18031), True, 'import numpy as np\n'), ((2190, 2230), 'numpy.divide', 'np.divide', (['sample_data[1]', 'control_value'], {}), '(sample_data[1], control_value)\n', (2199, 2230), True, 'import numpy as np\n'), ((2319, 2359), 'numpy.divide', 'np.divide', (['sample_data[2]', 'control_value'], {}), '(sample_data[2], control_value)\n', (2328, 2359), True, 'import numpy as np\n'), ((17949, 17975), 'numpy.array', 'np.array', (['data.shape[::-1]'], {}), '(data.shape[::-1])\n', (17957, 17975), True, 'import numpy as np\n'), ((17978, 17994), 'numpy.array', 'np.array', (['[0, 1]'], {}), '([0, 1])\n', (17986, 17994), True, 'import numpy as np\n'), ((9866, 9884), 'sklearn.linear_model.LinearRegression', 'LinearRegression', ([], {}), '()\n', (9882, 9884), False, 'from sklearn.linear_model import LinearRegression\n'), ((10224, 10242), 'sklearn.linear_model.LinearRegression', 'LinearRegression', ([], {}), '()\n', (10240, 10242), False, 'from sklearn.linear_model import LinearRegression\n'), ((11233, 11251), 'sklearn.linear_model.LinearRegression', 'LinearRegression', ([], {}), '()\n', (11249, 11251), False, 'from sklearn.linear_model import LinearRegression\n'), ((12949, 12967), 'sklearn.linear_model.LinearRegression', 'LinearRegression', ([], {}), '()\n', (12965, 12967), False, 'from sklearn.linear_model import LinearRegression\n'), ((1952, 1974), 'numpy.mean', 'np.mean', (['telos_cleaned'], {}), '(telos_cleaned)\n', (1959, 1974), True, 'import numpy as np\n'), ((4024, 4047), 'scipy.stats.zscore', 'stats.zscore', (['telo_data'], {}), '(telo_data)\n', (4036, 4047), False, 'from scipy import stats\n'), ((1845, 1867), 'numpy.mean', 'np.mean', (['telos_cleaned'], {}), '(telos_cleaned)\n', (1852, 1867), True, 'import numpy as np\n')] |
import json
import os
import sys
from collections import OrderedDict
import iotbx.phil
import xia2.Handlers.Streams
from dials.util.options import OptionParser
from jinja2 import ChoiceLoader, Environment, PackageLoader
from xia2.Modules.Report import Report
from xia2.XIA2Version import Version
phil_scope = iotbx.phil.parse(
"""\
title = 'xia2 report'
.type = str
prefix = 'xia2'
.type = str
log_include = None
.type = path
include scope xia2.Modules.Analysis.phil_scope
json {
indent = None
.type = int(value_min=0)
}
""",
process_includes=True,
)
help_message = """
"""
def run(args):
usage = "xia2.report [options] scaled_unmerged.mtz"
parser = OptionParser(
usage=usage, phil=phil_scope, check_format=False, epilog=help_message
)
params, options, args = parser.parse_args(
show_diff_phil=True, return_unhandled=True
)
if len(args) == 0:
parser.print_help()
return
unmerged_mtz = args[0]
report = Report.from_unmerged_mtz(unmerged_mtz, params, report_dir=".")
# xtriage
xtriage_success, xtriage_warnings, xtriage_danger = None, None, None
if params.xtriage_analysis:
try:
xtriage_success, xtriage_warnings, xtriage_danger = report.xtriage_report()
except Exception as e:
params.xtriage_analysis = False
print("Exception runnning xtriage:")
print(e)
json_data = {}
if params.xtriage_analysis:
json_data["xtriage"] = xtriage_success + xtriage_warnings + xtriage_danger
(
overall_stats_table,
merging_stats_table,
stats_plots,
) = report.resolution_plots_and_stats()
json_data.update(stats_plots)
json_data.update(report.batch_dependent_plots())
json_data.update(report.intensity_stats_plots(run_xtriage=False))
json_data.update(report.pychef_plots())
resolution_graphs = OrderedDict(
(k, json_data[k])
for k in (
"cc_one_half",
"i_over_sig_i",
"second_moments",
"wilson_intensity_plot",
"completeness",
"multiplicity_vs_resolution",
)
if k in json_data
)
if params.include_radiation_damage:
batch_graphs = OrderedDict(
(k, json_data[k])
for k in (
"scale_rmerge_vs_batch",
"i_over_sig_i_vs_batch",
"completeness_vs_dose",
"rcp_vs_dose",
"scp_vs_dose",
"rd_vs_batch_difference",
)
)
else:
batch_graphs = OrderedDict(
(k, json_data[k])
for k in ("scale_rmerge_vs_batch", "i_over_sig_i_vs_batch")
)
misc_graphs = OrderedDict(
(k, json_data[k])
for k in ("cumulative_intensity_distribution", "l_test", "multiplicities")
if k in json_data
)
for k, v in report.multiplicity_plots().items():
misc_graphs[k] = {"img": v}
styles = {}
for axis in ("h", "k", "l"):
styles["multiplicity_%s" % axis] = "square-plot"
loader = ChoiceLoader(
[PackageLoader("xia2", "templates"), PackageLoader("dials", "templates")]
)
env = Environment(loader=loader)
if params.log_include:
with open(params.log_include, "rb") as fh:
log_text = fh.read().decode("utf-8")
else:
log_text = ""
template = env.get_template("report.html")
html = template.render(
page_title=params.title,
filename=os.path.abspath(unmerged_mtz),
space_group=report.intensities.space_group_info().symbol_and_number(),
unit_cell=str(report.intensities.unit_cell()),
mtz_history=[h.strip() for h in report.mtz_object.history()],
xtriage_success=xtriage_success,
xtriage_warnings=xtriage_warnings,
xtriage_danger=xtriage_danger,
overall_stats_table=overall_stats_table,
merging_stats_table=merging_stats_table,
cc_half_significance_level=params.cc_half_significance_level,
resolution_graphs=resolution_graphs,
batch_graphs=batch_graphs,
misc_graphs=misc_graphs,
styles=styles,
xia2_version=Version,
log_text=log_text,
)
with open("%s-report.json" % params.prefix, "w") as fh:
json.dump(json_data, fh, indent=params.json.indent)
with open("%s-report.html" % params.prefix, "wb") as fh:
fh.write(html.encode("utf-8", "xmlcharrefreplace"))
def run_with_log():
xia2.Handlers.Streams.setup_logging(
logfile="xia2.report.txt", debugfile="xia2.report-debug.txt"
)
run(sys.argv[1:])
| [
"collections.OrderedDict",
"jinja2.Environment",
"xia2.Modules.Report.Report.from_unmerged_mtz",
"jinja2.PackageLoader",
"os.path.abspath",
"dials.util.options.OptionParser",
"json.dump"
] | [((684, 772), 'dials.util.options.OptionParser', 'OptionParser', ([], {'usage': 'usage', 'phil': 'phil_scope', 'check_format': '(False)', 'epilog': 'help_message'}), '(usage=usage, phil=phil_scope, check_format=False, epilog=\n help_message)\n', (696, 772), False, 'from dials.util.options import OptionParser\n'), ((995, 1057), 'xia2.Modules.Report.Report.from_unmerged_mtz', 'Report.from_unmerged_mtz', (['unmerged_mtz', 'params'], {'report_dir': '"""."""'}), "(unmerged_mtz, params, report_dir='.')\n", (1019, 1057), False, 'from xia2.Modules.Report import Report\n'), ((1917, 2103), 'collections.OrderedDict', 'OrderedDict', (["((k, json_data[k]) for k in ('cc_one_half', 'i_over_sig_i',\n 'second_moments', 'wilson_intensity_plot', 'completeness',\n 'multiplicity_vs_resolution') if k in json_data)"], {}), "((k, json_data[k]) for k in ('cc_one_half', 'i_over_sig_i',\n 'second_moments', 'wilson_intensity_plot', 'completeness',\n 'multiplicity_vs_resolution') if k in json_data)\n", (1928, 2103), False, 'from collections import OrderedDict\n'), ((2766, 2893), 'collections.OrderedDict', 'OrderedDict', (["((k, json_data[k]) for k in ('cumulative_intensity_distribution', 'l_test',\n 'multiplicities') if k in json_data)"], {}), "((k, json_data[k]) for k in ('cumulative_intensity_distribution',\n 'l_test', 'multiplicities') if k in json_data)\n", (2777, 2893), False, 'from collections import OrderedDict\n'), ((3243, 3269), 'jinja2.Environment', 'Environment', ([], {'loader': 'loader'}), '(loader=loader)\n', (3254, 3269), False, 'from jinja2 import ChoiceLoader, Environment, PackageLoader\n'), ((2273, 2451), 'collections.OrderedDict', 'OrderedDict', (["((k, json_data[k]) for k in ('scale_rmerge_vs_batch',\n 'i_over_sig_i_vs_batch', 'completeness_vs_dose', 'rcp_vs_dose',\n 'scp_vs_dose', 'rd_vs_batch_difference'))"], {}), "((k, json_data[k]) for k in ('scale_rmerge_vs_batch',\n 'i_over_sig_i_vs_batch', 'completeness_vs_dose', 'rcp_vs_dose',\n 'scp_vs_dose', 'rd_vs_batch_difference'))\n", (2284, 2451), False, 'from collections import OrderedDict\n'), ((2622, 2716), 'collections.OrderedDict', 'OrderedDict', (["((k, json_data[k]) for k in ('scale_rmerge_vs_batch', 'i_over_sig_i_vs_batch'))"], {}), "((k, json_data[k]) for k in ('scale_rmerge_vs_batch',\n 'i_over_sig_i_vs_batch'))\n", (2633, 2716), False, 'from collections import OrderedDict\n'), ((4350, 4401), 'json.dump', 'json.dump', (['json_data', 'fh'], {'indent': 'params.json.indent'}), '(json_data, fh, indent=params.json.indent)\n', (4359, 4401), False, 'import json\n'), ((3154, 3188), 'jinja2.PackageLoader', 'PackageLoader', (['"""xia2"""', '"""templates"""'], {}), "('xia2', 'templates')\n", (3167, 3188), False, 'from jinja2 import ChoiceLoader, Environment, PackageLoader\n'), ((3190, 3225), 'jinja2.PackageLoader', 'PackageLoader', (['"""dials"""', '"""templates"""'], {}), "('dials', 'templates')\n", (3203, 3225), False, 'from jinja2 import ChoiceLoader, Environment, PackageLoader\n'), ((3556, 3585), 'os.path.abspath', 'os.path.abspath', (['unmerged_mtz'], {}), '(unmerged_mtz)\n', (3571, 3585), False, 'import os\n')] |
from django.conf.urls import patterns, include, url
from django.conf.urls.static import static
from django.conf import settings
from django.views.generic import TemplateView
from . import views
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', TemplateView.as_view(template_name='base.html')),
url(r'^admin/', include(admin.site.urls)),
#login
url(r'^login/$', 'django.contrib.auth.views.login', {'template_name': 'login.html'}),
#home
url(r'^home/$', views.home),
)
# Uncomment the next line to serve media files in dev.
# urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| [
"django.views.generic.TemplateView.as_view",
"django.conf.urls.include",
"django.conf.urls.url",
"django.contrib.admin.autodiscover"
] | [((280, 300), 'django.contrib.admin.autodiscover', 'admin.autodiscover', ([], {}), '()\n', (298, 300), False, 'from django.contrib import admin\n'), ((462, 549), 'django.conf.urls.url', 'url', (['"""^login/$"""', '"""django.contrib.auth.views.login"""', "{'template_name': 'login.html'}"], {}), "('^login/$', 'django.contrib.auth.views.login', {'template_name':\n 'login.html'})\n", (465, 549), False, 'from django.conf.urls import patterns, include, url\n'), ((563, 589), 'django.conf.urls.url', 'url', (['"""^home/$"""', 'views.home'], {}), "('^home/$', views.home)\n", (566, 589), False, 'from django.conf.urls import patterns, include, url\n'), ((344, 391), 'django.views.generic.TemplateView.as_view', 'TemplateView.as_view', ([], {'template_name': '"""base.html"""'}), "(template_name='base.html')\n", (364, 391), False, 'from django.views.generic import TemplateView\n'), ((420, 444), 'django.conf.urls.include', 'include', (['admin.site.urls'], {}), '(admin.site.urls)\n', (427, 444), False, 'from django.conf.urls import patterns, include, url\n')] |
import os
import datetime
from pathlib import Path
import pandas as pd
import luigi
PROCESSED_DIR = 'processed'
ROLLUP_DIR = 'rollups'
class PrepareDataTask(luigi.Task):
def __init__(self):
super().__init__()
self.last_processed_id = 0
if os.path.exists('last_processed_id.txt'):
try:
with open('last_processed_id.txt', 'r') as f:
self.last_processed_id = int(f.read())
except Exception as e:
print('Error reading last_processed_id.txt')
self.last_id = self.last_processed_id
self.df = pd.read_json('test_data/trip_data.json')
# Simulate only getting the latest (unprocessed).
self.df = self.df[self.df['id'] > self.last_processed_id]
if len(self.df):
self.last_id = int(self.df.iloc[-1]['id'])
def requires(self):
return None
def run(self):
if not os.path.exists(PROCESSED_DIR):
os.makedirs(PROCESSED_DIR)
# Simulate work
#import time
#time.sleep(10)
# Simulate error
#import random
#if random.random() > 0.5:
# raise Exception('Fake error')
output_path = f'{PROCESSED_DIR}/processed_{self.last_id}.parquet'
self.df.to_parquet(output_path)
with open('last_processed_id.txt', 'w') as f:
f.write(f'{self.last_id}')
def output(self):
output_path = f'{PROCESSED_DIR}/processed_{self.last_id}.parquet'
return luigi.LocalTarget(output_path)
class RollupTask(luigi.Task):
date_param = luigi.DateParameter(default=datetime.date.today())
rollup_dir = Path(ROLLUP_DIR)
def _output_path(self):
return f'{ROLLUP_DIR}/rollup_{self.date_param}.parquet'
def requires(self):
return PrepareDataTask()
def run(self):
if not os.path.exists(ROLLUP_DIR):
os.makedirs(ROLLUP_DIR)
data_dir = Path(PROCESSED_DIR)
df = pd.concat(
pd.read_parquet(parquet_file)
for parquet_file in data_dir.glob('*.parquet')
)
# Average travel times
rollup = df.groupby(['origin_id', 'destination_id'])['travel_time'].mean().to_frame()
rollup.to_parquet(self._output_path())
def output(self):
return luigi.LocalTarget(self._output_path())
if __name__ == '__main__':
luigi.run()
| [
"luigi.run",
"os.path.exists",
"pandas.read_parquet",
"os.makedirs",
"pathlib.Path",
"datetime.date.today",
"luigi.LocalTarget",
"pandas.read_json"
] | [((1671, 1687), 'pathlib.Path', 'Path', (['ROLLUP_DIR'], {}), '(ROLLUP_DIR)\n', (1675, 1687), False, 'from pathlib import Path\n'), ((2395, 2406), 'luigi.run', 'luigi.run', ([], {}), '()\n', (2404, 2406), False, 'import luigi\n'), ((270, 309), 'os.path.exists', 'os.path.exists', (['"""last_processed_id.txt"""'], {}), "('last_processed_id.txt')\n", (284, 309), False, 'import os\n'), ((610, 650), 'pandas.read_json', 'pd.read_json', (['"""test_data/trip_data.json"""'], {}), "('test_data/trip_data.json')\n", (622, 650), True, 'import pandas as pd\n'), ((1523, 1553), 'luigi.LocalTarget', 'luigi.LocalTarget', (['output_path'], {}), '(output_path)\n', (1540, 1553), False, 'import luigi\n'), ((1958, 1977), 'pathlib.Path', 'Path', (['PROCESSED_DIR'], {}), '(PROCESSED_DIR)\n', (1962, 1977), False, 'from pathlib import Path\n'), ((935, 964), 'os.path.exists', 'os.path.exists', (['PROCESSED_DIR'], {}), '(PROCESSED_DIR)\n', (949, 964), False, 'import os\n'), ((978, 1004), 'os.makedirs', 'os.makedirs', (['PROCESSED_DIR'], {}), '(PROCESSED_DIR)\n', (989, 1004), False, 'import os\n'), ((1631, 1652), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (1650, 1652), False, 'import datetime\n'), ((1874, 1900), 'os.path.exists', 'os.path.exists', (['ROLLUP_DIR'], {}), '(ROLLUP_DIR)\n', (1888, 1900), False, 'import os\n'), ((1914, 1937), 'os.makedirs', 'os.makedirs', (['ROLLUP_DIR'], {}), '(ROLLUP_DIR)\n', (1925, 1937), False, 'import os\n'), ((2014, 2043), 'pandas.read_parquet', 'pd.read_parquet', (['parquet_file'], {}), '(parquet_file)\n', (2029, 2043), True, 'import pandas as pd\n')] |
import time
from datetime import datetime
import numpy as np
from matplotlib import pyplot as plt
from matplotlib.dates import epoch2num
import device_factory
if __name__ == '__main__':
amount = 50
devices = []
for i in range(amount):
device = device_factory.ecopower_4(i, i)
devices.append(device)
start = int(time.mktime(datetime(2010, 1, 2).timetuple()) // 60)
end = int(time.mktime(datetime(2010, 1, 3).timetuple()) // 60)
sample_time = start + 15 * 24
sample_dur = 16
P = [[] for d in devices]
T = [[] for d in devices]
Th = [[] for d in devices]
for now in range(start, sample_time):
for idx, device in enumerate(devices):
device.step(now)
P[idx].append(device.components.consumer.P)
T[idx].append(device.components.storage.T)
Th[idx].append(device.components.heatsink.in_heat)
samples = []
for d in devices:
# d.components.sampler.setpoint_density = 0.1
samples.append(d.components.sampler.sample(100, sample_dur))
# samples = [d.components.sampler.sample(100, sample_dur) for d in devices]
schedule = np.zeros(sample_dur)
for idx, device in enumerate(devices):
# min_schedule_idx = np.argmin(np.sum(np.abs(samples[idx]), axis=1))
# device.components.scheduler.schedule = samples[idx][min_schedule_idx]
# schedule += samples[idx][min_schedule_idx]
max_schedule_idx = np.argmax(np.sum(np.abs(samples[idx]), axis=1))
device.components.scheduler.schedule = samples[idx][max_schedule_idx]
schedule += samples[idx][max_schedule_idx]
for now in range(sample_time, end):
for idx, device in enumerate(devices):
device.step(now)
P[idx].append(device.components.consumer.P)
T[idx].append(device.components.storage.T)
Th[idx].append(device.components.heatsink.in_heat)
P = np.sum(P, axis=0)
Th = np.sum(Th, axis=0)
T = np.mean(T, axis=0)
ax = plt.subplot(2, 1, 1)
ax.grid(True)
tz = 60 # timezone deviation in minutes
x = epoch2num(np.arange((start + tz) * 60, (end + tz) * 60, 60))
Th = np.reshape(Th, (len(x) // 15, 15)).mean(axis=1)
ax.plot_date(x[::15], Th, color='magenta', label='P$_{th,out}$ (kW)', ls='-',
marker=None)
ax.legend()
ax = plt.subplot(2, 1, 2, sharex=ax)
ax.grid(True)
l1 = ax.plot_date(x, P, label='P$_{el}$ (kW)', ls='-', marker=None)
sched_x = epoch2num(np.arange(
(sample_time + tz) * 60, ((sample_time + tz) + sample_dur * 15) * 60, 60))
l2 = ax.plot_date(sched_x[::15], schedule, color='r', label='Schedule',
ls='-', marker=None)
ax = plt.twinx()
l3 = ax.plot_date(x, T, color='g', label='T (\\textdegree C)', ls='-', marker=None)
lines = l1 + l2 + l3
labels = [l.get_label() for l in lines]
ax.legend(lines, labels)
plt.gcf().autofmt_xdate()
# # Samples plot
# fig, ax = plt.subplots(len(samples))
# if len(samples) == 1:
# ax = [ax]
# for i, sample in enumerate(samples):
# t = np.arange(len(sample[0]))
# for s in sample:
# ax[i].plot(t, s)
plt.show()
| [
"datetime.datetime",
"numpy.mean",
"device_factory.ecopower_4",
"numpy.abs",
"matplotlib.pyplot.gcf",
"matplotlib.pyplot.twinx",
"numpy.sum",
"numpy.zeros",
"matplotlib.pyplot.subplot",
"numpy.arange",
"matplotlib.pyplot.show"
] | [((1207, 1227), 'numpy.zeros', 'np.zeros', (['sample_dur'], {}), '(sample_dur)\n', (1215, 1227), True, 'import numpy as np\n'), ((2003, 2020), 'numpy.sum', 'np.sum', (['P'], {'axis': '(0)'}), '(P, axis=0)\n', (2009, 2020), True, 'import numpy as np\n'), ((2031, 2049), 'numpy.sum', 'np.sum', (['Th'], {'axis': '(0)'}), '(Th, axis=0)\n', (2037, 2049), True, 'import numpy as np\n'), ((2059, 2077), 'numpy.mean', 'np.mean', (['T'], {'axis': '(0)'}), '(T, axis=0)\n', (2066, 2077), True, 'import numpy as np\n'), ((2090, 2110), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(2)', '(1)', '(1)'], {}), '(2, 1, 1)\n', (2101, 2110), True, 'from matplotlib import pyplot as plt\n'), ((2440, 2471), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(2)', '(1)', '(2)'], {'sharex': 'ax'}), '(2, 1, 2, sharex=ax)\n', (2451, 2471), True, 'from matplotlib import pyplot as plt\n'), ((2809, 2820), 'matplotlib.pyplot.twinx', 'plt.twinx', ([], {}), '()\n', (2818, 2820), True, 'from matplotlib import pyplot as plt\n'), ((3314, 3324), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (3322, 3324), True, 'from matplotlib import pyplot as plt\n'), ((282, 313), 'device_factory.ecopower_4', 'device_factory.ecopower_4', (['i', 'i'], {}), '(i, i)\n', (307, 313), False, 'import device_factory\n'), ((2195, 2244), 'numpy.arange', 'np.arange', (['((start + tz) * 60)', '((end + tz) * 60)', '(60)'], {}), '((start + tz) * 60, (end + tz) * 60, 60)\n', (2204, 2244), True, 'import numpy as np\n'), ((2589, 2675), 'numpy.arange', 'np.arange', (['((sample_time + tz) * 60)', '((sample_time + tz + sample_dur * 15) * 60)', '(60)'], {}), '((sample_time + tz) * 60, (sample_time + tz + sample_dur * 15) * \n 60, 60)\n', (2598, 2675), True, 'import numpy as np\n'), ((3016, 3025), 'matplotlib.pyplot.gcf', 'plt.gcf', ([], {}), '()\n', (3023, 3025), True, 'from matplotlib import pyplot as plt\n'), ((1532, 1552), 'numpy.abs', 'np.abs', (['samples[idx]'], {}), '(samples[idx])\n', (1538, 1552), True, 'import numpy as np\n'), ((379, 399), 'datetime.datetime', 'datetime', (['(2010)', '(1)', '(2)'], {}), '(2010, 1, 2)\n', (387, 399), False, 'from datetime import datetime\n'), ((447, 467), 'datetime.datetime', 'datetime', (['(2010)', '(1)', '(3)'], {}), '(2010, 1, 3)\n', (455, 467), False, 'from datetime import datetime\n')] |
"""
Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
SPDX-License-Identifier: MIT-0
"""
import boto3
import copy
import unittest
from botocore.stub import ANY
from cfn_policy_validator.tests import account_config, offline_only, only_run_for_end_to_end
from cfn_policy_validator.tests.boto_mocks import mock_test_setup, BotoResponse, get_test_mode, TEST_MODE
from cfn_policy_validator.tests.validation_tests import FINDING_TYPE, mock_access_analyzer_resource_setup, \
MockAccessPreviewFinding, MockNoFindings, MockInvalidConfiguration, MockUnknownError, \
MockTimeout, MockValidateResourcePolicyFinding
from cfn_policy_validator.validation.validator import validate_parser_output, Validator
from cfn_policy_validator.application_error import ApplicationError
from cfn_policy_validator.parsers.output import Output, Policy, Resource
resource_policy_with_no_findings = {
'Version': '2012-10-17',
'Statement': [
{
'Effect': 'Allow',
'Action': '*',
'Principal': {
'AWS': account_config.account_id
},
'Resource': f'arn:aws:sqs:{account_config.region}:{account_config.account_id}:resource1'
}
]
}
lambda_permissions_policy_with_findings = {
"Version": "2012-10-17",
"Statement": [{
"Effect": "Allow",
"Principal": {},
"Action": "lambda:InvokeFunction",
"Resource": f"arn:aws:lambda:{account_config.region}:{account_config.account_id}:function:my-function"
}]
}
class BaseResourcePolicyTest(unittest.TestCase):
def setUp(self):
self.output = Output(account_config)
def add_resources_to_output(self, resource_type, resource_policy, resource_policy_2=None, configuration_1=None, configuration_2=None):
if resource_policy_2 is None:
resource_policy_2 = resource_policy
policy1 = Policy('policy1', copy.deepcopy(resource_policy))
resource1 = Resource('resource1', resource_type, policy1, configuration_1)
policy2 = Policy('policy2', copy.deepcopy(resource_policy_2))
resource2 = Resource('resource2', resource_type, policy2, configuration_2)
self.output.Resources = [
resource1,
resource2
]
@only_run_for_end_to_end
def create_archive_rule(self, resource_type_to_archive):
session = boto3.Session(region_name=account_config.region)
self.client = session.client('accessanalyzer')
response = self.client.list_analyzers(type='ACCOUNT')
self.actual_analyzer_name = next((analyzer['name'] for analyzer in response['analyzers'] if analyzer['status'] == 'ACTIVE'))
self.archive_rule_name = 'IgnoreRoleFindings'
self.client.create_archive_rule(
analyzerName=self.actual_analyzer_name,
ruleName='IgnoreRoleFindings',
filter={
'resourceType': {
'eq': [resource_type_to_archive]
}
}
)
@only_run_for_end_to_end
def delete_archive_rule(self):
self.client.delete_archive_rule(analyzerName=self.actual_analyzer_name, ruleName=self.archive_rule_name)
def assert_finding_is_equal(self, actual_finding, expected_policy_name, expected_resource_name, expected_code):
self.assertEqual(expected_policy_name, actual_finding.policyName)
self.assertEqual(expected_resource_name, actual_finding.resourceName)
self.assertEqual(expected_code, actual_finding.code)
def assert_has_findings(self, findings, errors=0, security_warnings=0, warnings=0, suggestions=0):
self.assertEqual(errors, len(findings.errors))
self.assertEqual(security_warnings, len(findings.security_warnings))
self.assertEqual(warnings, len(findings.warnings))
self.assertEqual(suggestions, len(findings.suggestions))
class WhenValidatingResources(BaseResourcePolicyTest):
def setUp(self):
self.output = Output(account_config)
@mock_access_analyzer_resource_setup(
MockUnknownError()
)
@offline_only
def test_unknown_access_preview_failure(self):
policy = Policy('ResourcePolicy', copy.deepcopy(resource_policy_with_no_findings))
resources = [
Resource('resource1', 'AWS::SQS::Queue', policy)
]
validator = Validator(account_config.account_id, account_config.region, account_config.partition)
with self.assertRaises(ApplicationError) as cm:
validator.validate_resources(resources)
self.assertEqual('Failed to create access preview for resource1. Reason: UNKNOWN_ERROR', str(cm.exception))
@mock_access_analyzer_resource_setup(
MockTimeout()
)
@offline_only
def test_unknown_access_preview_timeout(self):
policy = Policy('ResourcePolicy', copy.deepcopy(resource_policy_with_no_findings))
resources = [
Resource('resource1', 'AWS::SQS::Queue', policy)
]
validator = Validator(account_config.account_id, account_config.region, account_config.partition)
validator.maximum_number_of_access_preview_attempts = 2
with self.assertRaises(ApplicationError) as cm:
validator.validate_resources(resources)
self.assertEqual('Timed out after 5 minutes waiting for access analyzer preview to create.', str(cm.exception))
@mock_test_setup(
accessanalyzer=[
BotoResponse(
method='list_analyzers',
service_response={'analyzers': []},
expected_params={'type': 'ACCOUNT'}
),
BotoResponse(
method='create_analyzer',
service_response={'arn': 'arn:aws:access-analyzer:us-east-1:123456789123:analyzer/MyAnalyzer'},
expected_params={'analyzerName': ANY, 'type': 'ACCOUNT'}
)
],
assert_no_pending_responses=True
)
def test_if_no_analyzer_exists_in_account(self):
validator = Validator(account_config.account_id, account_config.region, account_config.partition)
validator.validate_resources([])
@mock_access_analyzer_resource_setup(
MockValidateResourcePolicyFinding(code='EMPTY_OBJECT_PRINCIPAL', finding_type=FINDING_TYPE.SUGGESTION)
)
def test_with_resource_type_that_is_not_supported_by_access_previews(self):
output = Output(account_config)
policy = Policy('PermissionsPolicy', copy.deepcopy(lambda_permissions_policy_with_findings))
resource = Resource('resource1', 'Lambda', policy)
output.Resources = [resource]
findings = validate_parser_output(output)
self.assert_has_findings(findings, suggestions=1)
self.assert_finding_is_equal(
actual_finding=findings.suggestions[0],
expected_policy_name='PermissionsPolicy',
expected_resource_name='resource1',
expected_code='EMPTY_OBJECT_PRINCIPAL'
)
class WhenValidatingResourcesWithNonActiveFindings(BaseResourcePolicyTest):
def setUp(self):
self.output = Output(account_config)
self.create_archive_rule(resource_type_to_archive='AWS::KMS::Key')
def tearDown(self):
self.delete_archive_rule()
@mock_access_analyzer_resource_setup(
MockAccessPreviewFinding(),
MockAccessPreviewFinding(finding_status='ARCHIVED')
)
def test_output_only_includes_active_findings(self):
self.add_resources_to_output('AWS::SQS::Queue', sqs_queue_policy_that_allows_external_access)
policy1 = Policy('policy1', copy.deepcopy(sqs_queue_policy_that_allows_external_access))
resource1 = Resource('resource1', 'AWS::SQS::Queue', policy1)
policy2 = Policy('policy2', copy.deepcopy(kms_key_policy_that_allows_external_access))
resource2 = Resource('resource2', 'AWS::KMS::Key', policy2)
self.output.Resources = [resource1, resource2]
findings = validate_parser_output(self.output)
self.assert_has_findings(findings, security_warnings=1)
self.assert_finding_is_equal(
actual_finding=findings.security_warnings[0],
expected_policy_name='policy1',
expected_resource_name='resource1',
expected_code='EXTERNAL_PRINCIPAL'
)
@mock_access_analyzer_resource_setup(
MockAccessPreviewFinding(finding_status='ARCHIVED'),
MockAccessPreviewFinding(finding_status='ARCHIVED')
)
def test_output_does_not_include_any_findings_when_all_are_archived(self):
self.add_resources_to_output('AWS::KMS::Key', kms_key_policy_that_allows_external_access)
findings = validate_parser_output(self.output)
self.assert_has_findings(findings, security_warnings=0)
sqs_queue_policy_that_allows_external_access = {
"Version": "2012-10-17",
"Statement": [{
"Effect": "Allow",
"Principal": {
"AWS": ["*"]
},
"Action": "sqs:SendMessage",
"Resource": "*"
}]
}
sqs_queue_policy_with_findings = {
"Version": "2012-10-17",
"Statement": [{
"Effect": "Allow",
"Principal": {},
"Action": "sqs:SendMessage",
"Resource": "*"
}]
}
sqs_queue_policy_with_no_findings = {
"Version": "2012-10-17",
"Statement": [{
"Effect": "Allow",
"Principal": {
"AWS": [f'{account_config.account_id}']
},
"Action": "sqs:SendMessage",
"Resource": "*"
}]
}
sqs_queue_invalid_policy = {
"Version": "2012-10-17",
"Statement": [{
"Effect": {"not": "valid"},
"Principal": {
"AWS": [f'{account_config.account_id}']
},
"Action": "sqs:SendMessage",
"Resource": "*"
}]
}
class WhenValidatingSqsQueuePolicy(BaseResourcePolicyTest):
@mock_access_analyzer_resource_setup(
MockAccessPreviewFinding(),
MockAccessPreviewFinding()
)
def test_with_sqs_policy_that_allows_external_access(self):
self.add_resources_to_output('AWS::SQS::Queue', sqs_queue_policy_that_allows_external_access)
findings = validate_parser_output(self.output)
self.assert_has_findings(findings, security_warnings=2)
self.assert_finding_is_equal(
actual_finding=findings.security_warnings[0],
expected_policy_name='policy1',
expected_resource_name='resource1',
expected_code='EXTERNAL_PRINCIPAL'
)
self.assert_finding_is_equal(
actual_finding=findings.security_warnings[1],
expected_policy_name='policy2',
expected_resource_name='resource2',
expected_code='EXTERNAL_PRINCIPAL'
)
@mock_access_analyzer_resource_setup(
MockValidateResourcePolicyFinding(code='EMPTY_OBJECT_PRINCIPAL', finding_type=FINDING_TYPE.SUGGESTION),
MockValidateResourcePolicyFinding(code='EMPTY_OBJECT_PRINCIPAL', finding_type=FINDING_TYPE.SUGGESTION)
)
def test_with_sqs_policy_with_findings(self):
self.add_resources_to_output('AWS::SQS::Queue', sqs_queue_policy_with_findings)
findings = validate_parser_output(self.output)
self.assert_has_findings(findings, suggestions=2)
self.assert_finding_is_equal(
actual_finding=findings.suggestions[0],
expected_policy_name='policy1',
expected_resource_name='resource1',
expected_code='EMPTY_OBJECT_PRINCIPAL'
)
self.assert_finding_is_equal(
actual_finding=findings.suggestions[1],
expected_policy_name='policy2',
expected_resource_name='resource2',
expected_code='EMPTY_OBJECT_PRINCIPAL'
)
@mock_access_analyzer_resource_setup(
MockNoFindings(),
MockNoFindings()
)
def test_with_sqs_queue_policy_with_no_findings(self):
self.add_resources_to_output('AWS::SQS::Queue', sqs_queue_policy_with_no_findings)
findings = validate_parser_output(self.output)
self.assert_has_findings(findings)
@mock_access_analyzer_resource_setup(
MockInvalidConfiguration(),
MockInvalidConfiguration()
)
def test_with_invalid_sqs_queue_policy(self):
self.add_resources_to_output('AWS::SQS::Queue', sqs_queue_invalid_policy)
with self.assertRaises(ApplicationError) as cm:
validate_parser_output(self.output)
self.assertIn("Failed to create access preview for resource1. Validate that your trust or resource "
"policy's schema is correct.\nThe following validation findings were detected for this resource:", str(cm.exception))
kms_key_policy_that_allows_external_access = {
"Version": "2012-10-17",
"Statement": [{
"Effect": "Allow",
"Principal": {
"AWS": "*"
},
"Action": "kms:*",
"Resource": "*"
}]
}
kms_key_policy_with_findings = {
"Version": "2012-10-17",
"Statement": [{
"Effect": "Allow",
"Principal": {},
"Action": "kms:*",
"Resource": "*"
}]
}
kms_key_policy_with_no_findings = {
"Version": "2012-10-17",
"Statement": [{
"Effect": "Allow",
"Principal": {
"AWS": f"arn:aws:iam::{account_config.account_id}:root"
},
"Action": "kms:*",
"Resource": "*"
}]
}
kms_key_invalid_policy = {
"Version": "2012-10-17",
"Statement": [{
"Effect": "Allow",
"Principal": {
"AWS": f"arn:aws:iam::{account_config.account_id}:root"
},
"Action": {"not": "valid"},
"Resource": "*"
}]
}
class WhenValidatingKmsKeyPolicy(BaseResourcePolicyTest):
@mock_access_analyzer_resource_setup(
MockAccessPreviewFinding(),
MockAccessPreviewFinding()
)
def test_with_kms_policy_that_allows_external_access(self):
self.add_resources_to_output('AWS::KMS::Key', kms_key_policy_that_allows_external_access)
findings = validate_parser_output(self.output)
self.assert_has_findings(findings, security_warnings=2)
self.assert_finding_is_equal(
actual_finding=findings.security_warnings[0],
expected_policy_name='policy1',
expected_resource_name='resource1',
expected_code='EXTERNAL_PRINCIPAL'
)
self.assert_finding_is_equal(
actual_finding=findings.security_warnings[1],
expected_policy_name='policy2',
expected_resource_name='resource2',
expected_code='EXTERNAL_PRINCIPAL'
)
@mock_access_analyzer_resource_setup(
MockValidateResourcePolicyFinding(code='EMPTY_OBJECT_PRINCIPAL', finding_type=FINDING_TYPE.SUGGESTION),
MockValidateResourcePolicyFinding(code='EMPTY_OBJECT_PRINCIPAL', finding_type=FINDING_TYPE.SUGGESTION)
)
def test_with_kms_policy_with_findings(self):
self.add_resources_to_output('AWS::KMS::Key', kms_key_policy_with_findings)
findings = validate_parser_output(self.output)
self.assert_has_findings(findings, suggestions=2)
self.assert_finding_is_equal(
actual_finding=findings.suggestions[0],
expected_policy_name='policy1',
expected_resource_name='resource1',
expected_code='EMPTY_OBJECT_PRINCIPAL'
)
self.assert_finding_is_equal(
actual_finding=findings.suggestions[1],
expected_policy_name='policy2',
expected_resource_name='resource2',
expected_code='EMPTY_OBJECT_PRINCIPAL'
)
@mock_access_analyzer_resource_setup(
MockNoFindings(),
MockNoFindings()
)
def test_with_kms_policy_with_no_findings(self):
self.add_resources_to_output('AWS::KMS::Key', kms_key_policy_with_no_findings)
findings = validate_parser_output(self.output)
self.assert_has_findings(findings)
@mock_access_analyzer_resource_setup(
MockInvalidConfiguration(),
MockInvalidConfiguration()
)
def test_with_invalid_kms_policy(self):
self.add_resources_to_output('AWS::KMS::Key', kms_key_invalid_policy)
with self.assertRaises(ApplicationError) as cm:
validate_parser_output(self.output)
self.assertIn("Failed to create access preview for resource1. Validate that your trust or resource "
"policy's schema is correct.\nThe following validation findings were detected for this resource:", str(cm.exception))
def build_s3_bucket_policy_that_allows_external_access(resource_name):
return {
"Version": "2012-10-17",
"Statement": [{
"Effect": "Allow",
"Principal": {'AWS': "arn:aws:iam::123456789123:role/MyOtherRole"},
"Action": "*",
"Resource": [f"arn:aws:s3:::{resource_name}", f"arn:aws:s3:::{resource_name}/*"]
}]
}
def build_s3_bucket_policy_with_findings(resource_name):
return {
"Version": "2012-10-17",
"Statement": [{
"Effect": "Allow",
"Principal": {},
"Action": ["s3:PutObject", "s3:PutObjectAcl"],
"Resource": [f"arn:aws:s3:::{resource_name}/*"]
}]
}
def build_s3_bucket_policy_with_no_findings(resource_name):
return {
"Version": "2012-10-17",
"Statement": [{
"Effect": "Allow",
"Principal": {"AWS": [f"arn:aws:iam::{account_config.account_id}:root"]},
"Action": ["s3:PutObject", "s3:PutObjectAcl"],
"Resource": [f"arn:aws:s3:::{resource_name}", f"arn:aws:s3:::{resource_name}/*"]
}]
}
s3_bucket_invalid_policy = {
"Version": "2012-10-17",
"Statement": [{
"Effect": "Allow",
"Principal": {"AWS": [f"arn:aws:iam::{account_config.account_id}:root"]},
"Action": ["s3:PutObject", "s3:PutObjectAcl"],
"Resource": {"not": "valid"}
}]
}
class WhenValidatingS3BucketPolicy(BaseResourcePolicyTest):
@mock_access_analyzer_resource_setup(
MockAccessPreviewFinding(custom_validate_policy_type='AWS::S3::Bucket'),
MockAccessPreviewFinding(custom_validate_policy_type='AWS::S3::Bucket')
)
def test_with_s3_bucket_policy_that_allows_external_access(self):
self.add_resources_to_output('AWS::S3::Bucket',
build_s3_bucket_policy_that_allows_external_access('resource1'),
build_s3_bucket_policy_that_allows_external_access('resource2'))
findings = validate_parser_output(self.output)
self.assert_has_findings(findings, security_warnings=2)
self.assert_finding_is_equal(
actual_finding=findings.security_warnings[0],
expected_policy_name='policy1',
expected_resource_name='resource1',
expected_code='EXTERNAL_PRINCIPAL'
)
self.assert_finding_is_equal(
actual_finding=findings.security_warnings[1],
expected_policy_name='policy2',
expected_resource_name='resource2',
expected_code='EXTERNAL_PRINCIPAL'
)
@mock_access_analyzer_resource_setup(
MockValidateResourcePolicyFinding(code='EMPTY_OBJECT_PRINCIPAL', finding_type=FINDING_TYPE.SUGGESTION, custom_resource_type='AWS::S3::Bucket'),
MockValidateResourcePolicyFinding(code='EMPTY_OBJECT_PRINCIPAL', finding_type=FINDING_TYPE.SUGGESTION, custom_resource_type='AWS::S3::Bucket')
)
def test_with_s3_bucket_policy_with_findings(self):
self.add_resources_to_output('AWS::S3::Bucket',
build_s3_bucket_policy_with_findings('resource1'),
build_s3_bucket_policy_with_findings('resource2'))
findings = validate_parser_output(self.output)
self.assert_has_findings(findings, suggestions=2)
self.assert_finding_is_equal(
actual_finding=findings.suggestions[0],
expected_policy_name='policy1',
expected_resource_name='resource1',
expected_code='EMPTY_OBJECT_PRINCIPAL'
)
self.assert_finding_is_equal(
actual_finding=findings.suggestions[1],
expected_policy_name='policy2',
expected_resource_name='resource2',
expected_code='EMPTY_OBJECT_PRINCIPAL'
)
@mock_access_analyzer_resource_setup(
MockNoFindings(custom_validate_policy_type='AWS::S3::Bucket'),
MockNoFindings(custom_validate_policy_type='AWS::S3::Bucket')
)
def test_with_s3_bucket_policy_with_no_findings(self):
self.add_resources_to_output('AWS::S3::Bucket',
build_s3_bucket_policy_with_no_findings('resource1'),
build_s3_bucket_policy_with_no_findings('resource2'))
findings = validate_parser_output(self.output)
self.assert_has_findings(findings)
@mock_access_analyzer_resource_setup(
MockInvalidConfiguration(),
MockInvalidConfiguration()
)
def test_with_invalid_s3_bucket_policy(self):
self.add_resources_to_output('AWS::S3::Bucket', s3_bucket_invalid_policy)
with self.assertRaises(ApplicationError) as cm:
validate_parser_output(self.output)
self.assertIn("Failed to create access preview for resource1. Validate that your trust or resource "
"policy's schema is correct.\nThe following validation findings were detected for this resource:", str(cm.exception))
secrets_manager_resource_policy_that_allows_external_access = {
"Version": "2012-10-17",
"Statement": [{
"Effect": "Allow",
"Principal": {"AWS": f"arn:aws:iam::777888999444:root"},
"Action": "secretsmanager:GetSecretValue",
"Resource": "*"
}]
}
secrets_manager_resource_policy_with_findings = {
"Version": "2012-10-17",
"Statement": [{
"Effect": "Allow",
"Principal": {},
"Action": "secretsmanager:GetSecretValue",
"Resource": "*"
}]
}
secrets_manager_resource_policy_with_no_findings = {
"Version": "2012-10-17",
"Statement": [{
"Effect": "Allow",
"Principal": {
"AWS": f"arn:aws:iam::{account_config.account_id}:root"
},
"Action": "secretsmanager:GetSecretValue",
"Resource": "*"
}]
}
secrets_manager_resource_invalid_policy = {
"Version": "2012-10-17",
"Statement": [{
"Effect": "Allow",
"Principal": {
"AWS": f"arn:aws:iam::{account_config.account_id}:root"
},
"Action": {"not": "valid"},
"Resource": "*"
}]
}
class WhenValidatingSecretsManagerResourcePolicy(BaseResourcePolicyTest):
# This doesn't work because secrets manager uses the default KMS key if no KMS key is provided
# the default KMS key is not publicly accessible, so the secret is therefore not publicly accessible.
# To make this work, we'd need to look up the KMS key from the environment OR from the key policy if it had
# yet to be created
@unittest.skip("Skip until this is supported")
def test_with_secrets_manager_resource_policy_that_allows_external_access(self):
self.add_resources_to_output('AWS::SecretsManager::Secret', secrets_manager_resource_policy_that_allows_external_access)
findings = validate_parser_output(self.output)
self.assert_has_findings(findings, security_warnings=2)
self.assert_finding_is_equal(
actual_finding=findings.security_warnings[0],
expected_policy_name='policy1',
expected_resource_name='resource1',
expected_code='EXTERNAL_PRINCIPAL'
)
self.assert_finding_is_equal(
actual_finding=findings.security_warnings[1],
expected_policy_name='policy2',
expected_resource_name='resource2',
expected_code='EXTERNAL_PRINCIPAL'
)
@mock_access_analyzer_resource_setup(
MockValidateResourcePolicyFinding(code='EMPTY_OBJECT_PRINCIPAL', finding_type=FINDING_TYPE.SUGGESTION),
MockValidateResourcePolicyFinding(code='EMPTY_OBJECT_PRINCIPAL', finding_type=FINDING_TYPE.SUGGESTION)
)
def test_with_secrets_manager_resource_policy_with_findings(self):
self.add_resources_to_output('AWS::SecretsManager::Secret', secrets_manager_resource_policy_with_findings)
findings = validate_parser_output(self.output)
self.assert_has_findings(findings, suggestions=2)
self.assert_finding_is_equal(
actual_finding=findings.suggestions[0],
expected_policy_name='policy1',
expected_resource_name='resource1',
expected_code='EMPTY_OBJECT_PRINCIPAL'
)
self.assert_finding_is_equal(
actual_finding=findings.suggestions[1],
expected_policy_name='policy2',
expected_resource_name='resource2',
expected_code='EMPTY_OBJECT_PRINCIPAL'
)
@mock_access_analyzer_resource_setup(
MockNoFindings(),
MockNoFindings()
)
def test_with_secrets_manager_resource_policy_with_no_findings(self):
self.add_resources_to_output('AWS::SecretsManager::Secret', secrets_manager_resource_policy_with_no_findings)
findings = validate_parser_output(self.output)
self.assert_has_findings(findings)
@mock_access_analyzer_resource_setup(
MockInvalidConfiguration(),
MockInvalidConfiguration()
)
def test_with_invalid_secrets_manager_resource_policy(self):
self.add_resources_to_output('AWS::SecretsManager::Secret', secrets_manager_resource_invalid_policy)
with self.assertRaises(ApplicationError) as cm:
validate_parser_output(self.output)
self.assertIn("Failed to create access preview for resource1. Validate that your trust or resource "
"policy's schema is correct.\nThe following validation findings were detected for this resource:", str(cm.exception))
| [
"cfn_policy_validator.parsers.output.Resource",
"boto3.Session",
"cfn_policy_validator.tests.validation_tests.MockValidateResourcePolicyFinding",
"cfn_policy_validator.tests.validation_tests.MockInvalidConfiguration",
"cfn_policy_validator.validation.validator.Validator",
"cfn_policy_validator.tests.boto_... | [((20256, 20301), 'unittest.skip', 'unittest.skip', (['"""Skip until this is supported"""'], {}), "('Skip until this is supported')\n", (20269, 20301), False, 'import unittest\n'), ((1502, 1524), 'cfn_policy_validator.parsers.output.Output', 'Output', (['account_config'], {}), '(account_config)\n', (1508, 1524), False, 'from cfn_policy_validator.parsers.output import Output, Policy, Resource\n'), ((1810, 1872), 'cfn_policy_validator.parsers.output.Resource', 'Resource', (['"""resource1"""', 'resource_type', 'policy1', 'configuration_1'], {}), "('resource1', resource_type, policy1, configuration_1)\n", (1818, 1872), False, 'from cfn_policy_validator.parsers.output import Output, Policy, Resource\n'), ((1952, 2014), 'cfn_policy_validator.parsers.output.Resource', 'Resource', (['"""resource2"""', 'resource_type', 'policy2', 'configuration_2'], {}), "('resource2', resource_type, policy2, configuration_2)\n", (1960, 2014), False, 'from cfn_policy_validator.parsers.output import Output, Policy, Resource\n'), ((2172, 2220), 'boto3.Session', 'boto3.Session', ([], {'region_name': 'account_config.region'}), '(region_name=account_config.region)\n', (2185, 2220), False, 'import boto3\n'), ((3599, 3621), 'cfn_policy_validator.parsers.output.Output', 'Output', (['account_config'], {}), '(account_config)\n', (3605, 3621), False, 'from cfn_policy_validator.parsers.output import Output, Policy, Resource\n'), ((3921, 4011), 'cfn_policy_validator.validation.validator.Validator', 'Validator', (['account_config.account_id', 'account_config.region', 'account_config.partition'], {}), '(account_config.account_id, account_config.region, account_config.\n partition)\n', (3930, 4011), False, 'from cfn_policy_validator.validation.validator import validate_parser_output, Validator\n'), ((3664, 3682), 'cfn_policy_validator.tests.validation_tests.MockUnknownError', 'MockUnknownError', ([], {}), '()\n', (3680, 3682), False, 'from cfn_policy_validator.tests.validation_tests import FINDING_TYPE, mock_access_analyzer_resource_setup, MockAccessPreviewFinding, MockNoFindings, MockInvalidConfiguration, MockUnknownError, MockTimeout, MockValidateResourcePolicyFinding\n'), ((4506, 4596), 'cfn_policy_validator.validation.validator.Validator', 'Validator', (['account_config.account_id', 'account_config.region', 'account_config.partition'], {}), '(account_config.account_id, account_config.region, account_config.\n partition)\n', (4515, 4596), False, 'from cfn_policy_validator.validation.validator import validate_parser_output, Validator\n'), ((4254, 4267), 'cfn_policy_validator.tests.validation_tests.MockTimeout', 'MockTimeout', ([], {}), '()\n', (4265, 4267), False, 'from cfn_policy_validator.tests.validation_tests import FINDING_TYPE, mock_access_analyzer_resource_setup, MockAccessPreviewFinding, MockNoFindings, MockInvalidConfiguration, MockUnknownError, MockTimeout, MockValidateResourcePolicyFinding\n'), ((5349, 5439), 'cfn_policy_validator.validation.validator.Validator', 'Validator', (['account_config.account_id', 'account_config.region', 'account_config.partition'], {}), '(account_config.account_id, account_config.region, account_config.\n partition)\n', (5358, 5439), False, 'from cfn_policy_validator.validation.validator import validate_parser_output, Validator\n'), ((5706, 5728), 'cfn_policy_validator.parsers.output.Output', 'Output', (['account_config'], {}), '(account_config)\n', (5712, 5728), False, 'from cfn_policy_validator.parsers.output import Output, Policy, Resource\n'), ((5838, 5877), 'cfn_policy_validator.parsers.output.Resource', 'Resource', (['"""resource1"""', '"""Lambda"""', 'policy'], {}), "('resource1', 'Lambda', policy)\n", (5846, 5877), False, 'from cfn_policy_validator.parsers.output import Output, Policy, Resource\n'), ((5925, 5955), 'cfn_policy_validator.validation.validator.validate_parser_output', 'validate_parser_output', (['output'], {}), '(output)\n', (5947, 5955), False, 'from cfn_policy_validator.validation.validator import validate_parser_output, Validator\n'), ((5512, 5618), 'cfn_policy_validator.tests.validation_tests.MockValidateResourcePolicyFinding', 'MockValidateResourcePolicyFinding', ([], {'code': '"""EMPTY_OBJECT_PRINCIPAL"""', 'finding_type': 'FINDING_TYPE.SUGGESTION'}), "(code='EMPTY_OBJECT_PRINCIPAL',\n finding_type=FINDING_TYPE.SUGGESTION)\n", (5545, 5618), False, 'from cfn_policy_validator.tests.validation_tests import FINDING_TYPE, mock_access_analyzer_resource_setup, MockAccessPreviewFinding, MockNoFindings, MockInvalidConfiguration, MockUnknownError, MockTimeout, MockValidateResourcePolicyFinding\n'), ((6326, 6348), 'cfn_policy_validator.parsers.output.Output', 'Output', (['account_config'], {}), '(account_config)\n', (6332, 6348), False, 'from cfn_policy_validator.parsers.output import Output, Policy, Resource\n'), ((6851, 6900), 'cfn_policy_validator.parsers.output.Resource', 'Resource', (['"""resource1"""', '"""AWS::SQS::Queue"""', 'policy1'], {}), "('resource1', 'AWS::SQS::Queue', policy1)\n", (6859, 6900), False, 'from cfn_policy_validator.parsers.output import Output, Policy, Resource\n'), ((7005, 7052), 'cfn_policy_validator.parsers.output.Resource', 'Resource', (['"""resource2"""', '"""AWS::KMS::Key"""', 'policy2'], {}), "('resource2', 'AWS::KMS::Key', policy2)\n", (7013, 7052), False, 'from cfn_policy_validator.parsers.output import Output, Policy, Resource\n'), ((7117, 7152), 'cfn_policy_validator.validation.validator.validate_parser_output', 'validate_parser_output', (['self.output'], {}), '(self.output)\n', (7139, 7152), False, 'from cfn_policy_validator.validation.validator import validate_parser_output, Validator\n'), ((6511, 6537), 'cfn_policy_validator.tests.validation_tests.MockAccessPreviewFinding', 'MockAccessPreviewFinding', ([], {}), '()\n', (6535, 6537), False, 'from cfn_policy_validator.tests.validation_tests import FINDING_TYPE, mock_access_analyzer_resource_setup, MockAccessPreviewFinding, MockNoFindings, MockInvalidConfiguration, MockUnknownError, MockTimeout, MockValidateResourcePolicyFinding\n'), ((6541, 6592), 'cfn_policy_validator.tests.validation_tests.MockAccessPreviewFinding', 'MockAccessPreviewFinding', ([], {'finding_status': '"""ARCHIVED"""'}), "(finding_status='ARCHIVED')\n", (6565, 6592), False, 'from cfn_policy_validator.tests.validation_tests import FINDING_TYPE, mock_access_analyzer_resource_setup, MockAccessPreviewFinding, MockNoFindings, MockInvalidConfiguration, MockUnknownError, MockTimeout, MockValidateResourcePolicyFinding\n'), ((7742, 7777), 'cfn_policy_validator.validation.validator.validate_parser_output', 'validate_parser_output', (['self.output'], {}), '(self.output)\n', (7764, 7777), False, 'from cfn_policy_validator.validation.validator import validate_parser_output, Validator\n'), ((7450, 7501), 'cfn_policy_validator.tests.validation_tests.MockAccessPreviewFinding', 'MockAccessPreviewFinding', ([], {'finding_status': '"""ARCHIVED"""'}), "(finding_status='ARCHIVED')\n", (7474, 7501), False, 'from cfn_policy_validator.tests.validation_tests import FINDING_TYPE, mock_access_analyzer_resource_setup, MockAccessPreviewFinding, MockNoFindings, MockInvalidConfiguration, MockUnknownError, MockTimeout, MockValidateResourcePolicyFinding\n'), ((7505, 7556), 'cfn_policy_validator.tests.validation_tests.MockAccessPreviewFinding', 'MockAccessPreviewFinding', ([], {'finding_status': '"""ARCHIVED"""'}), "(finding_status='ARCHIVED')\n", (7529, 7556), False, 'from cfn_policy_validator.tests.validation_tests import FINDING_TYPE, mock_access_analyzer_resource_setup, MockAccessPreviewFinding, MockNoFindings, MockInvalidConfiguration, MockUnknownError, MockTimeout, MockValidateResourcePolicyFinding\n'), ((8997, 9032), 'cfn_policy_validator.validation.validator.validate_parser_output', 'validate_parser_output', (['self.output'], {}), '(self.output)\n', (9019, 9032), False, 'from cfn_policy_validator.validation.validator import validate_parser_output, Validator\n'), ((8766, 8792), 'cfn_policy_validator.tests.validation_tests.MockAccessPreviewFinding', 'MockAccessPreviewFinding', ([], {}), '()\n', (8790, 8792), False, 'from cfn_policy_validator.tests.validation_tests import FINDING_TYPE, mock_access_analyzer_resource_setup, MockAccessPreviewFinding, MockNoFindings, MockInvalidConfiguration, MockUnknownError, MockTimeout, MockValidateResourcePolicyFinding\n'), ((8796, 8822), 'cfn_policy_validator.tests.validation_tests.MockAccessPreviewFinding', 'MockAccessPreviewFinding', ([], {}), '()\n', (8820, 8822), False, 'from cfn_policy_validator.tests.validation_tests import FINDING_TYPE, mock_access_analyzer_resource_setup, MockAccessPreviewFinding, MockNoFindings, MockInvalidConfiguration, MockUnknownError, MockTimeout, MockValidateResourcePolicyFinding\n'), ((9882, 9917), 'cfn_policy_validator.validation.validator.validate_parser_output', 'validate_parser_output', (['self.output'], {}), '(self.output)\n', (9904, 9917), False, 'from cfn_policy_validator.validation.validator import validate_parser_output, Validator\n'), ((9527, 9633), 'cfn_policy_validator.tests.validation_tests.MockValidateResourcePolicyFinding', 'MockValidateResourcePolicyFinding', ([], {'code': '"""EMPTY_OBJECT_PRINCIPAL"""', 'finding_type': 'FINDING_TYPE.SUGGESTION'}), "(code='EMPTY_OBJECT_PRINCIPAL',\n finding_type=FINDING_TYPE.SUGGESTION)\n", (9560, 9633), False, 'from cfn_policy_validator.tests.validation_tests import FINDING_TYPE, mock_access_analyzer_resource_setup, MockAccessPreviewFinding, MockNoFindings, MockInvalidConfiguration, MockUnknownError, MockTimeout, MockValidateResourcePolicyFinding\n'), ((9633, 9739), 'cfn_policy_validator.tests.validation_tests.MockValidateResourcePolicyFinding', 'MockValidateResourcePolicyFinding', ([], {'code': '"""EMPTY_OBJECT_PRINCIPAL"""', 'finding_type': 'FINDING_TYPE.SUGGESTION'}), "(code='EMPTY_OBJECT_PRINCIPAL',\n finding_type=FINDING_TYPE.SUGGESTION)\n", (9666, 9739), False, 'from cfn_policy_validator.tests.validation_tests import FINDING_TYPE, mock_access_analyzer_resource_setup, MockAccessPreviewFinding, MockNoFindings, MockInvalidConfiguration, MockUnknownError, MockTimeout, MockValidateResourcePolicyFinding\n'), ((10597, 10632), 'cfn_policy_validator.validation.validator.validate_parser_output', 'validate_parser_output', (['self.output'], {}), '(self.output)\n', (10619, 10632), False, 'from cfn_policy_validator.validation.validator import validate_parser_output, Validator\n'), ((10402, 10418), 'cfn_policy_validator.tests.validation_tests.MockNoFindings', 'MockNoFindings', ([], {}), '()\n', (10416, 10418), False, 'from cfn_policy_validator.tests.validation_tests import FINDING_TYPE, mock_access_analyzer_resource_setup, MockAccessPreviewFinding, MockNoFindings, MockInvalidConfiguration, MockUnknownError, MockTimeout, MockValidateResourcePolicyFinding\n'), ((10422, 10438), 'cfn_policy_validator.tests.validation_tests.MockNoFindings', 'MockNoFindings', ([], {}), '()\n', (10436, 10438), False, 'from cfn_policy_validator.tests.validation_tests import FINDING_TYPE, mock_access_analyzer_resource_setup, MockAccessPreviewFinding, MockNoFindings, MockInvalidConfiguration, MockUnknownError, MockTimeout, MockValidateResourcePolicyFinding\n'), ((10712, 10738), 'cfn_policy_validator.tests.validation_tests.MockInvalidConfiguration', 'MockInvalidConfiguration', ([], {}), '()\n', (10736, 10738), False, 'from cfn_policy_validator.tests.validation_tests import FINDING_TYPE, mock_access_analyzer_resource_setup, MockAccessPreviewFinding, MockNoFindings, MockInvalidConfiguration, MockUnknownError, MockTimeout, MockValidateResourcePolicyFinding\n'), ((10742, 10768), 'cfn_policy_validator.tests.validation_tests.MockInvalidConfiguration', 'MockInvalidConfiguration', ([], {}), '()\n', (10766, 10768), False, 'from cfn_policy_validator.tests.validation_tests import FINDING_TYPE, mock_access_analyzer_resource_setup, MockAccessPreviewFinding, MockNoFindings, MockInvalidConfiguration, MockUnknownError, MockTimeout, MockValidateResourcePolicyFinding\n'), ((12353, 12388), 'cfn_policy_validator.validation.validator.validate_parser_output', 'validate_parser_output', (['self.output'], {}), '(self.output)\n', (12375, 12388), False, 'from cfn_policy_validator.validation.validator import validate_parser_output, Validator\n'), ((12126, 12152), 'cfn_policy_validator.tests.validation_tests.MockAccessPreviewFinding', 'MockAccessPreviewFinding', ([], {}), '()\n', (12150, 12152), False, 'from cfn_policy_validator.tests.validation_tests import FINDING_TYPE, mock_access_analyzer_resource_setup, MockAccessPreviewFinding, MockNoFindings, MockInvalidConfiguration, MockUnknownError, MockTimeout, MockValidateResourcePolicyFinding\n'), ((12156, 12182), 'cfn_policy_validator.tests.validation_tests.MockAccessPreviewFinding', 'MockAccessPreviewFinding', ([], {}), '()\n', (12180, 12182), False, 'from cfn_policy_validator.tests.validation_tests import FINDING_TYPE, mock_access_analyzer_resource_setup, MockAccessPreviewFinding, MockNoFindings, MockInvalidConfiguration, MockUnknownError, MockTimeout, MockValidateResourcePolicyFinding\n'), ((13234, 13269), 'cfn_policy_validator.validation.validator.validate_parser_output', 'validate_parser_output', (['self.output'], {}), '(self.output)\n', (13256, 13269), False, 'from cfn_policy_validator.validation.validator import validate_parser_output, Validator\n'), ((12883, 12989), 'cfn_policy_validator.tests.validation_tests.MockValidateResourcePolicyFinding', 'MockValidateResourcePolicyFinding', ([], {'code': '"""EMPTY_OBJECT_PRINCIPAL"""', 'finding_type': 'FINDING_TYPE.SUGGESTION'}), "(code='EMPTY_OBJECT_PRINCIPAL',\n finding_type=FINDING_TYPE.SUGGESTION)\n", (12916, 12989), False, 'from cfn_policy_validator.tests.validation_tests import FINDING_TYPE, mock_access_analyzer_resource_setup, MockAccessPreviewFinding, MockNoFindings, MockInvalidConfiguration, MockUnknownError, MockTimeout, MockValidateResourcePolicyFinding\n'), ((12989, 13095), 'cfn_policy_validator.tests.validation_tests.MockValidateResourcePolicyFinding', 'MockValidateResourcePolicyFinding', ([], {'code': '"""EMPTY_OBJECT_PRINCIPAL"""', 'finding_type': 'FINDING_TYPE.SUGGESTION'}), "(code='EMPTY_OBJECT_PRINCIPAL',\n finding_type=FINDING_TYPE.SUGGESTION)\n", (13022, 13095), False, 'from cfn_policy_validator.tests.validation_tests import FINDING_TYPE, mock_access_analyzer_resource_setup, MockAccessPreviewFinding, MockNoFindings, MockInvalidConfiguration, MockUnknownError, MockTimeout, MockValidateResourcePolicyFinding\n'), ((13939, 13974), 'cfn_policy_validator.validation.validator.validate_parser_output', 'validate_parser_output', (['self.output'], {}), '(self.output)\n', (13961, 13974), False, 'from cfn_policy_validator.validation.validator import validate_parser_output, Validator\n'), ((13754, 13770), 'cfn_policy_validator.tests.validation_tests.MockNoFindings', 'MockNoFindings', ([], {}), '()\n', (13768, 13770), False, 'from cfn_policy_validator.tests.validation_tests import FINDING_TYPE, mock_access_analyzer_resource_setup, MockAccessPreviewFinding, MockNoFindings, MockInvalidConfiguration, MockUnknownError, MockTimeout, MockValidateResourcePolicyFinding\n'), ((13774, 13790), 'cfn_policy_validator.tests.validation_tests.MockNoFindings', 'MockNoFindings', ([], {}), '()\n', (13788, 13790), False, 'from cfn_policy_validator.tests.validation_tests import FINDING_TYPE, mock_access_analyzer_resource_setup, MockAccessPreviewFinding, MockNoFindings, MockInvalidConfiguration, MockUnknownError, MockTimeout, MockValidateResourcePolicyFinding\n'), ((14054, 14080), 'cfn_policy_validator.tests.validation_tests.MockInvalidConfiguration', 'MockInvalidConfiguration', ([], {}), '()\n', (14078, 14080), False, 'from cfn_policy_validator.tests.validation_tests import FINDING_TYPE, mock_access_analyzer_resource_setup, MockAccessPreviewFinding, MockNoFindings, MockInvalidConfiguration, MockUnknownError, MockTimeout, MockValidateResourcePolicyFinding\n'), ((14084, 14110), 'cfn_policy_validator.tests.validation_tests.MockInvalidConfiguration', 'MockInvalidConfiguration', ([], {}), '()\n', (14108, 14110), False, 'from cfn_policy_validator.tests.validation_tests import FINDING_TYPE, mock_access_analyzer_resource_setup, MockAccessPreviewFinding, MockNoFindings, MockInvalidConfiguration, MockUnknownError, MockTimeout, MockValidateResourcePolicyFinding\n'), ((16299, 16334), 'cfn_policy_validator.validation.validator.validate_parser_output', 'validate_parser_output', (['self.output'], {}), '(self.output)\n', (16321, 16334), False, 'from cfn_policy_validator.validation.validator import validate_parser_output, Validator\n'), ((15868, 15939), 'cfn_policy_validator.tests.validation_tests.MockAccessPreviewFinding', 'MockAccessPreviewFinding', ([], {'custom_validate_policy_type': '"""AWS::S3::Bucket"""'}), "(custom_validate_policy_type='AWS::S3::Bucket')\n", (15892, 15939), False, 'from cfn_policy_validator.tests.validation_tests import FINDING_TYPE, mock_access_analyzer_resource_setup, MockAccessPreviewFinding, MockNoFindings, MockInvalidConfiguration, MockUnknownError, MockTimeout, MockValidateResourcePolicyFinding\n'), ((15943, 16014), 'cfn_policy_validator.tests.validation_tests.MockAccessPreviewFinding', 'MockAccessPreviewFinding', ([], {'custom_validate_policy_type': '"""AWS::S3::Bucket"""'}), "(custom_validate_policy_type='AWS::S3::Bucket')\n", (15967, 16014), False, 'from cfn_policy_validator.tests.validation_tests import FINDING_TYPE, mock_access_analyzer_resource_setup, MockAccessPreviewFinding, MockNoFindings, MockInvalidConfiguration, MockUnknownError, MockTimeout, MockValidateResourcePolicyFinding\n'), ((17360, 17395), 'cfn_policy_validator.validation.validator.validate_parser_output', 'validate_parser_output', (['self.output'], {}), '(self.output)\n', (17382, 17395), False, 'from cfn_policy_validator.validation.validator import validate_parser_output, Validator\n'), ((16829, 16980), 'cfn_policy_validator.tests.validation_tests.MockValidateResourcePolicyFinding', 'MockValidateResourcePolicyFinding', ([], {'code': '"""EMPTY_OBJECT_PRINCIPAL"""', 'finding_type': 'FINDING_TYPE.SUGGESTION', 'custom_resource_type': '"""AWS::S3::Bucket"""'}), "(code='EMPTY_OBJECT_PRINCIPAL',\n finding_type=FINDING_TYPE.SUGGESTION, custom_resource_type=\n 'AWS::S3::Bucket')\n", (16862, 16980), False, 'from cfn_policy_validator.tests.validation_tests import FINDING_TYPE, mock_access_analyzer_resource_setup, MockAccessPreviewFinding, MockNoFindings, MockInvalidConfiguration, MockUnknownError, MockTimeout, MockValidateResourcePolicyFinding\n'), ((16975, 17126), 'cfn_policy_validator.tests.validation_tests.MockValidateResourcePolicyFinding', 'MockValidateResourcePolicyFinding', ([], {'code': '"""EMPTY_OBJECT_PRINCIPAL"""', 'finding_type': 'FINDING_TYPE.SUGGESTION', 'custom_resource_type': '"""AWS::S3::Bucket"""'}), "(code='EMPTY_OBJECT_PRINCIPAL',\n finding_type=FINDING_TYPE.SUGGESTION, custom_resource_type=\n 'AWS::S3::Bucket')\n", (17008, 17126), False, 'from cfn_policy_validator.tests.validation_tests import FINDING_TYPE, mock_access_analyzer_resource_setup, MockAccessPreviewFinding, MockNoFindings, MockInvalidConfiguration, MockUnknownError, MockTimeout, MockValidateResourcePolicyFinding\n'), ((18258, 18293), 'cfn_policy_validator.validation.validator.validate_parser_output', 'validate_parser_output', (['self.output'], {}), '(self.output)\n', (18280, 18293), False, 'from cfn_policy_validator.validation.validator import validate_parser_output, Validator\n'), ((17880, 17941), 'cfn_policy_validator.tests.validation_tests.MockNoFindings', 'MockNoFindings', ([], {'custom_validate_policy_type': '"""AWS::S3::Bucket"""'}), "(custom_validate_policy_type='AWS::S3::Bucket')\n", (17894, 17941), False, 'from cfn_policy_validator.tests.validation_tests import FINDING_TYPE, mock_access_analyzer_resource_setup, MockAccessPreviewFinding, MockNoFindings, MockInvalidConfiguration, MockUnknownError, MockTimeout, MockValidateResourcePolicyFinding\n'), ((17945, 18006), 'cfn_policy_validator.tests.validation_tests.MockNoFindings', 'MockNoFindings', ([], {'custom_validate_policy_type': '"""AWS::S3::Bucket"""'}), "(custom_validate_policy_type='AWS::S3::Bucket')\n", (17959, 18006), False, 'from cfn_policy_validator.tests.validation_tests import FINDING_TYPE, mock_access_analyzer_resource_setup, MockAccessPreviewFinding, MockNoFindings, MockInvalidConfiguration, MockUnknownError, MockTimeout, MockValidateResourcePolicyFinding\n'), ((18373, 18399), 'cfn_policy_validator.tests.validation_tests.MockInvalidConfiguration', 'MockInvalidConfiguration', ([], {}), '()\n', (18397, 18399), False, 'from cfn_policy_validator.tests.validation_tests import FINDING_TYPE, mock_access_analyzer_resource_setup, MockAccessPreviewFinding, MockNoFindings, MockInvalidConfiguration, MockUnknownError, MockTimeout, MockValidateResourcePolicyFinding\n'), ((18403, 18429), 'cfn_policy_validator.tests.validation_tests.MockInvalidConfiguration', 'MockInvalidConfiguration', ([], {}), '()\n', (18427, 18429), False, 'from cfn_policy_validator.tests.validation_tests import FINDING_TYPE, mock_access_analyzer_resource_setup, MockAccessPreviewFinding, MockNoFindings, MockInvalidConfiguration, MockUnknownError, MockTimeout, MockValidateResourcePolicyFinding\n'), ((20521, 20556), 'cfn_policy_validator.validation.validator.validate_parser_output', 'validate_parser_output', (['self.output'], {}), '(self.output)\n', (20543, 20556), False, 'from cfn_policy_validator.validation.validator import validate_parser_output, Validator\n'), ((21454, 21489), 'cfn_policy_validator.validation.validator.validate_parser_output', 'validate_parser_output', (['self.output'], {}), '(self.output)\n', (21476, 21489), False, 'from cfn_policy_validator.validation.validator import validate_parser_output, Validator\n'), ((21051, 21157), 'cfn_policy_validator.tests.validation_tests.MockValidateResourcePolicyFinding', 'MockValidateResourcePolicyFinding', ([], {'code': '"""EMPTY_OBJECT_PRINCIPAL"""', 'finding_type': 'FINDING_TYPE.SUGGESTION'}), "(code='EMPTY_OBJECT_PRINCIPAL',\n finding_type=FINDING_TYPE.SUGGESTION)\n", (21084, 21157), False, 'from cfn_policy_validator.tests.validation_tests import FINDING_TYPE, mock_access_analyzer_resource_setup, MockAccessPreviewFinding, MockNoFindings, MockInvalidConfiguration, MockUnknownError, MockTimeout, MockValidateResourcePolicyFinding\n'), ((21157, 21263), 'cfn_policy_validator.tests.validation_tests.MockValidateResourcePolicyFinding', 'MockValidateResourcePolicyFinding', ([], {'code': '"""EMPTY_OBJECT_PRINCIPAL"""', 'finding_type': 'FINDING_TYPE.SUGGESTION'}), "(code='EMPTY_OBJECT_PRINCIPAL',\n finding_type=FINDING_TYPE.SUGGESTION)\n", (21190, 21263), False, 'from cfn_policy_validator.tests.validation_tests import FINDING_TYPE, mock_access_analyzer_resource_setup, MockAccessPreviewFinding, MockNoFindings, MockInvalidConfiguration, MockUnknownError, MockTimeout, MockValidateResourcePolicyFinding\n'), ((22211, 22246), 'cfn_policy_validator.validation.validator.validate_parser_output', 'validate_parser_output', (['self.output'], {}), '(self.output)\n', (22233, 22246), False, 'from cfn_policy_validator.validation.validator import validate_parser_output, Validator\n'), ((21974, 21990), 'cfn_policy_validator.tests.validation_tests.MockNoFindings', 'MockNoFindings', ([], {}), '()\n', (21988, 21990), False, 'from cfn_policy_validator.tests.validation_tests import FINDING_TYPE, mock_access_analyzer_resource_setup, MockAccessPreviewFinding, MockNoFindings, MockInvalidConfiguration, MockUnknownError, MockTimeout, MockValidateResourcePolicyFinding\n'), ((21994, 22010), 'cfn_policy_validator.tests.validation_tests.MockNoFindings', 'MockNoFindings', ([], {}), '()\n', (22008, 22010), False, 'from cfn_policy_validator.tests.validation_tests import FINDING_TYPE, mock_access_analyzer_resource_setup, MockAccessPreviewFinding, MockNoFindings, MockInvalidConfiguration, MockUnknownError, MockTimeout, MockValidateResourcePolicyFinding\n'), ((22326, 22352), 'cfn_policy_validator.tests.validation_tests.MockInvalidConfiguration', 'MockInvalidConfiguration', ([], {}), '()\n', (22350, 22352), False, 'from cfn_policy_validator.tests.validation_tests import FINDING_TYPE, mock_access_analyzer_resource_setup, MockAccessPreviewFinding, MockNoFindings, MockInvalidConfiguration, MockUnknownError, MockTimeout, MockValidateResourcePolicyFinding\n'), ((22356, 22382), 'cfn_policy_validator.tests.validation_tests.MockInvalidConfiguration', 'MockInvalidConfiguration', ([], {}), '()\n', (22380, 22382), False, 'from cfn_policy_validator.tests.validation_tests import FINDING_TYPE, mock_access_analyzer_resource_setup, MockAccessPreviewFinding, MockNoFindings, MockInvalidConfiguration, MockUnknownError, MockTimeout, MockValidateResourcePolicyFinding\n'), ((1764, 1794), 'copy.deepcopy', 'copy.deepcopy', (['resource_policy'], {}), '(resource_policy)\n', (1777, 1794), False, 'import copy\n'), ((1904, 1936), 'copy.deepcopy', 'copy.deepcopy', (['resource_policy_2'], {}), '(resource_policy_2)\n', (1917, 1936), False, 'import copy\n'), ((3785, 3832), 'copy.deepcopy', 'copy.deepcopy', (['resource_policy_with_no_findings'], {}), '(resource_policy_with_no_findings)\n', (3798, 3832), False, 'import copy\n'), ((3853, 3901), 'cfn_policy_validator.parsers.output.Resource', 'Resource', (['"""resource1"""', '"""AWS::SQS::Queue"""', 'policy'], {}), "('resource1', 'AWS::SQS::Queue', policy)\n", (3861, 3901), False, 'from cfn_policy_validator.parsers.output import Output, Policy, Resource\n'), ((4370, 4417), 'copy.deepcopy', 'copy.deepcopy', (['resource_policy_with_no_findings'], {}), '(resource_policy_with_no_findings)\n', (4383, 4417), False, 'import copy\n'), ((4438, 4486), 'cfn_policy_validator.parsers.output.Resource', 'Resource', (['"""resource1"""', '"""AWS::SQS::Queue"""', 'policy'], {}), "('resource1', 'AWS::SQS::Queue', policy)\n", (4446, 4486), False, 'from cfn_policy_validator.parsers.output import Output, Policy, Resource\n'), ((5769, 5823), 'copy.deepcopy', 'copy.deepcopy', (['lambda_permissions_policy_with_findings'], {}), '(lambda_permissions_policy_with_findings)\n', (5782, 5823), False, 'import copy\n'), ((6776, 6835), 'copy.deepcopy', 'copy.deepcopy', (['sqs_queue_policy_that_allows_external_access'], {}), '(sqs_queue_policy_that_allows_external_access)\n', (6789, 6835), False, 'import copy\n'), ((6932, 6989), 'copy.deepcopy', 'copy.deepcopy', (['kms_key_policy_that_allows_external_access'], {}), '(kms_key_policy_that_allows_external_access)\n', (6945, 6989), False, 'import copy\n'), ((10949, 10984), 'cfn_policy_validator.validation.validator.validate_parser_output', 'validate_parser_output', (['self.output'], {}), '(self.output)\n', (10971, 10984), False, 'from cfn_policy_validator.validation.validator import validate_parser_output, Validator\n'), ((14281, 14316), 'cfn_policy_validator.validation.validator.validate_parser_output', 'validate_parser_output', (['self.output'], {}), '(self.output)\n', (14303, 14316), False, 'from cfn_policy_validator.validation.validator import validate_parser_output, Validator\n'), ((18610, 18645), 'cfn_policy_validator.validation.validator.validate_parser_output', 'validate_parser_output', (['self.output'], {}), '(self.output)\n', (18632, 18645), False, 'from cfn_policy_validator.validation.validator import validate_parser_output, Validator\n'), ((22605, 22640), 'cfn_policy_validator.validation.validator.validate_parser_output', 'validate_parser_output', (['self.output'], {}), '(self.output)\n', (22627, 22640), False, 'from cfn_policy_validator.validation.validator import validate_parser_output, Validator\n'), ((4900, 5014), 'cfn_policy_validator.tests.boto_mocks.BotoResponse', 'BotoResponse', ([], {'method': '"""list_analyzers"""', 'service_response': "{'analyzers': []}", 'expected_params': "{'type': 'ACCOUNT'}"}), "(method='list_analyzers', service_response={'analyzers': []},\n expected_params={'type': 'ACCOUNT'})\n", (4912, 5014), False, 'from cfn_policy_validator.tests.boto_mocks import mock_test_setup, BotoResponse, get_test_mode, TEST_MODE\n'), ((5032, 5232), 'cfn_policy_validator.tests.boto_mocks.BotoResponse', 'BotoResponse', ([], {'method': '"""create_analyzer"""', 'service_response': "{'arn': 'arn:aws:access-analyzer:us-east-1:123456789123:analyzer/MyAnalyzer'}", 'expected_params': "{'analyzerName': ANY, 'type': 'ACCOUNT'}"}), "(method='create_analyzer', service_response={'arn':\n 'arn:aws:access-analyzer:us-east-1:123456789123:analyzer/MyAnalyzer'},\n expected_params={'analyzerName': ANY, 'type': 'ACCOUNT'})\n", (5044, 5232), False, 'from cfn_policy_validator.tests.boto_mocks import mock_test_setup, BotoResponse, get_test_mode, TEST_MODE\n')] |
"""
Tests for the h5py.Datatype class.
"""
from __future__ import absolute_import
from itertools import count
import numpy as np
import h5py
from ..common import ut, TestCase
class TestVlen(TestCase):
"""
Check that storage of vlen strings is carried out correctly.
"""
def assertVlenArrayEqual(self, dset, arr, message=None, precision=None):
self.assert_(
dset.shape == arr.shape,
"Shape mismatch (%s vs %s)%s" % (dset.shape, arr.shape, message)
)
for (i, d, a) in zip(count(), dset, arr):
self.assertArrayEqual(d, a, message, precision)
def test_compound(self):
fields = []
fields.append(('field_1', h5py.special_dtype(vlen=str)))
fields.append(('field_2', np.int32))
dt = np.dtype(fields)
self.f['mytype'] = np.dtype(dt)
dt_out = self.f['mytype'].dtype.fields['field_1'][0]
self.assertEqual(h5py.check_dtype(vlen=dt_out), str)
def test_compound_vlen_bool(self):
vidt = h5py.special_dtype(vlen=np.uint8)
def a(items):
return np.array(items, dtype=np.uint8)
f = self.f
dt_vb = np.dtype([
('foo', vidt),
('logical', np.bool)])
vb = f.create_dataset('dt_vb', shape=(4,), dtype=dt_vb)
data = np.array([(a([1,2,3]), True),
(a([1 ]), False),
(a([1,5 ]), True),
(a([], ), False),],
dtype=dt_vb)
vb[:] = data
actual = f['dt_vb'][:]
self.assertVlenArrayEqual(data['foo'], actual['foo'])
self.assertArrayEqual(data['logical'], actual['logical'])
dt_vv = np.dtype([
('foo', vidt),
('bar', vidt)])
f.create_dataset('dt_vv', shape=(4,), dtype=dt_vv)
dt_vvb = np.dtype([
('foo', vidt),
('bar', vidt),
('logical', np.bool)])
vvb = f.create_dataset('dt_vvb', shape=(2,), dtype=dt_vvb)
dt_bvv = np.dtype([
('logical', np.bool),
('foo', vidt),
('bar', vidt)])
bvv = f.create_dataset('dt_bvv', shape=(2,), dtype=dt_bvv)
data = np.array([(True, a([1,2,3]), a([1,2]) ),
(False, a([]), a([2,4,6])),],
dtype=bvv)
bvv[:] = data
actual = bvv[:]
self.assertVlenArrayEqual(data['foo'], actual['foo'])
self.assertVlenArrayEqual(data['bar'], actual['bar'])
self.assertArrayEqual(data['logical'], actual['logical'])
def test_compound_vlen_enum(self):
eidt = h5py.special_dtype(enum=(np.uint8, {'OFF': 0, 'ON': 1}))
vidt = h5py.special_dtype(vlen=np.uint8)
def a(items):
return np.array(items, dtype=np.uint8)
f = self.f
dt_vve = np.dtype([
('foo', vidt),
('bar', vidt),
('switch', eidt)])
vve = f.create_dataset('dt_vve', shape=(2,), dtype=dt_vve)
data = np.array([(a([1,2,3]), a([1,2]), 1),
(a([]), a([2,4,6]), 0),],
dtype=dt_vve)
vve[:] = data
actual = vve[:]
self.assertVlenArrayEqual(data['foo'], actual['foo'])
self.assertVlenArrayEqual(data['bar'], actual['bar'])
self.assertArrayEqual(data['switch'], actual['switch'])
def test_vlen_enum(self):
fname = self.mktemp()
arr1 = [[1],[1,2]]
dt1 = h5py.special_dtype(vlen=h5py.special_dtype(
enum=('i', dict(foo=1, bar=2))))
with h5py.File(fname,'w') as f:
df1 = f.create_dataset('test', (len(arr1),), dtype=dt1)
df1[:] = np.array(arr1)
with h5py.File(fname,'r') as f:
df2 = f['test']
dt2 = df2.dtype
arr2 = [e.tolist() for e in df2[:]]
self.assertEqual(arr1, arr2)
self.assertEqual(h5py.check_dtype(enum=h5py.check_dtype(vlen=dt1)),
h5py.check_dtype(enum=h5py.check_dtype(vlen=dt2)))
class TestOffsets(TestCase):
"""
Check that compound members with aligned or manual offsets are handled
correctly.
"""
def test_compound_vlen(self):
vidt = h5py.special_dtype(vlen=np.uint8)
eidt = h5py.special_dtype(enum=(np.uint8, {'OFF': 0, 'ON': 1}))
for np_align in (False, True):
dt = np.dtype([
('a', eidt),
('foo', vidt),
('bar', vidt),
('switch', eidt)], align=np_align)
np_offsets = [dt.fields[i][1] for i in dt.names]
for logical in (False, True):
if logical and np_align:
# Vlen types have different size in the numpy struct
self.assertRaises(TypeError, h5py.h5t.py_create, dt,
logical=logical)
else:
ht = h5py.h5t.py_create(dt, logical=logical)
offsets = [ht.get_member_offset(i)
for i in range(ht.get_nmembers())]
if np_align:
self.assertEqual(np_offsets, offsets)
def test_aligned_offsets(self):
dt = np.dtype('i2,i8', align=True)
ht = h5py.h5t.py_create(dt)
self.assertEqual(dt.itemsize, ht.get_size())
self.assertEqual(
[dt.fields[i][1] for i in dt.names],
[ht.get_member_offset(i) for i in range(ht.get_nmembers())]
)
def test_aligned_data(self):
dt = np.dtype('i2,f8', align=True)
data = np.empty(10, dtype=dt)
data['f0'] = np.array(np.random.randint(-100, 100, size=data.size),
dtype='i2')
data['f1'] = np.random.rand(data.size)
fname = self.mktemp()
with h5py.File(fname, 'w') as f:
f['data'] = data
with h5py.File(fname, 'r') as f:
self.assertArrayEqual(f['data'], data)
def test_out_of_order_offsets(self):
dt = np.dtype({
'names' : ['f1', 'f2', 'f3'],
'formats' : ['<f4', '<i4', '<f8'],
'offsets' : [0, 16, 8]
})
data = np.empty(10, dtype=dt)
data['f1'] = np.random.rand(data.size)
data['f2'] = np.random.random_integers(-10, 10, data.size)
data['f3'] = np.random.rand(data.size)*-1
fname = self.mktemp()
with h5py.File(fname, 'w') as fd:
fd.create_dataset('data', data=data)
with h5py.File(fname, 'r') as fd:
self.assertArrayEqual(fd['data'], data)
| [
"h5py.check_dtype",
"numpy.random.rand",
"numpy.random.random_integers",
"h5py.File",
"h5py.h5t.py_create",
"numpy.array",
"itertools.count",
"numpy.empty",
"numpy.random.randint",
"h5py.special_dtype",
"numpy.dtype"
] | [((806, 822), 'numpy.dtype', 'np.dtype', (['fields'], {}), '(fields)\n', (814, 822), True, 'import numpy as np\n'), ((850, 862), 'numpy.dtype', 'np.dtype', (['dt'], {}), '(dt)\n', (858, 862), True, 'import numpy as np\n'), ((1040, 1073), 'h5py.special_dtype', 'h5py.special_dtype', ([], {'vlen': 'np.uint8'}), '(vlen=np.uint8)\n', (1058, 1073), False, 'import h5py\n'), ((1184, 1231), 'numpy.dtype', 'np.dtype', (["[('foo', vidt), ('logical', np.bool)]"], {}), "([('foo', vidt), ('logical', np.bool)])\n", (1192, 1231), True, 'import numpy as np\n'), ((1736, 1776), 'numpy.dtype', 'np.dtype', (["[('foo', vidt), ('bar', vidt)]"], {}), "([('foo', vidt), ('bar', vidt)])\n", (1744, 1776), True, 'import numpy as np\n'), ((1879, 1941), 'numpy.dtype', 'np.dtype', (["[('foo', vidt), ('bar', vidt), ('logical', np.bool)]"], {}), "([('foo', vidt), ('bar', vidt), ('logical', np.bool)])\n", (1887, 1941), True, 'import numpy as np\n'), ((2064, 2126), 'numpy.dtype', 'np.dtype', (["[('logical', np.bool), ('foo', vidt), ('bar', vidt)]"], {}), "([('logical', np.bool), ('foo', vidt), ('bar', vidt)])\n", (2072, 2126), True, 'import numpy as np\n'), ((2675, 2731), 'h5py.special_dtype', 'h5py.special_dtype', ([], {'enum': "(np.uint8, {'OFF': 0, 'ON': 1})"}), "(enum=(np.uint8, {'OFF': 0, 'ON': 1}))\n", (2693, 2731), False, 'import h5py\n'), ((2747, 2780), 'h5py.special_dtype', 'h5py.special_dtype', ([], {'vlen': 'np.uint8'}), '(vlen=np.uint8)\n', (2765, 2780), False, 'import h5py\n'), ((2892, 2950), 'numpy.dtype', 'np.dtype', (["[('foo', vidt), ('bar', vidt), ('switch', eidt)]"], {}), "([('foo', vidt), ('bar', vidt), ('switch', eidt)])\n", (2900, 2950), True, 'import numpy as np\n'), ((4306, 4339), 'h5py.special_dtype', 'h5py.special_dtype', ([], {'vlen': 'np.uint8'}), '(vlen=np.uint8)\n', (4324, 4339), False, 'import h5py\n'), ((4355, 4411), 'h5py.special_dtype', 'h5py.special_dtype', ([], {'enum': "(np.uint8, {'OFF': 0, 'ON': 1})"}), "(enum=(np.uint8, {'OFF': 0, 'ON': 1}))\n", (4373, 4411), False, 'import h5py\n'), ((5311, 5340), 'numpy.dtype', 'np.dtype', (['"""i2,i8"""'], {'align': '(True)'}), "('i2,i8', align=True)\n", (5319, 5340), True, 'import numpy as np\n'), ((5354, 5376), 'h5py.h5t.py_create', 'h5py.h5t.py_create', (['dt'], {}), '(dt)\n', (5372, 5376), False, 'import h5py\n'), ((5635, 5664), 'numpy.dtype', 'np.dtype', (['"""i2,f8"""'], {'align': '(True)'}), "('i2,f8', align=True)\n", (5643, 5664), True, 'import numpy as np\n'), ((5680, 5702), 'numpy.empty', 'np.empty', (['(10)'], {'dtype': 'dt'}), '(10, dtype=dt)\n', (5688, 5702), True, 'import numpy as np\n'), ((5829, 5854), 'numpy.random.rand', 'np.random.rand', (['data.size'], {}), '(data.size)\n', (5843, 5854), True, 'import numpy as np\n'), ((6106, 6206), 'numpy.dtype', 'np.dtype', (["{'names': ['f1', 'f2', 'f3'], 'formats': ['<f4', '<i4', '<f8'], 'offsets':\n [0, 16, 8]}"], {}), "({'names': ['f1', 'f2', 'f3'], 'formats': ['<f4', '<i4', '<f8'],\n 'offsets': [0, 16, 8]})\n", (6114, 6206), True, 'import numpy as np\n'), ((6267, 6289), 'numpy.empty', 'np.empty', (['(10)'], {'dtype': 'dt'}), '(10, dtype=dt)\n', (6275, 6289), True, 'import numpy as np\n'), ((6311, 6336), 'numpy.random.rand', 'np.random.rand', (['data.size'], {}), '(data.size)\n', (6325, 6336), True, 'import numpy as np\n'), ((6358, 6403), 'numpy.random.random_integers', 'np.random.random_integers', (['(-10)', '(10)', 'data.size'], {}), '(-10, 10, data.size)\n', (6383, 6403), True, 'import numpy as np\n'), ((551, 558), 'itertools.count', 'count', ([], {}), '()\n', (556, 558), False, 'from itertools import count\n'), ((949, 978), 'h5py.check_dtype', 'h5py.check_dtype', ([], {'vlen': 'dt_out'}), '(vlen=dt_out)\n', (965, 978), False, 'import h5py\n'), ((1115, 1146), 'numpy.array', 'np.array', (['items'], {'dtype': 'np.uint8'}), '(items, dtype=np.uint8)\n', (1123, 1146), True, 'import numpy as np\n'), ((2822, 2853), 'numpy.array', 'np.array', (['items'], {'dtype': 'np.uint8'}), '(items, dtype=np.uint8)\n', (2830, 2853), True, 'import numpy as np\n'), ((3643, 3664), 'h5py.File', 'h5py.File', (['fname', '"""w"""'], {}), "(fname, 'w')\n", (3652, 3664), False, 'import h5py\n'), ((3759, 3773), 'numpy.array', 'np.array', (['arr1'], {}), '(arr1)\n', (3767, 3773), True, 'import numpy as np\n'), ((3788, 3809), 'h5py.File', 'h5py.File', (['fname', '"""r"""'], {}), "(fname, 'r')\n", (3797, 3809), False, 'import h5py\n'), ((4469, 4560), 'numpy.dtype', 'np.dtype', (["[('a', eidt), ('foo', vidt), ('bar', vidt), ('switch', eidt)]"], {'align': 'np_align'}), "([('a', eidt), ('foo', vidt), ('bar', vidt), ('switch', eidt)],\n align=np_align)\n", (4477, 4560), True, 'import numpy as np\n'), ((5734, 5778), 'numpy.random.randint', 'np.random.randint', (['(-100)', '(100)'], {'size': 'data.size'}), '(-100, 100, size=data.size)\n', (5751, 5778), True, 'import numpy as np\n'), ((5900, 5921), 'h5py.File', 'h5py.File', (['fname', '"""w"""'], {}), "(fname, 'w')\n", (5909, 5921), False, 'import h5py\n'), ((5971, 5992), 'h5py.File', 'h5py.File', (['fname', '"""r"""'], {}), "(fname, 'r')\n", (5980, 5992), False, 'import h5py\n'), ((6425, 6450), 'numpy.random.rand', 'np.random.rand', (['data.size'], {}), '(data.size)\n', (6439, 6450), True, 'import numpy as np\n'), ((6499, 6520), 'h5py.File', 'h5py.File', (['fname', '"""w"""'], {}), "(fname, 'w')\n", (6508, 6520), False, 'import h5py\n'), ((6591, 6612), 'h5py.File', 'h5py.File', (['fname', '"""r"""'], {}), "(fname, 'r')\n", (6600, 6612), False, 'import h5py\n'), ((717, 745), 'h5py.special_dtype', 'h5py.special_dtype', ([], {'vlen': 'str'}), '(vlen=str)\n', (735, 745), False, 'import h5py\n'), ((4006, 4032), 'h5py.check_dtype', 'h5py.check_dtype', ([], {'vlen': 'dt1'}), '(vlen=dt1)\n', (4022, 4032), False, 'import h5py\n'), ((4082, 4108), 'h5py.check_dtype', 'h5py.check_dtype', ([], {'vlen': 'dt2'}), '(vlen=dt2)\n', (4098, 4108), False, 'import h5py\n'), ((5005, 5044), 'h5py.h5t.py_create', 'h5py.h5t.py_create', (['dt'], {'logical': 'logical'}), '(dt, logical=logical)\n', (5023, 5044), False, 'import h5py\n')] |
import requests
from bs4 import BeautifulSoup
from prettytable import PrettyTable
# html = requests.get(
# 'http://jwzx.cqu.pt/student/xkxfTj.php',
# cookies={'PHPSESSID': 'o2r2fpddrj892dp1ntqddcp2hv'}).text
# soup = BeautifulSoup(html, 'html.parser')
# for tr in soup.find('table', {'id': 'AxfTjTable'}).findAll('tr')[1:]:
# tds = tr.findAll('td')
# print(tds[1:5])
table = PrettyTable(['aaa', 'bbb'])
print(table) | [
"prettytable.PrettyTable"
] | [((396, 423), 'prettytable.PrettyTable', 'PrettyTable', (["['aaa', 'bbb']"], {}), "(['aaa', 'bbb'])\n", (407, 423), False, 'from prettytable import PrettyTable\n')] |
import os
import itertools as it
import pandas as pd
def compute_jaccard(v1, v2):
v1, v2 = set(v1), set(v2)
intersection = v1.intersection(v2)
union = v1.union(v2)
return ((len(intersection) / len(union) if len(union) != 0 else 0),
len(intersection),
len(union))
def get_inter_method_similarity(sk_coefs_folds, torch_coefs_folds,
seeds, folds, signal='signal'):
inter_method_sims = []
for seed in seeds:
for fold in folds:
sk_coefs = sk_coefs_folds[signal][seed][fold][0]
sk_genes = sk_coefs_folds[signal][seed][fold][1]
sk_nz_coefs = (sk_coefs != 0)
sk_nz_genes = sk_genes[sk_nz_coefs]
torch_coefs = torch_coefs_folds[signal][seed][fold][0]
torch_genes = torch_coefs_folds[signal][seed][fold][1]
torch_nz_coefs = (torch_coefs != 0)
torch_nz_genes = torch_genes[torch_nz_coefs]
inter_method_sims.append(compute_jaccard(set(sk_nz_genes), set(torch_nz_genes))[0])
return inter_method_sims
def get_intra_method_similarity(sk_coefs_folds, torch_coefs_folds,
seeds, folds, signal='signal'):
intra_method_sims_sk = []
intra_method_sims_torch = []
for seed in seeds:
for f1, f2 in it.combinations(folds, 2):
# first for scikit-learn
sk_coefs_f1 = sk_coefs_folds[signal][seed][f1][0]
sk_genes_f1 = sk_coefs_folds[signal][seed][f1][1]
sk_coefs_f2 = sk_coefs_folds[signal][seed][f2][0]
sk_genes_f2 = sk_coefs_folds[signal][seed][f2][1]
sk_nz_coefs_f1 = (sk_coefs_f1 != 0)
sk_nz_genes_f1 = sk_genes_f1[sk_nz_coefs_f1]
sk_nz_coefs_f2 = (sk_coefs_f2 != 0)
sk_nz_genes_f2 = sk_genes_f2[sk_nz_coefs_f2]
intra_method_sims_sk.append(compute_jaccard(set(sk_nz_genes_f1), set(sk_nz_genes_f2))[0])
# then for torch
torch_coefs_f1 = torch_coefs_folds[signal][seed][f1][0]
torch_genes_f1 = torch_coefs_folds[signal][seed][f1][1]
torch_coefs_f2 = torch_coefs_folds[signal][seed][f2][0]
torch_genes_f2 = torch_coefs_folds[signal][seed][f2][1]
torch_nz_coefs_f1 = (torch_coefs_f1 != 0)
torch_nz_genes_f1 = torch_genes_f1[torch_nz_coefs_f1]
torch_nz_coefs_f2 = (torch_coefs_f2 != 0)
torch_nz_genes_f2 = torch_genes_f2[torch_nz_coefs_f2]
intra_method_sims_torch.append(compute_jaccard(set(torch_nz_genes_f1), set(torch_nz_genes_f2))[0])
return (intra_method_sims_sk, intra_method_sims_torch)
| [
"itertools.combinations"
] | [((1330, 1355), 'itertools.combinations', 'it.combinations', (['folds', '(2)'], {}), '(folds, 2)\n', (1345, 1355), True, 'import itertools as it\n')] |
import csv
from core.exceptions import InvalidFileException
def load_so_item_from_file(path, db_service):
with open(path) as csv_file:
csv_reader = csv.reader(csv_file)
error_msg = 'Missing required header: {}'
for i, row in enumerate(csv_reader, 1):
data = {
'id_sales_order_item': row[0],
'bob_id_sales_order_item': row[1],
'fk_sales_order': row[2],
'fk_sales_order_item_status': row[3],
'fk_delivery_type': row[4],
'unit_price': row[5],
'tax_amount': row[6],
'paid_price': row[7],
'name': row[8],
'sku': row[9],
'created_at': row[10],
'updated_at': row[11],
'last_status_change': row[12],
'original_unit_price': row[13],
'shipping_type': row[14],
'real_delivery_date': row[15],
'bob_id_supplier': row[16],
'is_marketplace': row[17],
}
if i == 1: # check if the header values line up
if not data['id_sales_order_item'] == 'id_sales_order_item':
raise InvalidFileException(
error_msg.format('id_sales_order_item'))
if not data['bob_id_sales_order_item'] == \
'bob_id_sales_order_item':
raise InvalidFileException(
error_msg.format('bob_id_sales_order_item'))
if not data['fk_sales_order'] == 'fk_sales_order':
raise InvalidFileException(
error_msg.format('fk_sales_order'))
if not data['fk_sales_order_item_status'] == \
'fk_sales_order_item_status':
raise InvalidFileException(
error_msg.format('fk_sales_order_item_status'))
if not data['fk_delivery_type'] == 'fk_delivery_type':
raise InvalidFileException(
error_msg.format('fk_delivery_type'))
if not data['unit_price'] == 'unit_price':
raise InvalidFileException(error_msg.format('unit_price'))
if not data['tax_amount'] == 'tax_amount':
raise InvalidFileException(error_msg.format('tax_amount'))
if not data['paid_price'] == 'paid_price':
raise InvalidFileException(error_msg.format('paid_price'))
if not data['name'] == 'name':
raise InvalidFileException(error_msg.format('name'))
if not data['sku'] == 'sku':
raise InvalidFileException(error_msg.format('sku'))
if not data['created_at'] == 'created_at':
raise InvalidFileException(error_msg.format('created_at'))
if not data['updated_at'] == 'updated_at':
raise InvalidFileException(error_msg.format('updated_at'))
if not data['last_status_change'] == 'last_status_change':
raise InvalidFileException(
error_msg.format('last_status_change'))
if not data['original_unit_price'] == 'original_unit_price':
raise InvalidFileException(
error_msg.format('original_unit_price'))
if not data['shipping_type'] == 'shipping_type':
raise InvalidFileException(
error_msg.format('shipping_type'))
if not data['real_delivery_date'] == 'real_delivery_date':
raise InvalidFileException(
error_msg.format('real_delivery_date'))
if not data['bob_id_supplier'] == 'bob_id_supplier':
raise InvalidFileException(
error_msg.format('bob_id_supplier'))
if not data['is_marketplace'] == 'is_marketplace':
raise InvalidFileException(
error_msg.format('is_marketplace'))
else:
process_so_item_data(data=data, db_service=db_service)
print(f'Processed {i} sales order item(s).')
def load_so_item_status_from_file(path, db_service):
with open(path) as csv_file:
csv_reader = csv.reader(csv_file)
error_msg = 'Missing required header: {}'
for i, row in enumerate(csv_reader, 1):
data = {
'id_sales_order_item_status': row[0],
'fk_oms_function': row[1],
'status': row[2],
'desc': row[3],
'deprecated': row[4],
'updated_at': row[5],
}
if i == 1: # check if the header values line up
if not data['id_sales_order_item_status'] == \
'id_sales_order_item_status':
raise InvalidFileException(
error_msg.format('id_sales_order_item_status'))
if not data['fk_oms_function'] == 'fk_oms_function':
raise InvalidFileException(
error_msg.format('fk_oms_function'))
if not data['status'] == 'status':
raise InvalidFileException(error_msg.format('status'))
if not data['desc'] == 'desc':
raise InvalidFileException(error_msg.format('desc'))
if not data['deprecated'] == 'deprecated':
raise InvalidFileException(error_msg.format('deprecated'))
if not data['updated_at'] == 'updated_at':
raise InvalidFileException(error_msg.format('updated_at'))
else:
process_so_item_status_data(data=data, db_service=db_service)
print(f'Processed {i} sales order item status.')
def load_so_item_status_history_from_file(path, db_service):
with open(path) as csv_file:
csv_reader = csv.reader(csv_file)
error_msg = 'Missing required header: {}'
for i, row in enumerate(csv_reader, 1):
data = {
'id_sales_order_item_status_history': row[0],
'fk_sales_order_item': row[1],
'fk_sales_order_item_status': row[2],
'created_at': row[3],
}
if i == 1: # check if the header values line up
if not data['id_sales_order_item_status_history'] == \
'id_sales_order_item_status_history':
raise InvalidFileException(
error_msg.format('id_sales_order_item_status_history'))
if not data['fk_sales_order_item'] == 'fk_sales_order_item':
raise InvalidFileException(
error_msg.format('fk_sales_order_item'))
if not data['fk_sales_order_item_status'] == \
'fk_sales_order_item_status':
raise InvalidFileException(
error_msg.format('fk_sales_order_item_status'))
if not data['created_at'] == 'created_at':
raise InvalidFileException(error_msg.format('created_at'))
else:
process_so_item_status_history_data(data=data,
db_service=db_service)
print(f'Processed {i} sales order item status history.')
def process_so_item_data(data, db_service):
if data['real_delivery_date'] == 'NULL':
data['real_delivery_date'] = None
db_service.add_so_item(**data)
def process_so_item_status_data(data, db_service):
db_service.add_so_item_status(**data)
def process_so_item_status_history_data(data, db_service):
db_service.add_so_item_status_history(**data)
| [
"csv.reader"
] | [((163, 183), 'csv.reader', 'csv.reader', (['csv_file'], {}), '(csv_file)\n', (173, 183), False, 'import csv\n'), ((4451, 4471), 'csv.reader', 'csv.reader', (['csv_file'], {}), '(csv_file)\n', (4461, 4471), False, 'import csv\n'), ((6134, 6154), 'csv.reader', 'csv.reader', (['csv_file'], {}), '(csv_file)\n', (6144, 6154), False, 'import csv\n')] |
# -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
import logging
import pytest
import threading
import time
from azure.iot.device.common import handle_exceptions
from azure.iot.device.iothub import client_event
from azure.iot.device.iothub.sync_handler_manager import SyncHandlerManager, HandlerManagerException
from azure.iot.device.iothub.sync_handler_manager import MESSAGE, METHOD, TWIN_DP_PATCH
from azure.iot.device.iothub.inbox_manager import InboxManager
from azure.iot.device.iothub.sync_inbox import SyncClientInbox
logging.basicConfig(level=logging.DEBUG)
# NOTE ON TEST IMPLEMENTATION:
# Despite having significant shared implementation between the sync and async handler managers,
# there are not shared tests. This is because while both have the same set of requirements and
# APIs, the internal implementation is different to an extent that it simply isn't really possible
# to test them to an appropriate degree of correctness with a shared set of tests.
# This means we must be very careful to always change both test modules when a change is made to
# shared behavior, or when shared features are added.
# NOTE ON TIMING/DELAY
# Several tests in this module have sleeps/delays in their implementation due to needing to wait
# for things to happen in other threads.
all_internal_receiver_handlers = [MESSAGE, METHOD, TWIN_DP_PATCH]
all_internal_client_event_handlers = [
"_on_connection_state_change",
"_on_new_sastoken_required",
"_on_background_exception",
]
all_internal_handlers = all_internal_receiver_handlers + all_internal_client_event_handlers
all_receiver_handlers = [s.lstrip("_") for s in all_internal_receiver_handlers]
all_client_event_handlers = [s.lstrip("_") for s in all_internal_client_event_handlers]
all_handlers = all_receiver_handlers + all_client_event_handlers
class ThreadsafeMock(object):
"""This class provides (some) Mock functionality in a threadsafe manner, specifically, it
ensures that the 'call_count' attribute will be accurate when the mock is called from another
thread.
It does not cover ALL mock functionality, but more features could be added to it as necessary
"""
def __init__(self):
self.call_count = 0
self.lock = threading.Lock()
def __call__(self, *args, **kwargs):
with self.lock:
self.call_count += 1
@pytest.fixture
def inbox_manager(mocker):
return InboxManager(inbox_type=SyncClientInbox)
@pytest.fixture
def handler():
def some_handler_fn(arg):
pass
return some_handler_fn
@pytest.mark.describe("SyncHandlerManager - Instantiation")
class TestInstantiation(object):
@pytest.mark.it("Initializes handler properties to None")
@pytest.mark.parametrize("handler_name", all_handlers)
def test_handlers(self, inbox_manager, handler_name):
hm = SyncHandlerManager(inbox_manager)
assert getattr(hm, handler_name) is None
@pytest.mark.it("Initializes receiver handler runner thread references to None")
@pytest.mark.parametrize(
"handler_name", all_internal_receiver_handlers, ids=all_receiver_handlers
)
def test_receiver_handler_runners(self, inbox_manager, handler_name):
hm = SyncHandlerManager(inbox_manager)
assert hm._receiver_handler_runners[handler_name] is None
@pytest.mark.it("Initializes client event handler runner thread reference to None")
def test_client_event_handler_runner(self, inbox_manager):
hm = SyncHandlerManager(inbox_manager)
assert hm._client_event_runner is None
@pytest.mark.describe("SyncHandlerManager - .stop()")
class TestStop(object):
@pytest.fixture(
params=[
"No handlers running",
"Some receiver handlers running",
"Some client event handlers running",
"Some receiver and some client event handlers running",
"All handlers running",
]
)
def handler_manager(self, request, inbox_manager, handler):
hm = SyncHandlerManager(inbox_manager)
if request.param == "Some receiver handlers running":
# Set an arbitrary receiver handler
hm.on_message_received = handler
elif request.param == "Some client event handlers running":
# Set an arbitrary client event handler
hm.on_connection_state_change = handler
elif request.param == "Some receiver and some client event handlers running":
# Set an arbitrary receiver and client event handler
hm.on_message_received = handler
hm.on_connection_state_change = handler
elif request.param == "All handlers running":
# NOTE: this sets all handlers to be the same fn, but this doesn't really
# make a difference in this context
for handler_name in all_handlers:
setattr(hm, handler_name, handler)
yield hm
hm.stop()
@pytest.mark.it("Stops all currently running handlers")
def test_stop_all(self, handler_manager):
handler_manager.stop()
for handler_name in all_internal_receiver_handlers:
assert handler_manager._receiver_handler_runners[handler_name] is None
assert handler_manager._client_event_runner is None
@pytest.mark.it(
"Stops only the currently running receiver handlers if the 'receiver_handlers_only' parameter is True"
)
def test_stop_only_receiver_handlers(self, handler_manager):
if handler_manager._client_event_runner is not None:
client_event_handlers_running = True
else:
client_event_handlers_running = False
handler_manager.stop(receiver_handlers_only=True)
# All receiver handlers have stopped
for handler_name in all_internal_receiver_handlers:
assert handler_manager._receiver_handler_runners[handler_name] is None
# If the client event handlers were running, they are STILL running
if client_event_handlers_running:
assert handler_manager._client_event_runner is not None
@pytest.mark.it("Completes all pending handler invocations before stopping the runner(s)")
def test_completes_pending(self, mocker, inbox_manager):
hm = SyncHandlerManager(inbox_manager)
# NOTE: We use two handlers arbitrarily here to show this happens for all handler runners
mock_msg_handler = ThreadsafeMock()
mock_mth_handler = ThreadsafeMock()
msg_inbox = inbox_manager.get_unified_message_inbox()
mth_inbox = inbox_manager.get_method_request_inbox()
for _ in range(200): # sufficiently many items so can't complete quickly
msg_inbox.put(mocker.MagicMock())
mth_inbox.put(mocker.MagicMock())
hm.on_message_received = mock_msg_handler
hm.on_method_request_received = mock_mth_handler
assert mock_msg_handler.call_count < 200
assert mock_mth_handler.call_count < 200
hm.stop()
time.sleep(0.1)
assert mock_msg_handler.call_count == 200
assert mock_mth_handler.call_count == 200
assert msg_inbox.empty()
assert mth_inbox.empty()
@pytest.mark.describe("SyncHandlerManager - .ensure_running()")
class TestEnsureRunning(object):
@pytest.fixture(
params=[
"All handlers set, all stopped",
"All handlers set, receivers stopped, client events running",
"All handlers set, all running",
"Some receiver and client event handlers set, all stopped",
"Some receiver and client event handlers set, receivers stopped, client events running",
"Some receiver and client event handlers set, all running",
"Some receiver handlers set, all stopped",
"Some receiver handlers set, all running",
"Some client event handlers set, all stopped",
"Some client event handlers set, all running",
"No handlers set",
]
)
def handler_manager(self, request, inbox_manager, handler):
# NOTE: this sets all handlers to be the same fn, but this doesn't really
# make a difference in this context
hm = SyncHandlerManager(inbox_manager)
if request.param == "All handlers set, all stopped":
for handler_name in all_handlers:
setattr(hm, handler_name, handler)
hm.stop()
elif request.param == "All handlers set, receivers stopped, client events running":
for handler_name in all_handlers:
setattr(hm, handler_name, handler)
hm.stop(receiver_handlers_only=True)
elif request.param == "All handlers set, all running":
for handler_name in all_handlers:
setattr(hm, handler_name, handler)
elif request.param == "Some receiver and client event handlers set, all stopped":
hm.on_message_received = handler
hm.on_method_request_received = handler
hm.on_connection_state_change = handler
hm.on_new_sastoken_required = handler
hm.stop()
elif (
request.param
== "Some receiver and client event handlers set, receivers stopped, client events running"
):
hm.on_message_received = handler
hm.on_method_request_received = handler
hm.on_connection_state_change = handler
hm.on_new_sastoken_required = handler
hm.stop(receiver_handlers_only=True)
elif request.param == "Some receiver and client event handlers set, all running":
hm.on_message_received = handler
hm.on_method_request_received = handler
hm.on_connection_state_change = handler
hm.on_new_sastoken_required = handler
elif request.param == "Some receiver handlers set, all stopped":
hm.on_message_received = handler
hm.on_method_request_received = handler
hm.stop()
elif request.param == "Some receiver handlers set, all running":
hm.on_message_received = handler
hm.on_method_request_received = handler
elif request.param == "Some client event handlers set, all stopped":
hm.on_connection_state_change = handler
hm.on_new_sastoken_required = handler
hm.stop()
elif request.param == "Some client event handlers set, all running":
hm.on_connection_state_change = handler
hm.on_new_sastoken_required = handler
yield hm
hm.stop()
@pytest.mark.it(
"Starts handler runners for any handler that is set, but does not have a handler runner running"
)
def test_starts_runners_if_necessary(self, handler_manager):
handler_manager.ensure_running()
# Check receiver handlers
for handler_name in all_receiver_handlers:
if getattr(handler_manager, handler_name) is not None:
# NOTE: this assumes the convention of internal names being the name of a handler
# prefixed with a "_". If this ever changes, you must change this test.
assert handler_manager._receiver_handler_runners["_" + handler_name] is not None
# Check client event handlers
for handler_name in all_client_event_handlers:
if getattr(handler_manager, handler_name) is not None:
assert handler_manager._client_event_runner is not None
# don't need to check the rest of the handlers since they all share a runner
break
# ##############
# # PROPERTIES #
# ##############
class SharedHandlerPropertyTests(object):
@pytest.fixture
def handler_manager(self, inbox_manager):
hm = SyncHandlerManager(inbox_manager)
yield hm
hm.stop()
# NOTE: We use setattr() and getattr() in these tests so they're generic to all properties.
# This is functionally identical to doing explicit assignment to a property, it just
# doesn't read quite as well.
@pytest.mark.it("Can be both read and written to")
def test_read_write(self, handler_name, handler_manager, handler):
assert getattr(handler_manager, handler_name) is None
setattr(handler_manager, handler_name, handler)
assert getattr(handler_manager, handler_name) is handler
setattr(handler_manager, handler_name, None)
assert getattr(handler_manager, handler_name) is None
class SharedReceiverHandlerPropertyTests(SharedHandlerPropertyTests):
# NOTE: If there is ever any deviation in the convention of what the internal names of handlers
# are other than just a prefixed "_", we'll have to move this fixture to the child classes so
# it can be unique to each handler
@pytest.fixture
def handler_name_internal(self, handler_name):
return "_" + handler_name
@pytest.mark.it(
"Creates and starts a daemon Thread for the correpsonding handler runner when value is set to a function"
)
def test_thread_created(self, handler_name, handler_name_internal, handler_manager, handler):
assert handler_manager._receiver_handler_runners[handler_name_internal] is None
setattr(handler_manager, handler_name, handler)
assert isinstance(
handler_manager._receiver_handler_runners[handler_name_internal], threading.Thread
)
assert handler_manager._receiver_handler_runners[handler_name_internal].daemon is True
@pytest.mark.it(
"Stops the corresponding handler runner and completes any existing daemon Thread for it when the value is set back to None"
)
def test_thread_removed(self, handler_name, handler_name_internal, handler_manager, handler):
# Set handler
setattr(handler_manager, handler_name, handler)
# Thread has been created and is alive
t = handler_manager._receiver_handler_runners[handler_name_internal]
assert isinstance(t, threading.Thread)
assert t.is_alive()
# Set the handler back to None
setattr(handler_manager, handler_name, None)
# Thread has finished and the manager no longer has a reference to it
assert not t.is_alive()
assert handler_manager._receiver_handler_runners[handler_name_internal] is None
@pytest.mark.it(
"Does not delete, remove, or replace the Thread for the corresponding handler runner, when updated with a new function value"
)
def test_thread_unchanged_by_handler_update(
self, handler_name, handler_name_internal, handler_manager, handler
):
# Set the handler
setattr(handler_manager, handler_name, handler)
# Thread has been crated and is alive
t = handler_manager._receiver_handler_runners[handler_name_internal]
assert isinstance(t, threading.Thread)
assert t.is_alive()
# Set new handler
def new_handler(arg):
pass
setattr(handler_manager, handler_name, new_handler)
assert handler_manager._receiver_handler_runners[handler_name_internal] is t
assert t.is_alive()
@pytest.mark.it(
"Is invoked by the runner when the Inbox corresponding to the handler receives an object, passing that object to the handler"
)
def test_handler_invoked(self, mocker, handler_name, handler_manager, inbox):
# Set the handler
mock_handler = mocker.MagicMock()
setattr(handler_manager, handler_name, mock_handler)
# Handler has not been called
assert mock_handler.call_count == 0
# Add an item to corresponding inbox, triggering the handler
mock_obj = mocker.MagicMock()
inbox.put(mock_obj)
time.sleep(0.1)
# Handler has been called with the item from the inbox
assert mock_handler.call_count == 1
assert mock_handler.call_args == mocker.call(mock_obj)
@pytest.mark.it(
"Is invoked by the runner every time the Inbox corresponding to the handler receives an object"
)
def test_handler_invoked_multiple(self, mocker, handler_name, handler_manager, inbox):
# Set the handler
mock_handler = ThreadsafeMock()
setattr(handler_manager, handler_name, mock_handler)
# Handler has not been called
assert mock_handler.call_count == 0
# Add 5 items to the corresponding inbox, triggering the handler
for _ in range(5):
inbox.put(mocker.MagicMock())
time.sleep(0.2)
# Handler has been called 5 times
assert mock_handler.call_count == 5
@pytest.mark.it(
"Is invoked for every item already in the corresponding Inbox at the moment of handler removal"
)
def test_handler_resolve_pending_items_before_handler_removal(
self, mocker, handler_name, handler_manager, inbox
):
# Use a threadsafe mock to ensure accurate counts
mock_handler = ThreadsafeMock()
assert inbox.empty()
# Queue up a bunch of items in the inbox
for _ in range(100):
inbox.put(mocker.MagicMock())
# The handler has not yet been called
assert mock_handler.call_count == 0
# Items are still in the inbox
assert not inbox.empty()
# Set the handler
setattr(handler_manager, handler_name, mock_handler)
# The handler has not yet been called for everything that was in the inbox
# NOTE: I'd really like to show that the handler call count is also > 0 here, but
# it's pretty difficult to make the timing work
assert mock_handler.call_count < 100
# Immediately remove the handler
setattr(handler_manager, handler_name, None)
# Wait to give a chance for the handler runner to finish calling everything
time.sleep(0.2)
# Despite removal, handler has been called for everything that was in the inbox at the
# time of the removal
assert mock_handler.call_count == 100
assert inbox.empty()
# Add some more items
for _ in range(100):
inbox.put(mocker.MagicMock())
# Wait to give a chance for the handler to be called (it won't)
time.sleep(0.2)
# Despite more items added to inbox, no further handler calls have been made beyond the
# initial calls that were made when the original items were added
assert mock_handler.call_count == 100
@pytest.mark.it(
"Sends a HandlerManagerException to the background exception handler if any exception is raised during its invocation"
)
def test_exception_in_handler(
self, mocker, handler_name, handler_manager, inbox, arbitrary_exception
):
background_exc_spy = mocker.spy(handle_exceptions, "handle_background_exception")
# Handler will raise exception when called
mock_handler = mocker.MagicMock()
mock_handler.side_effect = arbitrary_exception
# Set handler
setattr(handler_manager, handler_name, mock_handler)
# Handler has not been called
assert mock_handler.call_count == 0
# Background exception handler has not been called
assert background_exc_spy.call_count == 0
# Add an item to corresponding inbox, triggering the handler
inbox.put(mocker.MagicMock())
time.sleep(0.1)
# Handler has now been called
assert mock_handler.call_count == 1
# Background exception handler was called
assert background_exc_spy.call_count == 1
e = background_exc_spy.call_args[0][0]
assert isinstance(e, HandlerManagerException)
assert e.__cause__ is arbitrary_exception
@pytest.mark.it(
"Can be updated with a new value that the corresponding handler runner will immediately begin using for handler invocations instead"
)
def test_handler_update_handler(self, mocker, handler_name, handler_manager, inbox):
def handler(arg):
# Invoking handler replaces the set handler with a mock
setattr(handler_manager, handler_name, mocker.MagicMock())
setattr(handler_manager, handler_name, handler)
inbox.put(mocker.MagicMock())
time.sleep(0.1)
# Handler has been replaced with a mock, but the mock has not been invoked
assert getattr(handler_manager, handler_name) is not handler
assert getattr(handler_manager, handler_name).call_count == 0
# Add a new item to the inbox
inbox.put(mocker.MagicMock())
time.sleep(0.1)
# The mock was now called
assert getattr(handler_manager, handler_name).call_count == 1
class SharedClientEventHandlerPropertyTests(SharedHandlerPropertyTests):
@pytest.fixture
def inbox(self, inbox_manager):
return inbox_manager.get_client_event_inbox()
@pytest.mark.it(
"Creates and starts a daemon Thread for the Client Event handler runner when value is set to a function if the Client Event handler runner does not already exist"
)
def test_no_client_event_runner(self, handler_name, handler_manager, handler):
assert handler_manager._client_event_runner is None
setattr(handler_manager, handler_name, handler)
t = handler_manager._client_event_runner
assert isinstance(t, threading.Thread)
assert t.daemon is True
@pytest.mark.it(
"Does not modify the Client Event handler runner thread when value is set to a function if the Client Event handler runner already exists"
)
def test_client_event_runner_already_exists(self, handler_name, handler_manager, handler):
# Add a fake client event runner thread
fake_runner_thread = threading.Thread()
fake_runner_thread.daemon = True
fake_runner_thread.start()
handler_manager._client_event_runner = fake_runner_thread
# Set handler
setattr(handler_manager, handler_name, handler)
# Fake thread was not changed
assert handler_manager._client_event_runner is fake_runner_thread
@pytest.mark.it(
"Does not delete, remove, or replace the Thread for the Client Event handler runner when value is set back to None"
)
def test_handler_removed(self, handler_name, handler_manager, handler):
# Set handler
setattr(handler_manager, handler_name, handler)
# Thread has been created and is alive
t = handler_manager._client_event_runner
assert isinstance(t, threading.Thread)
assert t.is_alive()
# Set the handler back to None
setattr(handler_manager, handler_name, None)
# Thread is still maintained on the manager and alive
assert handler_manager._client_event_runner is t
assert t.is_alive()
@pytest.mark.it(
"Does not delete, remove, or replace the Thread for the Client Event handler runner when updated with a new function value"
)
def test_handler_update(self, handler_name, handler_manager, handler):
# Set handler
setattr(handler_manager, handler_name, handler)
# Thread has been created and is alive
t = handler_manager._client_event_runner
assert isinstance(t, threading.Thread)
assert t.is_alive()
# Set new handler
def new_handler(arg):
pass
setattr(handler_manager, handler_name, new_handler)
# Thread is still maintained on the manager and alive
assert handler_manager._client_event_runner is t
assert t.is_alive()
@pytest.mark.it(
"Is invoked by the runner only when the Client Event Inbox receives a matching Client Event, passing any arguments to the handler"
)
def test_handler_invoked(self, mocker, handler_name, handler_manager, inbox, event):
# Set the handler
mock_handler = mocker.MagicMock()
setattr(handler_manager, handler_name, mock_handler)
# Handler has not been called
assert mock_handler.call_count == 0
# Add the event to the client event inbox
inbox.put(event)
time.sleep(0.1)
# Handler has been called with the arguments from the event
assert mock_handler.call_count == 1
assert mock_handler.call_args == mocker.call(*event.args_for_user)
# Add non-matching event to the client event inbox
non_matching_event = client_event.ClientEvent("NON_MATCHING_EVENT")
inbox.put(non_matching_event)
time.sleep(0.1)
# Handler has not been called again
assert mock_handler.call_count == 1
@pytest.mark.it(
"Is invoked by the runner every time the Client Event Inbox receives a matching Client Event"
)
def test_handler_invoked_multiple(self, handler_name, handler_manager, inbox, event):
# Set the handler
mock_handler = ThreadsafeMock()
setattr(handler_manager, handler_name, mock_handler)
# Handler has not been called
assert mock_handler.call_count == 0
# Add 5 matching events to the corresponding inbox, triggering the handler
for _ in range(5):
inbox.put(event)
time.sleep(0.2)
# Handler has been called 5 times
assert mock_handler.call_count == 5
@pytest.mark.it(
"Sends a HandlerManagerException to the background exception handler if any exception is raised during its invocation"
)
def test_exception_in_handler(
self, mocker, handler_name, handler_manager, inbox, event, arbitrary_exception
):
background_exc_spy = mocker.spy(handle_exceptions, "handle_background_exception")
# Handler will raise exception when called
mock_handler = mocker.MagicMock()
mock_handler.side_effect = arbitrary_exception
# Set handler
setattr(handler_manager, handler_name, mock_handler)
# Handler has not been called
assert mock_handler.call_count == 0
# Background exception handler has not been called
assert background_exc_spy.call_count == 0
# Add the event to the client event inbox, triggering the handler
inbox.put(event)
time.sleep(0.1)
# Handler has now been called
assert mock_handler.call_count == 1
# Background exception handler was called
assert background_exc_spy.call_count == 1
e = background_exc_spy.call_args[0][0]
assert isinstance(e, HandlerManagerException)
assert e.__cause__ is arbitrary_exception
@pytest.mark.it(
"Can be updated with a new value that the Client Event handler runner will immediately begin using for handler invocations instead"
)
def test_updated_handler(self, mocker, handler_name, handler_manager, inbox, event):
def handler(*args):
# Invoking handler replaces the set handler with a mock
setattr(handler_manager, handler_name, mocker.MagicMock())
setattr(handler_manager, handler_name, handler)
inbox.put(event)
time.sleep(0.1)
# Handler has been replaced with a mock, but the mock has not been invoked
assert getattr(handler_manager, handler_name) is not handler
assert getattr(handler_manager, handler_name).call_count == 0
# Add a new event to the inbox
inbox.put(event)
time.sleep(0.1)
# The mock was now called
assert getattr(handler_manager, handler_name).call_count == 1
@pytest.mark.describe("SyncHandlerManager - PROPERTY: .on_message_received")
class TestSyncHandlerManagerPropertyOnMessageReceived(SharedReceiverHandlerPropertyTests):
@pytest.fixture
def handler_name(self):
return "on_message_received"
@pytest.fixture
def inbox(self, inbox_manager):
return inbox_manager.get_unified_message_inbox()
@pytest.mark.describe("SyncHandlerManager - PROPERTY: .on_method_request_received")
class TestSyncHandlerManagerPropertyOnMethodRequestReceived(SharedReceiverHandlerPropertyTests):
@pytest.fixture
def handler_name(self):
return "on_method_request_received"
@pytest.fixture
def inbox(self, inbox_manager):
return inbox_manager.get_method_request_inbox()
@pytest.mark.describe("SyncHandlerManager - PROPERTY: .on_twin_desired_properties_patch_received")
class TestSyncHandlerManagerPropertyOnTwinDesiredPropertiesPatchReceived(
SharedReceiverHandlerPropertyTests
):
@pytest.fixture
def handler_name(self):
return "on_twin_desired_properties_patch_received"
@pytest.fixture
def inbox(self, inbox_manager):
return inbox_manager.get_twin_patch_inbox()
@pytest.mark.describe("SyncHandlerManager - PROPERTY: .on_connection_state_change")
class TestSyncHandlerManagerPropertyOnConnectionStateChange(SharedClientEventHandlerPropertyTests):
@pytest.fixture
def handler_name(self):
return "on_connection_state_change"
@pytest.fixture
def event(self):
return client_event.ClientEvent(client_event.CONNECTION_STATE_CHANGE)
@pytest.mark.describe("SyncHandlerManager - PROPERTY: .on_new_sastoken_required")
class TestSyncHandlerManagerPropertyOnNewSastokenRequired(SharedClientEventHandlerPropertyTests):
@pytest.fixture
def handler_name(self):
return "on_new_sastoken_required"
@pytest.fixture
def event(self):
return client_event.ClientEvent(client_event.NEW_SASTOKEN_REQUIRED)
@pytest.mark.describe("SyncHandlerManager - PROPERTY: .on_background_exception")
class TestSyncHandlerManagerPropertyOnBackgroundException(SharedClientEventHandlerPropertyTests):
@pytest.fixture
def handler_name(self):
return "on_background_exception"
@pytest.fixture
def event(self, arbitrary_exception):
return client_event.ClientEvent(client_event.BACKGROUND_EXCEPTION, arbitrary_exception)
@pytest.mark.describe("SyncHandlerManager - PROPERTY: .handling_client_events")
class TestSyncHandlerManagerPropertyHandlingClientEvents(object):
@pytest.fixture
def handler_manager(self, inbox_manager):
hm = SyncHandlerManager(inbox_manager)
yield hm
hm.stop()
@pytest.mark.it("Is True if the Client Event Handler Runner is running")
def test_client_event_runner_running(self, handler_manager):
# Add a fake client event runner thread
fake_runner_thread = threading.Thread()
fake_runner_thread.daemon = True
fake_runner_thread.start()
handler_manager._client_event_runner = fake_runner_thread
assert handler_manager.handling_client_events is True
@pytest.mark.it("Is False if the Client Event Handler Runner is not running")
def test_client_event_runner_not_running(self, handler_manager):
assert handler_manager._client_event_runner is None
assert handler_manager.handling_client_events is False
| [
"logging.basicConfig",
"azure.iot.device.iothub.sync_handler_manager.SyncHandlerManager",
"pytest.mark.describe",
"azure.iot.device.iothub.client_event.ClientEvent",
"azure.iot.device.iothub.inbox_manager.InboxManager",
"threading.Lock",
"time.sleep",
"pytest.mark.parametrize",
"pytest.fixture",
"... | [((787, 827), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.DEBUG'}), '(level=logging.DEBUG)\n', (806, 827), False, 'import logging\n'), ((2815, 2873), 'pytest.mark.describe', 'pytest.mark.describe', (['"""SyncHandlerManager - Instantiation"""'], {}), "('SyncHandlerManager - Instantiation')\n", (2835, 2873), False, 'import pytest\n'), ((3822, 3874), 'pytest.mark.describe', 'pytest.mark.describe', (['"""SyncHandlerManager - .stop()"""'], {}), "('SyncHandlerManager - .stop()')\n", (3842, 3874), False, 'import pytest\n'), ((7452, 7514), 'pytest.mark.describe', 'pytest.mark.describe', (['"""SyncHandlerManager - .ensure_running()"""'], {}), "('SyncHandlerManager - .ensure_running()')\n", (7472, 7514), False, 'import pytest\n'), ((27812, 27887), 'pytest.mark.describe', 'pytest.mark.describe', (['"""SyncHandlerManager - PROPERTY: .on_message_received"""'], {}), "('SyncHandlerManager - PROPERTY: .on_message_received')\n", (27832, 27887), False, 'import pytest\n'), ((28181, 28268), 'pytest.mark.describe', 'pytest.mark.describe', (['"""SyncHandlerManager - PROPERTY: .on_method_request_received"""'], {}), "(\n 'SyncHandlerManager - PROPERTY: .on_method_request_received')\n", (28201, 28268), False, 'import pytest\n'), ((28569, 28676), 'pytest.mark.describe', 'pytest.mark.describe', (['"""SyncHandlerManager - PROPERTY: .on_twin_desired_properties_patch_received"""'], {}), "(\n 'SyncHandlerManager - PROPERTY: .on_twin_desired_properties_patch_received'\n )\n", (28589, 28676), False, 'import pytest\n'), ((29002, 29089), 'pytest.mark.describe', 'pytest.mark.describe', (['"""SyncHandlerManager - PROPERTY: .on_connection_state_change"""'], {}), "(\n 'SyncHandlerManager - PROPERTY: .on_connection_state_change')\n", (29022, 29089), False, 'import pytest\n'), ((29400, 29485), 'pytest.mark.describe', 'pytest.mark.describe', (['"""SyncHandlerManager - PROPERTY: .on_new_sastoken_required"""'], {}), "('SyncHandlerManager - PROPERTY: .on_new_sastoken_required'\n )\n", (29420, 29485), False, 'import pytest\n'), ((29790, 29869), 'pytest.mark.describe', 'pytest.mark.describe', (['"""SyncHandlerManager - PROPERTY: .on_background_exception"""'], {}), "('SyncHandlerManager - PROPERTY: .on_background_exception')\n", (29810, 29869), False, 'import pytest\n'), ((30219, 30297), 'pytest.mark.describe', 'pytest.mark.describe', (['"""SyncHandlerManager - PROPERTY: .handling_client_events"""'], {}), "('SyncHandlerManager - PROPERTY: .handling_client_events')\n", (30239, 30297), False, 'import pytest\n'), ((2667, 2707), 'azure.iot.device.iothub.inbox_manager.InboxManager', 'InboxManager', ([], {'inbox_type': 'SyncClientInbox'}), '(inbox_type=SyncClientInbox)\n', (2679, 2707), False, 'from azure.iot.device.iothub.inbox_manager import InboxManager\n'), ((2912, 2968), 'pytest.mark.it', 'pytest.mark.it', (['"""Initializes handler properties to None"""'], {}), "('Initializes handler properties to None')\n", (2926, 2968), False, 'import pytest\n'), ((2974, 3027), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""handler_name"""', 'all_handlers'], {}), "('handler_name', all_handlers)\n", (2997, 3027), False, 'import pytest\n'), ((3188, 3267), 'pytest.mark.it', 'pytest.mark.it', (['"""Initializes receiver handler runner thread references to None"""'], {}), "('Initializes receiver handler runner thread references to None')\n", (3202, 3267), False, 'import pytest\n'), ((3273, 3376), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""handler_name"""', 'all_internal_receiver_handlers'], {'ids': 'all_receiver_handlers'}), "('handler_name', all_internal_receiver_handlers, ids\n =all_receiver_handlers)\n", (3296, 3376), False, 'import pytest\n'), ((3579, 3666), 'pytest.mark.it', 'pytest.mark.it', (['"""Initializes client event handler runner thread reference to None"""'], {}), "(\n 'Initializes client event handler runner thread reference to None')\n", (3593, 3666), False, 'import pytest\n'), ((3904, 4114), 'pytest.fixture', 'pytest.fixture', ([], {'params': "['No handlers running', 'Some receiver handlers running',\n 'Some client event handlers running',\n 'Some receiver and some client event handlers running',\n 'All handlers running']"}), "(params=['No handlers running',\n 'Some receiver handlers running', 'Some client event handlers running',\n 'Some receiver and some client event handlers running',\n 'All handlers running'])\n", (3918, 4114), False, 'import pytest\n'), ((5200, 5254), 'pytest.mark.it', 'pytest.mark.it', (['"""Stops all currently running handlers"""'], {}), "('Stops all currently running handlers')\n", (5214, 5254), False, 'import pytest\n'), ((5541, 5669), 'pytest.mark.it', 'pytest.mark.it', (['"""Stops only the currently running receiver handlers if the \'receiver_handlers_only\' parameter is True"""'], {}), '(\n "Stops only the currently running receiver handlers if the \'receiver_handlers_only\' parameter is True"\n )\n', (5555, 5669), False, 'import pytest\n'), ((6353, 6447), 'pytest.mark.it', 'pytest.mark.it', (['"""Completes all pending handler invocations before stopping the runner(s)"""'], {}), "(\n 'Completes all pending handler invocations before stopping the runner(s)')\n", (6367, 6447), False, 'import pytest\n'), ((7553, 8149), 'pytest.fixture', 'pytest.fixture', ([], {'params': "['All handlers set, all stopped',\n 'All handlers set, receivers stopped, client events running',\n 'All handlers set, all running',\n 'Some receiver and client event handlers set, all stopped',\n 'Some receiver and client event handlers set, receivers stopped, client events running'\n , 'Some receiver and client event handlers set, all running',\n 'Some receiver handlers set, all stopped',\n 'Some receiver handlers set, all running',\n 'Some client event handlers set, all stopped',\n 'Some client event handlers set, all running', 'No handlers set']"}), "(params=['All handlers set, all stopped',\n 'All handlers set, receivers stopped, client events running',\n 'All handlers set, all running',\n 'Some receiver and client event handlers set, all stopped',\n 'Some receiver and client event handlers set, receivers stopped, client events running'\n , 'Some receiver and client event handlers set, all running',\n 'Some receiver handlers set, all stopped',\n 'Some receiver handlers set, all running',\n 'Some client event handlers set, all stopped',\n 'Some client event handlers set, all running', 'No handlers set'])\n", (7567, 8149), False, 'import pytest\n'), ((10873, 10995), 'pytest.mark.it', 'pytest.mark.it', (['"""Starts handler runners for any handler that is set, but does not have a handler runner running"""'], {}), "(\n 'Starts handler runners for any handler that is set, but does not have a handler runner running'\n )\n", (10887, 10995), False, 'import pytest\n'), ((12361, 12410), 'pytest.mark.it', 'pytest.mark.it', (['"""Can be both read and written to"""'], {}), "('Can be both read and written to')\n", (12375, 12410), False, 'import pytest\n'), ((13200, 13331), 'pytest.mark.it', 'pytest.mark.it', (['"""Creates and starts a daemon Thread for the correpsonding handler runner when value is set to a function"""'], {}), "(\n 'Creates and starts a daemon Thread for the correpsonding handler runner when value is set to a function'\n )\n", (13214, 13331), False, 'import pytest\n'), ((13811, 13960), 'pytest.mark.it', 'pytest.mark.it', (['"""Stops the corresponding handler runner and completes any existing daemon Thread for it when the value is set back to None"""'], {}), "(\n 'Stops the corresponding handler runner and completes any existing daemon Thread for it when the value is set back to None'\n )\n", (13825, 13960), False, 'import pytest\n'), ((14636, 14787), 'pytest.mark.it', 'pytest.mark.it', (['"""Does not delete, remove, or replace the Thread for the corresponding handler runner, when updated with a new function value"""'], {}), "(\n 'Does not delete, remove, or replace the Thread for the corresponding handler runner, when updated with a new function value'\n )\n", (14650, 14787), False, 'import pytest\n'), ((15458, 15609), 'pytest.mark.it', 'pytest.mark.it', (['"""Is invoked by the runner when the Inbox corresponding to the handler receives an object, passing that object to the handler"""'], {}), "(\n 'Is invoked by the runner when the Inbox corresponding to the handler receives an object, passing that object to the handler'\n )\n", (15472, 15609), False, 'import pytest\n'), ((16244, 16365), 'pytest.mark.it', 'pytest.mark.it', (['"""Is invoked by the runner every time the Inbox corresponding to the handler receives an object"""'], {}), "(\n 'Is invoked by the runner every time the Inbox corresponding to the handler receives an object'\n )\n", (16258, 16365), False, 'import pytest\n'), ((16930, 17051), 'pytest.mark.it', 'pytest.mark.it', (['"""Is invoked for every item already in the corresponding Inbox at the moment of handler removal"""'], {}), "(\n 'Is invoked for every item already in the corresponding Inbox at the moment of handler removal'\n )\n", (16944, 17051), False, 'import pytest\n'), ((18782, 18926), 'pytest.mark.it', 'pytest.mark.it', (['"""Sends a HandlerManagerException to the background exception handler if any exception is raised during its invocation"""'], {}), "(\n 'Sends a HandlerManagerException to the background exception handler if any exception is raised during its invocation'\n )\n", (18796, 18926), False, 'import pytest\n'), ((20035, 20193), 'pytest.mark.it', 'pytest.mark.it', (['"""Can be updated with a new value that the corresponding handler runner will immediately begin using for handler invocations instead"""'], {}), "(\n 'Can be updated with a new value that the corresponding handler runner will immediately begin using for handler invocations instead'\n )\n", (20049, 20193), False, 'import pytest\n'), ((21189, 21377), 'pytest.mark.it', 'pytest.mark.it', (['"""Creates and starts a daemon Thread for the Client Event handler runner when value is set to a function if the Client Event handler runner does not already exist"""'], {}), "(\n 'Creates and starts a daemon Thread for the Client Event handler runner when value is set to a function if the Client Event handler runner does not already exist'\n )\n", (21203, 21377), False, 'import pytest\n'), ((21715, 21879), 'pytest.mark.it', 'pytest.mark.it', (['"""Does not modify the Client Event handler runner thread when value is set to a function if the Client Event handler runner already exists"""'], {}), "(\n 'Does not modify the Client Event handler runner thread when value is set to a function if the Client Event handler runner already exists'\n )\n", (21729, 21879), False, 'import pytest\n'), ((22413, 22554), 'pytest.mark.it', 'pytest.mark.it', (['"""Does not delete, remove, or replace the Thread for the Client Event handler runner when value is set back to None"""'], {}), "(\n 'Does not delete, remove, or replace the Thread for the Client Event handler runner when value is set back to None'\n )\n", (22427, 22554), False, 'import pytest\n'), ((23129, 23278), 'pytest.mark.it', 'pytest.mark.it', (['"""Does not delete, remove, or replace the Thread for the Client Event handler runner when updated with a new function value"""'], {}), "(\n 'Does not delete, remove, or replace the Thread for the Client Event handler runner when updated with a new function value'\n )\n", (23143, 23278), False, 'import pytest\n'), ((23896, 24052), 'pytest.mark.it', 'pytest.mark.it', (['"""Is invoked by the runner only when the Client Event Inbox receives a matching Client Event, passing any arguments to the handler"""'], {}), "(\n 'Is invoked by the runner only when the Client Event Inbox receives a matching Client Event, passing any arguments to the handler'\n )\n", (23910, 24052), False, 'import pytest\n'), ((24938, 25057), 'pytest.mark.it', 'pytest.mark.it', (['"""Is invoked by the runner every time the Client Event Inbox receives a matching Client Event"""'], {}), "(\n 'Is invoked by the runner every time the Client Event Inbox receives a matching Client Event'\n )\n", (24952, 25057), False, 'import pytest\n'), ((25618, 25762), 'pytest.mark.it', 'pytest.mark.it', (['"""Sends a HandlerManagerException to the background exception handler if any exception is raised during its invocation"""'], {}), "(\n 'Sends a HandlerManagerException to the background exception handler if any exception is raised during its invocation'\n )\n", (25632, 25762), False, 'import pytest\n'), ((26870, 27027), 'pytest.mark.it', 'pytest.mark.it', (['"""Can be updated with a new value that the Client Event handler runner will immediately begin using for handler invocations instead"""'], {}), "(\n 'Can be updated with a new value that the Client Event handler runner will immediately begin using for handler invocations instead'\n )\n", (26884, 27027), False, 'import pytest\n'), ((30518, 30589), 'pytest.mark.it', 'pytest.mark.it', (['"""Is True if the Client Event Handler Runner is running"""'], {}), "('Is True if the Client Event Handler Runner is running')\n", (30532, 30589), False, 'import pytest\n'), ((30962, 31038), 'pytest.mark.it', 'pytest.mark.it', (['"""Is False if the Client Event Handler Runner is not running"""'], {}), "('Is False if the Client Event Handler Runner is not running')\n", (30976, 31038), False, 'import pytest\n'), ((2495, 2511), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (2509, 2511), False, 'import threading\n'), ((3099, 3132), 'azure.iot.device.iothub.sync_handler_manager.SyncHandlerManager', 'SyncHandlerManager', (['inbox_manager'], {}), '(inbox_manager)\n', (3117, 3132), False, 'from azure.iot.device.iothub.sync_handler_manager import SyncHandlerManager, HandlerManagerException\n'), ((3473, 3506), 'azure.iot.device.iothub.sync_handler_manager.SyncHandlerManager', 'SyncHandlerManager', (['inbox_manager'], {}), '(inbox_manager)\n', (3491, 3506), False, 'from azure.iot.device.iothub.sync_handler_manager import SyncHandlerManager, HandlerManagerException\n'), ((3738, 3771), 'azure.iot.device.iothub.sync_handler_manager.SyncHandlerManager', 'SyncHandlerManager', (['inbox_manager'], {}), '(inbox_manager)\n', (3756, 3771), False, 'from azure.iot.device.iothub.sync_handler_manager import SyncHandlerManager, HandlerManagerException\n'), ((4265, 4298), 'azure.iot.device.iothub.sync_handler_manager.SyncHandlerManager', 'SyncHandlerManager', (['inbox_manager'], {}), '(inbox_manager)\n', (4283, 4298), False, 'from azure.iot.device.iothub.sync_handler_manager import SyncHandlerManager, HandlerManagerException\n'), ((6517, 6550), 'azure.iot.device.iothub.sync_handler_manager.SyncHandlerManager', 'SyncHandlerManager', (['inbox_manager'], {}), '(inbox_manager)\n', (6535, 6550), False, 'from azure.iot.device.iothub.sync_handler_manager import SyncHandlerManager, HandlerManagerException\n'), ((7267, 7282), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (7277, 7282), False, 'import time\n'), ((8473, 8506), 'azure.iot.device.iothub.sync_handler_manager.SyncHandlerManager', 'SyncHandlerManager', (['inbox_manager'], {}), '(inbox_manager)\n', (8491, 8506), False, 'from azure.iot.device.iothub.sync_handler_manager import SyncHandlerManager, HandlerManagerException\n'), ((12066, 12099), 'azure.iot.device.iothub.sync_handler_manager.SyncHandlerManager', 'SyncHandlerManager', (['inbox_manager'], {}), '(inbox_manager)\n', (12084, 12099), False, 'from azure.iot.device.iothub.sync_handler_manager import SyncHandlerManager, HandlerManagerException\n'), ((16051, 16066), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (16061, 16066), False, 'import time\n'), ((16821, 16836), 'time.sleep', 'time.sleep', (['(0.2)'], {}), '(0.2)\n', (16831, 16836), False, 'import time\n'), ((18146, 18161), 'time.sleep', 'time.sleep', (['(0.2)'], {}), '(0.2)\n', (18156, 18161), False, 'import time\n'), ((18544, 18559), 'time.sleep', 'time.sleep', (['(0.2)'], {}), '(0.2)\n', (18554, 18559), False, 'import time\n'), ((19680, 19695), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (19690, 19695), False, 'import time\n'), ((20556, 20571), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (20566, 20571), False, 'import time\n'), ((20878, 20893), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (20888, 20893), False, 'import time\n'), ((22056, 22074), 'threading.Thread', 'threading.Thread', ([], {}), '()\n', (22072, 22074), False, 'import threading\n'), ((24441, 24456), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (24451, 24456), False, 'import time\n'), ((24734, 24780), 'azure.iot.device.iothub.client_event.ClientEvent', 'client_event.ClientEvent', (['"""NON_MATCHING_EVENT"""'], {}), "('NON_MATCHING_EVENT')\n", (24758, 24780), False, 'from azure.iot.device.iothub import client_event\n'), ((24827, 24842), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (24837, 24842), False, 'import time\n'), ((25509, 25524), 'time.sleep', 'time.sleep', (['(0.2)'], {}), '(0.2)\n', (25519, 25524), False, 'import time\n'), ((26515, 26530), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (26525, 26530), False, 'import time\n'), ((27379, 27394), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (27389, 27394), False, 'import time\n'), ((27689, 27704), 'time.sleep', 'time.sleep', (['(0.1)'], {}), '(0.1)\n', (27699, 27704), False, 'import time\n'), ((29334, 29396), 'azure.iot.device.iothub.client_event.ClientEvent', 'client_event.ClientEvent', (['client_event.CONNECTION_STATE_CHANGE'], {}), '(client_event.CONNECTION_STATE_CHANGE)\n', (29358, 29396), False, 'from azure.iot.device.iothub import client_event\n'), ((29726, 29786), 'azure.iot.device.iothub.client_event.ClientEvent', 'client_event.ClientEvent', (['client_event.NEW_SASTOKEN_REQUIRED'], {}), '(client_event.NEW_SASTOKEN_REQUIRED)\n', (29750, 29786), False, 'from azure.iot.device.iothub import client_event\n'), ((30135, 30220), 'azure.iot.device.iothub.client_event.ClientEvent', 'client_event.ClientEvent', (['client_event.BACKGROUND_EXCEPTION', 'arbitrary_exception'], {}), '(client_event.BACKGROUND_EXCEPTION, arbitrary_exception\n )\n', (30159, 30220), False, 'from azure.iot.device.iothub import client_event\n'), ((30443, 30476), 'azure.iot.device.iothub.sync_handler_manager.SyncHandlerManager', 'SyncHandlerManager', (['inbox_manager'], {}), '(inbox_manager)\n', (30461, 30476), False, 'from azure.iot.device.iothub.sync_handler_manager import SyncHandlerManager, HandlerManagerException\n'), ((30732, 30750), 'threading.Thread', 'threading.Thread', ([], {}), '()\n', (30748, 30750), False, 'import threading\n')] |
import json
import dash_bootstrap_components as dbc
import dash_core_components as dcc
import dash_html_components as html
import pandas as pd
import dash_table
def get_comps_data(bd, projverurl):
print('Getting components ...')
# path = projverurl + "/components?limit=5000"
#
# custom_headers = {'Accept': 'application/vnd.blackducksoftware.bill-of-materials-6+json'}
# resp = hub.execute_get(path, custom_headers=custom_headers)
# if resp.status_code != 200:
# print('component list response ' + str(resp.status_code))
# return None
#
# comps = resp.json()
comps = bd.get_json(projverurl + "/components?limit=5000")
df = pd.json_normalize(comps, record_path=['items'])
for index, comp in enumerate(comps['items']):
df.loc[index, 'json'] = json.dumps(comp)
print('Found ' + str(len(df.index)) + ' Components')
return df, comps['items']
col_data_comps = [
{"name": ['Component'], "id": "componentName"},
{"name": ['Version'], "id": "componentVersionName"},
{"name": ['Ignored'], "id": "ignored"},
# {"name": ['Ignored'], "id": "ignoreIcon"},
{"name": ['Reviewed'], "id": "reviewStatus"},
{"name": ['Policy Violation'], "id": "policyStatus"},
# {"name": ['Policy Status'], "id": "polIcon"},
{"name": ['Usage'], "id": "usages"},
{"name": ['Match Types'], "id": "matchTypes"},
]
def create_compstab(compdata, projname, vername):
global col_data_comps
for col, dtype in compdata.dtypes.items():
if dtype == 'bool':
compdata[col] = compdata[col].astype('str')
return [
dbc.Row(
dbc.Col(html.H2("Components")),
),
dbc.Row(
[
dbc.Col(html.H5("Project: " + projname + " - Version: " + vername), width=8),
dbc.Col(
dcc.Dropdown(
id="sel_comp_action",
options=[
{'label': 'Select Action ...', 'value': 'NOTHING'},
{'label': 'Ignore', 'value': 'IGNORE'},
{'label': 'Unignore', 'value': 'UNIGNORE'},
{'label': 'Set Reviewed', 'value': 'REVIEW'},
{'label': 'Set Unreviewed', 'value': 'UNREVIEW'},
{'label': 'Usage - Source', 'value': 'USAGE_SOURCE'},
{'label': 'Usage - Statically Linked', 'value': 'USAGE_STATIC'},
{'label': 'Usage - Dynamically Linked', 'value': 'USAGE_DYNAMIC'},
{'label': 'Usage - Separate Work', 'value': 'USAGE_SEPARATE'},
{'label': 'Usage - Merely Aggregated', 'value': 'USAGE_AGGREGATED'},
{'label': 'Usage - Implement Standard', 'value': 'USAGE_STANDARD'},
{'label': 'Usage - Prerequisite', 'value': 'USAGE_PREREQUISITE'},
{'label': 'Usage - Dev Tool/Excluded', 'value': 'USAGE_EXCLUDED'},
],
multi=False,
placeholder='Select Action ...'
), width=2,
align='center',
),
dbc.Col(dbc.Button("Selected Rows", id="button_comp_selected",
className="mr-2", size='sm'), width=1),
dbc.Col(dbc.Button("All Filtered Rows", id="button_comp_all",
className="mr-2", size='sm'), width=1),
]
),
dbc.Row(
dbc.Col(
dash_table.DataTable(
id='compstable',
columns=col_data_comps,
style_cell={
'overflow': 'hidden',
'textOverflow': 'ellipsis',
'maxWidth': 0,
'font_size': '12px',
},
data=compdata.to_dict('records'),
page_size=30, sort_action='native',
filter_action='native',
row_selectable="multi",
cell_selectable=False,
style_header={'backgroundColor': 'rgb(30, 30, 30)', 'color': 'white'},
tooltip_data=[
{
column: {'value': str(value), 'type': 'markdown'}
for column, value in row.items()
} for row in compdata.to_dict('records')
],
tooltip_duration=None,
style_data_conditional=[
{
'if': {'column_id': 'componentName'},
'width': '30%'
},
{
'if': {'column_id': 'componentVersionName'},
'width': '20%'
},
{
'if': {'column_id': 'ignored'},
'width': '10%'
},
{
'if': {'column_id': 'reviewStatus'},
'width': '10%'
},
{
'if': {'column_id': 'policyStatus'},
'width': '10%'
},
{
'if': {'column_id': 'usages'},
'width': '10%'
},
{
'if': {'column_id': 'matchTypes'},
'width': '10%'
},
{
'if': {
'filter_query': '{policyStatus} = "IN_VIOLATION"',
'column_id': 'policyStatus'
},
'backgroundColor': 'maroon',
'color': 'white'
},
{
'if': {
'filter_query': '{reviewStatus} = "REVIEWED"',
'column_id': 'reviewStatus'
},
'backgroundColor': 'blue',
'color': 'white'
},
{
'if': {
'filter_query': '{ignored} eq "True"',
'column_id': 'ignored'
},
'backgroundColor': 'grey',
'color': 'white'
},
],
sort_by=[{'column_id': 'componentName', 'direction': 'asc'},
{'column_id': 'componentVersionName', 'direction': 'asc'}]
# merge_duplicate_headers=True
),
width=12
),
),
]
def make_comp_toast(message):
"""
Helper function for making a toast. dict id for use in pattern matching
callbacks.
"""
return dbc.Toast(
message,
id={"type": "toast", "id": "toast_comp"},
key='toast_comp',
header="Component Processing",
is_open=True,
dismissable=False,
icon="info",
duration=8000,
)
def compactions(bd, action, origdata, vdata, rows, projverurl):
def do_comp_action(url, cdata):
custom_headers = {'Accept': 'application/vnd.blackducksoftware.bill-of-materials-6+json',
'Content-Type': 'application/vnd.blackducksoftware.bill-of-materials-6+json'}
# putresp = hub.execute_put(url, cdata, custom_headers=custom_headers)
# if not putresp.ok:
# print('Error - cannot update component ' + url)
# return False
# else:
# print('Processed component ' + cdata['componentName'])
# return True
r = bd.session.put(url, json=cdata)
if r.status_code == 200:
print('Processed component ' + cdata['componentName'])
return True
else:
print('Error - cannot update component ' + url)
return False
compaction_dict = {
'IGNORE':
{'field': 'ignored', 'value': True,
'confirmation': 'Ignored', 'display': 'True'},
'UNIGNORE':
{'field': 'ignored', 'value': False,
'confirmation': 'Unignored', 'display': 'False'},
'REVIEW':
{'field': 'reviewStatus', 'value': 'REVIEWED',
'confirmation': 'Set Reviewed', 'display': 'REVIEWED'},
'UNREVIEW':
{'field': 'reviewStatus', 'value': 'NOT_REVIEWED',
'confirmation': 'Set Unreviewed', 'display': 'NOT_REVIEWED'},
'USAGE_SOURCE':
{'field': 'usages', 'value': ['SOURCE_CODE'],
'confirmation': 'Usage Changed', 'display': 'SOURCE_CODE'},
'USAGE_STATIC':
{'field': 'usages', 'value': ['STATICALLY_LINKED'],
'confirmation': 'Usage Changed', 'display': 'STATICALLY_LINKED'},
'USAGE_DYNAMIC':
{'field': 'usages', 'value': ['DYNAMICALLY_LINKED'],
'confirmation': 'Usage Changed', 'display': 'DYNAMICALLY_LINKED'},
'USAGE_SEPARATE':
{'field': 'usages', 'value': ['SEPARATE_WORK'],
'confirmation': 'Usage Changed', 'display': 'SEPARATE_WORK'},
'USAGE_AGGREGATED':
{'field': 'usages', 'value': ['MERELY_AGGREGATED'],
'confirmation': 'Usage Changed', 'display': 'MERELY_AGGREGATED'},
'USAGE_STANDARD':
{'field': 'usages', 'value': ['IMPLEMENTATION_OF_STANDARD'],
'confirmation': 'Usage Changed', 'display': 'IMPLEMENTATION_OF_STANDARD'},
'USAGE_PREREQUISITE':
{'field': 'usages', 'value': ['PREREQUISITE'],
'confirmation': 'Usage Changed', 'display': 'PREREQUISITE'},
'USAGE_EXCLUDED':
{'field': 'usages', 'value': ['DEV_TOOL_EXCLUDED'],
'confirmation': 'Usage Changed', 'display': 'DEV_TOOL_EXCLUDED'},
}
count = 0
confirmation = ''
for row in rows:
thiscomp = vdata[row]
compurl = thiscomp['componentVersion']
#
# Find component in allcomps list
# compdata = next(comp for comp in allcomps if comp["componentVersion"] == compurl)
compdata = json.loads(thiscomp['json'])
if action in compaction_dict.keys():
entry = compaction_dict[action]
foundrow = -1
for origrow, origcomp in enumerate(origdata):
if origcomp['componentVersion'] == vdata[row]['componentVersion']:
foundrow = origrow
break
if foundrow >= 0:
origdata[foundrow][entry['field']] = entry['display']
confirmation = entry['confirmation']
compdata[entry['field']] = entry['value']
thiscompurl = projverurl + '/' + '/'.join(compurl.split('/')[4:])
if do_comp_action(thiscompurl, compdata):
count += 1
toast = ''
if count > 0:
toast = make_comp_toast("{} Components {}".format(count, confirmation))
return origdata, toast
| [
"dash_bootstrap_components.Toast",
"dash_bootstrap_components.Button",
"json.loads",
"pandas.json_normalize",
"json.dumps",
"dash_html_components.H5",
"dash_core_components.Dropdown",
"dash_html_components.H2"
] | [((682, 729), 'pandas.json_normalize', 'pd.json_normalize', (['comps'], {'record_path': "['items']"}), "(comps, record_path=['items'])\n", (699, 729), True, 'import pandas as pd\n'), ((7373, 7553), 'dash_bootstrap_components.Toast', 'dbc.Toast', (['message'], {'id': "{'type': 'toast', 'id': 'toast_comp'}", 'key': '"""toast_comp"""', 'header': '"""Component Processing"""', 'is_open': '(True)', 'dismissable': '(False)', 'icon': '"""info"""', 'duration': '(8000)'}), "(message, id={'type': 'toast', 'id': 'toast_comp'}, key=\n 'toast_comp', header='Component Processing', is_open=True, dismissable=\n False, icon='info', duration=8000)\n", (7382, 7553), True, 'import dash_bootstrap_components as dbc\n'), ((812, 828), 'json.dumps', 'json.dumps', (['comp'], {}), '(comp)\n', (822, 828), False, 'import json\n'), ((10733, 10761), 'json.loads', 'json.loads', (["thiscomp['json']"], {}), "(thiscomp['json'])\n", (10743, 10761), False, 'import json\n'), ((1655, 1676), 'dash_html_components.H2', 'html.H2', (['"""Components"""'], {}), "('Components')\n", (1662, 1676), True, 'import dash_html_components as html\n'), ((1745, 1803), 'dash_html_components.H5', 'html.H5', (["('Project: ' + projname + ' - Version: ' + vername)"], {}), "('Project: ' + projname + ' - Version: ' + vername)\n", (1752, 1803), True, 'import dash_html_components as html\n'), ((1860, 2749), 'dash_core_components.Dropdown', 'dcc.Dropdown', ([], {'id': '"""sel_comp_action"""', 'options': "[{'label': 'Select Action ...', 'value': 'NOTHING'}, {'label': 'Ignore',\n 'value': 'IGNORE'}, {'label': 'Unignore', 'value': 'UNIGNORE'}, {\n 'label': 'Set Reviewed', 'value': 'REVIEW'}, {'label': 'Set Unreviewed',\n 'value': 'UNREVIEW'}, {'label': 'Usage - Source', 'value':\n 'USAGE_SOURCE'}, {'label': 'Usage - Statically Linked', 'value':\n 'USAGE_STATIC'}, {'label': 'Usage - Dynamically Linked', 'value':\n 'USAGE_DYNAMIC'}, {'label': 'Usage - Separate Work', 'value':\n 'USAGE_SEPARATE'}, {'label': 'Usage - Merely Aggregated', 'value':\n 'USAGE_AGGREGATED'}, {'label': 'Usage - Implement Standard', 'value':\n 'USAGE_STANDARD'}, {'label': 'Usage - Prerequisite', 'value':\n 'USAGE_PREREQUISITE'}, {'label': 'Usage - Dev Tool/Excluded', 'value':\n 'USAGE_EXCLUDED'}]", 'multi': '(False)', 'placeholder': '"""Select Action ..."""'}), "(id='sel_comp_action', options=[{'label': 'Select Action ...',\n 'value': 'NOTHING'}, {'label': 'Ignore', 'value': 'IGNORE'}, {'label':\n 'Unignore', 'value': 'UNIGNORE'}, {'label': 'Set Reviewed', 'value':\n 'REVIEW'}, {'label': 'Set Unreviewed', 'value': 'UNREVIEW'}, {'label':\n 'Usage - Source', 'value': 'USAGE_SOURCE'}, {'label':\n 'Usage - Statically Linked', 'value': 'USAGE_STATIC'}, {'label':\n 'Usage - Dynamically Linked', 'value': 'USAGE_DYNAMIC'}, {'label':\n 'Usage - Separate Work', 'value': 'USAGE_SEPARATE'}, {'label':\n 'Usage - Merely Aggregated', 'value': 'USAGE_AGGREGATED'}, {'label':\n 'Usage - Implement Standard', 'value': 'USAGE_STANDARD'}, {'label':\n 'Usage - Prerequisite', 'value': 'USAGE_PREREQUISITE'}, {'label':\n 'Usage - Dev Tool/Excluded', 'value': 'USAGE_EXCLUDED'}], multi=False,\n placeholder='Select Action ...')\n", (1872, 2749), True, 'import dash_core_components as dcc\n'), ((3300, 3387), 'dash_bootstrap_components.Button', 'dbc.Button', (['"""Selected Rows"""'], {'id': '"""button_comp_selected"""', 'className': '"""mr-2"""', 'size': '"""sm"""'}), "('Selected Rows', id='button_comp_selected', className='mr-2',\n size='sm')\n", (3310, 3387), True, 'import dash_bootstrap_components as dbc\n'), ((3454, 3540), 'dash_bootstrap_components.Button', 'dbc.Button', (['"""All Filtered Rows"""'], {'id': '"""button_comp_all"""', 'className': '"""mr-2"""', 'size': '"""sm"""'}), "('All Filtered Rows', id='button_comp_all', className='mr-2',\n size='sm')\n", (3464, 3540), True, 'import dash_bootstrap_components as dbc\n')] |
# Copyright 2016 Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from osprofiler.drivers.ceilometer import Ceilometer
from osprofiler.tests import test
class CeilometerParserTestCase(test.TestCase):
def setUp(self):
super(CeilometerParserTestCase, self).setUp()
self.ceilometer = Ceilometer("ceilometer://",
ceilometer_api_version="2")
def test_build_empty_tree(self):
self.assertEqual([], self.ceilometer._build_tree({}))
def test_build_complex_tree(self):
test_input = {
"2": {"parent_id": "0", "trace_id": "2", "info": {"started": 1}},
"1": {"parent_id": "0", "trace_id": "1", "info": {"started": 0}},
"21": {"parent_id": "2", "trace_id": "21", "info": {"started": 6}},
"22": {"parent_id": "2", "trace_id": "22", "info": {"started": 7}},
"11": {"parent_id": "1", "trace_id": "11", "info": {"started": 1}},
"113": {"parent_id": "11", "trace_id": "113",
"info": {"started": 3}},
"112": {"parent_id": "11", "trace_id": "112",
"info": {"started": 2}},
"114": {"parent_id": "11", "trace_id": "114",
"info": {"started": 5}}
}
expected_output = [
{
"parent_id": "0",
"trace_id": "1",
"info": {"started": 0},
"children": [
{
"parent_id": "1",
"trace_id": "11",
"info": {"started": 1},
"children": [
{"parent_id": "11", "trace_id": "112",
"info": {"started": 2}, "children": []},
{"parent_id": "11", "trace_id": "113",
"info": {"started": 3}, "children": []},
{"parent_id": "11", "trace_id": "114",
"info": {"started": 5}, "children": []}
]
}
]
},
{
"parent_id": "0",
"trace_id": "2",
"info": {"started": 1},
"children": [
{"parent_id": "2", "trace_id": "21",
"info": {"started": 6}, "children": []},
{"parent_id": "2", "trace_id": "22",
"info": {"started": 7}, "children": []}
]
}
]
result = self.ceilometer._build_tree(test_input)
self.assertEqual(expected_output, result)
def test_get_report_empty(self):
self.ceilometer.client = mock.MagicMock()
self.ceilometer.client.events.list.return_value = []
expected = {
"info": {
"name": "total",
"started": 0,
"finished": None,
"last_trace_started": None
},
"children": [],
"stats": {},
}
base_id = "10"
self.assertEqual(expected, self.ceilometer.get_report(base_id))
def test_get_report(self):
self.ceilometer.client = mock.MagicMock()
results = [mock.MagicMock(), mock.MagicMock(), mock.MagicMock(),
mock.MagicMock(), mock.MagicMock()]
self.ceilometer.client.events.list.return_value = results
results[0].to_dict.return_value = {
"traits": [
{
"type": "string",
"name": "base_id",
"value": "7253ca8c-33b3-4f84-b4f1-f5a4311ddfa4"
},
{
"type": "string",
"name": "host",
"value": "ubuntu"
},
{
"type": "string",
"name": "method",
"value": "POST"
},
{
"type": "string",
"name": "name",
"value": "wsgi-start"
},
{
"type": "string",
"name": "parent_id",
"value": "7253ca8c-33b3-4f84-b4f1-f5a4311ddfa4"
},
{
"type": "string",
"name": "project",
"value": "keystone"
},
{
"type": "string",
"name": "service",
"value": "main"
},
{
"type": "string",
"name": "timestamp",
"value": "2015-12-23T14:02:22.338776"
},
{
"type": "string",
"name": "trace_id",
"value": "06320327-2c2c-45ae-923a-515de890276a"
}
],
"raw": {},
"generated": "2015-12-23T10:41:38.415793",
"event_type": "profiler.main",
"message_id": "65fc1553-3082-4a6f-9d1e-0e3183f57a47"}
results[1].to_dict.return_value = {
"traits":
[
{
"type": "string",
"name": "base_id",
"value": "7253ca8c-33b3-4f84-b4f1-f5a4311ddfa4"
},
{
"type": "string",
"name": "host",
"value": "ubuntu"
},
{
"type": "string",
"name": "name",
"value": "wsgi-stop"
},
{
"type": "string",
"name": "parent_id",
"value": "7253ca8c-33b3-4f84-b4f1-f5a4311ddfa4"
},
{
"type": "string",
"name": "project",
"value": "keystone"
},
{
"type": "string",
"name": "service",
"value": "main"
},
{
"type": "string",
"name": "timestamp",
"value": "2015-12-23T14:02:22.380405"
},
{
"type": "string",
"name": "trace_id",
"value": "016c97fd-87f3-40b2-9b55-e431156b694b"
}
],
"raw": {},
"generated": "2015-12-23T10:41:38.406052",
"event_type": "profiler.main",
"message_id": "3256d9f1-48ba-4ac5-a50b-64fa42c6e264"}
results[2].to_dict.return_value = {
"traits":
[
{
"type": "string",
"name": "base_id",
"value": "7253ca8c-33b3-4f84-b4f1-f5a4311ddfa4"
},
{
"type": "string",
"name": "db.params",
"value": "[]"
},
{
"type": "string",
"name": "db.statement",
"value": "SELECT 1"
},
{
"type": "string",
"name": "host",
"value": "ubuntu"
},
{
"type": "string",
"name": "name",
"value": "db-start"
},
{
"type": "string",
"name": "parent_id",
"value": "06320327-2c2c-45ae-923a-515de890276a"
},
{
"type": "string",
"name": "project",
"value": "keystone"
},
{
"type": "string",
"name": "service",
"value": "main"
},
{
"type": "string",
"name": "timestamp",
"value": "2015-12-23T14:02:22.395365"
},
{
"type": "string",
"name": "trace_id",
"value": "1baf1d24-9ca9-4f4c-bd3f-01b7e0c0735a"
}
],
"raw": {},
"generated": "2015-12-23T10:41:38.984161",
"event_type": "profiler.main",
"message_id": "60368aa4-16f0-4f37-a8fb-89e92fdf36ff"}
results[3].to_dict.return_value = {
"traits":
[
{
"type": "string",
"name": "base_id",
"value": "7253ca8c-33b3-4f84-b4f1-f5a4311ddfa4"
},
{
"type": "string",
"name": "host",
"value": "ubuntu"
},
{
"type": "string",
"name": "name",
"value": "db-stop"
},
{
"type": "string",
"name": "parent_id",
"value": "06320327-2c2c-45ae-923a-515de890276a"
},
{
"type": "string",
"name": "project",
"value": "keystone"
},
{
"type": "string",
"name": "service",
"value": "main"
},
{
"type": "string",
"name": "timestamp",
"value": "2015-12-23T14:02:22.415486"
},
{
"type": "string",
"name": "trace_id",
"value": "1baf1d24-9ca9-4f4c-bd3f-01b7e0c0735a"
}
],
"raw": {},
"generated": "2015-12-23T10:41:39.019378",
"event_type": "profiler.main",
"message_id": "3fbeb339-55c5-4f28-88e4-15bee251dd3d"}
results[4].to_dict.return_value = {
"traits":
[
{
"type": "string",
"name": "base_id",
"value": "7253ca8c-33b3-4f84-b4f1-f5a4311ddfa4"
},
{
"type": "string",
"name": "host",
"value": "ubuntu"
},
{
"type": "string",
"name": "method",
"value": "GET"
},
{
"type": "string",
"name": "name",
"value": "wsgi-start"
},
{
"type": "string",
"name": "parent_id",
"value": "7253ca8c-33b3-4f84-b4f1-f5a4311ddfa4"
},
{
"type": "string",
"name": "project",
"value": "keystone"
},
{
"type": "string",
"name": "service",
"value": "main"
},
{
"type": "string",
"name": "timestamp",
"value": "2015-12-23T14:02:22.427444"
},
{
"type": "string",
"name": "trace_id",
"value": "016c97fd-87f3-40b2-9b55-e431156b694b"
}
],
"raw": {},
"generated": "2015-12-23T10:41:38.360409",
"event_type": "profiler.main",
"message_id": "57b971a9-572f-4f29-9838-3ed2564c6b5b"}
expected = {"children": [
{"children": [{"children": [],
"info": {"finished": 76,
"host": "ubuntu",
"meta.raw_payload.db-start": {},
"meta.raw_payload.db-stop": {},
"name": "db",
"project": "keystone",
"service": "main",
"started": 56,
"exception": "None"},
"parent_id": "06320327-2c2c-45ae-923a-515de890276a",
"trace_id": "1baf1d24-9ca9-4f4c-bd3f-01b7e0c0735a"}
],
"info": {"finished": 0,
"host": "ubuntu",
"meta.raw_payload.wsgi-start": {},
"name": "wsgi",
"project": "keystone",
"service": "main",
"started": 0},
"parent_id": "7253ca8c-33b3-4f84-b4f1-f5a4311ddfa4",
"trace_id": "06320327-2c2c-45ae-923a-515de890276a"},
{"children": [],
"info": {"finished": 41,
"host": "ubuntu",
"meta.raw_payload.wsgi-start": {},
"meta.raw_payload.wsgi-stop": {},
"name": "wsgi",
"project": "keystone",
"service": "main",
"started": 88,
"exception": "None"},
"parent_id": "7253ca8c-33b3-4f84-b4f1-f5a4311ddfa4",
"trace_id": "016c97fd-87f3-40b2-9b55-e431156b694b"}],
"info": {
"finished": 88,
"name": "total",
"started": 0,
"last_trace_started": 88
},
"stats": {"db": {"count": 1, "duration": 20},
"wsgi": {"count": 2, "duration": -47}},
}
base_id = "10"
result = self.ceilometer.get_report(base_id)
expected_filter = [{"field": "base_id", "op": "eq", "value": base_id}]
self.ceilometer.client.events.list.assert_called_once_with(
expected_filter, limit=100000)
self.assertEqual(expected, result)
| [
"osprofiler.drivers.ceilometer.Ceilometer",
"mock.MagicMock"
] | [((879, 934), 'osprofiler.drivers.ceilometer.Ceilometer', 'Ceilometer', (['"""ceilometer://"""'], {'ceilometer_api_version': '"""2"""'}), "('ceilometer://', ceilometer_api_version='2')\n", (889, 934), False, 'from osprofiler.drivers.ceilometer import Ceilometer\n'), ((3321, 3337), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (3335, 3337), False, 'import mock\n'), ((3822, 3838), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (3836, 3838), False, 'import mock\n'), ((3858, 3874), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (3872, 3874), False, 'import mock\n'), ((3876, 3892), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (3890, 3892), False, 'import mock\n'), ((3894, 3910), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (3908, 3910), False, 'import mock\n'), ((3931, 3947), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (3945, 3947), False, 'import mock\n'), ((3949, 3965), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (3963, 3965), False, 'import mock\n')] |
from pathlib import Path
import numpy as np
import pickle
import argparse
import errno
import sys
def file_exists(path):
return Path(path).is_file()
def dir_exists(path):
return Path(path).is_dir()
def remove_extension(x): return x.split('.')[0]
def print_error(type, file):
print(FileNotFoundError(errno.ENOENT,
'The {} {} does not exist'.format(type, file)))
def calculate_threshold(similarity, output='confusables',
threshold=0.8, verbose=False):
lines = [line.rstrip('\n') for line in open(similarity)]
unicode_characters = np.asarray(lines[0].split(' ')[1:])
data = {}
data['threshold'] = threshold
data['characters'] = {}
for l in lines[1:]:
line = l.split(' ')
latin = line[0]
del line[0]
similarity_row = np.asarray(line, dtype=np.float)
indexes = np.where(similarity_row >= threshold)
data['characters'][latin] = unicode_characters[np.asarray(indexes[0])]\
.tolist()
chars = unicode_characters[np.asarray(indexes[0])].tolist()
if(verbose):
print('[{}] {}: {}'.format(len(chars), latin, ','.join(chars)))
output = '{}-{}.pickle'.format(output, int(threshold*100))
with open(output, 'wb') as f:
pickle.dump(data, f)
def main():
parser = argparse.ArgumentParser(description='Filter Unicode characters '
'based on a given threshold '
'between 0 and 1 '
'and a similarity matrix')
parser.add_argument('-s', '--similarity', default='similarities.txt')
parser.add_argument('-t', '--threshold', default=0.8, type=float)
parser.add_argument('-o', '--output', default='confusables')
parser.add_argument('-v', '--verbose', action='store_true')
args = parser.parse_args()
similarity = args.similarity
threshold = args.threshold
output = args.output
verbose = args.verbose
if not file_exists(similarity):
print_error('file', similarity)
sys.exit(1)
calculate_threshold(similarity, output, threshold, verbose)
if __name__ == '__main__':
main()
| [
"pickle.dump",
"argparse.ArgumentParser",
"pathlib.Path",
"numpy.where",
"numpy.asarray",
"sys.exit"
] | [((1358, 1499), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Filter Unicode characters based on a given threshold between 0 and 1 and a similarity matrix"""'}), "(description=\n 'Filter Unicode characters based on a given threshold between 0 and 1 and a similarity matrix'\n )\n", (1381, 1499), False, 'import argparse\n'), ((846, 878), 'numpy.asarray', 'np.asarray', (['line'], {'dtype': 'np.float'}), '(line, dtype=np.float)\n', (856, 878), True, 'import numpy as np\n'), ((897, 934), 'numpy.where', 'np.where', (['(similarity_row >= threshold)'], {}), '(similarity_row >= threshold)\n', (905, 934), True, 'import numpy as np\n'), ((1310, 1330), 'pickle.dump', 'pickle.dump', (['data', 'f'], {}), '(data, f)\n', (1321, 1330), False, 'import pickle\n'), ((2154, 2165), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (2162, 2165), False, 'import sys\n'), ((134, 144), 'pathlib.Path', 'Path', (['path'], {}), '(path)\n', (138, 144), False, 'from pathlib import Path\n'), ((190, 200), 'pathlib.Path', 'Path', (['path'], {}), '(path)\n', (194, 200), False, 'from pathlib import Path\n'), ((990, 1012), 'numpy.asarray', 'np.asarray', (['indexes[0]'], {}), '(indexes[0])\n', (1000, 1012), True, 'import numpy as np\n'), ((1073, 1095), 'numpy.asarray', 'np.asarray', (['indexes[0]'], {}), '(indexes[0])\n', (1083, 1095), True, 'import numpy as np\n')] |
#!/usr/bin/python
from aos.util.trapezoid_profile import TrapezoidProfile
from frc971.control_loops.python import control_loop
from frc971.control_loops.python import angular_system
from frc971.control_loops.python import controls
import copy
import numpy
import sys
from matplotlib import pylab
import gflags
import glog
FLAGS = gflags.FLAGS
try:
gflags.DEFINE_bool('plot', False, 'If true, plot the loop response.')
except gflags.DuplicateFlagError:
pass
# Wrist alone
# 0.1348
# Wrist with ball
# 0.3007
# Wrist with hatch
# 0.446
kWrist = angular_system.AngularSystemParams(
name='Wrist',
motor=control_loop.BAG(),
G=(6.0 / 60.0) * (20.0 / 100.0) * (24.0 / 84.0),
J=0.30,
q_pos=0.20,
q_vel=5.0,
kalman_q_pos=0.12,
kalman_q_vel=2.0,
kalman_q_voltage=4.0,
kalman_r_position=0.05)
kWristBall = copy.copy(kWrist)
kWristBall.J = 0.4007
kWristBall.q_pos = 0.55
kWristBall.q_vel = 5.0
kWristPanel = copy.copy(kWrist)
kWristPanel.J = 0.446
kWristModel = copy.copy(kWrist)
kWristModel.J = 0.1348
def main(argv):
if FLAGS.plot:
R = numpy.matrix([[numpy.pi / 2.0], [0.0]])
angular_system.PlotKick(kWristBall, R, plant_params=kWristBall)
angular_system.PlotMotion(kWristBall, R, plant_params=kWristBall)
# Write the generated constants out to a file.
if len(argv) != 5:
glog.fatal(
'Expected .h file name and .cc file name for the wrist and integral wrist.'
)
else:
namespaces = ['y2019', 'control_loops', 'superstructure', 'wrist']
angular_system.WriteAngularSystem([kWrist, kWristBall, kWristPanel],
argv[1:3], argv[3:5], namespaces)
if __name__ == '__main__':
argv = FLAGS(sys.argv)
glog.init()
sys.exit(main(argv))
| [
"frc971.control_loops.python.control_loop.BAG",
"gflags.DEFINE_bool",
"frc971.control_loops.python.angular_system.PlotKick",
"glog.fatal",
"frc971.control_loops.python.angular_system.PlotMotion",
"copy.copy",
"numpy.matrix",
"glog.init",
"frc971.control_loops.python.angular_system.WriteAngularSystem... | [((852, 869), 'copy.copy', 'copy.copy', (['kWrist'], {}), '(kWrist)\n', (861, 869), False, 'import copy\n'), ((954, 971), 'copy.copy', 'copy.copy', (['kWrist'], {}), '(kWrist)\n', (963, 971), False, 'import copy\n'), ((1009, 1026), 'copy.copy', 'copy.copy', (['kWrist'], {}), '(kWrist)\n', (1018, 1026), False, 'import copy\n'), ((355, 424), 'gflags.DEFINE_bool', 'gflags.DEFINE_bool', (['"""plot"""', '(False)', '"""If true, plot the loop response."""'], {}), "('plot', False, 'If true, plot the loop response.')\n", (373, 424), False, 'import gflags\n'), ((1776, 1787), 'glog.init', 'glog.init', ([], {}), '()\n', (1785, 1787), False, 'import glog\n'), ((623, 641), 'frc971.control_loops.python.control_loop.BAG', 'control_loop.BAG', ([], {}), '()\n', (639, 641), False, 'from frc971.control_loops.python import control_loop\n'), ((1099, 1138), 'numpy.matrix', 'numpy.matrix', (['[[numpy.pi / 2.0], [0.0]]'], {}), '([[numpy.pi / 2.0], [0.0]])\n', (1111, 1138), False, 'import numpy\n'), ((1147, 1210), 'frc971.control_loops.python.angular_system.PlotKick', 'angular_system.PlotKick', (['kWristBall', 'R'], {'plant_params': 'kWristBall'}), '(kWristBall, R, plant_params=kWristBall)\n', (1170, 1210), False, 'from frc971.control_loops.python import angular_system\n'), ((1219, 1284), 'frc971.control_loops.python.angular_system.PlotMotion', 'angular_system.PlotMotion', (['kWristBall', 'R'], {'plant_params': 'kWristBall'}), '(kWristBall, R, plant_params=kWristBall)\n', (1244, 1284), False, 'from frc971.control_loops.python import angular_system\n'), ((1368, 1465), 'glog.fatal', 'glog.fatal', (['"""Expected .h file name and .cc file name for the wrist and integral wrist."""'], {}), "(\n 'Expected .h file name and .cc file name for the wrist and integral wrist.'\n )\n", (1378, 1465), False, 'import glog\n'), ((1571, 1678), 'frc971.control_loops.python.angular_system.WriteAngularSystem', 'angular_system.WriteAngularSystem', (['[kWrist, kWristBall, kWristPanel]', 'argv[1:3]', 'argv[3:5]', 'namespaces'], {}), '([kWrist, kWristBall, kWristPanel], argv[1\n :3], argv[3:5], namespaces)\n', (1604, 1678), False, 'from frc971.control_loops.python import angular_system\n')] |
import pytest
from pandas.errors import NullFrequencyError
import pandas as pd
from pandas import TimedeltaIndex
import pandas._testing as tm
class TestTimedeltaIndexShift:
# -------------------------------------------------------------
# TimedeltaIndex.shift is used by __add__/__sub__
def test_tdi_shift_empty(self):
# GH#9903
idx = pd.TimedeltaIndex([], name="xxx")
tm.assert_index_equal(idx.shift(0, freq="H"), idx)
tm.assert_index_equal(idx.shift(3, freq="H"), idx)
def test_tdi_shift_hours(self):
# GH#9903
idx = pd.TimedeltaIndex(["5 hours", "6 hours", "9 hours"], name="xxx")
tm.assert_index_equal(idx.shift(0, freq="H"), idx)
exp = pd.TimedeltaIndex(["8 hours", "9 hours", "12 hours"], name="xxx")
tm.assert_index_equal(idx.shift(3, freq="H"), exp)
exp = pd.TimedeltaIndex(["2 hours", "3 hours", "6 hours"], name="xxx")
tm.assert_index_equal(idx.shift(-3, freq="H"), exp)
def test_tdi_shift_minutes(self):
# GH#9903
idx = pd.TimedeltaIndex(["5 hours", "6 hours", "9 hours"], name="xxx")
tm.assert_index_equal(idx.shift(0, freq="T"), idx)
exp = pd.TimedeltaIndex(["05:03:00", "06:03:00", "9:03:00"], name="xxx")
tm.assert_index_equal(idx.shift(3, freq="T"), exp)
exp = pd.TimedeltaIndex(["04:57:00", "05:57:00", "8:57:00"], name="xxx")
tm.assert_index_equal(idx.shift(-3, freq="T"), exp)
def test_tdi_shift_int(self):
# GH#8083
tdi = pd.to_timedelta(range(5), unit="d")
trange = tdi._with_freq("infer") + pd.offsets.Hour(1)
result = trange.shift(1)
expected = TimedeltaIndex(
[
"1 days 01:00:00",
"2 days 01:00:00",
"3 days 01:00:00",
"4 days 01:00:00",
"5 days 01:00:00",
],
freq="D",
)
tm.assert_index_equal(result, expected)
def test_tdi_shift_nonstandard_freq(self):
# GH#8083
tdi = pd.to_timedelta(range(5), unit="d")
trange = tdi._with_freq("infer") + pd.offsets.Hour(1)
result = trange.shift(3, freq="2D 1s")
expected = TimedeltaIndex(
[
"6 days 01:00:03",
"7 days 01:00:03",
"8 days 01:00:03",
"9 days 01:00:03",
"10 days 01:00:03",
],
freq="D",
)
tm.assert_index_equal(result, expected)
def test_shift_no_freq(self):
# GH#19147
tdi = TimedeltaIndex(["1 days 01:00:00", "2 days 01:00:00"], freq=None)
with pytest.raises(NullFrequencyError, match="Cannot shift with no freq"):
tdi.shift(2)
| [
"pandas._testing.assert_index_equal",
"pandas.TimedeltaIndex",
"pytest.raises",
"pandas.offsets.Hour"
] | [((369, 402), 'pandas.TimedeltaIndex', 'pd.TimedeltaIndex', (['[]'], {'name': '"""xxx"""'}), "([], name='xxx')\n", (386, 402), True, 'import pandas as pd\n'), ((590, 654), 'pandas.TimedeltaIndex', 'pd.TimedeltaIndex', (["['5 hours', '6 hours', '9 hours']"], {'name': '"""xxx"""'}), "(['5 hours', '6 hours', '9 hours'], name='xxx')\n", (607, 654), True, 'import pandas as pd\n'), ((728, 793), 'pandas.TimedeltaIndex', 'pd.TimedeltaIndex', (["['8 hours', '9 hours', '12 hours']"], {'name': '"""xxx"""'}), "(['8 hours', '9 hours', '12 hours'], name='xxx')\n", (745, 793), True, 'import pandas as pd\n'), ((867, 931), 'pandas.TimedeltaIndex', 'pd.TimedeltaIndex', (["['2 hours', '3 hours', '6 hours']"], {'name': '"""xxx"""'}), "(['2 hours', '3 hours', '6 hours'], name='xxx')\n", (884, 931), True, 'import pandas as pd\n'), ((1063, 1127), 'pandas.TimedeltaIndex', 'pd.TimedeltaIndex', (["['5 hours', '6 hours', '9 hours']"], {'name': '"""xxx"""'}), "(['5 hours', '6 hours', '9 hours'], name='xxx')\n", (1080, 1127), True, 'import pandas as pd\n'), ((1201, 1267), 'pandas.TimedeltaIndex', 'pd.TimedeltaIndex', (["['05:03:00', '06:03:00', '9:03:00']"], {'name': '"""xxx"""'}), "(['05:03:00', '06:03:00', '9:03:00'], name='xxx')\n", (1218, 1267), True, 'import pandas as pd\n'), ((1341, 1407), 'pandas.TimedeltaIndex', 'pd.TimedeltaIndex', (["['04:57:00', '05:57:00', '8:57:00']"], {'name': '"""xxx"""'}), "(['04:57:00', '05:57:00', '8:57:00'], name='xxx')\n", (1358, 1407), True, 'import pandas as pd\n'), ((1685, 1810), 'pandas.TimedeltaIndex', 'TimedeltaIndex', (["['1 days 01:00:00', '2 days 01:00:00', '3 days 01:00:00', '4 days 01:00:00',\n '5 days 01:00:00']"], {'freq': '"""D"""'}), "(['1 days 01:00:00', '2 days 01:00:00', '3 days 01:00:00',\n '4 days 01:00:00', '5 days 01:00:00'], freq='D')\n", (1699, 1810), False, 'from pandas import TimedeltaIndex\n'), ((1945, 1984), 'pandas._testing.assert_index_equal', 'tm.assert_index_equal', (['result', 'expected'], {}), '(result, expected)\n', (1966, 1984), True, 'import pandas._testing as tm\n'), ((2229, 2355), 'pandas.TimedeltaIndex', 'TimedeltaIndex', (["['6 days 01:00:03', '7 days 01:00:03', '8 days 01:00:03', '9 days 01:00:03',\n '10 days 01:00:03']"], {'freq': '"""D"""'}), "(['6 days 01:00:03', '7 days 01:00:03', '8 days 01:00:03',\n '9 days 01:00:03', '10 days 01:00:03'], freq='D')\n", (2243, 2355), False, 'from pandas import TimedeltaIndex\n'), ((2490, 2529), 'pandas._testing.assert_index_equal', 'tm.assert_index_equal', (['result', 'expected'], {}), '(result, expected)\n', (2511, 2529), True, 'import pandas._testing as tm\n'), ((2598, 2663), 'pandas.TimedeltaIndex', 'TimedeltaIndex', (["['1 days 01:00:00', '2 days 01:00:00']"], {'freq': 'None'}), "(['1 days 01:00:00', '2 days 01:00:00'], freq=None)\n", (2612, 2663), False, 'from pandas import TimedeltaIndex\n'), ((1614, 1632), 'pandas.offsets.Hour', 'pd.offsets.Hour', (['(1)'], {}), '(1)\n', (1629, 1632), True, 'import pandas as pd\n'), ((2144, 2162), 'pandas.offsets.Hour', 'pd.offsets.Hour', (['(1)'], {}), '(1)\n', (2159, 2162), True, 'import pandas as pd\n'), ((2677, 2745), 'pytest.raises', 'pytest.raises', (['NullFrequencyError'], {'match': '"""Cannot shift with no freq"""'}), "(NullFrequencyError, match='Cannot shift with no freq')\n", (2690, 2745), False, 'import pytest\n')] |
import os
from subprocess import check_output, CalledProcessError
from nose import tools as nt
from stolos import queue_backend as qb
from stolos.testing_tools import (
with_setup, validate_zero_queued_task, validate_one_queued_task,
validate_n_queued_task
)
def run(cmd, tasks_json_tmpfile, **kwargs):
cmd = (
"set -o pipefail ; STOLOS_TASKS_JSON={tasks_json} {cmd}").format(
cmd=cmd, tasks_json=tasks_json_tmpfile, **kwargs)
rv = check_output(cmd, shell=True, executable="bash", env=os.environ)
return rv
@with_setup
def test_stolos_submit(app1, job_id1, tasks_json_tmpfile):
with nt.assert_raises(CalledProcessError):
run("stolos-submit -h", tasks_json_tmpfile)
validate_zero_queued_task(app1)
run("stolos-submit -a %s -j %s" % (app1, job_id1), tasks_json_tmpfile)
validate_one_queued_task(app1, job_id1)
run("stolos-submit -a %s -j %s" % (app1, job_id1), tasks_json_tmpfile)
validate_one_queued_task(app1, job_id1)
@with_setup
def test_stolos_submit_readd(app1, job_id1, tasks_json_tmpfile):
qb.set_state(app1, job_id1, failed=True)
validate_zero_queued_task(app1)
run("stolos-submit -a %s -j %s" % (app1, job_id1),
tasks_json_tmpfile)
validate_zero_queued_task(app1)
run("stolos-submit -a %s -j %s --readd" % (app1, job_id1),
tasks_json_tmpfile)
validate_one_queued_task(app1, job_id1)
@with_setup
def test_stolos_submit_multiple_jobs(app1, app2, job_id1, job_id2,
tasks_json_tmpfile):
validate_zero_queued_task(app1)
validate_zero_queued_task(app2)
run("stolos-submit -a %s %s -j %s %s" % (app1, app2, job_id1, job_id2),
tasks_json_tmpfile)
validate_n_queued_task(app1, job_id1, job_id2)
validate_n_queued_task(app2, job_id1, job_id2)
run("stolos-submit -a %s %s -j %s %s" % (app1, app2, job_id1, job_id2),
tasks_json_tmpfile)
validate_n_queued_task(app1, job_id1, job_id2)
validate_n_queued_task(app2, job_id1, job_id2)
| [
"subprocess.check_output",
"stolos.testing_tools.validate_one_queued_task",
"stolos.testing_tools.validate_n_queued_task",
"nose.tools.assert_raises",
"stolos.queue_backend.set_state",
"stolos.testing_tools.validate_zero_queued_task"
] | [((471, 535), 'subprocess.check_output', 'check_output', (['cmd'], {'shell': '(True)', 'executable': '"""bash"""', 'env': 'os.environ'}), "(cmd, shell=True, executable='bash', env=os.environ)\n", (483, 535), False, 'from subprocess import check_output, CalledProcessError\n'), ((726, 757), 'stolos.testing_tools.validate_zero_queued_task', 'validate_zero_queued_task', (['app1'], {}), '(app1)\n', (751, 757), False, 'from stolos.testing_tools import with_setup, validate_zero_queued_task, validate_one_queued_task, validate_n_queued_task\n'), ((837, 876), 'stolos.testing_tools.validate_one_queued_task', 'validate_one_queued_task', (['app1', 'job_id1'], {}), '(app1, job_id1)\n', (861, 876), False, 'from stolos.testing_tools import with_setup, validate_zero_queued_task, validate_one_queued_task, validate_n_queued_task\n'), ((956, 995), 'stolos.testing_tools.validate_one_queued_task', 'validate_one_queued_task', (['app1', 'job_id1'], {}), '(app1, job_id1)\n', (980, 995), False, 'from stolos.testing_tools import with_setup, validate_zero_queued_task, validate_one_queued_task, validate_n_queued_task\n'), ((1079, 1119), 'stolos.queue_backend.set_state', 'qb.set_state', (['app1', 'job_id1'], {'failed': '(True)'}), '(app1, job_id1, failed=True)\n', (1091, 1119), True, 'from stolos import queue_backend as qb\n'), ((1124, 1155), 'stolos.testing_tools.validate_zero_queued_task', 'validate_zero_queued_task', (['app1'], {}), '(app1)\n', (1149, 1155), False, 'from stolos.testing_tools import with_setup, validate_zero_queued_task, validate_one_queued_task, validate_n_queued_task\n'), ((1243, 1274), 'stolos.testing_tools.validate_zero_queued_task', 'validate_zero_queued_task', (['app1'], {}), '(app1)\n', (1268, 1274), False, 'from stolos.testing_tools import with_setup, validate_zero_queued_task, validate_one_queued_task, validate_n_queued_task\n'), ((1370, 1409), 'stolos.testing_tools.validate_one_queued_task', 'validate_one_queued_task', (['app1', 'job_id1'], {}), '(app1, job_id1)\n', (1394, 1409), False, 'from stolos.testing_tools import with_setup, validate_zero_queued_task, validate_one_queued_task, validate_n_queued_task\n'), ((1553, 1584), 'stolos.testing_tools.validate_zero_queued_task', 'validate_zero_queued_task', (['app1'], {}), '(app1)\n', (1578, 1584), False, 'from stolos.testing_tools import with_setup, validate_zero_queued_task, validate_one_queued_task, validate_n_queued_task\n'), ((1589, 1620), 'stolos.testing_tools.validate_zero_queued_task', 'validate_zero_queued_task', (['app2'], {}), '(app2)\n', (1614, 1620), False, 'from stolos.testing_tools import with_setup, validate_zero_queued_task, validate_one_queued_task, validate_n_queued_task\n'), ((1729, 1775), 'stolos.testing_tools.validate_n_queued_task', 'validate_n_queued_task', (['app1', 'job_id1', 'job_id2'], {}), '(app1, job_id1, job_id2)\n', (1751, 1775), False, 'from stolos.testing_tools import with_setup, validate_zero_queued_task, validate_one_queued_task, validate_n_queued_task\n'), ((1780, 1826), 'stolos.testing_tools.validate_n_queued_task', 'validate_n_queued_task', (['app2', 'job_id1', 'job_id2'], {}), '(app2, job_id1, job_id2)\n', (1802, 1826), False, 'from stolos.testing_tools import with_setup, validate_zero_queued_task, validate_one_queued_task, validate_n_queued_task\n'), ((1935, 1981), 'stolos.testing_tools.validate_n_queued_task', 'validate_n_queued_task', (['app1', 'job_id1', 'job_id2'], {}), '(app1, job_id1, job_id2)\n', (1957, 1981), False, 'from stolos.testing_tools import with_setup, validate_zero_queued_task, validate_one_queued_task, validate_n_queued_task\n'), ((1986, 2032), 'stolos.testing_tools.validate_n_queued_task', 'validate_n_queued_task', (['app2', 'job_id1', 'job_id2'], {}), '(app2, job_id1, job_id2)\n', (2008, 2032), False, 'from stolos.testing_tools import with_setup, validate_zero_queued_task, validate_one_queued_task, validate_n_queued_task\n'), ((632, 668), 'nose.tools.assert_raises', 'nt.assert_raises', (['CalledProcessError'], {}), '(CalledProcessError)\n', (648, 668), True, 'from nose import tools as nt\n')] |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from senlin.common import consts
from senlin.engine.actions import base as ab
from senlin.engine.actions import cluster_action as ca
from senlin.engine import cluster as cm
from senlin.engine import dispatcher
from senlin.engine import node as nm
from senlin.objects import action as ao
from senlin.objects import cluster as co
from senlin.objects import dependency as dobj
from senlin.tests.unit.common import base
from senlin.tests.unit.common import utils
@mock.patch.object(cm.Cluster, 'load')
class ClusterCreateTest(base.SenlinTestCase):
def setUp(self):
super(ClusterCreateTest, self).setUp()
self.ctx = utils.dummy_context()
@mock.patch.object(ao.Action, 'update')
@mock.patch.object(ab.Action, 'create')
@mock.patch.object(co.Cluster, 'get_next_index')
@mock.patch.object(nm, 'Node')
@mock.patch.object(dobj.Dependency, 'create')
@mock.patch.object(dispatcher, 'start_action')
@mock.patch.object(ca.ClusterAction, '_wait_for_dependents')
def test__create_nodes_single(self, mock_wait, mock_start, mock_dep,
mock_node, mock_index, mock_action,
mock_update, mock_load):
# prepare mocks
cluster = mock.Mock(id='CLUSTER_ID', profile_id='FAKE_PROFILE',
user='FAKE_USER', project='FAKE_PROJECT',
domain='FAKE_DOMAIN',
config={"node.name.format": "node-$3I"})
mock_index.return_value = 123
node = mock.Mock(id='NODE_ID')
mock_node.return_value = node
mock_load.return_value = cluster
# cluster action is real
action = ca.ClusterAction(cluster.id, 'CLUSTER_ACTION', self.ctx)
action.id = 'CLUSTER_ACTION_ID'
mock_wait.return_value = (action.RES_OK, 'All dependents completed')
# node_action is faked
mock_action.return_value = 'NODE_ACTION_ID'
# do it
res_code, res_msg = action._create_nodes(1)
# assertions
self.assertEqual(action.RES_OK, res_code)
self.assertEqual('All dependents completed', res_msg)
mock_index.assert_called_once_with(action.context, 'CLUSTER_ID')
mock_node.assert_called_once_with('node-123',
'FAKE_PROFILE',
'CLUSTER_ID',
context=action.context,
user='FAKE_USER',
project='FAKE_PROJECT',
domain='FAKE_DOMAIN',
index=123, metadata={})
node.store.assert_called_once_with(action.context)
mock_action.assert_called_once_with(action.context, 'NODE_ID',
'NODE_CREATE',
name='node_create_NODE_ID',
cause='Derived Action')
mock_dep.assert_called_once_with(action.context, ['NODE_ACTION_ID'],
'CLUSTER_ACTION_ID')
mock_update.assert_called_once_with(
action.context, 'NODE_ACTION_ID',
{'status': ab.Action.READY})
mock_start.assert_called_once_with()
mock_wait.assert_called_once_with()
self.assertEqual({'nodes_added': ['NODE_ID']}, action.outputs)
@mock.patch.object(co.Cluster, 'get')
def test_create_nodes_zero(self, mock_get, mock_load):
cluster = mock.Mock()
cluster.id = 'FAKE_CLUSTER'
mock_get.return_value = mock.Mock()
mock_load.return_value = cluster
action = ca.ClusterAction(cluster.id, 'CLUSTER_ACTION', self.ctx)
res_code, res_msg = action._create_nodes(0)
self.assertEqual(action.RES_OK, res_code)
self.assertEqual('', res_msg)
@mock.patch.object(ao.Action, 'update')
@mock.patch.object(ab.Action, 'create')
@mock.patch.object(co.Cluster, 'get_next_index')
@mock.patch.object(nm, 'Node')
@mock.patch.object(dobj.Dependency, 'create')
@mock.patch.object(dispatcher, 'start_action')
@mock.patch.object(ca.ClusterAction, '_wait_for_dependents')
def test__create_nodes_multiple(self, mock_wait, mock_start, mock_dep,
mock_node, mock_index, mock_action,
mock_update, mock_load):
cluster = mock.Mock(id='01234567-123434',
config={"node.name.format": "node-$3I"})
node1 = mock.Mock(id='01234567-abcdef',
data={'placement': {'region': 'regionOne'}})
node2 = mock.Mock(id='abcdefab-123456',
data={'placement': {'region': 'regionTwo'}})
mock_node.side_effect = [node1, node2]
mock_index.side_effect = [123, 124]
mock_load.return_value = cluster
# cluster action is real
action = ca.ClusterAction(cluster.id, 'CLUSTER_ACTION', self.ctx)
action.id = 'CLUSTER_ACTION_ID'
action.data = {
'placement': {
'count': 2,
'placements': [
{'region': 'regionOne'},
{'region': 'regionTwo'}
]
}
}
mock_wait.return_value = (action.RES_OK, 'All dependents completed')
# node_action is faked
mock_action.side_effect = ['NODE_ACTION_1', 'NODE_ACTION_2']
# do it
res_code, res_msg = action._create_nodes(2)
# assertions
self.assertEqual(action.RES_OK, res_code)
self.assertEqual('All dependents completed', res_msg)
self.assertEqual(2, mock_index.call_count)
self.assertEqual(2, mock_node.call_count)
node1.store.assert_called_once_with(action.context)
node2.store.assert_called_once_with(action.context)
self.assertEqual(2, mock_action.call_count)
self.assertEqual(1, mock_dep.call_count)
update_calls = [
mock.call(action.context, 'NODE_ACTION_1', {'status': 'READY'}),
mock.call(action.context, 'NODE_ACTION_2', {'status': 'READY'})
]
mock_update.assert_has_calls(update_calls)
mock_start.assert_called_once_with()
mock_wait.assert_called_once_with()
self.assertEqual({'nodes_added': [node1.id, node2.id]}, action.outputs)
self.assertEqual({'region': 'regionOne'}, node1.data['placement'])
self.assertEqual({'region': 'regionTwo'}, node2.data['placement'])
mock_node_calls = [
mock.call('node-123', mock.ANY, '01234567-123434',
user=mock.ANY, project=mock.ANY, domain=mock.ANY,
index=123, context=mock.ANY, metadata={},
data={'placement': {'region': 'regionOne'}}),
mock.call('node-124', mock.ANY, '01234567-123434',
user=mock.ANY, project=mock.ANY, domain=mock.ANY,
index=124, context=mock.ANY, metadata={},
data={'placement': {'region': 'regionTwo'}})
]
mock_node.assert_has_calls(mock_node_calls)
cluster.add_node.assert_has_calls([
mock.call(node1), mock.call(node2)])
@mock.patch.object(ao.Action, 'update')
@mock.patch.object(co.Cluster, 'get')
@mock.patch.object(nm, 'Node')
@mock.patch.object(dobj.Dependency, 'create')
@mock.patch.object(dispatcher, 'start_action')
@mock.patch.object(ca.ClusterAction, '_wait_for_dependents')
def test__create_nodes_multiple_failed_wait(self, mock_wait, mock_start,
mock_dep, mock_node, mock_get,
mock_update, mock_load):
cluster = mock.Mock(id='01234567-123434', config={})
db_cluster = mock.Mock(next_index=1)
mock_get.return_value = db_cluster
node1 = mock.Mock(id='01234567-abcdef', data={})
node2 = mock.Mock(id='abcdefab-123456', data={})
mock_node.side_effect = [node1, node2]
mock_load.return_value = cluster
# cluster action is real
action = ca.ClusterAction(cluster.id, 'CLUSTER_ACTION', self.ctx)
action.id = 'CLUSTER_ACTION_ID'
action.data = {
'placement': {
'count': 2,
'placements': [
{'region': 'regionOne'},
{'region': 'regionTwo'}
]
}
}
mock_wait.return_value = (action.RES_ERROR, 'Waiting timed out')
# node_action is faked
n_action_1 = mock.Mock()
n_action_2 = mock.Mock()
self.patchobject(ab, 'Action', side_effect=[n_action_1, n_action_2])
# do it
res_code, res_msg = action._create_nodes(2)
# assertions
self.assertEqual(action.RES_ERROR, res_code)
self.assertEqual('Failed in creating nodes.', res_msg)
def test_do_create_success(self, mock_load):
cluster = mock.Mock(id='FAKE_CLUSTER', ACTIVE='ACTIVE')
cluster.do_create.return_value = True
mock_load.return_value = cluster
action = ca.ClusterAction(cluster.id, 'CLUSTER_ACTION', self.ctx)
x_create_nodes = self.patchobject(action, '_create_nodes',
return_value=(action.RES_OK, 'OK'))
# do it
res_code, res_msg = action.do_create()
self.assertEqual(action.RES_OK, res_code)
self.assertEqual('Cluster creation succeeded.', res_msg)
x_create_nodes.assert_called_once_with(cluster.desired_capacity)
cluster.eval_status.assert_called_once_with(
action.context, consts.CLUSTER_CREATE, created_at=mock.ANY)
def test_do_create_failed_create_cluster(self, mock_load):
cluster = mock.Mock(id='FAKE_CLUSTER')
cluster.do_create.return_value = False
mock_load.return_value = cluster
action = ca.ClusterAction(cluster.id, 'CLUSTER_ACTION', self.ctx)
# do it
res_code, res_msg = action.do_create()
self.assertEqual(action.RES_ERROR, res_code)
self.assertEqual('Cluster creation failed.', res_msg)
cluster.set_status.assert_called_once_with(
action.context, 'ERROR', 'Cluster creation failed.')
def test_do_create_failed_create_nodes(self, mock_load):
cluster = mock.Mock(id='FAKE_ID',)
cluster.do_create.return_value = True
mock_load.return_value = cluster
action = ca.ClusterAction(cluster.id, 'CLUSTER_ACTION', self.ctx)
# do it
for code in [action.RES_CANCEL, action.RES_TIMEOUT, action.RES_ERROR]:
self.patchobject(action, '_create_nodes',
return_value=(code, 'Really Bad'))
res_code, res_msg = action.do_create()
self.assertEqual(code, res_code)
self.assertEqual('Really Bad', res_msg)
cluster.eval_status.assert_called_once_with(
action.context, consts.CLUSTER_CREATE)
cluster.eval_status.reset_mock()
def test_do_create_failed_for_retry(self, mock_load):
cluster = mock.Mock(id='FAKE_ID', INIT='INIT')
cluster.do_create.return_value = True
mock_load.return_value = cluster
action = ca.ClusterAction(cluster.id, 'CLUSTER_ACTION', self.ctx)
self.patchobject(action, '_create_nodes',
return_value=(action.RES_RETRY, 'retry'))
# do it
res_code, res_msg = action.do_create()
self.assertEqual(action.RES_RETRY, res_code)
self.assertEqual('retry', res_msg)
cluster.eval_status.assert_called_once_with(
action.context, consts.CLUSTER_CREATE)
| [
"mock.Mock",
"mock.patch.object",
"senlin.tests.unit.common.utils.dummy_context",
"senlin.engine.actions.cluster_action.ClusterAction",
"mock.call"
] | [((1024, 1061), 'mock.patch.object', 'mock.patch.object', (['cm.Cluster', '"""load"""'], {}), "(cm.Cluster, 'load')\n", (1041, 1061), False, 'import mock\n'), ((1224, 1262), 'mock.patch.object', 'mock.patch.object', (['ao.Action', '"""update"""'], {}), "(ao.Action, 'update')\n", (1241, 1262), False, 'import mock\n'), ((1268, 1306), 'mock.patch.object', 'mock.patch.object', (['ab.Action', '"""create"""'], {}), "(ab.Action, 'create')\n", (1285, 1306), False, 'import mock\n'), ((1312, 1359), 'mock.patch.object', 'mock.patch.object', (['co.Cluster', '"""get_next_index"""'], {}), "(co.Cluster, 'get_next_index')\n", (1329, 1359), False, 'import mock\n'), ((1365, 1394), 'mock.patch.object', 'mock.patch.object', (['nm', '"""Node"""'], {}), "(nm, 'Node')\n", (1382, 1394), False, 'import mock\n'), ((1400, 1444), 'mock.patch.object', 'mock.patch.object', (['dobj.Dependency', '"""create"""'], {}), "(dobj.Dependency, 'create')\n", (1417, 1444), False, 'import mock\n'), ((1450, 1495), 'mock.patch.object', 'mock.patch.object', (['dispatcher', '"""start_action"""'], {}), "(dispatcher, 'start_action')\n", (1467, 1495), False, 'import mock\n'), ((1501, 1560), 'mock.patch.object', 'mock.patch.object', (['ca.ClusterAction', '"""_wait_for_dependents"""'], {}), "(ca.ClusterAction, '_wait_for_dependents')\n", (1518, 1560), False, 'import mock\n'), ((4045, 4081), 'mock.patch.object', 'mock.patch.object', (['co.Cluster', '"""get"""'], {}), "(co.Cluster, 'get')\n", (4062, 4081), False, 'import mock\n'), ((4514, 4552), 'mock.patch.object', 'mock.patch.object', (['ao.Action', '"""update"""'], {}), "(ao.Action, 'update')\n", (4531, 4552), False, 'import mock\n'), ((4558, 4596), 'mock.patch.object', 'mock.patch.object', (['ab.Action', '"""create"""'], {}), "(ab.Action, 'create')\n", (4575, 4596), False, 'import mock\n'), ((4602, 4649), 'mock.patch.object', 'mock.patch.object', (['co.Cluster', '"""get_next_index"""'], {}), "(co.Cluster, 'get_next_index')\n", (4619, 4649), False, 'import mock\n'), ((4655, 4684), 'mock.patch.object', 'mock.patch.object', (['nm', '"""Node"""'], {}), "(nm, 'Node')\n", (4672, 4684), False, 'import mock\n'), ((4690, 4734), 'mock.patch.object', 'mock.patch.object', (['dobj.Dependency', '"""create"""'], {}), "(dobj.Dependency, 'create')\n", (4707, 4734), False, 'import mock\n'), ((4740, 4785), 'mock.patch.object', 'mock.patch.object', (['dispatcher', '"""start_action"""'], {}), "(dispatcher, 'start_action')\n", (4757, 4785), False, 'import mock\n'), ((4791, 4850), 'mock.patch.object', 'mock.patch.object', (['ca.ClusterAction', '"""_wait_for_dependents"""'], {}), "(ca.ClusterAction, '_wait_for_dependents')\n", (4808, 4850), False, 'import mock\n'), ((7923, 7961), 'mock.patch.object', 'mock.patch.object', (['ao.Action', '"""update"""'], {}), "(ao.Action, 'update')\n", (7940, 7961), False, 'import mock\n'), ((7967, 8003), 'mock.patch.object', 'mock.patch.object', (['co.Cluster', '"""get"""'], {}), "(co.Cluster, 'get')\n", (7984, 8003), False, 'import mock\n'), ((8009, 8038), 'mock.patch.object', 'mock.patch.object', (['nm', '"""Node"""'], {}), "(nm, 'Node')\n", (8026, 8038), False, 'import mock\n'), ((8044, 8088), 'mock.patch.object', 'mock.patch.object', (['dobj.Dependency', '"""create"""'], {}), "(dobj.Dependency, 'create')\n", (8061, 8088), False, 'import mock\n'), ((8094, 8139), 'mock.patch.object', 'mock.patch.object', (['dispatcher', '"""start_action"""'], {}), "(dispatcher, 'start_action')\n", (8111, 8139), False, 'import mock\n'), ((8145, 8204), 'mock.patch.object', 'mock.patch.object', (['ca.ClusterAction', '"""_wait_for_dependents"""'], {}), "(ca.ClusterAction, '_wait_for_dependents')\n", (8162, 8204), False, 'import mock\n'), ((1196, 1217), 'senlin.tests.unit.common.utils.dummy_context', 'utils.dummy_context', ([], {}), '()\n', (1215, 1217), False, 'from senlin.tests.unit.common import utils\n'), ((1805, 1972), 'mock.Mock', 'mock.Mock', ([], {'id': '"""CLUSTER_ID"""', 'profile_id': '"""FAKE_PROFILE"""', 'user': '"""FAKE_USER"""', 'project': '"""FAKE_PROJECT"""', 'domain': '"""FAKE_DOMAIN"""', 'config': "{'node.name.format': 'node-$3I'}"}), "(id='CLUSTER_ID', profile_id='FAKE_PROFILE', user='FAKE_USER',\n project='FAKE_PROJECT', domain='FAKE_DOMAIN', config={\n 'node.name.format': 'node-$3I'})\n", (1814, 1972), False, 'import mock\n'), ((2101, 2124), 'mock.Mock', 'mock.Mock', ([], {'id': '"""NODE_ID"""'}), "(id='NODE_ID')\n", (2110, 2124), False, 'import mock\n'), ((2255, 2311), 'senlin.engine.actions.cluster_action.ClusterAction', 'ca.ClusterAction', (['cluster.id', '"""CLUSTER_ACTION"""', 'self.ctx'], {}), "(cluster.id, 'CLUSTER_ACTION', self.ctx)\n", (2271, 2311), True, 'from senlin.engine.actions import cluster_action as ca\n'), ((4159, 4170), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (4168, 4170), False, 'import mock\n'), ((4239, 4250), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (4248, 4250), False, 'import mock\n'), ((4309, 4365), 'senlin.engine.actions.cluster_action.ClusterAction', 'ca.ClusterAction', (['cluster.id', '"""CLUSTER_ACTION"""', 'self.ctx'], {}), "(cluster.id, 'CLUSTER_ACTION', self.ctx)\n", (4325, 4365), True, 'from senlin.engine.actions import cluster_action as ca\n'), ((5077, 5149), 'mock.Mock', 'mock.Mock', ([], {'id': '"""01234567-123434"""', 'config': "{'node.name.format': 'node-$3I'}"}), "(id='01234567-123434', config={'node.name.format': 'node-$3I'})\n", (5086, 5149), False, 'import mock\n'), ((5194, 5270), 'mock.Mock', 'mock.Mock', ([], {'id': '"""01234567-abcdef"""', 'data': "{'placement': {'region': 'regionOne'}}"}), "(id='01234567-abcdef', data={'placement': {'region': 'regionOne'}})\n", (5203, 5270), False, 'import mock\n'), ((5313, 5389), 'mock.Mock', 'mock.Mock', ([], {'id': '"""abcdefab-123456"""', 'data': "{'placement': {'region': 'regionTwo'}}"}), "(id='abcdefab-123456', data={'placement': {'region': 'regionTwo'}})\n", (5322, 5389), False, 'import mock\n'), ((5599, 5655), 'senlin.engine.actions.cluster_action.ClusterAction', 'ca.ClusterAction', (['cluster.id', '"""CLUSTER_ACTION"""', 'self.ctx'], {}), "(cluster.id, 'CLUSTER_ACTION', self.ctx)\n", (5615, 5655), True, 'from senlin.engine.actions import cluster_action as ca\n'), ((8452, 8494), 'mock.Mock', 'mock.Mock', ([], {'id': '"""01234567-123434"""', 'config': '{}'}), "(id='01234567-123434', config={})\n", (8461, 8494), False, 'import mock\n'), ((8516, 8539), 'mock.Mock', 'mock.Mock', ([], {'next_index': '(1)'}), '(next_index=1)\n', (8525, 8539), False, 'import mock\n'), ((8599, 8639), 'mock.Mock', 'mock.Mock', ([], {'id': '"""01234567-abcdef"""', 'data': '{}'}), "(id='01234567-abcdef', data={})\n", (8608, 8639), False, 'import mock\n'), ((8656, 8696), 'mock.Mock', 'mock.Mock', ([], {'id': '"""abcdefab-123456"""', 'data': '{}'}), "(id='abcdefab-123456', data={})\n", (8665, 8696), False, 'import mock\n'), ((8836, 8892), 'senlin.engine.actions.cluster_action.ClusterAction', 'ca.ClusterAction', (['cluster.id', '"""CLUSTER_ACTION"""', 'self.ctx'], {}), "(cluster.id, 'CLUSTER_ACTION', self.ctx)\n", (8852, 8892), True, 'from senlin.engine.actions import cluster_action as ca\n'), ((9301, 9312), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (9310, 9312), False, 'import mock\n'), ((9334, 9345), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (9343, 9345), False, 'import mock\n'), ((9698, 9743), 'mock.Mock', 'mock.Mock', ([], {'id': '"""FAKE_CLUSTER"""', 'ACTIVE': '"""ACTIVE"""'}), "(id='FAKE_CLUSTER', ACTIVE='ACTIVE')\n", (9707, 9743), False, 'import mock\n'), ((9848, 9904), 'senlin.engine.actions.cluster_action.ClusterAction', 'ca.ClusterAction', (['cluster.id', '"""CLUSTER_ACTION"""', 'self.ctx'], {}), "(cluster.id, 'CLUSTER_ACTION', self.ctx)\n", (9864, 9904), True, 'from senlin.engine.actions import cluster_action as ca\n'), ((10510, 10538), 'mock.Mock', 'mock.Mock', ([], {'id': '"""FAKE_CLUSTER"""'}), "(id='FAKE_CLUSTER')\n", (10519, 10538), False, 'import mock\n'), ((10644, 10700), 'senlin.engine.actions.cluster_action.ClusterAction', 'ca.ClusterAction', (['cluster.id', '"""CLUSTER_ACTION"""', 'self.ctx'], {}), "(cluster.id, 'CLUSTER_ACTION', self.ctx)\n", (10660, 10700), True, 'from senlin.engine.actions import cluster_action as ca\n'), ((11078, 11101), 'mock.Mock', 'mock.Mock', ([], {'id': '"""FAKE_ID"""'}), "(id='FAKE_ID')\n", (11087, 11101), False, 'import mock\n'), ((11207, 11263), 'senlin.engine.actions.cluster_action.ClusterAction', 'ca.ClusterAction', (['cluster.id', '"""CLUSTER_ACTION"""', 'self.ctx'], {}), "(cluster.id, 'CLUSTER_ACTION', self.ctx)\n", (11223, 11263), True, 'from senlin.engine.actions import cluster_action as ca\n'), ((11862, 11898), 'mock.Mock', 'mock.Mock', ([], {'id': '"""FAKE_ID"""', 'INIT': '"""INIT"""'}), "(id='FAKE_ID', INIT='INIT')\n", (11871, 11898), False, 'import mock\n'), ((12003, 12059), 'senlin.engine.actions.cluster_action.ClusterAction', 'ca.ClusterAction', (['cluster.id', '"""CLUSTER_ACTION"""', 'self.ctx'], {}), "(cluster.id, 'CLUSTER_ACTION', self.ctx)\n", (12019, 12059), True, 'from senlin.engine.actions import cluster_action as ca\n'), ((6679, 6742), 'mock.call', 'mock.call', (['action.context', '"""NODE_ACTION_1"""', "{'status': 'READY'}"], {}), "(action.context, 'NODE_ACTION_1', {'status': 'READY'})\n", (6688, 6742), False, 'import mock\n'), ((6756, 6819), 'mock.call', 'mock.call', (['action.context', '"""NODE_ACTION_2"""', "{'status': 'READY'}"], {}), "(action.context, 'NODE_ACTION_2', {'status': 'READY'})\n", (6765, 6819), False, 'import mock\n'), ((7240, 7436), 'mock.call', 'mock.call', (['"""node-123"""', 'mock.ANY', '"""01234567-123434"""'], {'user': 'mock.ANY', 'project': 'mock.ANY', 'domain': 'mock.ANY', 'index': '(123)', 'context': 'mock.ANY', 'metadata': '{}', 'data': "{'placement': {'region': 'regionOne'}}"}), "('node-123', mock.ANY, '01234567-123434', user=mock.ANY, project=\n mock.ANY, domain=mock.ANY, index=123, context=mock.ANY, metadata={},\n data={'placement': {'region': 'regionOne'}})\n", (7249, 7436), False, 'import mock\n'), ((7507, 7703), 'mock.call', 'mock.call', (['"""node-124"""', 'mock.ANY', '"""01234567-123434"""'], {'user': 'mock.ANY', 'project': 'mock.ANY', 'domain': 'mock.ANY', 'index': '(124)', 'context': 'mock.ANY', 'metadata': '{}', 'data': "{'placement': {'region': 'regionTwo'}}"}), "('node-124', mock.ANY, '01234567-123434', user=mock.ANY, project=\n mock.ANY, domain=mock.ANY, index=124, context=mock.ANY, metadata={},\n data={'placement': {'region': 'regionTwo'}})\n", (7516, 7703), False, 'import mock\n'), ((7880, 7896), 'mock.call', 'mock.call', (['node1'], {}), '(node1)\n', (7889, 7896), False, 'import mock\n'), ((7898, 7914), 'mock.call', 'mock.call', (['node2'], {}), '(node2)\n', (7907, 7914), False, 'import mock\n')] |
# -*- coding: utf-8 -*-
# Copyright (c) 2018-2021, earthobservations developers.
# Distributed under the MIT License. See LICENSE for more info.
import pytest
from wetterdienst import Wetterdienst
@pytest.mark.remote
@pytest.mark.parametrize(
"provider,kind,kwargs",
[
# German Weather Service (DWD)
(
"dwd",
"observation",
{"parameter": "kl", "resolution": "daily", "period": "recent"},
),
("dwd", "forecast", {"parameter": "large", "mosmix_type": "large"}),
# Environment and Climate Change Canada
("eccc", "observation", {"parameter": "daily", "resolution": "daily"}),
],
)
@pytest.mark.parametrize("si_units", (False, True))
def test_api(provider, kind, kwargs, si_units):
""" Test main wetterdienst API """
# Build API
api = Wetterdienst(provider, kind)
# Discover parameters
assert api.discover()
# All stations
request = api(**kwargs, si_units=si_units).all()
stations = request.df
# Check stations DataFrame columns
assert set(stations.columns).issuperset(
{
"station_id",
"from_date",
"to_date",
"height",
"latitude",
"longitude",
"name",
"state",
}
)
# Check that there are actually stations
assert not stations.empty
# Query first DataFrame from values
values = next(request.values.query()).df
# TODO: DWD Forecast has no quality
assert set(values.columns).issuperset(
{"station_id", "parameter", "date", "value", "quality"}
)
assert not values.empty
| [
"wetterdienst.Wetterdienst",
"pytest.mark.parametrize"
] | [((221, 512), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""provider,kind,kwargs"""', "[('dwd', 'observation', {'parameter': 'kl', 'resolution': 'daily', 'period':\n 'recent'}), ('dwd', 'forecast', {'parameter': 'large', 'mosmix_type':\n 'large'}), ('eccc', 'observation', {'parameter': 'daily', 'resolution':\n 'daily'})]"], {}), "('provider,kind,kwargs', [('dwd', 'observation', {\n 'parameter': 'kl', 'resolution': 'daily', 'period': 'recent'}), ('dwd',\n 'forecast', {'parameter': 'large', 'mosmix_type': 'large'}), ('eccc',\n 'observation', {'parameter': 'daily', 'resolution': 'daily'})])\n", (244, 512), False, 'import pytest\n'), ((677, 727), 'pytest.mark.parametrize', 'pytest.mark.parametrize', (['"""si_units"""', '(False, True)'], {}), "('si_units', (False, True))\n", (700, 727), False, 'import pytest\n'), ((841, 869), 'wetterdienst.Wetterdienst', 'Wetterdienst', (['provider', 'kind'], {}), '(provider, kind)\n', (853, 869), False, 'from wetterdienst import Wetterdienst\n')] |
#! -*- coding:utf-8 -*-
# 语义相似度任务-无监督:训练集为网上pretrain数据, dev集为sts-b
from bert4torch.tokenizers import Tokenizer
from bert4torch.models import build_transformer_model, BaseModel
from bert4torch.snippets import sequence_padding, Callback, ListDataset
import torch.nn as nn
import torch
import torch.optim as optim
from torch.utils.data import DataLoader
from sklearn.metrics.pairwise import paired_cosine_distances
from scipy.stats import pearsonr, spearmanr
import copy
import random
import numpy as np
random.seed(2022)
np.random.seed(2002)
maxlen = 256
batch_size = 8
config_path = 'F:/Projects/pretrain_ckpt/bert/[google_tf_base]--chinese_L-12_H-768_A-12/bert_config.json'
checkpoint_path = 'F:/Projects/pretrain_ckpt/bert/[google_tf_base]--chinese_L-12_H-768_A-12/pytorch_model.bin'
dict_path = 'F:/Projects/pretrain_ckpt/bert/[google_tf_base]--chinese_L-12_H-768_A-12/vocab.txt'
device = 'cuda' if torch.cuda.is_available() else 'cpu'
# 建立分词器
tokenizer = Tokenizer(dict_path, do_lower_case=True)
def collate_fn(batch):
def add_noise(token_ids, del_ratio=0.6):
n = len(token_ids)
keep_or_not = np.random.rand(n) > del_ratio
if sum(keep_or_not) == 0:
keep_or_not[np.random.choice(n)] = True # guarantee that at least one word remains
return list(np.array(token_ids)[keep_or_not])
texts_list = [[] for _ in range(3)]
for text in batch:
token_ids, _ = tokenizer.encode(text, maxlen=maxlen)
texts_list[0].append([tokenizer._token_start_id] + add_noise(token_ids[1:-1]) + [tokenizer._token_end_id])
texts_list[1].append(token_ids[:-1])
texts_list[2].append(token_ids[1:])
for i, texts in enumerate(texts_list):
texts_list[i] = torch.tensor(sequence_padding(texts), dtype=torch.long, device=device)
return texts_list[:2], texts_list[2].flatten()
# 加载数据集
def get_data(filename):
train_data = []
with open(filename, encoding='utf-8') as f:
for row, l in enumerate(f):
if row == 0: # 跳过首行
continue
text = l.strip().replace(' ', '')
if len(text) > 0:
train_data.append(text)
return train_data
train_data = get_data('F:/Projects/data/corpus/pretrain/film/film.txt')
train_dataloader = DataLoader(ListDataset(data=train_data), batch_size=batch_size, shuffle=True, collate_fn=collate_fn)
from task_sentence_embedding_sbert_sts_b__CosineSimilarityLoss import valid_dataloader
# 定义bert上的模型结构
class Model(BaseModel):
def __init__(self, pool_method='mean', scale=20.0):
super().__init__()
self.encoder, self.config = build_transformer_model(config_path=config_path, checkpoint_path=checkpoint_path, with_pool=True, with_mlm=True, return_model_config=True, segment_vocab_size=0)
self.decoder = self.encoder # 这里可以通过使用copy和不使用copy来决定一个模型还是两个独立的模型
self.pool_method = pool_method
self.scale = scale
def forward(self, token_ids_list):
token_ids1 = token_ids_list[0]
hidden_state1, pool_cls1, _ = self.encoder([token_ids1])
embeddings_a = self.get_pool_emb(hidden_state1, pool_cls1, attention_mask=token_ids1.gt(0).long())
token_ids2 = token_ids_list[1]
_, _, mlm_score2 = self.decoder([token_ids2, embeddings_a.unsqueeze(1), torch.ones_like(token_ids1)[:, 0:1]])
return mlm_score2.reshape(-1, mlm_score2.shape[-1])
def encode(self, token_ids):
self.eval()
with torch.no_grad():
hidden_state, pool_cls, _ = self.encoder([token_ids])
output = self.get_pool_emb(hidden_state, pool_cls, attention_mask=token_ids.gt(0).long())
return output
def get_pool_emb(self, hidden_state, pool_cls, attention_mask):
if self.pool_method == 'cls':
return pool_cls
elif self.pool_method == 'mean':
hidden_state = torch.sum(hidden_state * attention_mask[:, :, None], dim=1)
attention_mask = torch.sum(attention_mask, dim=1)[:, None]
return hidden_state / attention_mask
elif self.pool_method == 'max':
seq_state = hidden_state * attention_mask[:, :, None]
return torch.max(seq_state, dim=1)
else:
raise ValueError('pool_method illegal')
model = Model().to(device)
# 定义使用的loss和optimizer,这里支持自定义
model.compile(
loss=nn.CrossEntropyLoss(ignore_index=0),
optimizer=optim.Adam(model.parameters(), lr=2e-5), # 用足够小的学习率
)
# 定义评价函数
def evaluate(data):
embeddings1, embeddings2, labels = [], [], []
for (batch_token1_ids, batch_token2_ids), label in data:
embeddings1.append(model.encode(batch_token1_ids))
embeddings2.append(model.encode(batch_token2_ids))
labels.append(label)
embeddings1 = torch.concat(embeddings1).cpu().numpy()
embeddings2 = torch.concat(embeddings2).cpu().numpy()
labels = torch.concat(labels).cpu().numpy()
cosine_scores = 1 - (paired_cosine_distances(embeddings1, embeddings2))
eval_pearson_cosine, _ = pearsonr(labels, cosine_scores)
return eval_pearson_cosine
class Evaluator(Callback):
"""评估与保存
"""
def __init__(self):
self.best_val_consine = 0.
def on_epoch_end(self, global_step, epoch, logs=None):
val_consine = evaluate(valid_dataloader)
if val_consine > self.best_val_consine:
self.best_val_consine = val_consine
# model.save_weights('best_model.pt')
print(f'val_consine: {val_consine:.5f}, best_val_consine: {self.best_val_consine:.5f}\n')
if __name__ == '__main__':
evaluator = Evaluator()
model.fit(train_dataloader,
epochs=20,
steps_per_epoch=100,
callbacks=[evaluator]
)
else:
model.load_weights('best_model.pt')
| [
"torch.ones_like",
"torch.nn.CrossEntropyLoss",
"sklearn.metrics.pairwise.paired_cosine_distances",
"numpy.random.rand",
"numpy.random.choice",
"torch.max",
"random.seed",
"bert4torch.tokenizers.Tokenizer",
"bert4torch.snippets.sequence_padding",
"torch.cuda.is_available",
"torch.no_grad",
"nu... | [((503, 520), 'random.seed', 'random.seed', (['(2022)'], {}), '(2022)\n', (514, 520), False, 'import random\n'), ((521, 541), 'numpy.random.seed', 'np.random.seed', (['(2002)'], {}), '(2002)\n', (535, 541), True, 'import numpy as np\n'), ((963, 1003), 'bert4torch.tokenizers.Tokenizer', 'Tokenizer', (['dict_path'], {'do_lower_case': '(True)'}), '(dict_path, do_lower_case=True)\n', (972, 1003), False, 'from bert4torch.tokenizers import Tokenizer\n'), ((905, 930), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (928, 930), False, 'import torch\n'), ((2300, 2328), 'bert4torch.snippets.ListDataset', 'ListDataset', ([], {'data': 'train_data'}), '(data=train_data)\n', (2311, 2328), False, 'from bert4torch.snippets import sequence_padding, Callback, ListDataset\n'), ((5036, 5067), 'scipy.stats.pearsonr', 'pearsonr', (['labels', 'cosine_scores'], {}), '(labels, cosine_scores)\n', (5044, 5067), False, 'from scipy.stats import pearsonr, spearmanr\n'), ((2637, 2807), 'bert4torch.models.build_transformer_model', 'build_transformer_model', ([], {'config_path': 'config_path', 'checkpoint_path': 'checkpoint_path', 'with_pool': '(True)', 'with_mlm': '(True)', 'return_model_config': '(True)', 'segment_vocab_size': '(0)'}), '(config_path=config_path, checkpoint_path=\n checkpoint_path, with_pool=True, with_mlm=True, return_model_config=\n True, segment_vocab_size=0)\n', (2660, 2807), False, 'from bert4torch.models import build_transformer_model, BaseModel\n'), ((4372, 4407), 'torch.nn.CrossEntropyLoss', 'nn.CrossEntropyLoss', ([], {'ignore_index': '(0)'}), '(ignore_index=0)\n', (4391, 4407), True, 'import torch.nn as nn\n'), ((4956, 5005), 'sklearn.metrics.pairwise.paired_cosine_distances', 'paired_cosine_distances', (['embeddings1', 'embeddings2'], {}), '(embeddings1, embeddings2)\n', (4979, 5005), False, 'from sklearn.metrics.pairwise import paired_cosine_distances\n'), ((1122, 1139), 'numpy.random.rand', 'np.random.rand', (['n'], {}), '(n)\n', (1136, 1139), True, 'import numpy as np\n'), ((1750, 1773), 'bert4torch.snippets.sequence_padding', 'sequence_padding', (['texts'], {}), '(texts)\n', (1766, 1773), False, 'from bert4torch.snippets import sequence_padding, Callback, ListDataset\n'), ((3476, 3491), 'torch.no_grad', 'torch.no_grad', ([], {}), '()\n', (3489, 3491), False, 'import torch\n'), ((1210, 1229), 'numpy.random.choice', 'np.random.choice', (['n'], {}), '(n)\n', (1226, 1229), True, 'import numpy as np\n'), ((1301, 1320), 'numpy.array', 'np.array', (['token_ids'], {}), '(token_ids)\n', (1309, 1320), True, 'import numpy as np\n'), ((3890, 3949), 'torch.sum', 'torch.sum', (['(hidden_state * attention_mask[:, :, None])'], {'dim': '(1)'}), '(hidden_state * attention_mask[:, :, None], dim=1)\n', (3899, 3949), False, 'import torch\n'), ((3310, 3337), 'torch.ones_like', 'torch.ones_like', (['token_ids1'], {}), '(token_ids1)\n', (3325, 3337), False, 'import torch\n'), ((3979, 4011), 'torch.sum', 'torch.sum', (['attention_mask'], {'dim': '(1)'}), '(attention_mask, dim=1)\n', (3988, 4011), False, 'import torch\n'), ((4195, 4222), 'torch.max', 'torch.max', (['seq_state'], {'dim': '(1)'}), '(seq_state, dim=1)\n', (4204, 4222), False, 'import torch\n'), ((4785, 4810), 'torch.concat', 'torch.concat', (['embeddings1'], {}), '(embeddings1)\n', (4797, 4810), False, 'import torch\n'), ((4843, 4868), 'torch.concat', 'torch.concat', (['embeddings2'], {}), '(embeddings2)\n', (4855, 4868), False, 'import torch\n'), ((4896, 4916), 'torch.concat', 'torch.concat', (['labels'], {}), '(labels)\n', (4908, 4916), False, 'import torch\n')] |
from pathlib import Path
from .anki_exporter import AnkiJsonExporter
from ..anki.adapters.anki_deck import AnkiDeck
from ..config.config_settings import ConfigSettings
from ..utils import constants
from ..utils.notifier import AnkiModalNotifier, Notifier
from ..utils.disambiguate_uuids import disambiguate_note_model_uuids
EXPORT_FAILED_TITLE = "Export failed"
class AnkiJsonExporterWrapper:
"""
Wrapper designed to work with standard export dialog in anki.
"""
key = "CrowdAnki JSON representation"
ext = constants.ANKI_EXPORT_EXTENSION
hideTags = True
includeTags = True
directory_export = True
def __init__(self, collection,
deck_id: int = None,
json_exporter: AnkiJsonExporter = None,
notifier: Notifier = None):
self.includeMedia = True
self.did = deck_id
self.count = 0 # Todo?
self.collection = collection
self.anki_json_exporter = json_exporter or AnkiJsonExporter(collection, ConfigSettings.get_instance())
self.notifier = notifier or AnkiModalNotifier()
# required by anki exporting interface with its non-PEP-8 names
# noinspection PyPep8Naming
def exportInto(self, directory_path):
if self.did is None:
self.notifier.warning(EXPORT_FAILED_TITLE, "CrowdAnki export works only for specific decks. "
"Please use CrowdAnki snapshot if you want to export "
"the whole collection.")
return
deck = AnkiDeck(self.collection.decks.get(self.did, default=False))
if deck.is_dynamic:
self.notifier.warning(EXPORT_FAILED_TITLE, "CrowdAnki does not support export for dynamic decks.")
return
# Clean up duplicate note models. See
# https://github.com/Stvad/CrowdAnki/wiki/Workarounds-%E2%80%94-Duplicate-note-model-uuids.
disambiguate_note_model_uuids(self.collection)
# .parent because we receive name with random numbers at the end (hacking around internals of Anki) :(
export_path = Path(directory_path).parent
self.anki_json_exporter.export_to_directory(deck, export_path, self.includeMedia,
create_deck_subdirectory=ConfigSettings.get_instance().export_create_deck_subdirectory)
self.count = self.anki_json_exporter.last_exported_count
def get_exporter_id(exporter):
return f"{exporter.key} (*{exporter.ext})", exporter
def exporters_hook(exporters_list):
exporter_id = get_exporter_id(AnkiJsonExporterWrapper)
if exporter_id not in exporters_list:
exporters_list.append(exporter_id)
| [
"pathlib.Path"
] | [((2165, 2185), 'pathlib.Path', 'Path', (['directory_path'], {}), '(directory_path)\n', (2169, 2185), False, 'from pathlib import Path\n')] |
from functools import lru_cache
from typing import Optional
import requests
from .patches import Patches
class Item:
"""
Manipulation of static item data
"""
ITEM_URL = f"http://ddragon.leagueoflegends.com/cdn/{Patches.get_current_patch()}/data/en_US/item.json"
items = requests.get(ITEM_URL).json()
@classmethod
@lru_cache()
def id_for_name(cls, name: str) -> Optional[str]:
"""
Finds the id for an item given its name
Returns the id, None if not found
name - full name of item
"""
for item_id, item in cls.items["data"].items():
if item["name"].casefold() == name.casefold():
return item_id
@classmethod
@lru_cache()
def name_for_id(cls, item_id: str) -> Optional[str]:
"""
Finds the name for an item given its id
Returns the name, None if not found
item_id - id of item
"""
for found_item_id, item in cls.items["data"].items():
if found_item_id == item_id:
return item["name"] | [
"functools.lru_cache",
"requests.get"
] | [((348, 359), 'functools.lru_cache', 'lru_cache', ([], {}), '()\n', (357, 359), False, 'from functools import lru_cache\n'), ((732, 743), 'functools.lru_cache', 'lru_cache', ([], {}), '()\n', (741, 743), False, 'from functools import lru_cache\n'), ((295, 317), 'requests.get', 'requests.get', (['ITEM_URL'], {}), '(ITEM_URL)\n', (307, 317), False, 'import requests\n')] |
# @Time : 2020/10/6
# @Author : <NAME>
# @Email : <EMAIL>
"""
recbole.quick_start
########################
"""
import logging
from logging import getLogger
from recbole.config import Config
from recbole.data import create_dataset, data_preparation
from recbole.utils import init_logger, get_model, get_trainer, init_seed
from recbole.utils.utils import set_color
def run_recbole(model=None, dataset=None, config_file_list=None, config_dict=None, saved=True):
r""" A fast running api, which includes the complete process of
training and testing a model on a specified dataset
Args:
model (str): model name
dataset (str): dataset name
config_file_list (list): config files used to modify experiment parameters
config_dict (dict): parameters dictionary used to modify experiment parameters
saved (bool): whether to save the model
"""
# configurations initialization
config = Config(model=model, dataset=dataset, config_file_list=config_file_list, config_dict=config_dict)
# init_seed(config['seed'], config['reproducibility'])
# logger initialization
init_logger(config)
logger = getLogger()
import os
log_dir = os.path.dirname(logger.handlers[0].baseFilename)
config['log_dir'] = log_dir
logger.info(config)
# dataset filtering
dataset = create_dataset(config)
logger.info(dataset)
# dataset splitting
train_data, valid_data, test_data = data_preparation(config, dataset)
# model loading and initialization
model = get_model(config['model'])(config, train_data).to(config['device'])
logger.info(model)
# trainer loading and initialization
trainer = get_trainer(config['MODEL_TYPE'], config['model'])(config, model)
# model training
best_valid_score, best_valid_result = trainer.fit(
train_data, valid_data, saved=saved, show_progress=config['show_progress']
)
import numpy as np
import seaborn as sns
import matplotlib.pyplot as plt
from sklearn.decomposition import TruncatedSVD
embedding_matrix = model.item_embedding.weight[1:].cpu().detach().numpy()
svd = TruncatedSVD(n_components=2)
svd.fit(embedding_matrix)
comp_tr = np.transpose(svd.components_)
proj = np.dot(embedding_matrix, comp_tr)
cnt = {}
for i in dataset['item_id']:
if i.item() in cnt:
cnt[i.item()] += 1
else:
cnt[i.item()] = 1
freq = np.zeros(embedding_matrix.shape[0])
for i in cnt:
freq[i-1] = cnt[i]
# freq /= freq.max()
sns.set(style='darkgrid')
sns.set_context("notebook", font_scale=1.8, rc={"lines.linewidth": 3, 'lines.markersize': 20})
plt.figure(figsize=(6, 4.5))
plt.scatter(proj[:, 0], proj[:, 1], s=1, c=freq, cmap='viridis_r')
plt.colorbar()
plt.xlim(-2, 2)
plt.ylim(-2, 2)
# plt.axis('square')
# plt.show()
plt.savefig(log_dir + '/' + config['model'] + '-' + config['dataset'] + '.pdf', format='pdf', transparent=False, bbox_inches='tight')
from scipy.linalg import svdvals
svs = svdvals(embedding_matrix)
svs /= svs.max()
np.save(log_dir + '/sv.npy', svs)
sns.set(style='darkgrid')
sns.set_context("notebook", font_scale=1.8, rc={"lines.linewidth": 3, 'lines.markersize': 20})
plt.figure(figsize=(6, 4.5))
plt.plot(svs)
# plt.show()
plt.savefig(log_dir + '/svs.pdf', format='pdf', transparent=False, bbox_inches='tight')
# model evaluation
test_result = trainer.evaluate(test_data, load_best_model=saved, show_progress=config['show_progress'])
logger.info(set_color('best valid ', 'yellow') + f': {best_valid_result}')
logger.info(set_color('test result', 'yellow') + f': {test_result}')
return {
'best_valid_score': best_valid_score,
'valid_score_bigger': config['valid_metric_bigger'],
'best_valid_result': best_valid_result,
'test_result': test_result
}
def objective_function(config_dict=None, config_file_list=None, saved=True):
r""" The default objective_function used in HyperTuning
Args:
config_dict (dict): parameters dictionary used to modify experiment parameters
config_file_list (list): config files used to modify experiment parameters
saved (bool): whether to save the model
"""
config = Config(config_dict=config_dict, config_file_list=config_file_list)
init_seed(config['seed'], config['reproducibility'])
logging.basicConfig(level=logging.ERROR)
dataset = create_dataset(config)
train_data, valid_data, test_data = data_preparation(config, dataset)
model = get_model(config['model'])(config, train_data).to(config['device'])
trainer = get_trainer(config['MODEL_TYPE'], config['model'])(config, model)
best_valid_score, best_valid_result = trainer.fit(train_data, valid_data, verbose=False, saved=saved)
test_result = trainer.evaluate(test_data, load_best_model=saved)
return {
'best_valid_score': best_valid_score,
'valid_score_bigger': config['valid_metric_bigger'],
'best_valid_result': best_valid_result,
'test_result': test_result
}
| [
"logging.getLogger",
"recbole.utils.init_seed",
"recbole.config.Config",
"numpy.save",
"seaborn.set",
"scipy.linalg.svdvals",
"matplotlib.pyplot.plot",
"recbole.utils.init_logger",
"numpy.dot",
"matplotlib.pyplot.scatter",
"recbole.utils.get_trainer",
"matplotlib.pyplot.ylim",
"matplotlib.py... | [((944, 1044), 'recbole.config.Config', 'Config', ([], {'model': 'model', 'dataset': 'dataset', 'config_file_list': 'config_file_list', 'config_dict': 'config_dict'}), '(model=model, dataset=dataset, config_file_list=config_file_list,\n config_dict=config_dict)\n', (950, 1044), False, 'from recbole.config import Config\n'), ((1133, 1152), 'recbole.utils.init_logger', 'init_logger', (['config'], {}), '(config)\n', (1144, 1152), False, 'from recbole.utils import init_logger, get_model, get_trainer, init_seed\n'), ((1166, 1177), 'logging.getLogger', 'getLogger', ([], {}), '()\n', (1175, 1177), False, 'from logging import getLogger\n'), ((1207, 1255), 'os.path.dirname', 'os.path.dirname', (['logger.handlers[0].baseFilename'], {}), '(logger.handlers[0].baseFilename)\n', (1222, 1255), False, 'import os\n'), ((1356, 1378), 'recbole.data.create_dataset', 'create_dataset', (['config'], {}), '(config)\n', (1370, 1378), False, 'from recbole.data import create_dataset, data_preparation\n'), ((1469, 1502), 'recbole.data.data_preparation', 'data_preparation', (['config', 'dataset'], {}), '(config, dataset)\n', (1485, 1502), False, 'from recbole.data import create_dataset, data_preparation\n'), ((2160, 2188), 'sklearn.decomposition.TruncatedSVD', 'TruncatedSVD', ([], {'n_components': '(2)'}), '(n_components=2)\n', (2172, 2188), False, 'from sklearn.decomposition import TruncatedSVD\n'), ((2233, 2262), 'numpy.transpose', 'np.transpose', (['svd.components_'], {}), '(svd.components_)\n', (2245, 2262), True, 'import numpy as np\n'), ((2274, 2307), 'numpy.dot', 'np.dot', (['embedding_matrix', 'comp_tr'], {}), '(embedding_matrix, comp_tr)\n', (2280, 2307), True, 'import numpy as np\n'), ((2478, 2513), 'numpy.zeros', 'np.zeros', (['embedding_matrix.shape[0]'], {}), '(embedding_matrix.shape[0])\n', (2486, 2513), True, 'import numpy as np\n'), ((2594, 2619), 'seaborn.set', 'sns.set', ([], {'style': '"""darkgrid"""'}), "(style='darkgrid')\n", (2601, 2619), True, 'import seaborn as sns\n'), ((2624, 2722), 'seaborn.set_context', 'sns.set_context', (['"""notebook"""'], {'font_scale': '(1.8)', 'rc': "{'lines.linewidth': 3, 'lines.markersize': 20}"}), "('notebook', font_scale=1.8, rc={'lines.linewidth': 3,\n 'lines.markersize': 20})\n", (2639, 2722), True, 'import seaborn as sns\n'), ((2723, 2751), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(6, 4.5)'}), '(figsize=(6, 4.5))\n', (2733, 2751), True, 'import matplotlib.pyplot as plt\n'), ((2756, 2822), 'matplotlib.pyplot.scatter', 'plt.scatter', (['proj[:, 0]', 'proj[:, 1]'], {'s': '(1)', 'c': 'freq', 'cmap': '"""viridis_r"""'}), "(proj[:, 0], proj[:, 1], s=1, c=freq, cmap='viridis_r')\n", (2767, 2822), True, 'import matplotlib.pyplot as plt\n'), ((2827, 2841), 'matplotlib.pyplot.colorbar', 'plt.colorbar', ([], {}), '()\n', (2839, 2841), True, 'import matplotlib.pyplot as plt\n'), ((2846, 2861), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(-2)', '(2)'], {}), '(-2, 2)\n', (2854, 2861), True, 'import matplotlib.pyplot as plt\n'), ((2866, 2881), 'matplotlib.pyplot.ylim', 'plt.ylim', (['(-2)', '(2)'], {}), '(-2, 2)\n', (2874, 2881), True, 'import matplotlib.pyplot as plt\n'), ((2928, 3065), 'matplotlib.pyplot.savefig', 'plt.savefig', (["(log_dir + '/' + config['model'] + '-' + config['dataset'] + '.pdf')"], {'format': '"""pdf"""', 'transparent': '(False)', 'bbox_inches': '"""tight"""'}), "(log_dir + '/' + config['model'] + '-' + config['dataset'] +\n '.pdf', format='pdf', transparent=False, bbox_inches='tight')\n", (2939, 3065), True, 'import matplotlib.pyplot as plt\n'), ((3114, 3139), 'scipy.linalg.svdvals', 'svdvals', (['embedding_matrix'], {}), '(embedding_matrix)\n', (3121, 3139), False, 'from scipy.linalg import svdvals\n'), ((3165, 3198), 'numpy.save', 'np.save', (["(log_dir + '/sv.npy')", 'svs'], {}), "(log_dir + '/sv.npy', svs)\n", (3172, 3198), True, 'import numpy as np\n'), ((3204, 3229), 'seaborn.set', 'sns.set', ([], {'style': '"""darkgrid"""'}), "(style='darkgrid')\n", (3211, 3229), True, 'import seaborn as sns\n'), ((3234, 3332), 'seaborn.set_context', 'sns.set_context', (['"""notebook"""'], {'font_scale': '(1.8)', 'rc': "{'lines.linewidth': 3, 'lines.markersize': 20}"}), "('notebook', font_scale=1.8, rc={'lines.linewidth': 3,\n 'lines.markersize': 20})\n", (3249, 3332), True, 'import seaborn as sns\n'), ((3333, 3361), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(6, 4.5)'}), '(figsize=(6, 4.5))\n', (3343, 3361), True, 'import matplotlib.pyplot as plt\n'), ((3366, 3379), 'matplotlib.pyplot.plot', 'plt.plot', (['svs'], {}), '(svs)\n', (3374, 3379), True, 'import matplotlib.pyplot as plt\n'), ((3401, 3492), 'matplotlib.pyplot.savefig', 'plt.savefig', (["(log_dir + '/svs.pdf')"], {'format': '"""pdf"""', 'transparent': '(False)', 'bbox_inches': '"""tight"""'}), "(log_dir + '/svs.pdf', format='pdf', transparent=False,\n bbox_inches='tight')\n", (3412, 3492), True, 'import matplotlib.pyplot as plt\n'), ((4374, 4440), 'recbole.config.Config', 'Config', ([], {'config_dict': 'config_dict', 'config_file_list': 'config_file_list'}), '(config_dict=config_dict, config_file_list=config_file_list)\n', (4380, 4440), False, 'from recbole.config import Config\n'), ((4445, 4497), 'recbole.utils.init_seed', 'init_seed', (["config['seed']", "config['reproducibility']"], {}), "(config['seed'], config['reproducibility'])\n", (4454, 4497), False, 'from recbole.utils import init_logger, get_model, get_trainer, init_seed\n'), ((4502, 4542), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.ERROR'}), '(level=logging.ERROR)\n', (4521, 4542), False, 'import logging\n'), ((4557, 4579), 'recbole.data.create_dataset', 'create_dataset', (['config'], {}), '(config)\n', (4571, 4579), False, 'from recbole.data import create_dataset, data_preparation\n'), ((4620, 4653), 'recbole.data.data_preparation', 'data_preparation', (['config', 'dataset'], {}), '(config, dataset)\n', (4636, 4653), False, 'from recbole.data import create_dataset, data_preparation\n'), ((1702, 1752), 'recbole.utils.get_trainer', 'get_trainer', (["config['MODEL_TYPE']", "config['model']"], {}), "(config['MODEL_TYPE'], config['model'])\n", (1713, 1752), False, 'from recbole.utils import init_logger, get_model, get_trainer, init_seed\n'), ((4748, 4798), 'recbole.utils.get_trainer', 'get_trainer', (["config['MODEL_TYPE']", "config['model']"], {}), "(config['MODEL_TYPE'], config['model'])\n", (4759, 4798), False, 'from recbole.utils import init_logger, get_model, get_trainer, init_seed\n'), ((3638, 3672), 'recbole.utils.utils.set_color', 'set_color', (['"""best valid """', '"""yellow"""'], {}), "('best valid ', 'yellow')\n", (3647, 3672), False, 'from recbole.utils.utils import set_color\n'), ((3717, 3751), 'recbole.utils.utils.set_color', 'set_color', (['"""test result"""', '"""yellow"""'], {}), "('test result', 'yellow')\n", (3726, 3751), False, 'from recbole.utils.utils import set_color\n'), ((1555, 1581), 'recbole.utils.get_model', 'get_model', (["config['model']"], {}), "(config['model'])\n", (1564, 1581), False, 'from recbole.utils import init_logger, get_model, get_trainer, init_seed\n'), ((4666, 4692), 'recbole.utils.get_model', 'get_model', (["config['model']"], {}), "(config['model'])\n", (4675, 4692), False, 'from recbole.utils import init_logger, get_model, get_trainer, init_seed\n')] |
import os
import urllib
from google.appengine.api import users
from google.appengine.ext import ndb
import jinja2
import webapp2
from sys import argv
import datetime
import pickle
import sys
sys.path.insert(0, 'libs')
import BeautifulSoup
from bs4 import BeautifulSoup
import requests
import json
JINJA_ENVIRONMENT = jinja2.Environment(
loader=jinja2.FileSystemLoader(os.path.dirname(__file__)),
extensions=['jinja2.ext.autoescape', 'jinja2.ext.loopcontrols'],
autoescape=True)
url = 'http://www.njtransit.com/sf/sf_servlet.srv?hdnPageAction=TrainSchedulesFrom'
pu_code = "124_PRIN"
ny_code = "105_BNTN"
prs = "Princeton"
nyp = "New York Penn Station"
# get date
today = datetime.date.today()
str_date = today.__format__("%m/%d/%Y")
# trip info
toNY_dict = {'selOrigin': pu_code, 'selDestination': ny_code, 'datepicker': str_date, 'OriginDescription': prs, 'DestDescription': nyp}
toPU_dict = {'selOrigin': ny_code, 'selDestination': pu_code, 'datepicker': str_date, 'OriginDescription': nyp, 'DestDescription': prs}
# get to webpage with data for the day
with requests.Session() as re:
toNY = re.post(url, data=toNY_dict)
toPU = re.post(url, data=toPU_dict)
toPUhtml = toPU.text
toNYhtml = toNY.text
#Reads in html file and name of destination and outputs csv file with comma spliced file of train information
def scrape(html,destination):
title = str(today) + str(destination)
soup = BeautifulSoup(html)
# Improvements: instead of being so hacky with 10 search for td
# Gather all lines in table
table1 = soup.find_all("tr")
table2 = table1[10] #table1[10] contains the table of interest
table3 = table2.find_all('span')
# Create 7 lists
origin = [] #Times for departure at origin
origintrain = []
transferarrive = [] #Times for arrival at transfer
transferdepart = [] #Time for departure at transfer
transfertrain = [] #Train or bus number
destination = [] #Time of arrival at destination
total = [] #Total time of Travel
#Create 3 Columns of Text File
origin.append("Origin Departure") #Times for departure at origin
origintrain.append("Origin Train")
transferarrive.append("Transfer Arrival") #Times for arrival at transfer
transferdepart.append("Transfer Departure") #Time for departure at transfer
transfertrain.append("Transfer Train or Bus")
destination.append("Destination Arrival") #Time of arrival at destination
total.append("Total Travel Time") #Total time of Travel
#Store 4 columns into 4 lists
#Regex and pull approapriate data
for i in range(4, len(table3)-3, 4):
#origin.append(str(table3[i].text)[0:len(table3[i].text)])
origin.append(str(table3[i].text)[0:8])
origintrain.append(str(table3[i].text)[-5:])
transferarrive.append(str(table3[i+1].text)[7:15])
transferdepart.append(str(table3[i+1].text)[39:48])
transfertrain.append(str(table3[i+1].text)[-5:])
destination.append(str(table3[i+2].text)[0:len(table3[i+2].text)])
total.append(str(table3[i+3].text)[0:len(table3[i+3].text)])
#text_file = open(str(title) + ".csv", "w")
Dict = {'origin': origin[1:], 'transferarrive' : transferarrive[1:], 'transferdepart': transferdepart[1:], 'destination':destination[1:]}
return Dict
#Create csv files for to Princeton and to New York
toPUDict = scrape(toPUhtml, 'PU')
toNYDict = scrape(toNYhtml, 'NY')
class njdata(ndb.Model):
"""Models an individual Guestbook entry with author, content, and date."""
originstring = ndb.StringProperty(repeated = True)
transferarrivestring = ndb.StringProperty(repeated = True)
transferdepartstring = ndb.StringProperty(repeated = True)
destinationstring = ndb.StringProperty(repeated = True)
date = ndb.DateTimeProperty(auto_now_add=True) #Need date to get most recent data
globalPUDict = {}
class Test123(webapp2.RequestHandler):
def get(self):
toPUdata = njdata()
#toNYdata = njdata()
#toPUdata.content = pickle.dumps(toPUDict)
toPUdata.originstring = toPUDict['origin']
toPUdata.transferarrivestring = toPUDict['transferarrive']
toPUdata.transferdepartstring = toPUDict['transferdepart']
toPUdata.destinationstring = toPUDict['destination']
#Save data into data models
toPUdata.put()
#toNYdata.put()
toPUdata_query = toPUdata.query().order(-njdata.date)
a = toPUdata_query.fetch(1)
global globalPUDict
globalPUDict = {'origin': a[0].originstring, 'transferarrive': a[0].transferarrivestring, 'transferdepart': a[0].transferdepartstring, 'destination': a[0].destinationstring}
self.response.write(globalPUDict)
self.response.write(toPUDict)
class MainPage(webapp2.RequestHandler):
def get(self):
template = JINJA_ENVIRONMENT.get_template('index.html')
self.response.write(template.render())
class ToNY(webapp2.RequestHandler):
def get(self):
template = JINJA_ENVIRONMENT.get_template('toNY.html')
self.response.write(template.render(toNYDict))
class ToPU(webapp2.RequestHandler):
def get(self):
self.response.write(globalPUDict)
template = JINJA_ENVIRONMENT.get_template('toPU.html')
self.response.write(template.render(globalPUDict))
application = webapp2.WSGIApplication([
('/', MainPage),
('/toNY', ToNY),
('/toPU', ToPU),
('/test', Test123),
], debug=True)
| [
"sys.path.insert",
"requests.Session",
"bs4.BeautifulSoup",
"os.path.dirname",
"webapp2.WSGIApplication",
"datetime.date.today",
"google.appengine.ext.ndb.DateTimeProperty",
"google.appengine.ext.ndb.StringProperty"
] | [((197, 223), 'sys.path.insert', 'sys.path.insert', (['(0)', '"""libs"""'], {}), "(0, 'libs')\n", (212, 223), False, 'import sys\n'), ((694, 715), 'datetime.date.today', 'datetime.date.today', ([], {}), '()\n', (713, 715), False, 'import datetime\n'), ((5391, 5503), 'webapp2.WSGIApplication', 'webapp2.WSGIApplication', (["[('/', MainPage), ('/toNY', ToNY), ('/toPU', ToPU), ('/test', Test123)]"], {'debug': '(True)'}), "([('/', MainPage), ('/toNY', ToNY), ('/toPU', ToPU),\n ('/test', Test123)], debug=True)\n", (5414, 5503), False, 'import webapp2\n'), ((1086, 1104), 'requests.Session', 'requests.Session', ([], {}), '()\n', (1102, 1104), False, 'import requests\n'), ((1432, 1451), 'bs4.BeautifulSoup', 'BeautifulSoup', (['html'], {}), '(html)\n', (1445, 1451), False, 'from bs4 import BeautifulSoup\n'), ((3584, 3617), 'google.appengine.ext.ndb.StringProperty', 'ndb.StringProperty', ([], {'repeated': '(True)'}), '(repeated=True)\n', (3602, 3617), False, 'from google.appengine.ext import ndb\n'), ((3648, 3681), 'google.appengine.ext.ndb.StringProperty', 'ndb.StringProperty', ([], {'repeated': '(True)'}), '(repeated=True)\n', (3666, 3681), False, 'from google.appengine.ext import ndb\n'), ((3712, 3745), 'google.appengine.ext.ndb.StringProperty', 'ndb.StringProperty', ([], {'repeated': '(True)'}), '(repeated=True)\n', (3730, 3745), False, 'from google.appengine.ext import ndb\n'), ((3773, 3806), 'google.appengine.ext.ndb.StringProperty', 'ndb.StringProperty', ([], {'repeated': '(True)'}), '(repeated=True)\n', (3791, 3806), False, 'from google.appengine.ext import ndb\n'), ((3820, 3859), 'google.appengine.ext.ndb.DateTimeProperty', 'ndb.DateTimeProperty', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (3840, 3859), False, 'from google.appengine.ext import ndb\n'), ((381, 406), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (396, 406), False, 'import os\n')] |
import json
from django.utils.encoding import force_text
from germanium.tools import assert_true, assert_not_equal
from germanium.test_cases.client import ClientTestCase
from germanium.decorators import login
from germanium.crawler import Crawler, LinkExtractor, HtmlLinkExtractor as OriginalHtmlLinkExtractor
def flatt_list(iterable_value):
flatten_list = []
for val in iterable_value:
if isinstance(val, list):
flatten_list += val
else:
flatten_list.append(val)
return flatten_list
class JSONLinkExtractor(LinkExtractor):
def _extract_web_links(self, data):
return flatt_list(data.values())
def _extract_rest_links(self, data):
links = []
for rest_link in data.values():
if 'GET' in rest_link['methods']:
links += flatt_list([rest_link['url']])
return links
def _extract_from_dict(self, data):
links = []
for key, val in data.items():
if key == '_web_links':
links += self._extract_web_links(val)
elif key == '_rest_links':
links += self._extract_rest_links(val)
elif isinstance(val, (list, tuple)):
links += self._extract_from_list(val)
elif isinstance(val, dict):
links += self._extract_from_dict(val)
return links
def _extract_from_list(self, data):
links = []
for val in data:
if isinstance(val, dict):
links += self._extract_from_dict(val)
elif isinstance(val, (list, tuple)):
links += self._extract_from_list(val)
return links
def extract(self, content):
data = json.loads(content)
if isinstance(data, dict):
links = self._extract_from_dict(data)
elif isinstance(data, (list, tuple)):
links = self._extract_from_list(data)
return links
class HTMLLinkExtractor(OriginalHtmlLinkExtractor):
link_attr_names = ('href', 'src', 'data-resource')
class TextPlainSnippetsExtractor(LinkExtractor):
def extract(self, content):
links = []
try:
data = json.loads(content)
html_extractor = HTMLLinkExtractor()
for html in data.get('snippets', {}).values():
links += html_extractor.extract(html)
except ValueError:
# I text/plain is not snippet return empty links
pass
return links
class CrawlerTestCase(ClientTestCase):
REST_BASE = None
exclude_urls = ()
def get_users(self):
raise NotImplementedError
def get_exlude_urls(self):
return list(self.exclude_urls) + ['/logout/']
@login(users_generator='get_users')
def test_crawler(self):
self.logger.info('\n---------------------------')
self.logger.info('Test crawling with logged user %s' % self.logged_user.user)
tested_urls = []
failed_urls = []
def pre_request(url, referer, headers):
if url.startswith('/api/'):
headers['HTTP_X_FIELDS'] = '_rest_links,_web_links'
if self.REST_BASE:
headers['HTTP_X_BASE'] = str(self.REST_BASE)
return url, headers
def post_response(url, referer, resp, exception):
tested_urls.append(url)
assert_true(exception is None, msg='Received exception %s, url %s' % (force_text(exception), url))
if resp.status_code != 200:
failed_urls.append(url)
self.logger.warning('Response code for url %s from referer %s should be 200 but code is %s, user %s' %
(url, referer, resp.status_code, self.logged_user.user))
assert_not_equal(resp.status_code, 500, msg='Response code for url %s from referer %s is 500, user %s' %
(url, referer, self.logged_user.user))
Crawler(self.c, ('/',), self.get_exlude_urls(), pre_request, post_response,
extra_link_extractors={'application/json; charset=utf-8': JSONLinkExtractor(),
'text/plain': TextPlainSnippetsExtractor(),
'default': HTMLLinkExtractor()}).run()
self.logger.info('Completed with tested %s urls (warnings %s)' % (len(tested_urls), len(failed_urls)))
self.logger.info('---------------------------')
| [
"django.utils.encoding.force_text",
"germanium.tools.assert_not_equal",
"json.loads",
"germanium.decorators.login"
] | [((2752, 2786), 'germanium.decorators.login', 'login', ([], {'users_generator': '"""get_users"""'}), "(users_generator='get_users')\n", (2757, 2786), False, 'from germanium.decorators import login\n'), ((1738, 1757), 'json.loads', 'json.loads', (['content'], {}), '(content)\n', (1748, 1757), False, 'import json\n'), ((2205, 2224), 'json.loads', 'json.loads', (['content'], {}), '(content)\n', (2215, 2224), False, 'import json\n'), ((3801, 3953), 'germanium.tools.assert_not_equal', 'assert_not_equal', (['resp.status_code', '(500)'], {'msg': "('Response code for url %s from referer %s is 500, user %s' % (url, referer,\n self.logged_user.user))"}), "(resp.status_code, 500, msg=\n 'Response code for url %s from referer %s is 500, user %s' % (url,\n referer, self.logged_user.user))\n", (3817, 3953), False, 'from germanium.tools import assert_true, assert_not_equal\n'), ((3468, 3489), 'django.utils.encoding.force_text', 'force_text', (['exception'], {}), '(exception)\n', (3478, 3489), False, 'from django.utils.encoding import force_text\n')] |
##############################################################################
#
# Copyright (c) 2009 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""
$Id$
"""
from zope import component, interface
from zc.copy.interfaces import ICopyHook
from data import File, Image
from interfaces import IFile, IImage
@component.adapter(IFile)
@interface.implementer(ICopyHook)
def fileCopyFactory(original):
def factory(location, register):
file = File()
file.filename = original.filename
file.mimeType = original.mimeType
file.disablePreview = original.disablePreview
file.disablePrint = original.disablePrint
def afterCopy(translate):
file.data = original.data
register(afterCopy)
return file
return factory
@component.adapter(IImage)
@interface.implementer(ICopyHook)
def imageCopyFactory(original):
def factory(location, register):
image = Image()
image.filename = original.filename
image.mimeType = original.mimeType
def afterCopy(translate):
image.data = original.data
register(afterCopy)
return image
return factory
| [
"data.File",
"zope.interface.implementer",
"data.Image",
"zope.component.adapter"
] | [((799, 823), 'zope.component.adapter', 'component.adapter', (['IFile'], {}), '(IFile)\n', (816, 823), False, 'from zope import component, interface\n'), ((825, 857), 'zope.interface.implementer', 'interface.implementer', (['ICopyHook'], {}), '(ICopyHook)\n', (846, 857), False, 'from zope import component, interface\n'), ((1280, 1305), 'zope.component.adapter', 'component.adapter', (['IImage'], {}), '(IImage)\n', (1297, 1305), False, 'from zope import component, interface\n'), ((1307, 1339), 'zope.interface.implementer', 'interface.implementer', (['ICopyHook'], {}), '(ICopyHook)\n', (1328, 1339), False, 'from zope import component, interface\n'), ((941, 947), 'data.File', 'File', ([], {}), '()\n', (945, 947), False, 'from data import File, Image\n'), ((1425, 1432), 'data.Image', 'Image', ([], {}), '()\n', (1430, 1432), False, 'from data import File, Image\n')] |
import sys
verbose = False
def set_v(v):
global verbose
verbose = v
def print_v(s):
if verbose:
print(s)
def write_v(s):
if verbose:
sys.stdout.write(s)
| [
"sys.stdout.write"
] | [((172, 191), 'sys.stdout.write', 'sys.stdout.write', (['s'], {}), '(s)\n', (188, 191), False, 'import sys\n')] |