max_stars_repo_path stringlengths 4 286 | max_stars_repo_name stringlengths 5 119 | max_stars_count int64 0 191k | id stringlengths 1 7 | content stringlengths 6 1.03M | content_cleaned stringlengths 6 1.03M | language stringclasses 111 values | language_score float64 0.03 1 | comments stringlengths 0 556k | edu_score float64 0.32 5.03 | edu_int_score int64 0 5 |
|---|---|---|---|---|---|---|---|---|---|---|
catdog/apps.py | kisna72/django-keras-catdogs | 0 | 6619651 | <filename>catdog/apps.py
from django.apps import AppConfig
class CatdogConfig(AppConfig):
name = 'catdog'
| <filename>catdog/apps.py
from django.apps import AppConfig
class CatdogConfig(AppConfig):
name = 'catdog'
| none | 1 | 1.302776 | 1 | |
PythonDownload/pythonexercicios/ex045.py | GitGuii/PythonExs | 1 | 6619652 | <reponame>GitGuii/PythonExs<filename>PythonDownload/pythonexercicios/ex045.py
import sys
from random import randint
from time import sleep
itens = ('pedra','papel','tesoura')
computador = randint(0,2)
print('''Suas opçês
[0] pedra
[1] papel
[2] tesoura''')
jogador =int(input("Qual a sua escolha? "))
print('JO')
sleep(1)
print('KEN')
sleep(1)
print('PO!!!')
sleep(1)
print('-='*15)
print("O computador jogou {}".format(itens[computador]))
print("O jogador jogou {}".format(itens[jogador]))
print('-='*15)
if computador == 0:
if jogador == 0:
print('EMPATE')
elif jogador == 1:
print('JOGADOR VENCEU')
elif jogador == 2:
print('COMPUTADOR VENCEU')
else:
print("Opção invalida")
elif computador == 1:
if jogador == 0:
print('COMPUTADOR VENCEU')
elif jogador == 1:
print('EMPATE')
elif jogador == 2:
print('JOGADOR VENCEU')
else:
print("Opção invalida")
elif computador == 2:
if jogador == 0:
print('JOGADOR VENCEU')
elif jogador == 1:
print('COMPUTADOR VENCEU')
elif jogador == 2:
print('EMPATE')
else:
print("Opção invalida")
| import sys
from random import randint
from time import sleep
itens = ('pedra','papel','tesoura')
computador = randint(0,2)
print('''Suas opçês
[0] pedra
[1] papel
[2] tesoura''')
jogador =int(input("Qual a sua escolha? "))
print('JO')
sleep(1)
print('KEN')
sleep(1)
print('PO!!!')
sleep(1)
print('-='*15)
print("O computador jogou {}".format(itens[computador]))
print("O jogador jogou {}".format(itens[jogador]))
print('-='*15)
if computador == 0:
if jogador == 0:
print('EMPATE')
elif jogador == 1:
print('JOGADOR VENCEU')
elif jogador == 2:
print('COMPUTADOR VENCEU')
else:
print("Opção invalida")
elif computador == 1:
if jogador == 0:
print('COMPUTADOR VENCEU')
elif jogador == 1:
print('EMPATE')
elif jogador == 2:
print('JOGADOR VENCEU')
else:
print("Opção invalida")
elif computador == 2:
if jogador == 0:
print('JOGADOR VENCEU')
elif jogador == 1:
print('COMPUTADOR VENCEU')
elif jogador == 2:
print('EMPATE')
else:
print("Opção invalida") | pt | 0.919554 | Suas opçês [0] pedra [1] papel [2] tesoura | 3.745723 | 4 |
project/pfasst/playground.py | amit17133129/pyMG-2016 | 2 | 6619653 | <gh_stars>1-10
import numpy as np
import scipy.sparse.linalg as splinalg
from pymg.collocation_classes import CollGaussRadau_Right
from pymg.space_time_base import CollocationTimeStepBase
from project.pfasst.pfasst import SimplePFASSTCollocationProblem
from project.pfasst.plot_tools import matrix_plot, heat_map
from project.poisson1d import Poisson1D
from project.pfasst.block_smoother import BlockGaussSeidel, WeightedBlockJacobi
from project.pfasst.analyse_tools import SmootherAnalyser, SimplePFASSTProblemSetup, SimplePFASSTMultiGridAnalyser
from project.pfasst.pfasst import *
def show_call_order(cls, methname):
'Utility to show the call chain'
classes = [cls for cls in cls.__mro__ if methname in cls.__dict__]
print ' ==> '.join('%s.%s' % (cls.__name__, methname) for cls in classes)
if __name__ == "__main__":
# build SimplePFASSTCollocationProblem
num_nodes = 3
num_subintervals = 2
num_space = 16
k = 1
dt = 0.01
GRC = CollGaussRadau_Right(num_nodes, 0.0, 1.0)
Q = GRC.Qmat[1:, 1:]
QD = GRC.QDmat
# matrix_plot(QD)
# print QD.shape, Q.shape
nodes = GRC.nodes
CTSB = CollocationTimeStepBase(0.0, dt, Q, nodes)
CTSB_delta = CollocationTimeStepBase(0.0, dt, QD, nodes)
SpaceProblem = Poisson1D(num_space)
omega_h = np.linspace(1 / (num_space + 1), 1.0, num_space)
u_init = np.sin(2 * np.pi * np.linspace(1 / (num_space + 1), 1.0, num_space))
u_init_gen = lambda x: np.sin(2 * np.pi * x)
# show_call_order(SimplePFASSTCollocationProblem, '__init__')
PFASSTCollocProb = SimplePFASSTCollocationProblem(num_subintervals, CTSB, SpaceProblem, u_init_gen)
PFASSTPrecondProb = SimplePFASSTCollocationProblem(num_subintervals, CTSB_delta, SpaceProblem, u_init_gen)
sol = splinalg.spsolve(PFASSTCollocProb.A, PFASSTCollocProb.rhs)
sol_precond = splinalg.spsolve(PFASSTPrecondProb.A, PFASSTPrecondProb.rhs)
print '-' * 20
print 'Error between the precond and colloc', np.linalg.norm(sol - sol_precond, 2)
# heat_map(PFASSTCollocProb.rhs.reshape(-1, num_space))
# heat_map(sol.reshape(-1, num_space))
# heat_map(sol_precond.reshape(-1, num_space))
# heat_map(sol_precond.reshape(-1, num_space)-sol.reshape(-1, num_space))
# test the smoothers
# jac_smoother = WeightedBlockJacobi(PFASSTCollocProb,2.0/3.0)
# gs_smoother = BlockGaussSeidel(PFASSTCollocProb)
approx_jac_smoother = WeightedBlockJacobi(PFASSTPrecondProb, 2.0 / 3.0)
approx_gs_smoother = BlockGaussSeidel(PFASSTPrecondProb)
analyser_approx_jac = SmootherAnalyser(approx_jac_smoother, PFASSTCollocProb)
analyser_approx_gs = SmootherAnalyser(approx_gs_smoother, PFASSTCollocProb)
init = np.kron(np.asarray([1] * num_nodes + [1] * num_nodes * (num_subintervals - 1)), u_init)
print 'First 10 errors of approx block-Jacobi \n', analyser_approx_jac.errors(init, 10)
print 'First 10 errors of approx block-Gauss-Seidel \n', analyser_approx_gs.errors(init, 10)
print '-' * 20
# print 'Next build a really simple PFASST-Solver with two levels, where the space problem is just solved'
# c_strat = lambda nx, ny: (nx / 2, ny)
# s_pfasst_mgrid = SimplePFASSTMultigrid(PFASSTCollocProb, SolverSmoother, 4, c_strat=c_strat)
# print '...done, attach Smoother'
# s_pfasst_mgrid.attach_smoother(SimplePFASSTSmoother)
# print '...done, attach TransferOperator'
# # first set the options
# opts = {'space': {'i_ord': 4, 'r_ord': 2, 'periodic': False},
# 'time': {'i_ord': 4, 'r_ord': 2, 'periodic': False, 'new_nodes': CollGaussRadau_Right(5, 0.0, 1.0).nodes}}
# opt_list = [opts]*3
# s_pfasst_mgrid.attach_transfer(SimplePFASSTTransfer, opt_list)
# print '...done, check hierarchy chosen'
# print 'ndofs_list', s_pfasst_mgrid.ndofs_list
#
# one_v_cycle = s_pfasst_mgrid.do_v_cycle_recursive(init, PFASSTCollocProb.rhs, 0, 1, 0)
# print 'Error after 1 V-cycle', np.linalg.norm(sol - one_v_cycle, 2)
print 'Start using the SimplePFASSTMultiGridAnalyser'
# first the setup
pfasst_setup = SimplePFASSTProblemSetup(init_value_gen=lambda x: np.sin(2 * np.pi * x),
num_nodes=5, num_subintervals=4,
CollocationClass=CollGaussRadau_Right,
space_problem=Poisson1D(128),
c_strat=lambda nx, ny: (nx / 2, ny),
nlevels=2,
transfer_opts_space={'i_ord': 8, 'r_ord': 2, 'periodic': False},
transfer_opts_time={'i_ord': 4, 'r_ord': 2, 'periodic': False,
'num_new_nodes': 3})
dt = 0.001
print pfasst_setup
# now the analyser with the canonical parts
print "Assembling the analyser"
pfasst_analyser = SimplePFASSTMultiGridAnalyser(pfasst_setup)
pfasst_analyser.generate_pfasst_multigrid(dt)
u_init = pfasst_setup.init_value_gen(pfasst_setup.space_problem.domain)
init = np.kron(np.ones(pfasst_setup.num_nodes * pfasst_setup.num_subintervals), u_init)
err, res = pfasst_analyser.check_v_cycles(init, 10, 1, 1)
print "Checking the V-Cycle:"
print 'V-Cycle - errors :\t\t', err
print 'V-Cycle - residuals:\t', res
print "Checking if iteration matrix may be constructed"
# T_v_cycle, P_inv_v_cycle = pfasst_analyser.get_v_cycle_it_matrix()
# sol_vec = pfasst_setup.solution(dt)
# err_vec_list = [sol_vec - init]
# for i in range(9):
# err_vec_list.append(T_v_cycle.dot(err_vec_list[-1]))
# print "V-Cycle - errors:\t\t", map(lambda x: np.linalg.norm(x, 2), err_vec_list)
# # ARGGGGGHHHHHHHH, passt nicht sofort
| import numpy as np
import scipy.sparse.linalg as splinalg
from pymg.collocation_classes import CollGaussRadau_Right
from pymg.space_time_base import CollocationTimeStepBase
from project.pfasst.pfasst import SimplePFASSTCollocationProblem
from project.pfasst.plot_tools import matrix_plot, heat_map
from project.poisson1d import Poisson1D
from project.pfasst.block_smoother import BlockGaussSeidel, WeightedBlockJacobi
from project.pfasst.analyse_tools import SmootherAnalyser, SimplePFASSTProblemSetup, SimplePFASSTMultiGridAnalyser
from project.pfasst.pfasst import *
def show_call_order(cls, methname):
'Utility to show the call chain'
classes = [cls for cls in cls.__mro__ if methname in cls.__dict__]
print ' ==> '.join('%s.%s' % (cls.__name__, methname) for cls in classes)
if __name__ == "__main__":
# build SimplePFASSTCollocationProblem
num_nodes = 3
num_subintervals = 2
num_space = 16
k = 1
dt = 0.01
GRC = CollGaussRadau_Right(num_nodes, 0.0, 1.0)
Q = GRC.Qmat[1:, 1:]
QD = GRC.QDmat
# matrix_plot(QD)
# print QD.shape, Q.shape
nodes = GRC.nodes
CTSB = CollocationTimeStepBase(0.0, dt, Q, nodes)
CTSB_delta = CollocationTimeStepBase(0.0, dt, QD, nodes)
SpaceProblem = Poisson1D(num_space)
omega_h = np.linspace(1 / (num_space + 1), 1.0, num_space)
u_init = np.sin(2 * np.pi * np.linspace(1 / (num_space + 1), 1.0, num_space))
u_init_gen = lambda x: np.sin(2 * np.pi * x)
# show_call_order(SimplePFASSTCollocationProblem, '__init__')
PFASSTCollocProb = SimplePFASSTCollocationProblem(num_subintervals, CTSB, SpaceProblem, u_init_gen)
PFASSTPrecondProb = SimplePFASSTCollocationProblem(num_subintervals, CTSB_delta, SpaceProblem, u_init_gen)
sol = splinalg.spsolve(PFASSTCollocProb.A, PFASSTCollocProb.rhs)
sol_precond = splinalg.spsolve(PFASSTPrecondProb.A, PFASSTPrecondProb.rhs)
print '-' * 20
print 'Error between the precond and colloc', np.linalg.norm(sol - sol_precond, 2)
# heat_map(PFASSTCollocProb.rhs.reshape(-1, num_space))
# heat_map(sol.reshape(-1, num_space))
# heat_map(sol_precond.reshape(-1, num_space))
# heat_map(sol_precond.reshape(-1, num_space)-sol.reshape(-1, num_space))
# test the smoothers
# jac_smoother = WeightedBlockJacobi(PFASSTCollocProb,2.0/3.0)
# gs_smoother = BlockGaussSeidel(PFASSTCollocProb)
approx_jac_smoother = WeightedBlockJacobi(PFASSTPrecondProb, 2.0 / 3.0)
approx_gs_smoother = BlockGaussSeidel(PFASSTPrecondProb)
analyser_approx_jac = SmootherAnalyser(approx_jac_smoother, PFASSTCollocProb)
analyser_approx_gs = SmootherAnalyser(approx_gs_smoother, PFASSTCollocProb)
init = np.kron(np.asarray([1] * num_nodes + [1] * num_nodes * (num_subintervals - 1)), u_init)
print 'First 10 errors of approx block-Jacobi \n', analyser_approx_jac.errors(init, 10)
print 'First 10 errors of approx block-Gauss-Seidel \n', analyser_approx_gs.errors(init, 10)
print '-' * 20
# print 'Next build a really simple PFASST-Solver with two levels, where the space problem is just solved'
# c_strat = lambda nx, ny: (nx / 2, ny)
# s_pfasst_mgrid = SimplePFASSTMultigrid(PFASSTCollocProb, SolverSmoother, 4, c_strat=c_strat)
# print '...done, attach Smoother'
# s_pfasst_mgrid.attach_smoother(SimplePFASSTSmoother)
# print '...done, attach TransferOperator'
# # first set the options
# opts = {'space': {'i_ord': 4, 'r_ord': 2, 'periodic': False},
# 'time': {'i_ord': 4, 'r_ord': 2, 'periodic': False, 'new_nodes': CollGaussRadau_Right(5, 0.0, 1.0).nodes}}
# opt_list = [opts]*3
# s_pfasst_mgrid.attach_transfer(SimplePFASSTTransfer, opt_list)
# print '...done, check hierarchy chosen'
# print 'ndofs_list', s_pfasst_mgrid.ndofs_list
#
# one_v_cycle = s_pfasst_mgrid.do_v_cycle_recursive(init, PFASSTCollocProb.rhs, 0, 1, 0)
# print 'Error after 1 V-cycle', np.linalg.norm(sol - one_v_cycle, 2)
print 'Start using the SimplePFASSTMultiGridAnalyser'
# first the setup
pfasst_setup = SimplePFASSTProblemSetup(init_value_gen=lambda x: np.sin(2 * np.pi * x),
num_nodes=5, num_subintervals=4,
CollocationClass=CollGaussRadau_Right,
space_problem=Poisson1D(128),
c_strat=lambda nx, ny: (nx / 2, ny),
nlevels=2,
transfer_opts_space={'i_ord': 8, 'r_ord': 2, 'periodic': False},
transfer_opts_time={'i_ord': 4, 'r_ord': 2, 'periodic': False,
'num_new_nodes': 3})
dt = 0.001
print pfasst_setup
# now the analyser with the canonical parts
print "Assembling the analyser"
pfasst_analyser = SimplePFASSTMultiGridAnalyser(pfasst_setup)
pfasst_analyser.generate_pfasst_multigrid(dt)
u_init = pfasst_setup.init_value_gen(pfasst_setup.space_problem.domain)
init = np.kron(np.ones(pfasst_setup.num_nodes * pfasst_setup.num_subintervals), u_init)
err, res = pfasst_analyser.check_v_cycles(init, 10, 1, 1)
print "Checking the V-Cycle:"
print 'V-Cycle - errors :\t\t', err
print 'V-Cycle - residuals:\t', res
print "Checking if iteration matrix may be constructed"
# T_v_cycle, P_inv_v_cycle = pfasst_analyser.get_v_cycle_it_matrix()
# sol_vec = pfasst_setup.solution(dt)
# err_vec_list = [sol_vec - init]
# for i in range(9):
# err_vec_list.append(T_v_cycle.dot(err_vec_list[-1]))
# print "V-Cycle - errors:\t\t", map(lambda x: np.linalg.norm(x, 2), err_vec_list)
# # ARGGGGGHHHHHHHH, passt nicht sofort | en | 0.313595 | # build SimplePFASSTCollocationProblem # matrix_plot(QD) # print QD.shape, Q.shape # show_call_order(SimplePFASSTCollocationProblem, '__init__') # heat_map(PFASSTCollocProb.rhs.reshape(-1, num_space)) # heat_map(sol.reshape(-1, num_space)) # heat_map(sol_precond.reshape(-1, num_space)) # heat_map(sol_precond.reshape(-1, num_space)-sol.reshape(-1, num_space)) # test the smoothers # jac_smoother = WeightedBlockJacobi(PFASSTCollocProb,2.0/3.0) # gs_smoother = BlockGaussSeidel(PFASSTCollocProb) # print 'Next build a really simple PFASST-Solver with two levels, where the space problem is just solved' # c_strat = lambda nx, ny: (nx / 2, ny) # s_pfasst_mgrid = SimplePFASSTMultigrid(PFASSTCollocProb, SolverSmoother, 4, c_strat=c_strat) # print '...done, attach Smoother' # s_pfasst_mgrid.attach_smoother(SimplePFASSTSmoother) # print '...done, attach TransferOperator' # # first set the options # opts = {'space': {'i_ord': 4, 'r_ord': 2, 'periodic': False}, # 'time': {'i_ord': 4, 'r_ord': 2, 'periodic': False, 'new_nodes': CollGaussRadau_Right(5, 0.0, 1.0).nodes}} # opt_list = [opts]*3 # s_pfasst_mgrid.attach_transfer(SimplePFASSTTransfer, opt_list) # print '...done, check hierarchy chosen' # print 'ndofs_list', s_pfasst_mgrid.ndofs_list # # one_v_cycle = s_pfasst_mgrid.do_v_cycle_recursive(init, PFASSTCollocProb.rhs, 0, 1, 0) # print 'Error after 1 V-cycle', np.linalg.norm(sol - one_v_cycle, 2) # first the setup # now the analyser with the canonical parts # T_v_cycle, P_inv_v_cycle = pfasst_analyser.get_v_cycle_it_matrix() # sol_vec = pfasst_setup.solution(dt) # err_vec_list = [sol_vec - init] # for i in range(9): # err_vec_list.append(T_v_cycle.dot(err_vec_list[-1])) # print "V-Cycle - errors:\t\t", map(lambda x: np.linalg.norm(x, 2), err_vec_list) # # ARGGGGGHHHHHHHH, passt nicht sofort | 1.93639 | 2 |
tests/kafka/test_mappers.py | sauljabin/kaskade | 16 | 6619654 | <filename>tests/kafka/test_mappers.py
from unittest import TestCase
from kaskade.kafka.mappers import (
metadata_to_broker,
metadata_to_group,
metadata_to_group_partition,
metadata_to_partition,
metadata_to_topic,
)
from tests.kafka import (
random_broker_metadata,
random_group_metadata,
random_partition_metadata,
random_topic_metadata,
random_topic_partition_metadata,
)
class TestMappers(TestCase):
def test_metadata_to_broker(self):
metadata = random_broker_metadata()
actual = metadata_to_broker(metadata)
self.assertEqual(metadata.id, actual.id)
self.assertEqual(metadata.port, actual.port)
self.assertEqual(metadata.host, actual.host)
def test_metadata_to_group(self):
metadata = random_group_metadata()
metadata_broker = metadata.broker
actual = metadata_to_group(metadata)
actual_broker = actual.broker
self.assertEqual(metadata.id, actual.id)
self.assertEqual(metadata.state, actual.state)
self.assertListEqual([], actual.partitions)
self.assertEqual([], actual.members)
self.assertEqual(metadata_broker.id, actual_broker.id)
self.assertEqual(metadata_broker.port, actual_broker.port)
self.assertEqual(metadata_broker.host, actual_broker.host)
def test_metadata_to_partition(self):
metadata = random_partition_metadata()
actual = metadata_to_partition(metadata)
self.assertEqual(metadata.id, actual.id)
self.assertEqual(metadata.isrs, actual.isrs)
self.assertEqual(metadata.leader, actual.leader)
self.assertEqual(metadata.replicas, actual.replicas)
def test_metadata_to_group_partition(self):
metadata = random_topic_partition_metadata()
actual = metadata_to_group_partition(metadata)
self.assertEqual(metadata.partition, actual.id)
self.assertEqual(metadata.topic, actual.topic)
self.assertEqual(metadata.offset, actual.offset)
self.assertEqual("", actual.group)
self.assertEqual(0, actual.high)
self.assertEqual(0, actual.low)
def test_metadata_to_topic(self):
metadata = random_topic_metadata()
actual = metadata_to_topic(metadata)
self.assertEqual(metadata.topic, actual.name)
self.assertListEqual([], actual.groups)
self.assertListEqual([], actual.partitions)
| <filename>tests/kafka/test_mappers.py
from unittest import TestCase
from kaskade.kafka.mappers import (
metadata_to_broker,
metadata_to_group,
metadata_to_group_partition,
metadata_to_partition,
metadata_to_topic,
)
from tests.kafka import (
random_broker_metadata,
random_group_metadata,
random_partition_metadata,
random_topic_metadata,
random_topic_partition_metadata,
)
class TestMappers(TestCase):
def test_metadata_to_broker(self):
metadata = random_broker_metadata()
actual = metadata_to_broker(metadata)
self.assertEqual(metadata.id, actual.id)
self.assertEqual(metadata.port, actual.port)
self.assertEqual(metadata.host, actual.host)
def test_metadata_to_group(self):
metadata = random_group_metadata()
metadata_broker = metadata.broker
actual = metadata_to_group(metadata)
actual_broker = actual.broker
self.assertEqual(metadata.id, actual.id)
self.assertEqual(metadata.state, actual.state)
self.assertListEqual([], actual.partitions)
self.assertEqual([], actual.members)
self.assertEqual(metadata_broker.id, actual_broker.id)
self.assertEqual(metadata_broker.port, actual_broker.port)
self.assertEqual(metadata_broker.host, actual_broker.host)
def test_metadata_to_partition(self):
metadata = random_partition_metadata()
actual = metadata_to_partition(metadata)
self.assertEqual(metadata.id, actual.id)
self.assertEqual(metadata.isrs, actual.isrs)
self.assertEqual(metadata.leader, actual.leader)
self.assertEqual(metadata.replicas, actual.replicas)
def test_metadata_to_group_partition(self):
metadata = random_topic_partition_metadata()
actual = metadata_to_group_partition(metadata)
self.assertEqual(metadata.partition, actual.id)
self.assertEqual(metadata.topic, actual.topic)
self.assertEqual(metadata.offset, actual.offset)
self.assertEqual("", actual.group)
self.assertEqual(0, actual.high)
self.assertEqual(0, actual.low)
def test_metadata_to_topic(self):
metadata = random_topic_metadata()
actual = metadata_to_topic(metadata)
self.assertEqual(metadata.topic, actual.name)
self.assertListEqual([], actual.groups)
self.assertListEqual([], actual.partitions)
| none | 1 | 2.422028 | 2 | |
src/model.py | dasdachs/wd-1-scraping-exercise | 0 | 6619655 | import datetime
import math
from typing import List
from .utils import price_string_to_number
class SearchItem(object):
def __init__(
self, title: str, url: str, published_at: str, description: str, price: str
) -> None:
self.title: str = title
self.url: str = url
# Convert to datetime and remove the current self.published_at
# self.published_at: datetime.datetime = published_at
self.published_at: str = published_at # Change start to self.published_at
self.description: str = description
self._price: str = price
self.price: float = price_string_to_number(price)
def price_str(self) -> str:
"""Add currency to the price data or return TDB (to be discussed)."""
if self.price != math.nan:
return f"{self.price} €"
return "TBD"
def serialize_to_csv(self) -> List[str]:
# Return all the data as a row in a csv file
raise NotImplemented
def __lt__(self, other):
return self.price < other.price
def __gt__(self, other):
return self.price > other.price
def __repr__(self) -> str:
"""The representation in the code"""
return (
f'SearchItem(title="{self.title}", url="{self.url}", published_at="{self.published_at}",'
f'description="{self.description}", price="{self._price}")'
)
| import datetime
import math
from typing import List
from .utils import price_string_to_number
class SearchItem(object):
def __init__(
self, title: str, url: str, published_at: str, description: str, price: str
) -> None:
self.title: str = title
self.url: str = url
# Convert to datetime and remove the current self.published_at
# self.published_at: datetime.datetime = published_at
self.published_at: str = published_at # Change start to self.published_at
self.description: str = description
self._price: str = price
self.price: float = price_string_to_number(price)
def price_str(self) -> str:
"""Add currency to the price data or return TDB (to be discussed)."""
if self.price != math.nan:
return f"{self.price} €"
return "TBD"
def serialize_to_csv(self) -> List[str]:
# Return all the data as a row in a csv file
raise NotImplemented
def __lt__(self, other):
return self.price < other.price
def __gt__(self, other):
return self.price > other.price
def __repr__(self) -> str:
"""The representation in the code"""
return (
f'SearchItem(title="{self.title}", url="{self.url}", published_at="{self.published_at}",'
f'description="{self.description}", price="{self._price}")'
)
| en | 0.648138 | # Convert to datetime and remove the current self.published_at # self.published_at: datetime.datetime = published_at # Change start to self.published_at Add currency to the price data or return TDB (to be discussed). # Return all the data as a row in a csv file The representation in the code | 3.364658 | 3 |
common/django/managers.py | FeiChaiCom/django-netdisk | 6 | 6619656 | <reponame>FeiChaiCom/django-netdisk
# -*- coding: utf-8 -*-
import django
from django.db import models
from django.db.models.query import QuerySet
class SoftDeleteQuerySet(QuerySet):
def delete(self):
return super(SoftDeleteQuerySet, self).update(is_deleted=True)
def hard_delete(self):
return super(SoftDeleteQuerySet, self).delete()
class Manager(models.Manager):
"""支持软删除"""
def get_queryset(self):
return SoftDeleteQuerySet(self.model).filter(is_deleted=False)
try:
from mptt.managers import TreeManager
if django.VERSION >= (1, 6):
# TreeManager bug:
if "get_query_set" in TreeManager.__dict__:
# TreeManager should not define this, it messes things up.
del TreeManager.get_query_set
# See also:
# https://github.com/django-mptt/django-mptt/pull/388
# Once this has been merged, a new release for django-mptt has been
# made, and we can specify the new version in our requirements, this
# hack can be removed.
class BaseTreeManager(TreeManager):
"""soft delete: objects.delete()"""
def get_queryset(self):
return SoftDeleteQuerySet(self.model, using=self._db).filter(is_deleted=False)
# return SoftDeleteQuerySet(self.model).select_related("parent")
except Exception:
pass
| # -*- coding: utf-8 -*-
import django
from django.db import models
from django.db.models.query import QuerySet
class SoftDeleteQuerySet(QuerySet):
def delete(self):
return super(SoftDeleteQuerySet, self).update(is_deleted=True)
def hard_delete(self):
return super(SoftDeleteQuerySet, self).delete()
class Manager(models.Manager):
"""支持软删除"""
def get_queryset(self):
return SoftDeleteQuerySet(self.model).filter(is_deleted=False)
try:
from mptt.managers import TreeManager
if django.VERSION >= (1, 6):
# TreeManager bug:
if "get_query_set" in TreeManager.__dict__:
# TreeManager should not define this, it messes things up.
del TreeManager.get_query_set
# See also:
# https://github.com/django-mptt/django-mptt/pull/388
# Once this has been merged, a new release for django-mptt has been
# made, and we can specify the new version in our requirements, this
# hack can be removed.
class BaseTreeManager(TreeManager):
"""soft delete: objects.delete()"""
def get_queryset(self):
return SoftDeleteQuerySet(self.model, using=self._db).filter(is_deleted=False)
# return SoftDeleteQuerySet(self.model).select_related("parent")
except Exception:
pass | en | 0.723586 | # -*- coding: utf-8 -*- 支持软删除 # TreeManager bug: # TreeManager should not define this, it messes things up. # See also: # https://github.com/django-mptt/django-mptt/pull/388 # Once this has been merged, a new release for django-mptt has been # made, and we can specify the new version in our requirements, this # hack can be removed. soft delete: objects.delete() # return SoftDeleteQuerySet(self.model).select_related("parent") | 2.240441 | 2 |
application/registration/models.py | roklem314/PotilasArkisto | 2 | 6619657 | from application import db,app
from flask_login import current_user
from application.models import Base
from application.role.forms import RoleForm
from application.role.models import Role
from application.location.models import Location
from sqlalchemy.sql import text
class Accounts(Base):
__tablename__ = "account"
name = db.Column('name', db.String(144), nullable=False)
email = db.Column('email', db.String(25), unique=True)
password = db.Column('password', db.String())
appts = db.relationship("Appointment", backref='account', lazy=True)
loacation_id= db.Column(db.Integer, db.ForeignKey('location.id'),
nullable=True)
role = db.relationship("user_role",backref = 'account',lazy = True)
def __init__(self, name,email,password):
self.name = name
self.email = email
self.password = password
def get_id(self):
return self.id
def is_active(self):
return True
def is_anonymous(self):
return False
def is_authenticated(self):
return True
@staticmethod
def roles(email):
stmt = text("SELECT Role.role FROM account,Role "
" WHERE (account.id = Role.id) AND (account.email = :email) ").params(email = email)
res = db.engine.execute(stmt)
response = []
for row in res:
response.append({row[0]})
return response
| from application import db,app
from flask_login import current_user
from application.models import Base
from application.role.forms import RoleForm
from application.role.models import Role
from application.location.models import Location
from sqlalchemy.sql import text
class Accounts(Base):
__tablename__ = "account"
name = db.Column('name', db.String(144), nullable=False)
email = db.Column('email', db.String(25), unique=True)
password = db.Column('password', db.String())
appts = db.relationship("Appointment", backref='account', lazy=True)
loacation_id= db.Column(db.Integer, db.ForeignKey('location.id'),
nullable=True)
role = db.relationship("user_role",backref = 'account',lazy = True)
def __init__(self, name,email,password):
self.name = name
self.email = email
self.password = password
def get_id(self):
return self.id
def is_active(self):
return True
def is_anonymous(self):
return False
def is_authenticated(self):
return True
@staticmethod
def roles(email):
stmt = text("SELECT Role.role FROM account,Role "
" WHERE (account.id = Role.id) AND (account.email = :email) ").params(email = email)
res = db.engine.execute(stmt)
response = []
for row in res:
response.append({row[0]})
return response
| none | 1 | 2.679035 | 3 | |
enthought/pyface/util/python_stc.py | enthought/etsproxy | 3 | 6619658 | # proxy module
from pyface.util.python_stc import *
| # proxy module
from pyface.util.python_stc import *
| es | 0.125187 | # proxy module | 1.097323 | 1 |
api/api_gateway/api/tests/product_test_helper.py | fga-eps-mds/2018.2-Integra-Vendas | 6 | 6619659 | from django.conf import settings
import requests
def create_product(name = None, fk_vendor = None, price = None, photo = None, description = None, token = None):
data = {
'fk_vendor': fk_vendor,
'name': name,
'price': price,
'photo': photo,
'description': description,
'token': token
}
return data | from django.conf import settings
import requests
def create_product(name = None, fk_vendor = None, price = None, photo = None, description = None, token = None):
data = {
'fk_vendor': fk_vendor,
'name': name,
'price': price,
'photo': photo,
'description': description,
'token': token
}
return data | none | 1 | 1.932515 | 2 | |
qiskit_experiments/library/characterization/readout_angle.py | QuantumHardware/qiskit-experiments | 72 | 6619660 | # This code is part of Qiskit.
#
# (C) Copyright IBM 2021.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""
Readout Angle Experiment class.
"""
from typing import List, Optional
from qiskit.circuit import QuantumCircuit
from qiskit.qobj.utils import MeasLevel
from qiskit.providers.backend import Backend
from qiskit_experiments.framework import BaseExperiment, Options
from qiskit_experiments.library.characterization.analysis.readout_angle_analysis import (
ReadoutAngleAnalysis,
)
class ReadoutAngle(BaseExperiment):
r"""
Readout angle experiment class
# section: overview
Design and analyze experiments for estimating readout angle of the qubit.
The readout angle is the average of two angles: the angle of the IQ
cluster center of the ground state, and the angle of the IQ cluster center
of the excited state.
Each experiment consists of the following steps:
1. Circuits generation: two circuits, the first circuit measures the qubit
in the ground state, the second circuit sets the qubit in the excited state
and measures it. Measurements are in level 1 (kerneled).
2. Backend execution: actually running the circuits on the device
(or a simulator that supports level 1 measurements). The backend returns
the cluster centers of the ground and excited states.
3. Analysis of results: return the average of the angles of the two centers.
# section: analysis_ref
:py:class:`ReadoutAngleAnalysis`
"""
@classmethod
def _default_run_options(cls) -> Options:
"""Default run options."""
options = super()._default_run_options()
options.meas_level = MeasLevel.KERNELED
options.meas_return = "avg"
return options
def __init__(
self,
qubit: int,
backend: Optional[Backend] = None,
):
"""
Initialize the readout angle experiment class
Args:
qubit: the qubit whose readout angle is to be estimated
backend: Optional, the backend to run the experiment on.
"""
# Initialize base experiment
super().__init__([qubit], analysis=ReadoutAngleAnalysis(), backend=backend)
def circuits(self) -> List[QuantumCircuit]:
"""
Return a list of experiment circuits
Returns:
The experiment circuits
"""
circ0 = QuantumCircuit(1, 1)
circ0.measure(0, 0)
circ1 = QuantumCircuit(1, 1)
circ1.x(0)
circ1.measure(0, 0)
for i, circ in enumerate([circ0, circ1]):
circ.metadata = {
"experiment_type": self._type,
"qubit": self.physical_qubits[0],
"xval": i,
}
return [circ0, circ1]
| # This code is part of Qiskit.
#
# (C) Copyright IBM 2021.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""
Readout Angle Experiment class.
"""
from typing import List, Optional
from qiskit.circuit import QuantumCircuit
from qiskit.qobj.utils import MeasLevel
from qiskit.providers.backend import Backend
from qiskit_experiments.framework import BaseExperiment, Options
from qiskit_experiments.library.characterization.analysis.readout_angle_analysis import (
ReadoutAngleAnalysis,
)
class ReadoutAngle(BaseExperiment):
r"""
Readout angle experiment class
# section: overview
Design and analyze experiments for estimating readout angle of the qubit.
The readout angle is the average of two angles: the angle of the IQ
cluster center of the ground state, and the angle of the IQ cluster center
of the excited state.
Each experiment consists of the following steps:
1. Circuits generation: two circuits, the first circuit measures the qubit
in the ground state, the second circuit sets the qubit in the excited state
and measures it. Measurements are in level 1 (kerneled).
2. Backend execution: actually running the circuits on the device
(or a simulator that supports level 1 measurements). The backend returns
the cluster centers of the ground and excited states.
3. Analysis of results: return the average of the angles of the two centers.
# section: analysis_ref
:py:class:`ReadoutAngleAnalysis`
"""
@classmethod
def _default_run_options(cls) -> Options:
"""Default run options."""
options = super()._default_run_options()
options.meas_level = MeasLevel.KERNELED
options.meas_return = "avg"
return options
def __init__(
self,
qubit: int,
backend: Optional[Backend] = None,
):
"""
Initialize the readout angle experiment class
Args:
qubit: the qubit whose readout angle is to be estimated
backend: Optional, the backend to run the experiment on.
"""
# Initialize base experiment
super().__init__([qubit], analysis=ReadoutAngleAnalysis(), backend=backend)
def circuits(self) -> List[QuantumCircuit]:
"""
Return a list of experiment circuits
Returns:
The experiment circuits
"""
circ0 = QuantumCircuit(1, 1)
circ0.measure(0, 0)
circ1 = QuantumCircuit(1, 1)
circ1.x(0)
circ1.measure(0, 0)
for i, circ in enumerate([circ0, circ1]):
circ.metadata = {
"experiment_type": self._type,
"qubit": self.physical_qubits[0],
"xval": i,
}
return [circ0, circ1]
| en | 0.835417 | # This code is part of Qiskit. # # (C) Copyright IBM 2021. # # This code is licensed under the Apache License, Version 2.0. You may # obtain a copy of this license in the LICENSE.txt file in the root directory # of this source tree or at http://www.apache.org/licenses/LICENSE-2.0. # # Any modifications or derivative works of this code must retain this # copyright notice, and modified files need to carry a notice indicating # that they have been altered from the originals. Readout Angle Experiment class. Readout angle experiment class # section: overview Design and analyze experiments for estimating readout angle of the qubit. The readout angle is the average of two angles: the angle of the IQ cluster center of the ground state, and the angle of the IQ cluster center of the excited state. Each experiment consists of the following steps: 1. Circuits generation: two circuits, the first circuit measures the qubit in the ground state, the second circuit sets the qubit in the excited state and measures it. Measurements are in level 1 (kerneled). 2. Backend execution: actually running the circuits on the device (or a simulator that supports level 1 measurements). The backend returns the cluster centers of the ground and excited states. 3. Analysis of results: return the average of the angles of the two centers. # section: analysis_ref :py:class:`ReadoutAngleAnalysis` Default run options. Initialize the readout angle experiment class Args: qubit: the qubit whose readout angle is to be estimated backend: Optional, the backend to run the experiment on. # Initialize base experiment Return a list of experiment circuits Returns: The experiment circuits | 2.934328 | 3 |
api_auth/views.py | chohankyun/chohankyun | 37 | 6619661 | import json
from django.contrib.auth import get_user_model, user_logged_in
from django.http import HttpResponse
from django.utils.decorators import method_decorator
from django.utils.translation import ugettext_lazy as _
from django.views.decorators.debug import sensitive_post_parameters
from rest_framework import status, exceptions
from rest_framework.generics import GenericAPIView, RetrieveAPIView, DestroyAPIView, UpdateAPIView
from rest_framework.permissions import AllowAny, IsAuthenticated
from rest_framework.response import Response
from api_auth.serializers import (LoginSerializer, JWTSerializer, SessionUserSerializer, UsernameFindSerializer,
PasswordResetSerializer, PasswordResetConfirmSerializer, PasswordChangeSerializer,
RegisterSerializer, EmailConfirmSerializer)
sensitive_post_parameters_m = method_decorator(
sensitive_post_parameters(
'password', '<PASSWORD>', '<PASSWORD>', '<PASSWORD>'
)
)
class LoginView(GenericAPIView):
permission_classes = (AllowAny,)
serializer_class = LoginSerializer
@sensitive_post_parameters_m
def dispatch(self, *args, **kwargs):
return super(LoginView, self).dispatch(*args, **kwargs)
def post(self, request):
user = self.login(request)
user_logged_in.send(sender=user.__class__, request=request, user=user)
return self.get_response(user)
def login(self, request):
serializer = self.get_serializer(data=request.data, context={'request': request})
serializer.is_valid(raise_exception=True)
return serializer.validated_data['user']
def get_response(self, user):
jwt_serializer = JWTSerializer(instance=user, context={'request': self.request})
return Response(jwt_serializer.data, status=status.HTTP_200_OK)
class StatusView(GenericAPIView):
permission_classes = (IsAuthenticated,)
serializer_class = JWTSerializer
def get(self, request):
serializer = self.get_serializer(instance=request.user, context={'request': request})
return Response(serializer.data, status=status.HTTP_200_OK)
class SessionUserView(RetrieveAPIView):
permission_classes = (IsAuthenticated,)
serializer_class = SessionUserSerializer
def get_object(self):
user_id = self.request.user.id
return get_user_model().objects.get(pk=user_id)
class UsernameFindView(GenericAPIView):
permission_classes = (AllowAny,)
serializer_class = UsernameFindSerializer
def post(self, request):
serializer = self.get_serializer(data=request.data, context={'request': request})
serializer.is_valid(raise_exception=True)
serializer.save()
return Response('Your username has been sent to your e-mail address.')
class PasswordResetView(GenericAPIView):
permission_classes = (AllowAny,)
serializer_class = PasswordResetSerializer
def post(self, request):
serializer = self.get_serializer(data=request.data, context={'request': request})
serializer.is_valid(raise_exception=True)
serializer.save()
return Response('Password reset e-mail has been sent.')
class PasswordResetConfirmView(GenericAPIView):
permission_classes = (AllowAny,)
serializer_class = PasswordResetConfirmSerializer
def get(self, request, **kwargs):
try:
serializer = self.get_serializer(data=kwargs, context={'request': request})
serializer.is_valid(raise_exception=True)
serializer.save()
except exceptions.ValidationError as e:
return HttpResponse(e.default_detail, status=status.HTTP_400_BAD_REQUEST)
return HttpResponse(_('Password has been reset with the new password.'))
class PasswordChangeView(GenericAPIView):
permission_classes = (IsAuthenticated,)
serializer_class = PasswordChangeSerializer
@sensitive_post_parameters_m
def dispatch(self, *args, **kwargs):
return super(PasswordChangeView, self).dispatch(*args, **kwargs)
def post(self, request):
serializer = self.get_serializer(data=request.data, context={'request': request})
serializer.is_valid(raise_exception=True)
serializer.save()
return Response('Password has been changed with the new password.')
class RegisterView(GenericAPIView):
permission_classes = (AllowAny,)
serializer_class = RegisterSerializer
@sensitive_post_parameters_m
def dispatch(self, *args, **kwargs):
return super(RegisterView, self).dispatch(*args, **kwargs)
def post(self, request):
serializer = self.get_serializer(data=request.data, context={'request': request})
serializer.is_valid(raise_exception=True)
serializer.save()
return Response('Verification email sent.')
class EmailConfirmView(GenericAPIView):
permission_classes = (AllowAny,)
serializer_class = EmailConfirmSerializer
def get(self, request, **kwargs):
try:
serializer = self.get_serializer(data=kwargs, context={'request': request})
serializer.is_valid(raise_exception=True)
serializer.save()
except exceptions.ValidationError as e:
return HttpResponse(e.default_detail, status=status.HTTP_400_BAD_REQUEST)
return HttpResponse(_('Your email has been verified.'))
class UserResetView(DestroyAPIView):
permission_classes = (AllowAny,)
serializer_class = LoginSerializer
def get_object(self):
username = json.loads(self.request.body.decode('utf-8'))['username']
password = json.loads(self.request.body.decode('utf-8'))['password']
serializer = self.get_serializer(context={'request': self.request})
return serializer.authenticate(username=username, password=password)
class SessionUserDeleteView(DestroyAPIView):
permission_classes = (IsAuthenticated,)
def get_object(self):
user_id = self.request.user.id
return get_user_model().objects.get(pk=user_id)
def perform_destroy(self, instance):
if not instance.check_password(self.request.body.decode('utf-8')):
raise exceptions.AuthenticationFailed('Invalid password.')
instance.delete()
class SessionUserUpdateView(UpdateAPIView):
permission_classes = (IsAuthenticated,)
serializer_class = SessionUserSerializer
def get_object(self):
user_id = self.request.user.id
return get_user_model().objects.get(pk=user_id)
def perform_update(self, serializer):
if not serializer.instance.check_password(self.request.data['password']):
raise exceptions.AuthenticationFailed('Invalid password.')
serializer.save()
def get_response(self, user):
jwt_serializer = JWTSerializer(instance=user, context={'request': self.request})
return Response(jwt_serializer.data, status=status.HTTP_200_OK)
def update(self, request, *args, **kwargs):
partial = kwargs.pop('partial', False)
instance = self.get_object()
serializer = self.get_serializer(instance, data=request.data, partial=partial)
serializer.is_valid(raise_exception=True)
self.perform_update(serializer)
user = get_user_model().objects.get(pk=instance.id)
return self.get_response(user)
| import json
from django.contrib.auth import get_user_model, user_logged_in
from django.http import HttpResponse
from django.utils.decorators import method_decorator
from django.utils.translation import ugettext_lazy as _
from django.views.decorators.debug import sensitive_post_parameters
from rest_framework import status, exceptions
from rest_framework.generics import GenericAPIView, RetrieveAPIView, DestroyAPIView, UpdateAPIView
from rest_framework.permissions import AllowAny, IsAuthenticated
from rest_framework.response import Response
from api_auth.serializers import (LoginSerializer, JWTSerializer, SessionUserSerializer, UsernameFindSerializer,
PasswordResetSerializer, PasswordResetConfirmSerializer, PasswordChangeSerializer,
RegisterSerializer, EmailConfirmSerializer)
sensitive_post_parameters_m = method_decorator(
sensitive_post_parameters(
'password', '<PASSWORD>', '<PASSWORD>', '<PASSWORD>'
)
)
class LoginView(GenericAPIView):
permission_classes = (AllowAny,)
serializer_class = LoginSerializer
@sensitive_post_parameters_m
def dispatch(self, *args, **kwargs):
return super(LoginView, self).dispatch(*args, **kwargs)
def post(self, request):
user = self.login(request)
user_logged_in.send(sender=user.__class__, request=request, user=user)
return self.get_response(user)
def login(self, request):
serializer = self.get_serializer(data=request.data, context={'request': request})
serializer.is_valid(raise_exception=True)
return serializer.validated_data['user']
def get_response(self, user):
jwt_serializer = JWTSerializer(instance=user, context={'request': self.request})
return Response(jwt_serializer.data, status=status.HTTP_200_OK)
class StatusView(GenericAPIView):
permission_classes = (IsAuthenticated,)
serializer_class = JWTSerializer
def get(self, request):
serializer = self.get_serializer(instance=request.user, context={'request': request})
return Response(serializer.data, status=status.HTTP_200_OK)
class SessionUserView(RetrieveAPIView):
permission_classes = (IsAuthenticated,)
serializer_class = SessionUserSerializer
def get_object(self):
user_id = self.request.user.id
return get_user_model().objects.get(pk=user_id)
class UsernameFindView(GenericAPIView):
permission_classes = (AllowAny,)
serializer_class = UsernameFindSerializer
def post(self, request):
serializer = self.get_serializer(data=request.data, context={'request': request})
serializer.is_valid(raise_exception=True)
serializer.save()
return Response('Your username has been sent to your e-mail address.')
class PasswordResetView(GenericAPIView):
permission_classes = (AllowAny,)
serializer_class = PasswordResetSerializer
def post(self, request):
serializer = self.get_serializer(data=request.data, context={'request': request})
serializer.is_valid(raise_exception=True)
serializer.save()
return Response('Password reset e-mail has been sent.')
class PasswordResetConfirmView(GenericAPIView):
permission_classes = (AllowAny,)
serializer_class = PasswordResetConfirmSerializer
def get(self, request, **kwargs):
try:
serializer = self.get_serializer(data=kwargs, context={'request': request})
serializer.is_valid(raise_exception=True)
serializer.save()
except exceptions.ValidationError as e:
return HttpResponse(e.default_detail, status=status.HTTP_400_BAD_REQUEST)
return HttpResponse(_('Password has been reset with the new password.'))
class PasswordChangeView(GenericAPIView):
permission_classes = (IsAuthenticated,)
serializer_class = PasswordChangeSerializer
@sensitive_post_parameters_m
def dispatch(self, *args, **kwargs):
return super(PasswordChangeView, self).dispatch(*args, **kwargs)
def post(self, request):
serializer = self.get_serializer(data=request.data, context={'request': request})
serializer.is_valid(raise_exception=True)
serializer.save()
return Response('Password has been changed with the new password.')
class RegisterView(GenericAPIView):
permission_classes = (AllowAny,)
serializer_class = RegisterSerializer
@sensitive_post_parameters_m
def dispatch(self, *args, **kwargs):
return super(RegisterView, self).dispatch(*args, **kwargs)
def post(self, request):
serializer = self.get_serializer(data=request.data, context={'request': request})
serializer.is_valid(raise_exception=True)
serializer.save()
return Response('Verification email sent.')
class EmailConfirmView(GenericAPIView):
permission_classes = (AllowAny,)
serializer_class = EmailConfirmSerializer
def get(self, request, **kwargs):
try:
serializer = self.get_serializer(data=kwargs, context={'request': request})
serializer.is_valid(raise_exception=True)
serializer.save()
except exceptions.ValidationError as e:
return HttpResponse(e.default_detail, status=status.HTTP_400_BAD_REQUEST)
return HttpResponse(_('Your email has been verified.'))
class UserResetView(DestroyAPIView):
permission_classes = (AllowAny,)
serializer_class = LoginSerializer
def get_object(self):
username = json.loads(self.request.body.decode('utf-8'))['username']
password = json.loads(self.request.body.decode('utf-8'))['password']
serializer = self.get_serializer(context={'request': self.request})
return serializer.authenticate(username=username, password=password)
class SessionUserDeleteView(DestroyAPIView):
permission_classes = (IsAuthenticated,)
def get_object(self):
user_id = self.request.user.id
return get_user_model().objects.get(pk=user_id)
def perform_destroy(self, instance):
if not instance.check_password(self.request.body.decode('utf-8')):
raise exceptions.AuthenticationFailed('Invalid password.')
instance.delete()
class SessionUserUpdateView(UpdateAPIView):
permission_classes = (IsAuthenticated,)
serializer_class = SessionUserSerializer
def get_object(self):
user_id = self.request.user.id
return get_user_model().objects.get(pk=user_id)
def perform_update(self, serializer):
if not serializer.instance.check_password(self.request.data['password']):
raise exceptions.AuthenticationFailed('Invalid password.')
serializer.save()
def get_response(self, user):
jwt_serializer = JWTSerializer(instance=user, context={'request': self.request})
return Response(jwt_serializer.data, status=status.HTTP_200_OK)
def update(self, request, *args, **kwargs):
partial = kwargs.pop('partial', False)
instance = self.get_object()
serializer = self.get_serializer(instance, data=request.data, partial=partial)
serializer.is_valid(raise_exception=True)
self.perform_update(serializer)
user = get_user_model().objects.get(pk=instance.id)
return self.get_response(user)
| none | 1 | 2.011452 | 2 | |
test/test_Player.py | christianceb/chess-players-python | 0 | 6619662 | <gh_stars>0
from unittest import TestCase
from app.Player import Player
class TestPlayer(TestCase):
def __init__(self, *args, **kwargs):
super(TestPlayer, self).__init__(*args, **kwargs)
self.player1 = Player("A", "B", "C", "D", "E", "F")
self.player2 = Player("G", "H", "I", "J", "K", "L")
self.player3 = Player("A", "B", "C", "D", "E", "G")
self.player4 = Player("A", "B", "D", "E", "F", "G")
self.player5 = Player("Le", "Quick", "Browne", "Focks", "Jumps", "Over");
def test_eq(self):
"""Test equality override on Player class on 1 object"""
self.assertEqual(self.player1, self.player1)
def test_ne(self):
"""Test inverse of equality override on Player class"""
self.assertNotEqual(self.player2, self.player1)
def test_lt(self):
"""Test "less than" override on Player class"""
self.assertLess(self.player1, self.player3)
def test_lt2(self):
"""Test "less than" override on Player class where the higher value is determined in the middle of the
properties being evaluated
"""
self.assertLess(self.player1, self.player4)
def test_gt(self):
"""Test "greater than" override on Player class"""
self.assertGreater(self.player2, self.player1)
def test_contains(self):
"""Test if player contains certain keywords and is case insensitive"""
self.assertTrue(self.player5.contains("V")) | from unittest import TestCase
from app.Player import Player
class TestPlayer(TestCase):
def __init__(self, *args, **kwargs):
super(TestPlayer, self).__init__(*args, **kwargs)
self.player1 = Player("A", "B", "C", "D", "E", "F")
self.player2 = Player("G", "H", "I", "J", "K", "L")
self.player3 = Player("A", "B", "C", "D", "E", "G")
self.player4 = Player("A", "B", "D", "E", "F", "G")
self.player5 = Player("Le", "Quick", "Browne", "Focks", "Jumps", "Over");
def test_eq(self):
"""Test equality override on Player class on 1 object"""
self.assertEqual(self.player1, self.player1)
def test_ne(self):
"""Test inverse of equality override on Player class"""
self.assertNotEqual(self.player2, self.player1)
def test_lt(self):
"""Test "less than" override on Player class"""
self.assertLess(self.player1, self.player3)
def test_lt2(self):
"""Test "less than" override on Player class where the higher value is determined in the middle of the
properties being evaluated
"""
self.assertLess(self.player1, self.player4)
def test_gt(self):
"""Test "greater than" override on Player class"""
self.assertGreater(self.player2, self.player1)
def test_contains(self):
"""Test if player contains certain keywords and is case insensitive"""
self.assertTrue(self.player5.contains("V")) | en | 0.851497 | Test equality override on Player class on 1 object Test inverse of equality override on Player class Test "less than" override on Player class Test "less than" override on Player class where the higher value is determined in the middle of the
properties being evaluated Test "greater than" override on Player class Test if player contains certain keywords and is case insensitive | 3.58512 | 4 |
demo/uno-server/led-blink.py | wovo/hwpy | 0 | 6619663 | """
Blink a LED that is connected to pin d13 (the on-board LED)
using a while loop.
"""
import sys
sys.path.append( "../.." )
import hwpy
import time
led = hwpy.gpo( hwpy.pins.d13 )
print( __doc__ )
while True:
led.write( 0 )
time.sleep( 0.2 )
led.write( 1 )
time.sleep( 0.2 ) | """
Blink a LED that is connected to pin d13 (the on-board LED)
using a while loop.
"""
import sys
sys.path.append( "../.." )
import hwpy
import time
led = hwpy.gpo( hwpy.pins.d13 )
print( __doc__ )
while True:
led.write( 0 )
time.sleep( 0.2 )
led.write( 1 )
time.sleep( 0.2 ) | en | 0.929418 | Blink a LED that is connected to pin d13 (the on-board LED) using a while loop. | 3.16415 | 3 |
bin/diffsplice_fishers_exact.py | baraaorabi/flair | 139 | 6619664 | #!/usr/bin/env python3
import sys, csv, os
import scipy.stats as sps
try:
events_quant = open(sys.argv[1])
colname1 = sys.argv[2]
colname2 = sys.argv[3]
outfilename = sys.argv[4]
except:
print('usage: script.py events.quant.tsv colname1 colname2 out.fishers.tsv')
sys.exit()
header = events_quant.readline().rstrip().split('\t')
if colname1 in header:
col1 = header.index(colname1)
else:
sys.stderr.write('Could not find {} in {}\n'.format(colname1, ' '.join(header)))
sys.exit(1)
if colname2 in header:
col2 = header.index(colname2)
else:
sys.stderr.write('Could not find {} in {}\n'.format(colname2, ' '.join(header)))
sys.exit(1)
events = {}
for line in events_quant:
line = line.rstrip().split('\t')
feature = line[0][line[0].find('_')+1:]
if feature not in events:
events[feature] = {}
events[feature]['entries'] = []
events[feature]['counts'] = []
events[feature]['entries'] += [line]
events[feature]['counts'] += [[float(line[col1]), float(line[col2])]]
features_sorted = sorted(events.keys())
with open(outfilename, 'wt') as outfile:
writer = csv.writer(outfile, delimiter='\t', lineterminator=os.linesep)
writer.writerow(header+[colname1+'-'+colname2+'_pval'])
for feature in features_sorted:
for line in events[feature]['entries']:
writer.writerow(line + [sps.fisher_exact(events[feature]['counts'])[1]])
| #!/usr/bin/env python3
import sys, csv, os
import scipy.stats as sps
try:
events_quant = open(sys.argv[1])
colname1 = sys.argv[2]
colname2 = sys.argv[3]
outfilename = sys.argv[4]
except:
print('usage: script.py events.quant.tsv colname1 colname2 out.fishers.tsv')
sys.exit()
header = events_quant.readline().rstrip().split('\t')
if colname1 in header:
col1 = header.index(colname1)
else:
sys.stderr.write('Could not find {} in {}\n'.format(colname1, ' '.join(header)))
sys.exit(1)
if colname2 in header:
col2 = header.index(colname2)
else:
sys.stderr.write('Could not find {} in {}\n'.format(colname2, ' '.join(header)))
sys.exit(1)
events = {}
for line in events_quant:
line = line.rstrip().split('\t')
feature = line[0][line[0].find('_')+1:]
if feature not in events:
events[feature] = {}
events[feature]['entries'] = []
events[feature]['counts'] = []
events[feature]['entries'] += [line]
events[feature]['counts'] += [[float(line[col1]), float(line[col2])]]
features_sorted = sorted(events.keys())
with open(outfilename, 'wt') as outfile:
writer = csv.writer(outfile, delimiter='\t', lineterminator=os.linesep)
writer.writerow(header+[colname1+'-'+colname2+'_pval'])
for feature in features_sorted:
for line in events[feature]['entries']:
writer.writerow(line + [sps.fisher_exact(events[feature]['counts'])[1]])
| fr | 0.221828 | #!/usr/bin/env python3 | 2.374026 | 2 |
src/parse/errors.py | jo3-l/liftoff | 0 | 6619665 | <filename>src/parse/errors.py
class SyntaxError(Exception):
def __init__(self, msg: str, line: int, col: int):
super().__init__(f"{line}:{col}: {msg}")
self.line = line
self.col = col
| <filename>src/parse/errors.py
class SyntaxError(Exception):
def __init__(self, msg: str, line: int, col: int):
super().__init__(f"{line}:{col}: {msg}")
self.line = line
self.col = col
| none | 1 | 2.496457 | 2 | |
Scraping.py | Merzaad/learning_python | 1 | 6619666 | <reponame>Merzaad/learning_python<filename>Scraping.py
import requests
import pandas as pd
from bs4 import BeautifulSoup
url = 'https://news.yahoo.com/'
page = requests.get(url)
soup = BeautifulSoup(page.content,'html.parser')
results = soup.find(id="YDC-Stream")
job_elements = results.find_all("li", class_="js-stream-content Pos(r)")
a= []
for job_element in job_elements:
title_element = job_element.find("h3")
a.append(title_element.text.strip())
yahoo_news = pd.DataFrame(a,columns=['Titles'])
yahoo_news.to_csv('yahoo_news.csv' , index = False)
def keyword(tag):
result = soup.find_all("a" ,string=lambda text: tag in str(text).lower())
a=0
for i in result:
a+=1
print(i.text)
while True:
try:
tag = input("tag? or exit ")
if tag.lower() == exit:
break
else:
keyword(tag)
except:
print("some thing is wrong")
pass
else:
break
| import requests
import pandas as pd
from bs4 import BeautifulSoup
url = 'https://news.yahoo.com/'
page = requests.get(url)
soup = BeautifulSoup(page.content,'html.parser')
results = soup.find(id="YDC-Stream")
job_elements = results.find_all("li", class_="js-stream-content Pos(r)")
a= []
for job_element in job_elements:
title_element = job_element.find("h3")
a.append(title_element.text.strip())
yahoo_news = pd.DataFrame(a,columns=['Titles'])
yahoo_news.to_csv('yahoo_news.csv' , index = False)
def keyword(tag):
result = soup.find_all("a" ,string=lambda text: tag in str(text).lower())
a=0
for i in result:
a+=1
print(i.text)
while True:
try:
tag = input("tag? or exit ")
if tag.lower() == exit:
break
else:
keyword(tag)
except:
print("some thing is wrong")
pass
else:
break | none | 1 | 3.239496 | 3 | |
training_api/application/configuration/templates/scratch_configuration.py | BMW-InnovationLab/BMW-Semantic-Segmentation-Training-GUI | 20 | 6619667 | <filename>training_api/application/configuration/templates/scratch_configuration.py
import datetime
import mxnet as mx
from gluoncv.data import VOCSegmentation
from gluoncv.model_zoo.segbase import get_segmentation_model
from domain.models.paths import Paths
from domain.models.dataset_information import DatasetInformation
from domain.models.hyper_parameter_information import HyperParameterInformation
from domain.services.contracts.abstract_configuration_template import AbstractConfigurationTemplate
from domain.exceptions.configuration_exceptions import ConfigurationError
class ScratchConfiguration(AbstractConfigurationTemplate):
def create_network_configuration(self, dataset_info: DatasetInformation, config: HyperParameterInformation, paths: Paths) -> object:
try:
print(datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S: ") + "Creating From Scratch Configuration")
print(datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S: ") + "Using The Following Backbone: " + str(config.backbone))
print(datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S: ") + "Using The Following Network: " + str(config.network))
VOCSegmentation.NUM_CLASS = dataset_info.num_classes
return get_segmentation_model(model=config.network, backbone=config.backbone, dataset='pascal_voc', norm_layer=mx.gluon.nn.BatchNorm,
crop_size=config.crop_size,
pretrained_base=True, pretrained=False, base_size=config.base_size, aux=False)
except Exception as e:
raise ConfigurationError(configuration_name="_".join([config.network, config.backbone]), additional_message=e.__str__())
| <filename>training_api/application/configuration/templates/scratch_configuration.py
import datetime
import mxnet as mx
from gluoncv.data import VOCSegmentation
from gluoncv.model_zoo.segbase import get_segmentation_model
from domain.models.paths import Paths
from domain.models.dataset_information import DatasetInformation
from domain.models.hyper_parameter_information import HyperParameterInformation
from domain.services.contracts.abstract_configuration_template import AbstractConfigurationTemplate
from domain.exceptions.configuration_exceptions import ConfigurationError
class ScratchConfiguration(AbstractConfigurationTemplate):
def create_network_configuration(self, dataset_info: DatasetInformation, config: HyperParameterInformation, paths: Paths) -> object:
try:
print(datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S: ") + "Creating From Scratch Configuration")
print(datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S: ") + "Using The Following Backbone: " + str(config.backbone))
print(datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S: ") + "Using The Following Network: " + str(config.network))
VOCSegmentation.NUM_CLASS = dataset_info.num_classes
return get_segmentation_model(model=config.network, backbone=config.backbone, dataset='pascal_voc', norm_layer=mx.gluon.nn.BatchNorm,
crop_size=config.crop_size,
pretrained_base=True, pretrained=False, base_size=config.base_size, aux=False)
except Exception as e:
raise ConfigurationError(configuration_name="_".join([config.network, config.backbone]), additional_message=e.__str__())
| none | 1 | 2.22007 | 2 | |
apu/geo/Coord.py | afeldman/apu | 0 | 6619668 | <reponame>afeldman/apu<filename>apu/geo/Coord.py
""" coordination transformation """
from typing import List, Tuple
import numpy as np
def m2pix(
height: float,
extention: float,
radius: float, # = 6_378_137 ,
distorsion_scaling: float = 1.) -> float:
""" convert from meter in pixel
Arguments:
height(float): image height
extention(float): latitude extention of image in pixel
radius(float): planet radius in metern
distorsion_scaling(float): Scaling factor for
distortion between 0. and 1.
Returns:
float: conversionsfactor pix/m
"""
return (180. / np.pi) * height * \
distorsion_scaling / extention / radius
def km2pix(height: float,
extention: float,
radius: float = 6_378.137,
distorsion_scaling: float = 1.) -> float:
""" convert from kilometer in pixel
Arguments:
height(float): image height
extention(float): latitude extention of image in pixel
radius(float): planet radius in kilometer default is
the earth radius (6_378.137km)
distorsion_scaling(float): Scaling factor for
distortion between 0. and 1.
Returns:
float: conversionsfactor pix/m
"""
assert 0.0 < distorsion_scaling <= 1.0, \
f"apu.geo.Coord: distorsion_scaling {distorsion_scaling} has to" +\
" be in the interval ]0,1]"
return (180. / np.pi) * height * distorsion_scaling / extention / radius
def pix2carree(pixel: List[float],
area: List[Tuple[float]],
image_size: List[int],
origin: str = "upper") -> tuple:
""" Convert image pixel position to Carree lat/long
ASSAMTION: central median is 0 => (long [-180,180[)
Arguments:
pixel(List[float]): (u,v) coordinate in image
area(List[Tuple[float]]): ((u,v)_min, (u,v)_max) the area in the image
to search in
image_size(List[int]): image size in width and height
origin(str): image convention. where is the image origin.
'upper' means the origin [0,0] is in the upper left corner
'lower' means that the image origin is in the lower left corner
Returns:
tuple: (lat, lon) coordinates in a Plate Carree image
"""
lat = (pixel[0] / image_size[0]) * (area[1][0] - area[0][0]) + area[0][0]
lon = (pixel[1] / image_size[1]) * (area[1][1] - area[0][1])
lon = lon + area[0][1] if origin == "lower" else area[1][1] - lon
return (lat, lon)
# pylint: disable=C0103
def carree2pix(coord: List[float],
area: List[Tuple[float]],
image_size: List[int],
origin: str = "upper") -> tuple:
""" Convert Carree lat/long to image poxel position
Arguments:
coord(List[float]): (lat, lon) coordinate in image
area(List[Tuple[float]]): ((u,v)_min, (u,v)_max) the area in the image
to search in
image_size(List[int]): image size in width and height
origin(str): image convention. where is the image origin.
'upper' means the origin [0,0] is in the upper left corner
'lower' means that the image origin is in the lower left corner
Returns:
tuple: (u, v) pixel coordinate
"""
u = image_size[0] * (coord[0] - area[0][0]) / (area[1][0] - area[0][0])
v = image_size[1] / (area[0][1] - area[1][1])
v *= (coord[1] - area[0][1]) if origin == "lower" else (area[1][1]-coord[1])
return (u, v)
| """ coordination transformation """
from typing import List, Tuple
import numpy as np
def m2pix(
height: float,
extention: float,
radius: float, # = 6_378_137 ,
distorsion_scaling: float = 1.) -> float:
""" convert from meter in pixel
Arguments:
height(float): image height
extention(float): latitude extention of image in pixel
radius(float): planet radius in metern
distorsion_scaling(float): Scaling factor for
distortion between 0. and 1.
Returns:
float: conversionsfactor pix/m
"""
return (180. / np.pi) * height * \
distorsion_scaling / extention / radius
def km2pix(height: float,
extention: float,
radius: float = 6_378.137,
distorsion_scaling: float = 1.) -> float:
""" convert from kilometer in pixel
Arguments:
height(float): image height
extention(float): latitude extention of image in pixel
radius(float): planet radius in kilometer default is
the earth radius (6_378.137km)
distorsion_scaling(float): Scaling factor for
distortion between 0. and 1.
Returns:
float: conversionsfactor pix/m
"""
assert 0.0 < distorsion_scaling <= 1.0, \
f"apu.geo.Coord: distorsion_scaling {distorsion_scaling} has to" +\
" be in the interval ]0,1]"
return (180. / np.pi) * height * distorsion_scaling / extention / radius
def pix2carree(pixel: List[float],
area: List[Tuple[float]],
image_size: List[int],
origin: str = "upper") -> tuple:
""" Convert image pixel position to Carree lat/long
ASSAMTION: central median is 0 => (long [-180,180[)
Arguments:
pixel(List[float]): (u,v) coordinate in image
area(List[Tuple[float]]): ((u,v)_min, (u,v)_max) the area in the image
to search in
image_size(List[int]): image size in width and height
origin(str): image convention. where is the image origin.
'upper' means the origin [0,0] is in the upper left corner
'lower' means that the image origin is in the lower left corner
Returns:
tuple: (lat, lon) coordinates in a Plate Carree image
"""
lat = (pixel[0] / image_size[0]) * (area[1][0] - area[0][0]) + area[0][0]
lon = (pixel[1] / image_size[1]) * (area[1][1] - area[0][1])
lon = lon + area[0][1] if origin == "lower" else area[1][1] - lon
return (lat, lon)
# pylint: disable=C0103
def carree2pix(coord: List[float],
area: List[Tuple[float]],
image_size: List[int],
origin: str = "upper") -> tuple:
""" Convert Carree lat/long to image poxel position
Arguments:
coord(List[float]): (lat, lon) coordinate in image
area(List[Tuple[float]]): ((u,v)_min, (u,v)_max) the area in the image
to search in
image_size(List[int]): image size in width and height
origin(str): image convention. where is the image origin.
'upper' means the origin [0,0] is in the upper left corner
'lower' means that the image origin is in the lower left corner
Returns:
tuple: (u, v) pixel coordinate
"""
u = image_size[0] * (coord[0] - area[0][0]) / (area[1][0] - area[0][0])
v = image_size[1] / (area[0][1] - area[1][1])
v *= (coord[1] - area[0][1]) if origin == "lower" else (area[1][1]-coord[1])
return (u, v) | en | 0.613524 | coordination transformation # = 6_378_137 , convert from meter in pixel
Arguments:
height(float): image height
extention(float): latitude extention of image in pixel
radius(float): planet radius in metern
distorsion_scaling(float): Scaling factor for
distortion between 0. and 1.
Returns:
float: conversionsfactor pix/m convert from kilometer in pixel
Arguments:
height(float): image height
extention(float): latitude extention of image in pixel
radius(float): planet radius in kilometer default is
the earth radius (6_378.137km)
distorsion_scaling(float): Scaling factor for
distortion between 0. and 1.
Returns:
float: conversionsfactor pix/m Convert image pixel position to Carree lat/long
ASSAMTION: central median is 0 => (long [-180,180[)
Arguments:
pixel(List[float]): (u,v) coordinate in image
area(List[Tuple[float]]): ((u,v)_min, (u,v)_max) the area in the image
to search in
image_size(List[int]): image size in width and height
origin(str): image convention. where is the image origin.
'upper' means the origin [0,0] is in the upper left corner
'lower' means that the image origin is in the lower left corner
Returns:
tuple: (lat, lon) coordinates in a Plate Carree image # pylint: disable=C0103 Convert Carree lat/long to image poxel position
Arguments:
coord(List[float]): (lat, lon) coordinate in image
area(List[Tuple[float]]): ((u,v)_min, (u,v)_max) the area in the image
to search in
image_size(List[int]): image size in width and height
origin(str): image convention. where is the image origin.
'upper' means the origin [0,0] is in the upper left corner
'lower' means that the image origin is in the lower left corner
Returns:
tuple: (u, v) pixel coordinate | 3.550457 | 4 |
backend/cloud-run-api/app/utils/multi_thread.py | tuxedocat/fast-annotation-tool | 24 | 6619669 | <reponame>tuxedocat/fast-annotation-tool<gh_stars>10-100
from concurrent import futures
from itertools import chain
def multi_thread_flat(iters, func, max_workers=20):
items = multi_thread(iters, func, max_workers)
return list(chain.from_iterable(items))
def multi_thread(iters, func, max_workers=20):
with futures.ThreadPoolExecutor(max_workers=max_workers) as executor:
result_futures = executor.map(func, iters)
return list(result_futures)
| from concurrent import futures
from itertools import chain
def multi_thread_flat(iters, func, max_workers=20):
items = multi_thread(iters, func, max_workers)
return list(chain.from_iterable(items))
def multi_thread(iters, func, max_workers=20):
with futures.ThreadPoolExecutor(max_workers=max_workers) as executor:
result_futures = executor.map(func, iters)
return list(result_futures) | none | 1 | 3.179535 | 3 | |
app/tests/test_camera.py | yangshun/cs4243-project | 3 | 6619670 | from unittest import TestCase
import numpy as np
import cv2 as cv2
from app.surface import Surface
from app.camera import Camera
SIZE = 100.0
class TestCamera(TestCase):
def setUp(self):
self.camera = Camera(50, width=200, height=200)
self.camera.position = np.array([0, -20, 0])
self.image = cv2.imread('../static/cube/front.png', cv2.CV_LOAD_IMAGE_COLOR)
self.edge_2dpoints = edge_2dpoints = np.array([(0, 0), (200, 0), (200, 200), (0, 200)])
def tearDown(self):
pass
# def testSurfaceProjection_leftPerpendicularSurface_shouldNotProject(self):
# edge_3dpoints = np.array([(0, 1, 1), (0, 0, 1), (0, 0, 0), (0, 1, 0)])
# edge_2dpoints = np.array([(0, 0), (0, 0), (0, 0), (0, 0)])
# surface = Surface(None, edge_3dpoints, edge_2dpoints)
#
# projected_image, _ = self.camera.project_surface(surface)
# self.assertIsNone(projected_image)
#
# def testSurfaceProjection_rightPerpendicularSurface_shouldNotProject(self):
# edge_3dpoints = np.array([(1, 0, 1), (1, 1, 1), (1, 1, 0), (1, 0, 0)])
# edge_2dpoints = np.array([(0, 0), (0, 0), (0, 0), (0, 0)])
# surface = Surface(None, edge_3dpoints, edge_2dpoints)
#
# projected_image, _ = self.camera.project_surface(surface)
# self.assertIsNone(projected_image)
#
# def testSurfaceProjection_backParallelSurface_shouldNotProject(self):
# edge_3dpoints = np.array([(1, 1, 1), (0, 1, 1), (0, 1, 0), (1, 1, 0)])
# edge_2dpoints = np.array([(0, 0), (0, 0), (0, 0), (0, 0)])
# surface = Surface(None, edge_3dpoints, edge_2dpoints)
#
# projected_image, _ = self.camera.project_surface(surface)
# self.assertIsNone(projected_image)
#
# def testProjectSurfaceDepth(self):
# image = cv2.imread('../static/cube/front.png', cv2.CV_LOAD_IMAGE_COLOR)
# edge_3dpoints = np.array([(-5, 0, 5), (5, 0, 5), (5, 0, -5), (-5, 0, -5)])
# edge_2dpoints = np.array([(0, 0), (200, 0), (200, 200), (0, 200)])
# surface = Surface(image, edge_3dpoints, edge_2dpoints)
#
# projected_image, image_depth = self.camera.project_surface(surface)
#
# expected_image_depth = np.array([[np.inf, np.inf, np.inf, np.inf, np.inf],
# [np.inf, 12.24744871, 11.30388331, 11.30388331, np.inf],
# [np.inf, 11.30388331, 10.27402334, 10.27402334, np.inf],
# [np.inf, 11.30388331, 10.27402334, 10.27402334, np.inf],
# [np.inf, np.inf, np.inf, np.inf, np.inf]])
#
# self.assertTrue(np.allclose(image_depth, expected_image_depth))
def testProjectionSurface_perpendicularSurface(self):
# front coordinate
# edge_3dpoints = np.array([(-SIZE/2, 0, SIZE/2), (SIZE/2, 0, SIZE/2), (SIZE/2, 0, -SIZE/2), (-SIZE/2, 0, -SIZE/2)])
# left surface
edge_3dpoints = np.array([(-SIZE/2, -SIZE/2, SIZE/2), (-SIZE/2, SIZE/2, SIZE/2),
(-SIZE/2, SIZE/2, -SIZE/2), (-SIZE/2, -SIZE/2, -SIZE/2)])
surface = Surface(self.image, edge_3dpoints, self.edge_2dpoints)
projected_image = self.camera.project_surface(surface)
cv2.imwrite('../static/cube/test.png', projected_image)
def testDistanceToImagePlane_positiveDistance(self):
point = np.array((3, 4, 5))
dist = self.camera.distance_to_image_plane(point)
self.assertAlmostEqual(dist, 24)
def testDistanceToImagePlane_zeroDistance(self):
point = np.array((3, -20, 5))
dist = self.camera.distance_to_image_plane(point)
self.assertAlmostEqual(dist, 0)
def testDistanceToImagePlane_negativeDistance(self):
point = np.array((3, -100, 5))
dist = self.camera.distance_to_image_plane(point)
self.assertAlmostEqual(dist, -80)
def testClippingSurface_inFront_noClipping(self):
edge_3dpoints = np.array([(-SIZE/2, -SIZE/2, SIZE/2), (-SIZE/2, SIZE/2, SIZE/2),
(-SIZE/2, SIZE/2, -SIZE/2), (-SIZE/2, -SIZE/2, -SIZE/2)])
surface = Surface(self.image, edge_3dpoints, self.edge_2dpoints)
self.camera.position = np.array([0, -51, 0])
clipped_image = self.camera.clipping_surface(surface)
clipped_height, clipped_width, _ = clipped_image.shape
self.assertAlmostEqual(clipped_height, 200)
self.assertAlmostEqual(clipped_width, 200)
def testClippingSurface_behind_clippedTotally(self):
edge_3dpoints = np.array([(-SIZE/2, -SIZE/2, SIZE/2), (-SIZE/2, SIZE/2, SIZE/2),
(-SIZE/2, SIZE/2, -SIZE/2), (-SIZE/2, -SIZE/2, -SIZE/2)])
surface = Surface(self.image, edge_3dpoints, self.edge_2dpoints)
self.camera.position = np.array([0, 50, 0])
clipped_image = self.camera.clipping_surface(surface)
self.assertIsNone(clipped_image)
def testClippingSurface_partially_clipped(self):
edge_3dpoints = np.array([(-SIZE/2, -SIZE/2, SIZE/2), (-SIZE/2, SIZE/2, SIZE/2),
(-SIZE/2, SIZE/2, -SIZE/2), (-SIZE/2, -SIZE/2, -SIZE/2)])
surface = Surface(self.image, edge_3dpoints, self.edge_2dpoints)
self.camera.position = np.array([0, 0, 0])
clipped_image = self.camera.clipping_surface(surface)
clipped_height, clipped_width, _ = clipped_image.shape
self.assertAlmostEqual(clipped_height, 200)
self.assertAlmostEqual(clipped_width, 200)
| from unittest import TestCase
import numpy as np
import cv2 as cv2
from app.surface import Surface
from app.camera import Camera
SIZE = 100.0
class TestCamera(TestCase):
def setUp(self):
self.camera = Camera(50, width=200, height=200)
self.camera.position = np.array([0, -20, 0])
self.image = cv2.imread('../static/cube/front.png', cv2.CV_LOAD_IMAGE_COLOR)
self.edge_2dpoints = edge_2dpoints = np.array([(0, 0), (200, 0), (200, 200), (0, 200)])
def tearDown(self):
pass
# def testSurfaceProjection_leftPerpendicularSurface_shouldNotProject(self):
# edge_3dpoints = np.array([(0, 1, 1), (0, 0, 1), (0, 0, 0), (0, 1, 0)])
# edge_2dpoints = np.array([(0, 0), (0, 0), (0, 0), (0, 0)])
# surface = Surface(None, edge_3dpoints, edge_2dpoints)
#
# projected_image, _ = self.camera.project_surface(surface)
# self.assertIsNone(projected_image)
#
# def testSurfaceProjection_rightPerpendicularSurface_shouldNotProject(self):
# edge_3dpoints = np.array([(1, 0, 1), (1, 1, 1), (1, 1, 0), (1, 0, 0)])
# edge_2dpoints = np.array([(0, 0), (0, 0), (0, 0), (0, 0)])
# surface = Surface(None, edge_3dpoints, edge_2dpoints)
#
# projected_image, _ = self.camera.project_surface(surface)
# self.assertIsNone(projected_image)
#
# def testSurfaceProjection_backParallelSurface_shouldNotProject(self):
# edge_3dpoints = np.array([(1, 1, 1), (0, 1, 1), (0, 1, 0), (1, 1, 0)])
# edge_2dpoints = np.array([(0, 0), (0, 0), (0, 0), (0, 0)])
# surface = Surface(None, edge_3dpoints, edge_2dpoints)
#
# projected_image, _ = self.camera.project_surface(surface)
# self.assertIsNone(projected_image)
#
# def testProjectSurfaceDepth(self):
# image = cv2.imread('../static/cube/front.png', cv2.CV_LOAD_IMAGE_COLOR)
# edge_3dpoints = np.array([(-5, 0, 5), (5, 0, 5), (5, 0, -5), (-5, 0, -5)])
# edge_2dpoints = np.array([(0, 0), (200, 0), (200, 200), (0, 200)])
# surface = Surface(image, edge_3dpoints, edge_2dpoints)
#
# projected_image, image_depth = self.camera.project_surface(surface)
#
# expected_image_depth = np.array([[np.inf, np.inf, np.inf, np.inf, np.inf],
# [np.inf, 12.24744871, 11.30388331, 11.30388331, np.inf],
# [np.inf, 11.30388331, 10.27402334, 10.27402334, np.inf],
# [np.inf, 11.30388331, 10.27402334, 10.27402334, np.inf],
# [np.inf, np.inf, np.inf, np.inf, np.inf]])
#
# self.assertTrue(np.allclose(image_depth, expected_image_depth))
def testProjectionSurface_perpendicularSurface(self):
# front coordinate
# edge_3dpoints = np.array([(-SIZE/2, 0, SIZE/2), (SIZE/2, 0, SIZE/2), (SIZE/2, 0, -SIZE/2), (-SIZE/2, 0, -SIZE/2)])
# left surface
edge_3dpoints = np.array([(-SIZE/2, -SIZE/2, SIZE/2), (-SIZE/2, SIZE/2, SIZE/2),
(-SIZE/2, SIZE/2, -SIZE/2), (-SIZE/2, -SIZE/2, -SIZE/2)])
surface = Surface(self.image, edge_3dpoints, self.edge_2dpoints)
projected_image = self.camera.project_surface(surface)
cv2.imwrite('../static/cube/test.png', projected_image)
def testDistanceToImagePlane_positiveDistance(self):
point = np.array((3, 4, 5))
dist = self.camera.distance_to_image_plane(point)
self.assertAlmostEqual(dist, 24)
def testDistanceToImagePlane_zeroDistance(self):
point = np.array((3, -20, 5))
dist = self.camera.distance_to_image_plane(point)
self.assertAlmostEqual(dist, 0)
def testDistanceToImagePlane_negativeDistance(self):
point = np.array((3, -100, 5))
dist = self.camera.distance_to_image_plane(point)
self.assertAlmostEqual(dist, -80)
def testClippingSurface_inFront_noClipping(self):
edge_3dpoints = np.array([(-SIZE/2, -SIZE/2, SIZE/2), (-SIZE/2, SIZE/2, SIZE/2),
(-SIZE/2, SIZE/2, -SIZE/2), (-SIZE/2, -SIZE/2, -SIZE/2)])
surface = Surface(self.image, edge_3dpoints, self.edge_2dpoints)
self.camera.position = np.array([0, -51, 0])
clipped_image = self.camera.clipping_surface(surface)
clipped_height, clipped_width, _ = clipped_image.shape
self.assertAlmostEqual(clipped_height, 200)
self.assertAlmostEqual(clipped_width, 200)
def testClippingSurface_behind_clippedTotally(self):
edge_3dpoints = np.array([(-SIZE/2, -SIZE/2, SIZE/2), (-SIZE/2, SIZE/2, SIZE/2),
(-SIZE/2, SIZE/2, -SIZE/2), (-SIZE/2, -SIZE/2, -SIZE/2)])
surface = Surface(self.image, edge_3dpoints, self.edge_2dpoints)
self.camera.position = np.array([0, 50, 0])
clipped_image = self.camera.clipping_surface(surface)
self.assertIsNone(clipped_image)
def testClippingSurface_partially_clipped(self):
edge_3dpoints = np.array([(-SIZE/2, -SIZE/2, SIZE/2), (-SIZE/2, SIZE/2, SIZE/2),
(-SIZE/2, SIZE/2, -SIZE/2), (-SIZE/2, -SIZE/2, -SIZE/2)])
surface = Surface(self.image, edge_3dpoints, self.edge_2dpoints)
self.camera.position = np.array([0, 0, 0])
clipped_image = self.camera.clipping_surface(surface)
clipped_height, clipped_width, _ = clipped_image.shape
self.assertAlmostEqual(clipped_height, 200)
self.assertAlmostEqual(clipped_width, 200)
| en | 0.506456 | # def testSurfaceProjection_leftPerpendicularSurface_shouldNotProject(self): # edge_3dpoints = np.array([(0, 1, 1), (0, 0, 1), (0, 0, 0), (0, 1, 0)]) # edge_2dpoints = np.array([(0, 0), (0, 0), (0, 0), (0, 0)]) # surface = Surface(None, edge_3dpoints, edge_2dpoints) # # projected_image, _ = self.camera.project_surface(surface) # self.assertIsNone(projected_image) # # def testSurfaceProjection_rightPerpendicularSurface_shouldNotProject(self): # edge_3dpoints = np.array([(1, 0, 1), (1, 1, 1), (1, 1, 0), (1, 0, 0)]) # edge_2dpoints = np.array([(0, 0), (0, 0), (0, 0), (0, 0)]) # surface = Surface(None, edge_3dpoints, edge_2dpoints) # # projected_image, _ = self.camera.project_surface(surface) # self.assertIsNone(projected_image) # # def testSurfaceProjection_backParallelSurface_shouldNotProject(self): # edge_3dpoints = np.array([(1, 1, 1), (0, 1, 1), (0, 1, 0), (1, 1, 0)]) # edge_2dpoints = np.array([(0, 0), (0, 0), (0, 0), (0, 0)]) # surface = Surface(None, edge_3dpoints, edge_2dpoints) # # projected_image, _ = self.camera.project_surface(surface) # self.assertIsNone(projected_image) # # def testProjectSurfaceDepth(self): # image = cv2.imread('../static/cube/front.png', cv2.CV_LOAD_IMAGE_COLOR) # edge_3dpoints = np.array([(-5, 0, 5), (5, 0, 5), (5, 0, -5), (-5, 0, -5)]) # edge_2dpoints = np.array([(0, 0), (200, 0), (200, 200), (0, 200)]) # surface = Surface(image, edge_3dpoints, edge_2dpoints) # # projected_image, image_depth = self.camera.project_surface(surface) # # expected_image_depth = np.array([[np.inf, np.inf, np.inf, np.inf, np.inf], # [np.inf, 12.24744871, 11.30388331, 11.30388331, np.inf], # [np.inf, 11.30388331, 10.27402334, 10.27402334, np.inf], # [np.inf, 11.30388331, 10.27402334, 10.27402334, np.inf], # [np.inf, np.inf, np.inf, np.inf, np.inf]]) # # self.assertTrue(np.allclose(image_depth, expected_image_depth)) # front coordinate # edge_3dpoints = np.array([(-SIZE/2, 0, SIZE/2), (SIZE/2, 0, SIZE/2), (SIZE/2, 0, -SIZE/2), (-SIZE/2, 0, -SIZE/2)]) # left surface | 2.778455 | 3 |
system/input-output/file/csv-file.py | tonylixu/devops | 0 | 6619671 | <reponame>tonylixu/devops<filename>system/input-output/file/csv-file.py
'''
Write and read csv file
'''
import csv
def write_to_csv(v):
offset = 0
size = len(v)
lines = 50
with open('data.csv', 'w') as f:
csvout = csv.writer(f)
csvout.writerows(v)
def read_from_csv():
with open('data.csv', 'r') as f:
csvin = csv.reader(f)
for row in csvin:
print row
def read_from_csv_dict():
with open('data.csv', 'rt') as f:
csvin = csv.DictReader(f, fieldnames=['first', 'last'])
for row in csvin:
print row
if __name__ == '__main__':
villains = [
['Doctor', 'No'],
['Rosa', 'Klebb'],
['Mister', 'Big'],
['Auric', 'Goldfinger'],
['Ernst', 'Blofeld']
]
write_to_csv(villains)
read_from_csv()
read_from_csv_dict() | '''
Write and read csv file
'''
import csv
def write_to_csv(v):
offset = 0
size = len(v)
lines = 50
with open('data.csv', 'w') as f:
csvout = csv.writer(f)
csvout.writerows(v)
def read_from_csv():
with open('data.csv', 'r') as f:
csvin = csv.reader(f)
for row in csvin:
print row
def read_from_csv_dict():
with open('data.csv', 'rt') as f:
csvin = csv.DictReader(f, fieldnames=['first', 'last'])
for row in csvin:
print row
if __name__ == '__main__':
villains = [
['Doctor', 'No'],
['Rosa', 'Klebb'],
['Mister', 'Big'],
['Auric', 'Goldfinger'],
['Ernst', 'Blofeld']
]
write_to_csv(villains)
read_from_csv()
read_from_csv_dict() | en | 0.966812 | Write and read csv file | 3.830121 | 4 |
torchOnVideo/denoising/vnlnet/utils.py | torchOnVideo/torchOnVideo | 2 | 6619672 | import torch
import torch.nn as nn
import math
from skimage.measure.simple_metrics import compare_psnr
def weights_init_kaiming(m):
classname = m.__class__.__name__
if classname.find('Conv') != -1:
nn.init.kaiming_normal(m.weight.data, a=0, mode='fan_in')
elif classname.find('Linear') != -1:
nn.init.kaiming_normal(m.weight.data, a=0, mode='fan_in')
elif classname.find('BatchNorm') != -1:
m.weight.data.normal_(mean=0, std=math.sqrt(2./9./64.)).clamp_(-0.025,0.025)
nn.init.constant(m.bias.data, 0.0)
def batch_PSNR(img, imclean, data_range):
Img = img.data.cpu().numpy().astype(np.float32)
Iclean = imclean.data.cpu().numpy().astype(np.float32)
PSNR = 0
for i in range(Img.shape[0]):
PSNR += compare_psnr(Iclean[i,:,:,:], Img[i,:,:,:], data_range=data_range)
return (PSNR/Img.shape[0]) | import torch
import torch.nn as nn
import math
from skimage.measure.simple_metrics import compare_psnr
def weights_init_kaiming(m):
classname = m.__class__.__name__
if classname.find('Conv') != -1:
nn.init.kaiming_normal(m.weight.data, a=0, mode='fan_in')
elif classname.find('Linear') != -1:
nn.init.kaiming_normal(m.weight.data, a=0, mode='fan_in')
elif classname.find('BatchNorm') != -1:
m.weight.data.normal_(mean=0, std=math.sqrt(2./9./64.)).clamp_(-0.025,0.025)
nn.init.constant(m.bias.data, 0.0)
def batch_PSNR(img, imclean, data_range):
Img = img.data.cpu().numpy().astype(np.float32)
Iclean = imclean.data.cpu().numpy().astype(np.float32)
PSNR = 0
for i in range(Img.shape[0]):
PSNR += compare_psnr(Iclean[i,:,:,:], Img[i,:,:,:], data_range=data_range)
return (PSNR/Img.shape[0]) | none | 1 | 2.068807 | 2 | |
blt_net/cascademv2/core/model/losses.py | AlexD123123/BLT-net | 1 | 6619673 | <filename>blt_net/cascademv2/core/model/losses.py
from keras import backend as K
# if K.image_dim_ordering() == 'tf':
if K.common.image_dim_ordering() == 'tf':
import tensorflow as tf
epsilon = 1e-4
def regr_loss(y_true, y_pred):
positives = y_true[:, :, 0]
n_positive = tf.reduce_sum(positives)
absolute_loss = tf.abs(y_true[:,:,1:] - y_pred)
square_loss = 0.5 * (y_true[:,:,1:] - y_pred) ** 2
l1_loss = tf.where(tf.less(absolute_loss, 1.0), square_loss, absolute_loss - 0.5)
localization_loss = tf.to_float(tf.reduce_sum(l1_loss, axis=-1))
loc_loss = tf.reduce_sum(localization_loss * positives, axis=-1)/ tf.maximum(1.0, n_positive)
return loc_loss
def cls_loss(y_true, y_pred):
#new values
gamma_param = 2.0
alpha_param = 0.75
positives = y_true[:, :, 0]
negatives = y_true[:, :, 1]
valid = positives + negatives
classification_loss = valid * K.binary_crossentropy(y_pred[:, :, 0], positives)
# firstly compute the focal weight
foreground_alpha = positives * tf.constant(1-alpha_param)
background_alpha = negatives * tf.constant(alpha_param)
foreground_weight = foreground_alpha * (tf.constant(1.0) - y_pred[:, :, 0]) ** tf.constant(gamma_param)
background_weight = background_alpha * y_pred[:, :, 0] ** tf.constant(gamma_param)
focal_weight = foreground_weight + background_weight
assigned_boxes = tf.reduce_sum(positives)
class_loss = tf.reduce_sum(classification_loss * focal_weight, axis=-1) / tf.maximum(1.0, assigned_boxes)
return class_loss
| <filename>blt_net/cascademv2/core/model/losses.py
from keras import backend as K
# if K.image_dim_ordering() == 'tf':
if K.common.image_dim_ordering() == 'tf':
import tensorflow as tf
epsilon = 1e-4
def regr_loss(y_true, y_pred):
positives = y_true[:, :, 0]
n_positive = tf.reduce_sum(positives)
absolute_loss = tf.abs(y_true[:,:,1:] - y_pred)
square_loss = 0.5 * (y_true[:,:,1:] - y_pred) ** 2
l1_loss = tf.where(tf.less(absolute_loss, 1.0), square_loss, absolute_loss - 0.5)
localization_loss = tf.to_float(tf.reduce_sum(l1_loss, axis=-1))
loc_loss = tf.reduce_sum(localization_loss * positives, axis=-1)/ tf.maximum(1.0, n_positive)
return loc_loss
def cls_loss(y_true, y_pred):
#new values
gamma_param = 2.0
alpha_param = 0.75
positives = y_true[:, :, 0]
negatives = y_true[:, :, 1]
valid = positives + negatives
classification_loss = valid * K.binary_crossentropy(y_pred[:, :, 0], positives)
# firstly compute the focal weight
foreground_alpha = positives * tf.constant(1-alpha_param)
background_alpha = negatives * tf.constant(alpha_param)
foreground_weight = foreground_alpha * (tf.constant(1.0) - y_pred[:, :, 0]) ** tf.constant(gamma_param)
background_weight = background_alpha * y_pred[:, :, 0] ** tf.constant(gamma_param)
focal_weight = foreground_weight + background_weight
assigned_boxes = tf.reduce_sum(positives)
class_loss = tf.reduce_sum(classification_loss * focal_weight, axis=-1) / tf.maximum(1.0, assigned_boxes)
return class_loss
| en | 0.39041 | # if K.image_dim_ordering() == 'tf': #new values # firstly compute the focal weight | 2.128049 | 2 |
hackerspace/EventMarketing/images.py | y0av/HackspaceOS | 0 | 6619674 | <gh_stars>0
from PIL import Image, ImageFont, ImageDraw
from hackerspace.EventMarketing.image_functions.background_image import add_background_image
from hackerspace.EventMarketing.image_functions.add_logo import add_logo
from hackerspace.EventMarketing.image_functions.text import add_soon_at_space
class EventImage():
def __init__(self, event):
self.event = event
self.layout = 1
self.int_width_px = 500
self.int_height_px = 500
#########################
self.image = Image.new(
'RGBA', (self.int_width_px, self.int_height_px), (255, 255, 255, 0))
self = add_background_image(self)
self = add_logo(self)
self = add_soon_at_space(self)
# add background block
# add event name
# add event date and time
| from PIL import Image, ImageFont, ImageDraw
from hackerspace.EventMarketing.image_functions.background_image import add_background_image
from hackerspace.EventMarketing.image_functions.add_logo import add_logo
from hackerspace.EventMarketing.image_functions.text import add_soon_at_space
class EventImage():
def __init__(self, event):
self.event = event
self.layout = 1
self.int_width_px = 500
self.int_height_px = 500
#########################
self.image = Image.new(
'RGBA', (self.int_width_px, self.int_height_px), (255, 255, 255, 0))
self = add_background_image(self)
self = add_logo(self)
self = add_soon_at_space(self)
# add background block
# add event name
# add event date and time | en | 0.232575 | ######################### # add background block # add event name # add event date and time | 2.897148 | 3 |
chokozainerrl/show_result.py | chokozainer/chokozainerrl | 0 | 6619675 | <gh_stars>0
import pandas as pd
import glob
import os
def table(filestr):
score_files = glob.glob(filestr)
score_files.sort(key=os.path.getmtime)
score_file = score_files[-1]
df = pd.read_csv(score_file, delimiter='\t' )
return df
def graph(filestr,strx,stry):
score_files = glob.glob(filestr)
score_files.sort(key=os.path.getmtime)
score_file = score_files[-1]
df = pd.read_csv(score_file, delimiter='\t' )
df.plot(x=strx,y=stry) | import pandas as pd
import glob
import os
def table(filestr):
score_files = glob.glob(filestr)
score_files.sort(key=os.path.getmtime)
score_file = score_files[-1]
df = pd.read_csv(score_file, delimiter='\t' )
return df
def graph(filestr,strx,stry):
score_files = glob.glob(filestr)
score_files.sort(key=os.path.getmtime)
score_file = score_files[-1]
df = pd.read_csv(score_file, delimiter='\t' )
df.plot(x=strx,y=stry) | none | 1 | 2.762264 | 3 | |
project/accounts/urls.py | dukeofdylan/COP4710_Project | 0 | 6619676 | <gh_stars>0
"""cop4710 URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.urls import path
from accounts.views import SignupView, LoginView, LogoutView, ProfileView, redirect_to_default_user_profile
urlpatterns = [
path("signup/", SignupView.as_view(), name="accounts_signup"),
path("login/", LoginView.as_view(), name="accounts_login"),
path("logout/", LogoutView.as_view(), name="accounts_logout"),
path("<pk>/", ProfileView.as_view(), name="accounts_view"),
path("", redirect_to_default_user_profile),
]
| """cop4710 URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.urls import path
from accounts.views import SignupView, LoginView, LogoutView, ProfileView, redirect_to_default_user_profile
urlpatterns = [
path("signup/", SignupView.as_view(), name="accounts_signup"),
path("login/", LoginView.as_view(), name="accounts_login"),
path("logout/", LogoutView.as_view(), name="accounts_logout"),
path("<pk>/", ProfileView.as_view(), name="accounts_view"),
path("", redirect_to_default_user_profile),
] | en | 0.565614 | cop4710 URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/3.1/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: path('', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.urls import include, path 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) | 2.822984 | 3 |
bigfastapi/models/auth_models.py | Psami-wondah/bigfastapi | 0 | 6619677 | import datetime as _dt
from sqlite3 import Timestamp
import sqlalchemy as _sql
import sqlalchemy.orm as _orm
import passlib.hash as _hash
from sqlalchemy.schema import Column
from sqlalchemy.types import String, Integer, Enum, DateTime, Boolean, ARRAY, Text
from sqlalchemy import ForeignKey
from uuid import UUID, uuid4
from sqlalchemy.ext.hybrid import hybrid_property, hybrid_method
from sqlalchemy.sql import func
from fastapi_utils.guid_type import GUID, GUID_DEFAULT_SQLITE
from ..utils.utils import generate_short_id
import bigfastapi.db.database as _database
class VerificationCode(_database.Base):
__tablename__ = "verification_codes"
id = Column(String(255), primary_key=True, index=True, default=uuid4().hex)
user_id = Column(String(255), ForeignKey("users.id"))
code = Column(String(255), index=True, unique=True)
date_created = Column(DateTime, default=_dt.datetime.utcnow)
class PasswordResetCode(_database.Base):
__tablename__ = "password_reset_codes"
id = Column(String(255), primary_key=True, index=True, default=uuid4().hex)
user_id = Column(String(255), ForeignKey("users.id"))
code = Column(String(255), index=True, unique=True)
date_created = Column(DateTime, default=_dt.datetime.utcnow)
class Token(_database.Base):
__tablename__ = "tokens"
id = Column(String(255), primary_key=True, index=True, default=uuid4().hex)
user_id = Column(String(255), ForeignKey("users.id"))
token = Column(String(255), index=True)
date_created = Column(DateTime, default=_dt.datetime.utcnow)
class VerificationToken(_database.Base):
__tablename__ = "verification_tokens"
id = Column(String(255), primary_key=True, index=True, default=uuid4().hex)
user_id = Column(String(255), ForeignKey("users.id"))
token = Column(String(255), index=True)
date_created = Column(DateTime, default=_dt.datetime.utcnow)
class PasswordResetToken(_database.Base):
__tablename__ = "password_reset_tokens"
id = Column(String(255), primary_key=True, index=True, default=uuid4().hex)
user_id = Column(String(255), ForeignKey("users.id"))
token = Column(String(255), index=True)
date_created = Column(DateTime, default=_dt.datetime.utcnow) | import datetime as _dt
from sqlite3 import Timestamp
import sqlalchemy as _sql
import sqlalchemy.orm as _orm
import passlib.hash as _hash
from sqlalchemy.schema import Column
from sqlalchemy.types import String, Integer, Enum, DateTime, Boolean, ARRAY, Text
from sqlalchemy import ForeignKey
from uuid import UUID, uuid4
from sqlalchemy.ext.hybrid import hybrid_property, hybrid_method
from sqlalchemy.sql import func
from fastapi_utils.guid_type import GUID, GUID_DEFAULT_SQLITE
from ..utils.utils import generate_short_id
import bigfastapi.db.database as _database
class VerificationCode(_database.Base):
__tablename__ = "verification_codes"
id = Column(String(255), primary_key=True, index=True, default=uuid4().hex)
user_id = Column(String(255), ForeignKey("users.id"))
code = Column(String(255), index=True, unique=True)
date_created = Column(DateTime, default=_dt.datetime.utcnow)
class PasswordResetCode(_database.Base):
__tablename__ = "password_reset_codes"
id = Column(String(255), primary_key=True, index=True, default=uuid4().hex)
user_id = Column(String(255), ForeignKey("users.id"))
code = Column(String(255), index=True, unique=True)
date_created = Column(DateTime, default=_dt.datetime.utcnow)
class Token(_database.Base):
__tablename__ = "tokens"
id = Column(String(255), primary_key=True, index=True, default=uuid4().hex)
user_id = Column(String(255), ForeignKey("users.id"))
token = Column(String(255), index=True)
date_created = Column(DateTime, default=_dt.datetime.utcnow)
class VerificationToken(_database.Base):
__tablename__ = "verification_tokens"
id = Column(String(255), primary_key=True, index=True, default=uuid4().hex)
user_id = Column(String(255), ForeignKey("users.id"))
token = Column(String(255), index=True)
date_created = Column(DateTime, default=_dt.datetime.utcnow)
class PasswordResetToken(_database.Base):
__tablename__ = "password_reset_tokens"
id = Column(String(255), primary_key=True, index=True, default=uuid4().hex)
user_id = Column(String(255), ForeignKey("users.id"))
token = Column(String(255), index=True)
date_created = Column(DateTime, default=_dt.datetime.utcnow) | none | 1 | 2.193592 | 2 | |
mobilenet_finetunev2.py | zheng-ningxin/amc-models | 0 | 6619678 | from models.mobilenet_v2 import MobileNetV2
import argparse
import os
import json
import torch
import torch.nn as nn
import torch.cuda as cuda
from torch.optim.lr_scheduler import StepLR, MultiStepLR, CosineAnnealingLR
from torchvision import datasets, transforms
from nni.compression.torch.utils.shape_dependency import ChannelDependency
from nni.compression.torch import L1FilterPruner, Constrained_L1FilterPruner
from nni.compression.torch import L2FilterPruner, Constrained_L2FilterPruner
from nni.compression.torch import ActivationMeanRankFilterPruner, ConstrainedActivationMeanRankFilterPruner
from nni.compression.torch import ModelSpeedup
from nni.compression.torch.utils.counter import count_flops_params
from utils import measure_model, AverageMeter, progress_bar, accuracy, process_state_dict
class LabelSmoothingLoss(nn.Module):
def __init__(self, classes, smoothing=0.0, dim=-1):
super(LabelSmoothingLoss, self).__init__()
self.confidence = 1.0 - smoothing
self.smoothing = smoothing
self.cls = classes
self.dim = dim
def forward(self, pred, target):
pred = pred.log_softmax(dim=self.dim)
with torch.no_grad():
# true_dist = pred.data.clone()
true_dist = torch.zeros_like(pred)
true_dist.fill_(self.smoothing / (self.cls - 1))
true_dist.scatter_(1, target.data.unsqueeze(1), self.confidence)
return torch.mean(torch.sum(-true_dist * pred, dim=self.dim))
def imagenet_dataset(args):
kwargs = {'num_workers': 10, 'pin_memory': True} if torch.cuda.is_available() else {}
normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])
train_loader = torch.utils.data.DataLoader(
datasets.ImageFolder(os.path.join(args.data_dir, 'train'),
transform=transforms.Compose([
transforms.RandomResizedCrop(224),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
normalize,
])),
batch_size=args.batch_size, shuffle=True, **kwargs)
val_loader = torch.utils.data.DataLoader(
datasets.ImageFolder(os.path.join(args.data_dir, 'val'),
transform=transforms.Compose([
transforms.Resize(256),
transforms.CenterCrop(224),
transforms.ToTensor(),
normalize,
])),
batch_size=args.batch_size, shuffle=True, **kwargs)
dummy_input = torch.ones(1, 3, 224, 224)
return train_loader, val_loader, dummy_input
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('--data-dir', type=str, default='/mnt/imagenet/raw_jpeg/2012/',
help='dataset directory')
parser.add_argument('--batch-size', type=int, default=64,
help='input batch size for training (default: 64)')
parser.add_argument('--checkpoint', type=str, default ='.checkpoints/torch/mobilenetv2_imagenet_71.814.pth.tar', help='The path of the checkpoint to load')
parser.add_argument('--sparsity', type=str, default='mobilenetv2_config.json',
help='path of the sparsity config file')
parser.add_argument('--log-interval', type=int, default=200,
help='how many batches to wait before logging training status')
parser.add_argument('--finetune_epochs', type=int, default=6,
help='the number of finetune epochs after pruning')
parser.add_argument('--lr', type=float, default=0.001, help='the learning rate of model')
parser.add_argument('--lr_decay', choices=['multistep', 'cos', 'step'], default='multistep', help='lr decay scheduler type')
parser.add_argument('--label-smothing', type=float, default=None, help='label smothing')
parser.add_argument('--weight-decay', type=float, default=5e-4, help='weight decay')
parser.add_argument('--type', choices=['l1', 'l2', 'activation'], default='l1', help='the pruning algo type')
parser.add_argument('--para', action='store_true', help='if use multiple gpus')
return parser.parse_args()
def train(args, model, device, train_loader, criterion, optimizer, epoch, callback=None):
model.train()
loss_sum = 0
for batch_idx, (data, target) in enumerate(train_loader):
data, target = data.to(device), target.to(device)
optimizer.zero_grad()
output = model(data)
loss = criterion(output, target)
loss_sum += loss.item()
loss.backward()
# callback should be inserted between loss.backward() and optimizer.step()
if callback:
callback()
optimizer.step()
if batch_idx % args.log_interval == 0:
print('Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}'.format(
epoch, batch_idx * len(data), len(train_loader.dataset),
100. * batch_idx / len(train_loader), loss_sum/(batch_idx+1)))
def test(model, device, criterion, val_loader):
model.eval()
test_loss = 0
correct = 0
with torch.no_grad():
for data, target in val_loader:
data, target = data.to(device), target.to(device)
output = model(data)
# sum up batch loss
test_loss += criterion(output, target).item()
# get the index of the max log-probability
pred = output.argmax(dim=1, keepdim=True)
correct += pred.eq(target.view_as(pred)).sum().item()
test_loss /= len(val_loader.dataset)
accuracy = correct / len(val_loader.dataset)
print('\nTest set: Average loss: {:.4f}, Accuracy: {}/{} ({:.2f}%)\n'.format(
test_loss, correct, len(val_loader.dataset), 100. * accuracy))
return accuracy
def get_data(args):
return imagenet_dataset(args)
if __name__ == '__main__':
print("Benchmark the constraint-aware one shot pruner.")
args = parse_args()
torch.manual_seed(0)
Model = MobileNetV2
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
train_loader, val_loader, dummy_input = get_data(args)
net = Model(1000, profile='normal').to(device)
if args.checkpoint:
checkpoint = torch.load(args.checkpoint)
net.load_state_dict(process_state_dict(checkpoint['state_dict']))
# acc = test(net, device, torch.nn.CrossEntropyLoss(), val_loader)
# print('Before pruning: %f' % (acc))
with open(args.sparsity, 'r') as jf:
cfglist = json.load(jf)
if args.type == 'l1':
Pruner = Constrained_L1FilterPruner
if args.type == 'l2':
Pruner = Constrained_L2FilterPruner
elif args.type == 'activation':
Pruner = ConstrainedActivationMeanRankFilterPruner
segmentation = 10
channel_depen = ChannelDependency(net, dummy_input.to(device))
c_dsets = channel_depen.dependency_sets
dset_map = {}
cfg_map = {}
print("Dependency sets")
for dset in c_dsets:
print(dset)
for layer in dset:
dset_map[layer] = dset
for cfg in cfglist:
name = cfg['op_names'][0]
cfg_map[name] = cfg
print(cfg_map)
cfg_bukets = []
for i in range(segmentation):
cfg_bukets.append([])
visited = set()
for cfg in cfglist:
layer = cfg['op_names'][0]
if layer in visited:
continue
# find the buckets with the least layers
minimum_layer = len(cfg_bukets[0])
index = 0
for i in range(segmentation):
if len(cfg_bukets[i]) < minimum_layer:
minimum_layer = len(cfg_bukets[i])
index = i
for name in dset_map[layer]:
if len(dset_map[layer])> 1:
print('####################################dependency sets:', dset_map[layer])
if name in cfg_map:
print(name,':%f'%cfg_map[name]['sparsity'])
cfg_bukets[index].append(cfg_map[name])
visited.add(name)
# print(cfg_bukets)
print(cfglist)
for cfglist in cfg_bukets:
acc = test(net, device, torch.nn.CrossEntropyLoss(), val_loader)
print('In a new iteration, Currently, ACC:', acc)
print(cfglist)
if len(cfglist) == 0:
continue
pruner = Pruner(net, cfglist, dummy_input.to(device))
if isinstance(pruner, ConstrainedActivationMeanRankFilterPruner):
# need to inference before the compress function
for data, label in train_loader:
data = data.to(device)
net(data)
break
pruner.compress()
mask_path = './mask_%f_%d_%s' % (args.lr, args.finetune_epochs, args.lr_decay)
weight_path = './_ck_%f_%d_%s.pth' % (args.lr, args.finetune_epochs, args.lr_decay)
pruner.export_model(weight_path, mask_path)
pruner._unwrap_model()
ms = ModelSpeedup(net, dummy_input.to(device), mask_path)
ms.speedup_model()
print('Model speedup finished')
optimizer = torch.optim.SGD(net.parameters(), lr=args.lr,
momentum=0.9,
weight_decay=args.weight_decay)
scheduler = None
if args.lr_decay == 'multistep':
scheduler = MultiStepLR(
optimizer, milestones=[int(args.finetune_epochs*0.25), int(args.finetune_epochs*0.5), int(args.finetune_epochs*0.75)], gamma=0.1)
elif args.lr_decay == 'cos':
scheduler = CosineAnnealingLR(optimizer, T_max=args.finetune_epochs)
elif args.lr_decay == 'step':
scheduler = StepLR(optimizer, step_size=1, gamma=0.1)
criterion = torch.nn.CrossEntropyLoss()
if args.label_smothing:
criterion = LabelSmoothingLoss(1000, args.label_smothing)
acc = test(net, device, criterion, val_loader)
print('After pruning: %f' % (acc))
for epoch in range(args.finetune_epochs):
train(args, net, device, train_loader,
criterion, optimizer, epoch)
if scheduler:
scheduler.step()
acc = test(net, device, criterion, val_loader)
print('Learning rate: ', scheduler.get_last_lr())
print('Finetune Epoch %d, acc of original pruner %f'%(epoch, acc))
acc = test(net, device, criterion, val_loader)
print('After finetuning: %f' % (acc))
flops, weights = count_flops_params(net, dummy_input.size())
print('Flops:', flops)
| from models.mobilenet_v2 import MobileNetV2
import argparse
import os
import json
import torch
import torch.nn as nn
import torch.cuda as cuda
from torch.optim.lr_scheduler import StepLR, MultiStepLR, CosineAnnealingLR
from torchvision import datasets, transforms
from nni.compression.torch.utils.shape_dependency import ChannelDependency
from nni.compression.torch import L1FilterPruner, Constrained_L1FilterPruner
from nni.compression.torch import L2FilterPruner, Constrained_L2FilterPruner
from nni.compression.torch import ActivationMeanRankFilterPruner, ConstrainedActivationMeanRankFilterPruner
from nni.compression.torch import ModelSpeedup
from nni.compression.torch.utils.counter import count_flops_params
from utils import measure_model, AverageMeter, progress_bar, accuracy, process_state_dict
class LabelSmoothingLoss(nn.Module):
def __init__(self, classes, smoothing=0.0, dim=-1):
super(LabelSmoothingLoss, self).__init__()
self.confidence = 1.0 - smoothing
self.smoothing = smoothing
self.cls = classes
self.dim = dim
def forward(self, pred, target):
pred = pred.log_softmax(dim=self.dim)
with torch.no_grad():
# true_dist = pred.data.clone()
true_dist = torch.zeros_like(pred)
true_dist.fill_(self.smoothing / (self.cls - 1))
true_dist.scatter_(1, target.data.unsqueeze(1), self.confidence)
return torch.mean(torch.sum(-true_dist * pred, dim=self.dim))
def imagenet_dataset(args):
kwargs = {'num_workers': 10, 'pin_memory': True} if torch.cuda.is_available() else {}
normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])
train_loader = torch.utils.data.DataLoader(
datasets.ImageFolder(os.path.join(args.data_dir, 'train'),
transform=transforms.Compose([
transforms.RandomResizedCrop(224),
transforms.RandomHorizontalFlip(),
transforms.ToTensor(),
normalize,
])),
batch_size=args.batch_size, shuffle=True, **kwargs)
val_loader = torch.utils.data.DataLoader(
datasets.ImageFolder(os.path.join(args.data_dir, 'val'),
transform=transforms.Compose([
transforms.Resize(256),
transforms.CenterCrop(224),
transforms.ToTensor(),
normalize,
])),
batch_size=args.batch_size, shuffle=True, **kwargs)
dummy_input = torch.ones(1, 3, 224, 224)
return train_loader, val_loader, dummy_input
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('--data-dir', type=str, default='/mnt/imagenet/raw_jpeg/2012/',
help='dataset directory')
parser.add_argument('--batch-size', type=int, default=64,
help='input batch size for training (default: 64)')
parser.add_argument('--checkpoint', type=str, default ='.checkpoints/torch/mobilenetv2_imagenet_71.814.pth.tar', help='The path of the checkpoint to load')
parser.add_argument('--sparsity', type=str, default='mobilenetv2_config.json',
help='path of the sparsity config file')
parser.add_argument('--log-interval', type=int, default=200,
help='how many batches to wait before logging training status')
parser.add_argument('--finetune_epochs', type=int, default=6,
help='the number of finetune epochs after pruning')
parser.add_argument('--lr', type=float, default=0.001, help='the learning rate of model')
parser.add_argument('--lr_decay', choices=['multistep', 'cos', 'step'], default='multistep', help='lr decay scheduler type')
parser.add_argument('--label-smothing', type=float, default=None, help='label smothing')
parser.add_argument('--weight-decay', type=float, default=5e-4, help='weight decay')
parser.add_argument('--type', choices=['l1', 'l2', 'activation'], default='l1', help='the pruning algo type')
parser.add_argument('--para', action='store_true', help='if use multiple gpus')
return parser.parse_args()
def train(args, model, device, train_loader, criterion, optimizer, epoch, callback=None):
model.train()
loss_sum = 0
for batch_idx, (data, target) in enumerate(train_loader):
data, target = data.to(device), target.to(device)
optimizer.zero_grad()
output = model(data)
loss = criterion(output, target)
loss_sum += loss.item()
loss.backward()
# callback should be inserted between loss.backward() and optimizer.step()
if callback:
callback()
optimizer.step()
if batch_idx % args.log_interval == 0:
print('Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}'.format(
epoch, batch_idx * len(data), len(train_loader.dataset),
100. * batch_idx / len(train_loader), loss_sum/(batch_idx+1)))
def test(model, device, criterion, val_loader):
model.eval()
test_loss = 0
correct = 0
with torch.no_grad():
for data, target in val_loader:
data, target = data.to(device), target.to(device)
output = model(data)
# sum up batch loss
test_loss += criterion(output, target).item()
# get the index of the max log-probability
pred = output.argmax(dim=1, keepdim=True)
correct += pred.eq(target.view_as(pred)).sum().item()
test_loss /= len(val_loader.dataset)
accuracy = correct / len(val_loader.dataset)
print('\nTest set: Average loss: {:.4f}, Accuracy: {}/{} ({:.2f}%)\n'.format(
test_loss, correct, len(val_loader.dataset), 100. * accuracy))
return accuracy
def get_data(args):
return imagenet_dataset(args)
if __name__ == '__main__':
print("Benchmark the constraint-aware one shot pruner.")
args = parse_args()
torch.manual_seed(0)
Model = MobileNetV2
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
train_loader, val_loader, dummy_input = get_data(args)
net = Model(1000, profile='normal').to(device)
if args.checkpoint:
checkpoint = torch.load(args.checkpoint)
net.load_state_dict(process_state_dict(checkpoint['state_dict']))
# acc = test(net, device, torch.nn.CrossEntropyLoss(), val_loader)
# print('Before pruning: %f' % (acc))
with open(args.sparsity, 'r') as jf:
cfglist = json.load(jf)
if args.type == 'l1':
Pruner = Constrained_L1FilterPruner
if args.type == 'l2':
Pruner = Constrained_L2FilterPruner
elif args.type == 'activation':
Pruner = ConstrainedActivationMeanRankFilterPruner
segmentation = 10
channel_depen = ChannelDependency(net, dummy_input.to(device))
c_dsets = channel_depen.dependency_sets
dset_map = {}
cfg_map = {}
print("Dependency sets")
for dset in c_dsets:
print(dset)
for layer in dset:
dset_map[layer] = dset
for cfg in cfglist:
name = cfg['op_names'][0]
cfg_map[name] = cfg
print(cfg_map)
cfg_bukets = []
for i in range(segmentation):
cfg_bukets.append([])
visited = set()
for cfg in cfglist:
layer = cfg['op_names'][0]
if layer in visited:
continue
# find the buckets with the least layers
minimum_layer = len(cfg_bukets[0])
index = 0
for i in range(segmentation):
if len(cfg_bukets[i]) < minimum_layer:
minimum_layer = len(cfg_bukets[i])
index = i
for name in dset_map[layer]:
if len(dset_map[layer])> 1:
print('####################################dependency sets:', dset_map[layer])
if name in cfg_map:
print(name,':%f'%cfg_map[name]['sparsity'])
cfg_bukets[index].append(cfg_map[name])
visited.add(name)
# print(cfg_bukets)
print(cfglist)
for cfglist in cfg_bukets:
acc = test(net, device, torch.nn.CrossEntropyLoss(), val_loader)
print('In a new iteration, Currently, ACC:', acc)
print(cfglist)
if len(cfglist) == 0:
continue
pruner = Pruner(net, cfglist, dummy_input.to(device))
if isinstance(pruner, ConstrainedActivationMeanRankFilterPruner):
# need to inference before the compress function
for data, label in train_loader:
data = data.to(device)
net(data)
break
pruner.compress()
mask_path = './mask_%f_%d_%s' % (args.lr, args.finetune_epochs, args.lr_decay)
weight_path = './_ck_%f_%d_%s.pth' % (args.lr, args.finetune_epochs, args.lr_decay)
pruner.export_model(weight_path, mask_path)
pruner._unwrap_model()
ms = ModelSpeedup(net, dummy_input.to(device), mask_path)
ms.speedup_model()
print('Model speedup finished')
optimizer = torch.optim.SGD(net.parameters(), lr=args.lr,
momentum=0.9,
weight_decay=args.weight_decay)
scheduler = None
if args.lr_decay == 'multistep':
scheduler = MultiStepLR(
optimizer, milestones=[int(args.finetune_epochs*0.25), int(args.finetune_epochs*0.5), int(args.finetune_epochs*0.75)], gamma=0.1)
elif args.lr_decay == 'cos':
scheduler = CosineAnnealingLR(optimizer, T_max=args.finetune_epochs)
elif args.lr_decay == 'step':
scheduler = StepLR(optimizer, step_size=1, gamma=0.1)
criterion = torch.nn.CrossEntropyLoss()
if args.label_smothing:
criterion = LabelSmoothingLoss(1000, args.label_smothing)
acc = test(net, device, criterion, val_loader)
print('After pruning: %f' % (acc))
for epoch in range(args.finetune_epochs):
train(args, net, device, train_loader,
criterion, optimizer, epoch)
if scheduler:
scheduler.step()
acc = test(net, device, criterion, val_loader)
print('Learning rate: ', scheduler.get_last_lr())
print('Finetune Epoch %d, acc of original pruner %f'%(epoch, acc))
acc = test(net, device, criterion, val_loader)
print('After finetuning: %f' % (acc))
flops, weights = count_flops_params(net, dummy_input.size())
print('Flops:', flops)
| en | 0.543999 | # true_dist = pred.data.clone() # callback should be inserted between loss.backward() and optimizer.step() # sum up batch loss # get the index of the max log-probability # acc = test(net, device, torch.nn.CrossEntropyLoss(), val_loader) # print('Before pruning: %f' % (acc)) # find the buckets with the least layers ###################################dependency sets:', dset_map[layer]) # print(cfg_bukets) # need to inference before the compress function | 1.93488 | 2 |
nagiosapi/logic.py | stvnjacobs/nagiosapi | 2 | 6619679 | import os
from flask import abort, jsonify
def object_list_from_status_dat():
path = os.getenv("NAGIOS_STATUS_PATH",
default="/usr/local/nagios/var/status.dat")
skip_characters = "\n", "#" # skip comments and empty lines
objects = []
with open(path, "r") as f:
lines = f.readlines()
for line in lines:
current = len(objects) - 1
if line[0] in skip_characters:
continue
if "{" in line: # the start of a block includes an open brace
name = line[:-2].strip() # the string before the curly
objects.append({})
continue
elif "}" in line: # the end of a block - so, this object is complete
objects[current] = {name: objects[current]}
continue
else: # must be a property within a block
key = line.split("=")[0].strip()
value = "".join(line.replace(f"{key}=", "")).strip()
objects[current][key] = value
continue
return objects
def status_json_or_404(endpoint=None, field=None, **kwargs):
filters = {k: v for k, v in kwargs.items() if v is not None}
objects = object_list_from_status_dat()
if endpoint:
objects = list(
filter(lambda o: endpoint in o, objects)
)
if filters:
for k, v in filters.items():
objects = list(
filter(lambda o: k in o[endpoint] and o[endpoint][k] == v,
objects)
)
if field:
objects = list(
map(lambda o: o[endpoint][field], objects)
)
if not objects:
abort(404)
if len(objects) == 1:
objects = objects[0]
return jsonify(objects)
| import os
from flask import abort, jsonify
def object_list_from_status_dat():
path = os.getenv("NAGIOS_STATUS_PATH",
default="/usr/local/nagios/var/status.dat")
skip_characters = "\n", "#" # skip comments and empty lines
objects = []
with open(path, "r") as f:
lines = f.readlines()
for line in lines:
current = len(objects) - 1
if line[0] in skip_characters:
continue
if "{" in line: # the start of a block includes an open brace
name = line[:-2].strip() # the string before the curly
objects.append({})
continue
elif "}" in line: # the end of a block - so, this object is complete
objects[current] = {name: objects[current]}
continue
else: # must be a property within a block
key = line.split("=")[0].strip()
value = "".join(line.replace(f"{key}=", "")).strip()
objects[current][key] = value
continue
return objects
def status_json_or_404(endpoint=None, field=None, **kwargs):
filters = {k: v for k, v in kwargs.items() if v is not None}
objects = object_list_from_status_dat()
if endpoint:
objects = list(
filter(lambda o: endpoint in o, objects)
)
if filters:
for k, v in filters.items():
objects = list(
filter(lambda o: k in o[endpoint] and o[endpoint][k] == v,
objects)
)
if field:
objects = list(
map(lambda o: o[endpoint][field], objects)
)
if not objects:
abort(404)
if len(objects) == 1:
objects = objects[0]
return jsonify(objects)
| en | 0.86048 | # skip comments and empty lines # the start of a block includes an open brace # the string before the curly # the end of a block - so, this object is complete # must be a property within a block | 2.714833 | 3 |
Visualization/GraffitiOverTime/scripts/understand_graffiti_data.py | dhandeo/OpenDataDayAlbany2013 | 1 | 6619680 | import json
file = open("../server/static/data/nyc-graffiti.js")
info = json.load(file)
print info.keys()
print info["meta"].keys()
print 'Format \n######'
count = 0
for acolumn in info["meta"]["view"]["columns"]:
print count, acolumn["fieldName"]
count = count + 1
print ""
| import json
file = open("../server/static/data/nyc-graffiti.js")
info = json.load(file)
print info.keys()
print info["meta"].keys()
print 'Format \n######'
count = 0
for acolumn in info["meta"]["view"]["columns"]:
print count, acolumn["fieldName"]
count = count + 1
print ""
| none | 1 | 3.100466 | 3 | |
Data Analyst in Python/Step 5 - Probability and Statistics/4. Conditional Probability/1. Conditional Probability Fundamentals.py | MyArist/Dataquest | 8 | 6619681 | ## 2. Brief Recap ##
p_2 = 1/6
p_odd = 3/6
p_2_or_4 = 2/6
## 3. Updating Probabilities ##
p_3 = 1/4
p_6 = 0/4
p_odd = 2/4
p_even = 2/4
## 4. Conditional Probability ##
p_december = 1/3
p_31 = 2/3
p_summer = 0/3
p_ends_r = 1/3
## 5. Conditional Probability Formula ##
card_b = 21
card_a_and_b = 9
p_a_given_b = card_a_and_b / card_b
## 6. Example Walkthough ##
p_negative_given_non_hiv = 6/30
print(p_negative_given_non_hiv)
## 7. Probability Formula ##
p_premium_given_chrome = 158/2762
p_basic_given_safari = 274/1288
p_free_given_firefox = 2103/2285
more_likely_premium = 'Safari' | ## 2. Brief Recap ##
p_2 = 1/6
p_odd = 3/6
p_2_or_4 = 2/6
## 3. Updating Probabilities ##
p_3 = 1/4
p_6 = 0/4
p_odd = 2/4
p_even = 2/4
## 4. Conditional Probability ##
p_december = 1/3
p_31 = 2/3
p_summer = 0/3
p_ends_r = 1/3
## 5. Conditional Probability Formula ##
card_b = 21
card_a_and_b = 9
p_a_given_b = card_a_and_b / card_b
## 6. Example Walkthough ##
p_negative_given_non_hiv = 6/30
print(p_negative_given_non_hiv)
## 7. Probability Formula ##
p_premium_given_chrome = 158/2762
p_basic_given_safari = 274/1288
p_free_given_firefox = 2103/2285
more_likely_premium = 'Safari' | en | 0.430207 | ## 2. Brief Recap ## ## 3. Updating Probabilities ## ## 4. Conditional Probability ## ## 5. Conditional Probability Formula ## ## 6. Example Walkthough ## ## 7. Probability Formula ## | 2.452079 | 2 |
sentinel/discovery/layers/domain/service/generalisation/interface/swarm.py | alhassane/sentinel | 0 | 6619682 | # pylint: skip-file
"""Implement Service interface to to manage docker service"""
from zope.interface import Interface
from typing import Union
class SwarmInterface(Interface):
"""Interface to manage docker service"""
def get_services_from_id(self, id):
"""Get services from a swarm service ID"""
def get_containers_from_filters(self, filters: dict) -> Union[list, None]:
"""Get all containers of a service from filters"""
| # pylint: skip-file
"""Implement Service interface to to manage docker service"""
from zope.interface import Interface
from typing import Union
class SwarmInterface(Interface):
"""Interface to manage docker service"""
def get_services_from_id(self, id):
"""Get services from a swarm service ID"""
def get_containers_from_filters(self, filters: dict) -> Union[list, None]:
"""Get all containers of a service from filters"""
| en | 0.819776 | # pylint: skip-file Implement Service interface to to manage docker service Interface to manage docker service Get services from a swarm service ID Get all containers of a service from filters | 2.396176 | 2 |
IA/ASD/Sem 1 (Flavia)/Lab 2/3.py | worthl3ss/random-small | 1 | 6619683 | <reponame>worthl3ss/random-small
n=int(input())
l=[]
while n!=0:
if n%10 not in l:
l.append(n%10)
n//=10
l.sort()
print("{",end='')
for i in l:
print(i,end=',')
print('\b}') | n=int(input())
l=[]
while n!=0:
if n%10 not in l:
l.append(n%10)
n//=10
l.sort()
print("{",end='')
for i in l:
print(i,end=',')
print('\b}') | none | 1 | 3.365974 | 3 | |
apps/site/api/renderers/kml_renderer.py | LocalGround/localground | 9 | 6619684 | from StringIO import StringIO
from rest_framework import renderers
class KMLRenderer(renderers.BaseRenderer):
"""
Renderer which serializes to KML using the existing XML renderer
"""
media_type = 'application/vnd.google-earth.kml+xml'
format = 'kml'
level_sep = '.'
headers = None
def render(self, data, media_type=None, renderer_context=None):
"""
Renders serialized *data* into KML.
"""
kml_buffer = StringIO(self.build_kml(data))
return kml_buffer.getvalue()
def build_kml(self, raw_data):
"""
Returns a well-formatted KML string
"""
kml = KML()
dataset = None
if 'overlay_type' in raw_data and \
raw_data['overlay_type'] == 'project':
# instance of complex type: projects
dataset = raw_data['children']['photos']['data'] + \
raw_data['children']['audio']['data'] + \
raw_data['children']['markers']['data']
elif 'results' in raw_data:
# list of simple type: photos, audio, or markers
dataset = raw_data.get('results')
else:
# instance of simple type: photos, audio, or markers
dataset = [raw_data]
for data in dataset:
if (not data.get('geometry') or
not data.get('geometry').get('coordinates')):
continue
name = KML.as_node('name', [data.get('name')])
cdata = None
if 'file_path_orig' in data:
cdata = KML.wrap_cdata(
data['overlay_type'], data['file_path_orig'])
description = KML.as_node(
'description', [cdata, data.get('caption')])
coord = KML.get_coord(data['geometry'])
point = KML.as_node('Point', [coord])
placemark = KML.as_node('Placemark', [name, description, point])
kml.append(placemark)
return kml.get_kml()
class KML():
"""
Simplified KML encoder with limited features for Local Ground
data export API
"""
prolog = '<?xml version="1.0" encoding="UTF-8"?>'
namespace = 'http://www.opengis.net/kml/2.2'
root = '<kml xmlns="{}"><Folder>'.format(namespace)
closing_root = '</Folder></kml>'
kml = ''
def __init__(self):
self.kml = '{}{}'.format(self.prolog, self.root)
@staticmethod
def as_node(tag, content=[], attriutes='', self_closing=False):
if self_closing:
return '<{}{} />'.format(tag, attriutes)
opening = '<{}{}>'.format(tag, attriutes)
closing = '</{}>'.format(tag)
concat = ''.join([elem for elem in content if elem])
node = '{}{}{}'.format(opening, concat, closing)
return node
@staticmethod
def wrap_cdata(datatype, url, url_msg='Link to attachment'):
opening = '<![CDATA['
closing = ']]>'
html = ''
if datatype == 'photo':
html = '<img src="{}" /><br />'.format(url)
elif datatype != 'marker':
html = '<a href="{}">{}</a><br />'.format(url, url_msg)
cdata = '{}{}{}'.format(opening, html, closing)
return cdata
@staticmethod
def get_coord(geom):
opening = '<coordinates>'
closing = '</coordinates>'
coord = '{}{},{},0{}'.format(
opening,
geom['coordinates'][0],
geom['coordinates'][1],
closing
)
return coord
def append(self, new_node):
self.kml = '{}{}'.format(self.kml, new_node)
return self.get_kml()
def get_kml(self):
return '{}{}'.format(self.kml, self.closing_root)
| from StringIO import StringIO
from rest_framework import renderers
class KMLRenderer(renderers.BaseRenderer):
"""
Renderer which serializes to KML using the existing XML renderer
"""
media_type = 'application/vnd.google-earth.kml+xml'
format = 'kml'
level_sep = '.'
headers = None
def render(self, data, media_type=None, renderer_context=None):
"""
Renders serialized *data* into KML.
"""
kml_buffer = StringIO(self.build_kml(data))
return kml_buffer.getvalue()
def build_kml(self, raw_data):
"""
Returns a well-formatted KML string
"""
kml = KML()
dataset = None
if 'overlay_type' in raw_data and \
raw_data['overlay_type'] == 'project':
# instance of complex type: projects
dataset = raw_data['children']['photos']['data'] + \
raw_data['children']['audio']['data'] + \
raw_data['children']['markers']['data']
elif 'results' in raw_data:
# list of simple type: photos, audio, or markers
dataset = raw_data.get('results')
else:
# instance of simple type: photos, audio, or markers
dataset = [raw_data]
for data in dataset:
if (not data.get('geometry') or
not data.get('geometry').get('coordinates')):
continue
name = KML.as_node('name', [data.get('name')])
cdata = None
if 'file_path_orig' in data:
cdata = KML.wrap_cdata(
data['overlay_type'], data['file_path_orig'])
description = KML.as_node(
'description', [cdata, data.get('caption')])
coord = KML.get_coord(data['geometry'])
point = KML.as_node('Point', [coord])
placemark = KML.as_node('Placemark', [name, description, point])
kml.append(placemark)
return kml.get_kml()
class KML():
"""
Simplified KML encoder with limited features for Local Ground
data export API
"""
prolog = '<?xml version="1.0" encoding="UTF-8"?>'
namespace = 'http://www.opengis.net/kml/2.2'
root = '<kml xmlns="{}"><Folder>'.format(namespace)
closing_root = '</Folder></kml>'
kml = ''
def __init__(self):
self.kml = '{}{}'.format(self.prolog, self.root)
@staticmethod
def as_node(tag, content=[], attriutes='', self_closing=False):
if self_closing:
return '<{}{} />'.format(tag, attriutes)
opening = '<{}{}>'.format(tag, attriutes)
closing = '</{}>'.format(tag)
concat = ''.join([elem for elem in content if elem])
node = '{}{}{}'.format(opening, concat, closing)
return node
@staticmethod
def wrap_cdata(datatype, url, url_msg='Link to attachment'):
opening = '<![CDATA['
closing = ']]>'
html = ''
if datatype == 'photo':
html = '<img src="{}" /><br />'.format(url)
elif datatype != 'marker':
html = '<a href="{}">{}</a><br />'.format(url, url_msg)
cdata = '{}{}{}'.format(opening, html, closing)
return cdata
@staticmethod
def get_coord(geom):
opening = '<coordinates>'
closing = '</coordinates>'
coord = '{}{},{},0{}'.format(
opening,
geom['coordinates'][0],
geom['coordinates'][1],
closing
)
return coord
def append(self, new_node):
self.kml = '{}{}'.format(self.kml, new_node)
return self.get_kml()
def get_kml(self):
return '{}{}'.format(self.kml, self.closing_root)
| en | 0.715979 | Renderer which serializes to KML using the existing XML renderer Renders serialized *data* into KML. Returns a well-formatted KML string # instance of complex type: projects # list of simple type: photos, audio, or markers # instance of simple type: photos, audio, or markers Simplified KML encoder with limited features for Local Ground data export API | 2.62316 | 3 |
python_src/1_3.py | gleisonsdm/QIF_LIB | 0 | 6619685 | import pandas as pd
import sys
from tabulate import tabulate
def getPrior(filename):
data = pd.read_csv(filename, delimiter=";", header=None, nrows=1)
data = data.fillna(0.0)
return data
def printPrior(filename):
prior = getPrior(filename)
prior = prior.add_prefix("P(E_")
prior = prior.add_suffix(")")
print(prior.to_markdown())
def main():
if len(sys.argv) == 2:
print('--------- Output ----------')
printPrior(sys.argv[1])
print('---------------------------')
else:
print("File not found")
main()
| import pandas as pd
import sys
from tabulate import tabulate
def getPrior(filename):
data = pd.read_csv(filename, delimiter=";", header=None, nrows=1)
data = data.fillna(0.0)
return data
def printPrior(filename):
prior = getPrior(filename)
prior = prior.add_prefix("P(E_")
prior = prior.add_suffix(")")
print(prior.to_markdown())
def main():
if len(sys.argv) == 2:
print('--------- Output ----------')
printPrior(sys.argv[1])
print('---------------------------')
else:
print("File not found")
main()
| none | 1 | 3.101907 | 3 | |
src/pyLox/pyLox.py | ronsh909/Pylox-Interpreter | 2 | 6619686 | '''
The module serves as the interpreter for the Lox language.
There are two modes to run the interpreter in:
1. REPL - by running the interpreter without an argument, you can enter statements, which will be executed, and expressions which will be evaluated
and displayed to the user on the terminal.
2. Source - by passing a source file as a command line argument, the interpreter will execute all statements and expressions in the source file.
The interpreter has 4 stages:
1. The scanner scans the source file and creates a list of tokens based on the input.
2. The parser parses the tokens into statements and expressions.
3. The resolver performs semantic analysis on the statements and expressions, such as resolving variable - tracking down to which declaration
a variable refers to.
4. The interpreter executes the statements and expressions.
'''
import sys
import argparse
from run_mode import RunMode as mode
from error_handler import ErrorHandler
from scanner import Scanner
from Lox_parser import Parser
from interpreter import Interpreter
from resolver import Resolver
class Lox:
def __init__(self):
self.error_handler = ErrorHandler()
self.interpreter = Interpreter(self.error_handler)
# Runs the interpreter with a source file.
def run_file(self, path: str):
with open(path, "r") as f:
self.run("".join(f.readlines()), mode.FILE)
if self.error_handler.had_error or self.error_handler.had_runtime_error:
sys.exit()
# Runs the interpreter in REPL mode.
def run_prompt(self):
try:
while True:
self.run(input(">"), mode.REPL)
self.error_handler.had_error = False
self.error_handler.had_runtime_error = False
except KeyboardInterrupt:
print ("\nKeyboard interrupt.")
# This functions performs the 4 passes: scanning, parsing, resolving & binding, and interpreting.
def run(self, source: str, mode):
scanner = Scanner(self.error_handler, source)
tokens = scanner.scan_tokens()
parser = Parser(tokens, self.error_handler)
statements = parser.parse()
if self.error_handler.had_error == True:
return
resolver = Resolver(self.interpreter, self.error_handler)
resolver.resolve_list(statements)
if self.error_handler.had_error == True:
return
self.interpreter.interpret(statements, mode)
if __name__ == "__main__":
Lox = Lox()
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument("script", nargs='?', type=str , default=None,
help="The path to the source file to be interpreted."+
" Path needs to be encapsulated with quotation marks.")
args = arg_parser.parse_args()
if args.script is not None:
Lox.run_file(args.script)
else:
Lox.run_prompt()
| '''
The module serves as the interpreter for the Lox language.
There are two modes to run the interpreter in:
1. REPL - by running the interpreter without an argument, you can enter statements, which will be executed, and expressions which will be evaluated
and displayed to the user on the terminal.
2. Source - by passing a source file as a command line argument, the interpreter will execute all statements and expressions in the source file.
The interpreter has 4 stages:
1. The scanner scans the source file and creates a list of tokens based on the input.
2. The parser parses the tokens into statements and expressions.
3. The resolver performs semantic analysis on the statements and expressions, such as resolving variable - tracking down to which declaration
a variable refers to.
4. The interpreter executes the statements and expressions.
'''
import sys
import argparse
from run_mode import RunMode as mode
from error_handler import ErrorHandler
from scanner import Scanner
from Lox_parser import Parser
from interpreter import Interpreter
from resolver import Resolver
class Lox:
def __init__(self):
self.error_handler = ErrorHandler()
self.interpreter = Interpreter(self.error_handler)
# Runs the interpreter with a source file.
def run_file(self, path: str):
with open(path, "r") as f:
self.run("".join(f.readlines()), mode.FILE)
if self.error_handler.had_error or self.error_handler.had_runtime_error:
sys.exit()
# Runs the interpreter in REPL mode.
def run_prompt(self):
try:
while True:
self.run(input(">"), mode.REPL)
self.error_handler.had_error = False
self.error_handler.had_runtime_error = False
except KeyboardInterrupt:
print ("\nKeyboard interrupt.")
# This functions performs the 4 passes: scanning, parsing, resolving & binding, and interpreting.
def run(self, source: str, mode):
scanner = Scanner(self.error_handler, source)
tokens = scanner.scan_tokens()
parser = Parser(tokens, self.error_handler)
statements = parser.parse()
if self.error_handler.had_error == True:
return
resolver = Resolver(self.interpreter, self.error_handler)
resolver.resolve_list(statements)
if self.error_handler.had_error == True:
return
self.interpreter.interpret(statements, mode)
if __name__ == "__main__":
Lox = Lox()
arg_parser = argparse.ArgumentParser()
arg_parser.add_argument("script", nargs='?', type=str , default=None,
help="The path to the source file to be interpreted."+
" Path needs to be encapsulated with quotation marks.")
args = arg_parser.parse_args()
if args.script is not None:
Lox.run_file(args.script)
else:
Lox.run_prompt()
| en | 0.867247 | The module serves as the interpreter for the Lox language.
There are two modes to run the interpreter in:
1. REPL - by running the interpreter without an argument, you can enter statements, which will be executed, and expressions which will be evaluated
and displayed to the user on the terminal.
2. Source - by passing a source file as a command line argument, the interpreter will execute all statements and expressions in the source file.
The interpreter has 4 stages:
1. The scanner scans the source file and creates a list of tokens based on the input.
2. The parser parses the tokens into statements and expressions.
3. The resolver performs semantic analysis on the statements and expressions, such as resolving variable - tracking down to which declaration
a variable refers to.
4. The interpreter executes the statements and expressions. # Runs the interpreter with a source file. # Runs the interpreter in REPL mode. # This functions performs the 4 passes: scanning, parsing, resolving & binding, and interpreting. | 3.737644 | 4 |
tests/test_parametric_components/test_ToroidalFieldCoilTripleArc.py | PullRequest-Agent/paramak | 0 | 6619687 |
import unittest
import paramak
import pytest
class test_ToroidalFieldCoilTripleArc(unittest.TestCase):
def test_ToroidalFieldCoilTripleArc_creation_with_inner_leg(self):
"""creates a tf coil with inner leg using the ToroidalFieldCoilTripleArc
parametric component and checks that a cadquery solid is created"""
test_shape = paramak.ToroidalFieldCoilTripleArc(
R1=100,
h=100,
radii=(100, 200),
coverages=(10, 60),
thickness=10,
distance=50,
number_of_coils=1,
vertical_displacement=10,
with_inner_leg=True
)
assert test_shape.solid is not None
assert test_shape.volume > 1000
assert test_shape.inner_leg_connection_points is not None
test_inner_leg = paramak.ExtrudeStraightShape(
points=test_shape.inner_leg_connection_points, distance=0.5
)
assert test_inner_leg.solid is not None
def test_ToroidalFieldCoilTripleArc_creation_no_inner_leg(self):
"""creates a tf coil with no inner leg using the ToroidalFieldCoilRectangle
parametric component and checks that a cadquery solid is created"""
test_shape_1 = paramak.ToroidalFieldCoilTripleArc(
R1=100, h=100, radii=(100, 200), coverages=(10, 60), thickness=10,
distance=50, number_of_coils=1, vertical_displacement=10,
with_inner_leg=True
)
test_volume_1 = test_shape_1.volume
test_inner_leg = paramak.ExtrudeStraightShape(
points=test_shape_1.inner_leg_connection_points, distance=50
)
inner_leg_volume = test_inner_leg.volume
test_shape_2 = paramak.ToroidalFieldCoilTripleArc(
R1=100, h=100, radii=(100, 200), coverages=(10, 60), thickness=10,
distance=50, number_of_coils=1, vertical_displacement=10,
with_inner_leg=False
)
assert test_shape_2.solid is not None
assert test_shape_2.volume == pytest.approx(
test_volume_1 - inner_leg_volume, rel=0.01)
def test_ToroidalFieldCoilTripleArc_relative_volume(self):
"""creates tf coil shapes with different numbers of tf coils and checks that
their relative volumes are correct"""
test_shape_1 = paramak.ToroidalFieldCoilTripleArc(
R1=100, h=100, radii=(100, 200), coverages=(10, 60), thickness=10,
distance=50, number_of_coils=1, vertical_displacement=10,
with_inner_leg=True
)
test_volume_1 = test_shape_1.volume
test_shape_2 = paramak.ToroidalFieldCoilTripleArc(
R1=100, h=100, radii=(100, 200), coverages=(10, 60), thickness=10,
distance=50, number_of_coils=8, vertical_displacement=10,
with_inner_leg=True
)
assert test_shape_2.volume == pytest.approx(
test_volume_1 * 8, rel=0.01)
def test_ToroidalFieldCoilTripleArc_rotation_angle(self):
"""Creates tf coils with rotation_angles < 360 in different workplanes
and checks that the correct cuts are performed and their volumes are
correct."""
test_shape = paramak.ToroidalFieldCoilTripleArc(
R1=150,
h=200,
radii=(50, 50),
coverages=(70, 70),
thickness=50,
distance=50,
number_of_coils=8,
)
test_shape.rotation_angle = 360
test_shape.workplane = "XZ"
test_volume = test_shape.volume
test_shape.rotation_angle = 180
assert test_shape.volume == pytest.approx(test_volume * 0.5, rel=0.01)
test_shape.rotation_angle = 360
test_shape.workplane = "YZ"
test_volume = test_shape.volume
test_shape.rotation_angle = 180
assert test_shape.volume == pytest.approx(test_volume * 0.5, rel=0.01)
# this test will remain commented until workplane issue #308 is resolved
# currently causes terminal to crash due to large number of unions
# test_shape.rotation_angle = 360
# test_shape.workplane = "XY"
# test_volume = test_shape.volume
# test_shape.rotation_angle = 180
# assert test_shape.volume == pytest.approx(test_volume * 0.5)
|
import unittest
import paramak
import pytest
class test_ToroidalFieldCoilTripleArc(unittest.TestCase):
def test_ToroidalFieldCoilTripleArc_creation_with_inner_leg(self):
"""creates a tf coil with inner leg using the ToroidalFieldCoilTripleArc
parametric component and checks that a cadquery solid is created"""
test_shape = paramak.ToroidalFieldCoilTripleArc(
R1=100,
h=100,
radii=(100, 200),
coverages=(10, 60),
thickness=10,
distance=50,
number_of_coils=1,
vertical_displacement=10,
with_inner_leg=True
)
assert test_shape.solid is not None
assert test_shape.volume > 1000
assert test_shape.inner_leg_connection_points is not None
test_inner_leg = paramak.ExtrudeStraightShape(
points=test_shape.inner_leg_connection_points, distance=0.5
)
assert test_inner_leg.solid is not None
def test_ToroidalFieldCoilTripleArc_creation_no_inner_leg(self):
"""creates a tf coil with no inner leg using the ToroidalFieldCoilRectangle
parametric component and checks that a cadquery solid is created"""
test_shape_1 = paramak.ToroidalFieldCoilTripleArc(
R1=100, h=100, radii=(100, 200), coverages=(10, 60), thickness=10,
distance=50, number_of_coils=1, vertical_displacement=10,
with_inner_leg=True
)
test_volume_1 = test_shape_1.volume
test_inner_leg = paramak.ExtrudeStraightShape(
points=test_shape_1.inner_leg_connection_points, distance=50
)
inner_leg_volume = test_inner_leg.volume
test_shape_2 = paramak.ToroidalFieldCoilTripleArc(
R1=100, h=100, radii=(100, 200), coverages=(10, 60), thickness=10,
distance=50, number_of_coils=1, vertical_displacement=10,
with_inner_leg=False
)
assert test_shape_2.solid is not None
assert test_shape_2.volume == pytest.approx(
test_volume_1 - inner_leg_volume, rel=0.01)
def test_ToroidalFieldCoilTripleArc_relative_volume(self):
"""creates tf coil shapes with different numbers of tf coils and checks that
their relative volumes are correct"""
test_shape_1 = paramak.ToroidalFieldCoilTripleArc(
R1=100, h=100, radii=(100, 200), coverages=(10, 60), thickness=10,
distance=50, number_of_coils=1, vertical_displacement=10,
with_inner_leg=True
)
test_volume_1 = test_shape_1.volume
test_shape_2 = paramak.ToroidalFieldCoilTripleArc(
R1=100, h=100, radii=(100, 200), coverages=(10, 60), thickness=10,
distance=50, number_of_coils=8, vertical_displacement=10,
with_inner_leg=True
)
assert test_shape_2.volume == pytest.approx(
test_volume_1 * 8, rel=0.01)
def test_ToroidalFieldCoilTripleArc_rotation_angle(self):
"""Creates tf coils with rotation_angles < 360 in different workplanes
and checks that the correct cuts are performed and their volumes are
correct."""
test_shape = paramak.ToroidalFieldCoilTripleArc(
R1=150,
h=200,
radii=(50, 50),
coverages=(70, 70),
thickness=50,
distance=50,
number_of_coils=8,
)
test_shape.rotation_angle = 360
test_shape.workplane = "XZ"
test_volume = test_shape.volume
test_shape.rotation_angle = 180
assert test_shape.volume == pytest.approx(test_volume * 0.5, rel=0.01)
test_shape.rotation_angle = 360
test_shape.workplane = "YZ"
test_volume = test_shape.volume
test_shape.rotation_angle = 180
assert test_shape.volume == pytest.approx(test_volume * 0.5, rel=0.01)
# this test will remain commented until workplane issue #308 is resolved
# currently causes terminal to crash due to large number of unions
# test_shape.rotation_angle = 360
# test_shape.workplane = "XY"
# test_volume = test_shape.volume
# test_shape.rotation_angle = 180
# assert test_shape.volume == pytest.approx(test_volume * 0.5)
| en | 0.898619 | creates a tf coil with inner leg using the ToroidalFieldCoilTripleArc parametric component and checks that a cadquery solid is created creates a tf coil with no inner leg using the ToroidalFieldCoilRectangle parametric component and checks that a cadquery solid is created creates tf coil shapes with different numbers of tf coils and checks that their relative volumes are correct Creates tf coils with rotation_angles < 360 in different workplanes and checks that the correct cuts are performed and their volumes are correct. # this test will remain commented until workplane issue #308 is resolved # currently causes terminal to crash due to large number of unions # test_shape.rotation_angle = 360 # test_shape.workplane = "XY" # test_volume = test_shape.volume # test_shape.rotation_angle = 180 # assert test_shape.volume == pytest.approx(test_volume * 0.5) | 2.542714 | 3 |
templates and misc/cloudflare-cleaner.py | gonzaleztroyano/ASIR2-IAW-SCRIPT | 0 | 6619688 | #!/bin/env python3
import CloudFlare
import os
import sys
# def main():
# cf = CloudFlare.CloudFlare(token='<KEY>')
# zones = cf.zones.get()
# for zone in zones:
# zone_id = zone['id']
# zone_name = zone['name']
# print("zone_id=%s zone_name=%s" % (zone_id, zone_name))
def main():
try:
zone_name = sys.argv[1]
dns_name = sys.argv[2]
except IndexError:
exit('usage: example_delete_zone_entry.py zone dns_record')
cf = CloudFlare.CloudFlare(token='TOKEN')
try:
params = {'name':zone_name}
zones = cf.zones.get(params=params)
except CloudFlare.exceptions.CloudFlareAPIError as e:
exit('/zones %d %s - api call failed' % (e, e))
except Exception as e:
exit('/zones.get - %s - api call failed' % (e))
if len(zones) == 0:
exit('/zones.get - %s - zone not found' % (zone_name))
if len(zones) != 1:
exit('/zones.get - %s - api call returned %d items' % (zone_name, len(zones)))
zone = zones[0]
zone_id = zone['id']
zone_name = zone['name']
print('ZONE:', zone_id, zone_name)
try:
params = {'name':dns_name + '.' + zone_name}
dns_records = cf.zones.dns_records.get(zone_id, params=params)
except CloudFlare.exceptions.CloudFlareAPIError as e:
exit('/zones/dns_records %s - %d %s - api call failed' % (dns_name, e, e))
found = False
for dns_record in dns_records:
dns_record_id = dns_record['id']
dns_record_name = dns_record['name']
dns_record_type = dns_record['type']
dns_record_value = dns_record['content']
print('DNS RECORD:', dns_record_id, dns_record_name, dns_record_type, dns_record_value)
try:
dns_record = cf.zones.dns_records.delete(zone_id, dns_record_id)
print('DELETED')
except CloudFlare.exceptions.CloudFlareAPIError as e:
exit('/zones.dns_records.delete %s - %d %s - api call failed' % (dns_name, e, e))
found = True
if not found:
print('RECORD NOT FOUND')
exit(0)
if __name__ == '__main__':
main()
######
"""
for sub in {blog.blueskynepal,blog.mau123,blog.mau124,blog.ma,blog.miguelangel}
do
python3 cloudflare-cleaner.py villablanca.me $sub
done
REGEX for grep: (^[^;]+?(?=.villablanca.me))
https://regex101.com/r/cqc8al/1
""" | #!/bin/env python3
import CloudFlare
import os
import sys
# def main():
# cf = CloudFlare.CloudFlare(token='<KEY>')
# zones = cf.zones.get()
# for zone in zones:
# zone_id = zone['id']
# zone_name = zone['name']
# print("zone_id=%s zone_name=%s" % (zone_id, zone_name))
def main():
try:
zone_name = sys.argv[1]
dns_name = sys.argv[2]
except IndexError:
exit('usage: example_delete_zone_entry.py zone dns_record')
cf = CloudFlare.CloudFlare(token='TOKEN')
try:
params = {'name':zone_name}
zones = cf.zones.get(params=params)
except CloudFlare.exceptions.CloudFlareAPIError as e:
exit('/zones %d %s - api call failed' % (e, e))
except Exception as e:
exit('/zones.get - %s - api call failed' % (e))
if len(zones) == 0:
exit('/zones.get - %s - zone not found' % (zone_name))
if len(zones) != 1:
exit('/zones.get - %s - api call returned %d items' % (zone_name, len(zones)))
zone = zones[0]
zone_id = zone['id']
zone_name = zone['name']
print('ZONE:', zone_id, zone_name)
try:
params = {'name':dns_name + '.' + zone_name}
dns_records = cf.zones.dns_records.get(zone_id, params=params)
except CloudFlare.exceptions.CloudFlareAPIError as e:
exit('/zones/dns_records %s - %d %s - api call failed' % (dns_name, e, e))
found = False
for dns_record in dns_records:
dns_record_id = dns_record['id']
dns_record_name = dns_record['name']
dns_record_type = dns_record['type']
dns_record_value = dns_record['content']
print('DNS RECORD:', dns_record_id, dns_record_name, dns_record_type, dns_record_value)
try:
dns_record = cf.zones.dns_records.delete(zone_id, dns_record_id)
print('DELETED')
except CloudFlare.exceptions.CloudFlareAPIError as e:
exit('/zones.dns_records.delete %s - %d %s - api call failed' % (dns_name, e, e))
found = True
if not found:
print('RECORD NOT FOUND')
exit(0)
if __name__ == '__main__':
main()
######
"""
for sub in {blog.blueskynepal,blog.mau123,blog.mau124,blog.ma,blog.miguelangel}
do
python3 cloudflare-cleaner.py villablanca.me $sub
done
REGEX for grep: (^[^;]+?(?=.villablanca.me))
https://regex101.com/r/cqc8al/1
""" | en | 0.402995 | #!/bin/env python3 # def main(): # cf = CloudFlare.CloudFlare(token='<KEY>') # zones = cf.zones.get() # for zone in zones: # zone_id = zone['id'] # zone_name = zone['name'] # print("zone_id=%s zone_name=%s" % (zone_id, zone_name)) ###### for sub in {blog.blueskynepal,blog.mau123,blog.mau124,blog.ma,blog.miguelangel} do python3 cloudflare-cleaner.py villablanca.me $sub done REGEX for grep: (^[^;]+?(?=.villablanca.me)) https://regex101.com/r/cqc8al/1 | 2.708556 | 3 |
src/main/nspawn/setup/__init__.py | Andrei-Pozolotin/nspawn | 15 | 6619689 | """
Setup DSL
"""
# invoke engine
import nspawn.app.engine.invoke
nspawn.app.engine.invoke.invoke_main('setup.py')
# publish setup dsl
from nspawn import tool as TOOL
from nspawn.setuper.syntax import *
__all__ = [
'TOOL',
'IMAGE',
'MACHINE',
'WITH',
'EXEC',
'COPY',
'CAST',
'RUN',
'SH',
]
| """
Setup DSL
"""
# invoke engine
import nspawn.app.engine.invoke
nspawn.app.engine.invoke.invoke_main('setup.py')
# publish setup dsl
from nspawn import tool as TOOL
from nspawn.setuper.syntax import *
__all__ = [
'TOOL',
'IMAGE',
'MACHINE',
'WITH',
'EXEC',
'COPY',
'CAST',
'RUN',
'SH',
]
| en | 0.519579 | Setup DSL # invoke engine # publish setup dsl | 1.340116 | 1 |
api/tests/test_views.py | jwbaldwin/novu-note | 1 | 6619690 | <reponame>jwbaldwin/novu-note
from django.test import TestCase
from django.contrib.auth.models import User
from rest_framework.test import APIClient
from rest_framework import status
from django.urls import reverse
from ..models import Note
class ViewTestCase(TestCase):
"""Test suite for the api views."""
def setUp(self):
"""Define the test client and other test variables."""
# Initialize client and force it to use authentication
self.user = User.objects.create(username="nerd")
self.client = APIClient()
self.client.force_authenticate(user=self.user)
# Create a JSON POST request
self.url = reverse('create')
self.sample_note_data = {
'text': 'A new idea!',
'category_tags': ['django', 'testing'],
'creator': self.user.id
}
self.response = self.client.post(
self.url, self.sample_note_data, format='json')
def test_api_can_create_a_note(self):
"""POST: Test the api has note creation capability."""
self.assertEqual(self.response.status_code, status.HTTP_201_CREATED)
self.assertEqual(Note.objects.count(), 1)
self.assertEqual(Note.objects.get().text, self.sample_note_data.get('text'))
self.assertEqual(Note.objects.get().category_tags, self.sample_note_data.get('category_tags'))
def test_api_can_get_a_note(self):
"""GET: Test the api can get a given note."""
note = Note.objects.get()
response = self.client.get(
reverse('details',
kwargs= { 'pk': note.id }),
format="json"
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertContains(response, note)
def test_api_can_update_note(self):
"""PUT: Test the api can update a given note."""
note = Note.objects.get()
modified_note = {
'text': 'A modified new idea!',
'category_tags': ['rest', 'test'],
'creator': self.user.id
}
response = self.client.put(
reverse('details',
kwargs= { 'pk': note.id }),
modified_note,
format='json'
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(Note.objects.get().category_tags, modified_note.get('category_tags'))
def test_api_can_delete_note(self):
"""DELETE: Test the api can delete a note."""
note = Note.objects.get()
response = self.client.delete(
reverse('details',
kwargs={ 'pk': note.id }),
format='json',
follow=True)
self.assertEquals(response.status_code, status.HTTP_204_NO_CONTENT)
self.assertEqual(Note.objects.count(), 0)
| from django.test import TestCase
from django.contrib.auth.models import User
from rest_framework.test import APIClient
from rest_framework import status
from django.urls import reverse
from ..models import Note
class ViewTestCase(TestCase):
"""Test suite for the api views."""
def setUp(self):
"""Define the test client and other test variables."""
# Initialize client and force it to use authentication
self.user = User.objects.create(username="nerd")
self.client = APIClient()
self.client.force_authenticate(user=self.user)
# Create a JSON POST request
self.url = reverse('create')
self.sample_note_data = {
'text': 'A new idea!',
'category_tags': ['django', 'testing'],
'creator': self.user.id
}
self.response = self.client.post(
self.url, self.sample_note_data, format='json')
def test_api_can_create_a_note(self):
"""POST: Test the api has note creation capability."""
self.assertEqual(self.response.status_code, status.HTTP_201_CREATED)
self.assertEqual(Note.objects.count(), 1)
self.assertEqual(Note.objects.get().text, self.sample_note_data.get('text'))
self.assertEqual(Note.objects.get().category_tags, self.sample_note_data.get('category_tags'))
def test_api_can_get_a_note(self):
"""GET: Test the api can get a given note."""
note = Note.objects.get()
response = self.client.get(
reverse('details',
kwargs= { 'pk': note.id }),
format="json"
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertContains(response, note)
def test_api_can_update_note(self):
"""PUT: Test the api can update a given note."""
note = Note.objects.get()
modified_note = {
'text': 'A modified new idea!',
'category_tags': ['rest', 'test'],
'creator': self.user.id
}
response = self.client.put(
reverse('details',
kwargs= { 'pk': note.id }),
modified_note,
format='json'
)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertEqual(Note.objects.get().category_tags, modified_note.get('category_tags'))
def test_api_can_delete_note(self):
"""DELETE: Test the api can delete a note."""
note = Note.objects.get()
response = self.client.delete(
reverse('details',
kwargs={ 'pk': note.id }),
format='json',
follow=True)
self.assertEquals(response.status_code, status.HTTP_204_NO_CONTENT)
self.assertEqual(Note.objects.count(), 0) | en | 0.757715 | Test suite for the api views. Define the test client and other test variables. # Initialize client and force it to use authentication # Create a JSON POST request POST: Test the api has note creation capability. GET: Test the api can get a given note. PUT: Test the api can update a given note. DELETE: Test the api can delete a note. | 2.687098 | 3 |
amadeus/travel/predictions/_flight_delay.py | akshitsingla/amadeus-python | 125 | 6619691 | from amadeus.client.decorator import Decorator
class FlightDelay(Decorator, object):
def get(self, **params):
'''
Forecast the chances for a flight to be delayed
.. code-block:: python
amadeus.travel.predictions.flight_delay.get(originLocationCode='NCE',
destinationLocationCode='IST',
departureDate='2020-08-01',
departureTime='18:20:00',
arrivalDate='2020-08-01',
arrivalTime='22:15:00',
aircraftCode='321',
carrierCode='TK',
flightNumber='1816',
duration='PT31H10M')
:param originLocationCode: the City/Airport IATA code from which
the flight will depart. ``"NYC"``, for example for New York
:param destinationLocationCode: the City/Airport IATA code to which
the flight is going. ``"MAD"``, for example for Madrid
:param departureDate: the date on which the traveler departs
from the origin, in `YYYY-MM-DD` format
:param departureTime: local time on which to fly out,
in `HH:MM:SS` format
:param arrivalDate: the date on which the traveler arrives
to the destination, in `YYYY-MM-DD` format
:param arrivalTime: local time on which the traveler arrives
to the destination, in `HH:MM:SS` format
:param aircraftCode: IATA aircraft code
:param carrierCode: airline / carrier code
:param flightNumber: flight number as assigned by the carrier
:param duration: flight duration,
in `PnYnMnDTnHnMnS` format e.g. PT2H10M
:rtype: amadeus.Response
:raises amadeus.ResponseError: if the request could not be completed
'''
return self.client.get('/v1/travel/predictions/flight-delay', **params)
| from amadeus.client.decorator import Decorator
class FlightDelay(Decorator, object):
def get(self, **params):
'''
Forecast the chances for a flight to be delayed
.. code-block:: python
amadeus.travel.predictions.flight_delay.get(originLocationCode='NCE',
destinationLocationCode='IST',
departureDate='2020-08-01',
departureTime='18:20:00',
arrivalDate='2020-08-01',
arrivalTime='22:15:00',
aircraftCode='321',
carrierCode='TK',
flightNumber='1816',
duration='PT31H10M')
:param originLocationCode: the City/Airport IATA code from which
the flight will depart. ``"NYC"``, for example for New York
:param destinationLocationCode: the City/Airport IATA code to which
the flight is going. ``"MAD"``, for example for Madrid
:param departureDate: the date on which the traveler departs
from the origin, in `YYYY-MM-DD` format
:param departureTime: local time on which to fly out,
in `HH:MM:SS` format
:param arrivalDate: the date on which the traveler arrives
to the destination, in `YYYY-MM-DD` format
:param arrivalTime: local time on which the traveler arrives
to the destination, in `HH:MM:SS` format
:param aircraftCode: IATA aircraft code
:param carrierCode: airline / carrier code
:param flightNumber: flight number as assigned by the carrier
:param duration: flight duration,
in `PnYnMnDTnHnMnS` format e.g. PT2H10M
:rtype: amadeus.Response
:raises amadeus.ResponseError: if the request could not be completed
'''
return self.client.get('/v1/travel/predictions/flight-delay', **params)
| en | 0.742713 | Forecast the chances for a flight to be delayed .. code-block:: python amadeus.travel.predictions.flight_delay.get(originLocationCode='NCE', destinationLocationCode='IST', departureDate='2020-08-01', departureTime='18:20:00', arrivalDate='2020-08-01', arrivalTime='22:15:00', aircraftCode='321', carrierCode='TK', flightNumber='1816', duration='PT31H10M') :param originLocationCode: the City/Airport IATA code from which the flight will depart. ``"NYC"``, for example for New York :param destinationLocationCode: the City/Airport IATA code to which the flight is going. ``"MAD"``, for example for Madrid :param departureDate: the date on which the traveler departs from the origin, in `YYYY-MM-DD` format :param departureTime: local time on which to fly out, in `HH:MM:SS` format :param arrivalDate: the date on which the traveler arrives to the destination, in `YYYY-MM-DD` format :param arrivalTime: local time on which the traveler arrives to the destination, in `HH:MM:SS` format :param aircraftCode: IATA aircraft code :param carrierCode: airline / carrier code :param flightNumber: flight number as assigned by the carrier :param duration: flight duration, in `PnYnMnDTnHnMnS` format e.g. PT2H10M :rtype: amadeus.Response :raises amadeus.ResponseError: if the request could not be completed | 2.983261 | 3 |
nlp_gym/envs/quality_estimation/observation.py | lipucky/nlp-gym | 0 | 6619692 | <filename>nlp_gym/envs/quality_estimation/observation.py
from dataclasses import dataclass
from typing import List
from nlp_gym.envs.common.observation import BaseObservation
@dataclass(init=True)
class Observation(BaseObservation):
src_sentence: str
tent_translated_sentence: List[str]
time_step: int
total_steps: int
@classmethod
def build(cls, src_sentence: str, tent_translated_sentence: List,
time_step: int, total_steps: int):
observation = Observation(src_sentence, tent_translated_sentence, time_step, total_steps)
return observation
def get_updated_observation(self, action: str) -> 'Observation':
self.tent_translated_sentence.append(action)
return self
| <filename>nlp_gym/envs/quality_estimation/observation.py
from dataclasses import dataclass
from typing import List
from nlp_gym.envs.common.observation import BaseObservation
@dataclass(init=True)
class Observation(BaseObservation):
src_sentence: str
tent_translated_sentence: List[str]
time_step: int
total_steps: int
@classmethod
def build(cls, src_sentence: str, tent_translated_sentence: List,
time_step: int, total_steps: int):
observation = Observation(src_sentence, tent_translated_sentence, time_step, total_steps)
return observation
def get_updated_observation(self, action: str) -> 'Observation':
self.tent_translated_sentence.append(action)
return self
| none | 1 | 2.642687 | 3 | |
locuspocus/Fasta.py | jonahcullen/LocusPocus | 0 | 6619693 | <reponame>jonahcullen/LocusPocus
from collections import defaultdict
import logging
import re
import numpy as np
from minus80 import Freezable
from minus80.RawFile import RawFile
import reprlib
import pprint
from functools import lru_cache
from locuspocus import Chromosome
class Fasta(Freezable):
'''
A pythonic interface to a FASTA file. This interface
allows convenient slicing into contigs (chromosomes).
>>> from locuspocus import Fasta
>>> x = Fasta.from_file('example.fa')
'''
log = logging.getLogger(__name__)
handler = logging.StreamHandler()
formatter = logging.Formatter(
'%(asctime)s %(name)-12s %(levelname)-8s %(message)s'
)
handler.setFormatter(formatter)
if not len(log.handlers):
log.addHandler(handler)
log.setLevel(logging.INFO)
def __init__(self,name,parent=None):
'''
Load a Fasta object from the Minus80.
Parameters
----------
name : str
The name of the frozen object
Returns
-------
A Fasta object
'''
super().__init__(name,parent=parent)
# Load up from the database
self._initialize_tables()
def _initialize_tables(self):
'''
Initialize the tables for the FASTA class
NOTE: internal method
'''
cur = self._db.cursor()
cur.execute('''
CREATE TABLE IF NOT EXISTS added_order (
aorder INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT
);
''')
cur.execute('''
CREATE TABLE IF NOT EXISTS nicknames (
nickname TEXT,
chrom TEXT,
PRIMARY KEY(nickname,chrom),
FOREIGN KEY(chrom) REFERENCES chroms(chrom)
)
''')
cur.execute('''
CREATE TABLE IF NOT EXISTS attributes (
chrom TEXT,
attribute TEXT,
PRIMARY KEY(chrom,attribute),
FOREIGN KEY(chrom) REFERENCES chroms(chrom)
)
''')
def add_chrom(self,chrom,cur=None,force=False):
'''
Add a chromosome to the Fasta object.
Parameters
----------
name : str
The name of the chromosome
'''
self.log.info(f'Adding {chrom.name}')
# Check for duplicates
if chrom.name in self:
if not force:
raise ValueError(f'{chrom.name} already in FASTA')
else:
if cur is None:
cur = self._db.cursor()
cur.execute(
'''
INSERT OR REPLACE INTO added_order
(name)
VALUES (?)
''',(chrom.name,)
)
for x in chrom._attrs:
self._add_attribute(chrom.name,x)
seqarray = np.array(chrom.seq)
self._bcolz_array(chrom.name,seqarray)
self.cache_clear()
def chrom_names(self):
'''
Returns an iterable of chromosome names
Parameters
----------
None
Returns
-------
An iterable of chromosome names in added order
'''
return (x for (x,) in self._db.cursor().execute('''
SELECT name FROM added_order ORDER BY aorder
'''))
def cache_clear(self):
self.__getitem__.cache_clear()
@classmethod
def from_file(cls,name,fasta_file,force=False,parent=None):
'''
Create a Fasta object from a file.
'''
self = cls(name,parent=parent)
with RawFile(fasta_file) as IN, self._db as db:
cur = db.cursor()
cur_chrom = None
seqs = []
name, attrs = None,None
for line in IN:
line = line.strip()
if line.startswith('>'):
# Finish the last chromosome before adding a new one
if len(seqs) > 0:
cur_chrom = Chromosome(name,seqs,*attrs)
self.add_chrom(cur_chrom,cur=cur,force=force)
seqs = []
name,*attrs = line.lstrip('>').split()
else:
seqs += line
#cur_chrom.seq = np.append(cur_chrom.seq,list(line))
# Add the last chromosome
cur_chrom = Chromosome(name,seqs,*attrs)
self.add_chrom(cur_chrom,cur=cur,force=force)
return self
def __iter__(self):
'''
Iterate over chromosome objects
'''
chroms = self._db.cursor().execute('SELECT name FROM added_order ORDER BY aorder')
for (chrom,) in chroms:
yield self[chrom]
def __len__(self):
'''
Returns the number of chroms in the Fasta
'''
return self._db.cursor().execute('''
SELECT COUNT(*) FROM added_order
''').fetchone()[0]
def __contains__(self,obj):
'''
Returns boolean indicating if a named
contig (chromosome) is in the fasta.
'''
if isinstance(obj,Chromosome):
obj = obj.name
cur = self._db.cursor()
# Check if in chrom names
in_added = cur.execute('''
SELECT COUNT(*) FROM added_order
WHERE name = ?
''',(obj,)).fetchone()[0]
if in_added == 1:
return True
# Check if in aliases
in_alias = cur.execute('''
SELECT COUNT(*) FROM nicknames
WHERE nickname = ?
''',(obj,)).fetchone()[0]
if in_alias == 1:
return True
# Otherise its not here
return False
@lru_cache(maxsize=128)
def __getitem__(self,chrom_name):
if chrom_name not in self:
raise ValueError(f'{chrom_name} not in {self._m80_name}')
try:
seq_array = self._bcolz_array(chrom_name)
except Exception as e:
chrom_name = self._get_nickname(chrom_name)
seq_array = self._bcolz_array(chrom_name)
finally:
attrs = [x[0] for x in self._db.cursor().execute('''
SELECT attribute FROM attributes
WHERE chrom = ?
ORDER BY rowid -- This preserves the ordering of attrs
''',(chrom_name,))]
return Chromosome(chrom_name,seq_array,*attrs)
def to_fasta(self,filename,line_length=70):
'''
Print the chromosomes to a file in FASTA format
Paramaters
----------
filename : str
The output filename
line_length : int (default: 70)
The number of nucleotides per line
Returns
-------
None
'''
with open(filename,'w') as OUT:
for chrom_name in self.chrom_names():
print(f'Printing out {chrom_name}')
chrom = self[chrom_name]
#easy_id = ids[chrom_name]
start_length = len(chrom)
#if easy_id == 'chrUn':
# easy_id = easy_id + '_' + chrom_name
print(f'>{chrom_name} {"|".join(chrom._attrs)}',file=OUT)
printed_length = 0
for i in range(0,len(chrom),70):
sequence = chrom.seq[i:i+70]
print(''.join(sequence),file=OUT)
printed_length += len(sequence)
if printed_length != start_length:
raise ValueError('Chromosome was truncated during printing')
return None
def _add_attribute(self,chrom_name,attr,cur=None):
'''
Add an attribute the the Fasta object.
Attributes describe chromosomes and
often follow the '>' token in the FASTA file.
Parameters
----------
chrom_name : str
The name of the chromosome you are adding
an attribute to
attr : str
the attribute you are adding
'''
if cur is None:
cur = self._db.cursor()
cur.execute(
'''
INSERT INTO attributes
(chrom,attribute)
VALUES (?,?)
''',
(chrom_name,attr)
)
self.cache_clear()
def _add_nickname(self,chrom,nickname,cur=None):
'''
Add a nickname for a chromosome
Parameters
----------
chrom : str
The chromosome you want to nickname
nickname : str
The alternative name for the chromosome
'''
if cur is None:
cur = self._db.cursor()
cur.execute(
'''
INSERT OR REPLACE INTO nicknames
(nickname,chrom)
VALUES (?,?)
''',
(nickname,chrom)
)
def _get_nickname(self,nickname):
'''
Get a chromosomem name by nickname
'''
return self._db.cursor().execute('''
SELECT chrom FROM nicknames
WHERE nickname = ?
''',(nickname,)).fetchone()[0]
def __repr__(self): #pragma: nocover
return pprint.saferepr(
reprlib.repr(list(self))
)
| from collections import defaultdict
import logging
import re
import numpy as np
from minus80 import Freezable
from minus80.RawFile import RawFile
import reprlib
import pprint
from functools import lru_cache
from locuspocus import Chromosome
class Fasta(Freezable):
'''
A pythonic interface to a FASTA file. This interface
allows convenient slicing into contigs (chromosomes).
>>> from locuspocus import Fasta
>>> x = Fasta.from_file('example.fa')
'''
log = logging.getLogger(__name__)
handler = logging.StreamHandler()
formatter = logging.Formatter(
'%(asctime)s %(name)-12s %(levelname)-8s %(message)s'
)
handler.setFormatter(formatter)
if not len(log.handlers):
log.addHandler(handler)
log.setLevel(logging.INFO)
def __init__(self,name,parent=None):
'''
Load a Fasta object from the Minus80.
Parameters
----------
name : str
The name of the frozen object
Returns
-------
A Fasta object
'''
super().__init__(name,parent=parent)
# Load up from the database
self._initialize_tables()
def _initialize_tables(self):
'''
Initialize the tables for the FASTA class
NOTE: internal method
'''
cur = self._db.cursor()
cur.execute('''
CREATE TABLE IF NOT EXISTS added_order (
aorder INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT
);
''')
cur.execute('''
CREATE TABLE IF NOT EXISTS nicknames (
nickname TEXT,
chrom TEXT,
PRIMARY KEY(nickname,chrom),
FOREIGN KEY(chrom) REFERENCES chroms(chrom)
)
''')
cur.execute('''
CREATE TABLE IF NOT EXISTS attributes (
chrom TEXT,
attribute TEXT,
PRIMARY KEY(chrom,attribute),
FOREIGN KEY(chrom) REFERENCES chroms(chrom)
)
''')
def add_chrom(self,chrom,cur=None,force=False):
'''
Add a chromosome to the Fasta object.
Parameters
----------
name : str
The name of the chromosome
'''
self.log.info(f'Adding {chrom.name}')
# Check for duplicates
if chrom.name in self:
if not force:
raise ValueError(f'{chrom.name} already in FASTA')
else:
if cur is None:
cur = self._db.cursor()
cur.execute(
'''
INSERT OR REPLACE INTO added_order
(name)
VALUES (?)
''',(chrom.name,)
)
for x in chrom._attrs:
self._add_attribute(chrom.name,x)
seqarray = np.array(chrom.seq)
self._bcolz_array(chrom.name,seqarray)
self.cache_clear()
def chrom_names(self):
'''
Returns an iterable of chromosome names
Parameters
----------
None
Returns
-------
An iterable of chromosome names in added order
'''
return (x for (x,) in self._db.cursor().execute('''
SELECT name FROM added_order ORDER BY aorder
'''))
def cache_clear(self):
self.__getitem__.cache_clear()
@classmethod
def from_file(cls,name,fasta_file,force=False,parent=None):
'''
Create a Fasta object from a file.
'''
self = cls(name,parent=parent)
with RawFile(fasta_file) as IN, self._db as db:
cur = db.cursor()
cur_chrom = None
seqs = []
name, attrs = None,None
for line in IN:
line = line.strip()
if line.startswith('>'):
# Finish the last chromosome before adding a new one
if len(seqs) > 0:
cur_chrom = Chromosome(name,seqs,*attrs)
self.add_chrom(cur_chrom,cur=cur,force=force)
seqs = []
name,*attrs = line.lstrip('>').split()
else:
seqs += line
#cur_chrom.seq = np.append(cur_chrom.seq,list(line))
# Add the last chromosome
cur_chrom = Chromosome(name,seqs,*attrs)
self.add_chrom(cur_chrom,cur=cur,force=force)
return self
def __iter__(self):
'''
Iterate over chromosome objects
'''
chroms = self._db.cursor().execute('SELECT name FROM added_order ORDER BY aorder')
for (chrom,) in chroms:
yield self[chrom]
def __len__(self):
'''
Returns the number of chroms in the Fasta
'''
return self._db.cursor().execute('''
SELECT COUNT(*) FROM added_order
''').fetchone()[0]
def __contains__(self,obj):
'''
Returns boolean indicating if a named
contig (chromosome) is in the fasta.
'''
if isinstance(obj,Chromosome):
obj = obj.name
cur = self._db.cursor()
# Check if in chrom names
in_added = cur.execute('''
SELECT COUNT(*) FROM added_order
WHERE name = ?
''',(obj,)).fetchone()[0]
if in_added == 1:
return True
# Check if in aliases
in_alias = cur.execute('''
SELECT COUNT(*) FROM nicknames
WHERE nickname = ?
''',(obj,)).fetchone()[0]
if in_alias == 1:
return True
# Otherise its not here
return False
@lru_cache(maxsize=128)
def __getitem__(self,chrom_name):
if chrom_name not in self:
raise ValueError(f'{chrom_name} not in {self._m80_name}')
try:
seq_array = self._bcolz_array(chrom_name)
except Exception as e:
chrom_name = self._get_nickname(chrom_name)
seq_array = self._bcolz_array(chrom_name)
finally:
attrs = [x[0] for x in self._db.cursor().execute('''
SELECT attribute FROM attributes
WHERE chrom = ?
ORDER BY rowid -- This preserves the ordering of attrs
''',(chrom_name,))]
return Chromosome(chrom_name,seq_array,*attrs)
def to_fasta(self,filename,line_length=70):
'''
Print the chromosomes to a file in FASTA format
Paramaters
----------
filename : str
The output filename
line_length : int (default: 70)
The number of nucleotides per line
Returns
-------
None
'''
with open(filename,'w') as OUT:
for chrom_name in self.chrom_names():
print(f'Printing out {chrom_name}')
chrom = self[chrom_name]
#easy_id = ids[chrom_name]
start_length = len(chrom)
#if easy_id == 'chrUn':
# easy_id = easy_id + '_' + chrom_name
print(f'>{chrom_name} {"|".join(chrom._attrs)}',file=OUT)
printed_length = 0
for i in range(0,len(chrom),70):
sequence = chrom.seq[i:i+70]
print(''.join(sequence),file=OUT)
printed_length += len(sequence)
if printed_length != start_length:
raise ValueError('Chromosome was truncated during printing')
return None
def _add_attribute(self,chrom_name,attr,cur=None):
'''
Add an attribute the the Fasta object.
Attributes describe chromosomes and
often follow the '>' token in the FASTA file.
Parameters
----------
chrom_name : str
The name of the chromosome you are adding
an attribute to
attr : str
the attribute you are adding
'''
if cur is None:
cur = self._db.cursor()
cur.execute(
'''
INSERT INTO attributes
(chrom,attribute)
VALUES (?,?)
''',
(chrom_name,attr)
)
self.cache_clear()
def _add_nickname(self,chrom,nickname,cur=None):
'''
Add a nickname for a chromosome
Parameters
----------
chrom : str
The chromosome you want to nickname
nickname : str
The alternative name for the chromosome
'''
if cur is None:
cur = self._db.cursor()
cur.execute(
'''
INSERT OR REPLACE INTO nicknames
(nickname,chrom)
VALUES (?,?)
''',
(nickname,chrom)
)
def _get_nickname(self,nickname):
'''
Get a chromosomem name by nickname
'''
return self._db.cursor().execute('''
SELECT chrom FROM nicknames
WHERE nickname = ?
''',(nickname,)).fetchone()[0]
def __repr__(self): #pragma: nocover
return pprint.saferepr(
reprlib.repr(list(self))
) | en | 0.510397 | A pythonic interface to a FASTA file. This interface allows convenient slicing into contigs (chromosomes). >>> from locuspocus import Fasta >>> x = Fasta.from_file('example.fa') Load a Fasta object from the Minus80. Parameters ---------- name : str The name of the frozen object Returns ------- A Fasta object # Load up from the database Initialize the tables for the FASTA class NOTE: internal method CREATE TABLE IF NOT EXISTS added_order ( aorder INTEGER PRIMARY KEY AUTOINCREMENT, name TEXT ); CREATE TABLE IF NOT EXISTS nicknames ( nickname TEXT, chrom TEXT, PRIMARY KEY(nickname,chrom), FOREIGN KEY(chrom) REFERENCES chroms(chrom) ) CREATE TABLE IF NOT EXISTS attributes ( chrom TEXT, attribute TEXT, PRIMARY KEY(chrom,attribute), FOREIGN KEY(chrom) REFERENCES chroms(chrom) ) Add a chromosome to the Fasta object. Parameters ---------- name : str The name of the chromosome # Check for duplicates INSERT OR REPLACE INTO added_order (name) VALUES (?) Returns an iterable of chromosome names Parameters ---------- None Returns ------- An iterable of chromosome names in added order SELECT name FROM added_order ORDER BY aorder Create a Fasta object from a file. # Finish the last chromosome before adding a new one #cur_chrom.seq = np.append(cur_chrom.seq,list(line)) # Add the last chromosome Iterate over chromosome objects Returns the number of chroms in the Fasta SELECT COUNT(*) FROM added_order Returns boolean indicating if a named contig (chromosome) is in the fasta. # Check if in chrom names SELECT COUNT(*) FROM added_order WHERE name = ? # Check if in aliases SELECT COUNT(*) FROM nicknames WHERE nickname = ? # Otherise its not here SELECT attribute FROM attributes WHERE chrom = ? ORDER BY rowid -- This preserves the ordering of attrs Print the chromosomes to a file in FASTA format Paramaters ---------- filename : str The output filename line_length : int (default: 70) The number of nucleotides per line Returns ------- None #easy_id = ids[chrom_name] #if easy_id == 'chrUn': # easy_id = easy_id + '_' + chrom_name Add an attribute the the Fasta object. Attributes describe chromosomes and often follow the '>' token in the FASTA file. Parameters ---------- chrom_name : str The name of the chromosome you are adding an attribute to attr : str the attribute you are adding INSERT INTO attributes (chrom,attribute) VALUES (?,?) Add a nickname for a chromosome Parameters ---------- chrom : str The chromosome you want to nickname nickname : str The alternative name for the chromosome INSERT OR REPLACE INTO nicknames (nickname,chrom) VALUES (?,?) Get a chromosomem name by nickname SELECT chrom FROM nicknames WHERE nickname = ? #pragma: nocover | 2.330978 | 2 |
siri/siri/siri_common_services_v2_0.py | grutts/siri | 0 | 6619694 | from dataclasses import dataclass, field
from enum import Enum
from typing import List, Optional
from xsdata.models.datatype import XmlDateTime, XmlDuration
from siri.siri.siri_request_error_conditions_v2_0 import (
CapabilityNotSupportedError,
OtherError,
ServiceDeliveryErrorConditionStructure,
ServiceNotAvailableError,
UnknownSubscriberError,
UnknownSubscriptionError,
)
from siri.siri.siri_requests_v2_0 import (
AbstractNotificationStructure,
AuthenticatedRequestStructure,
ConsumerRequestEndpointStructure,
ConsumerResponseEndpointStructure,
ProducerRequestEndpointStructure,
RequestStructure,
ResponseEndpointStructure,
ResponseStructure,
)
from siri.siri_utility.siri_utility_v1_1 import (
EmptyType,
Extensions,
)
__NAMESPACE__ = "http://www.siri.org.uk/siri"
@dataclass
class DataNameSpacesStructure:
"""
Name spaces.
:ivar stop_point_name_space: Name space for STOP POINTs.
:ivar line_name_space: Name space for LINE names and DIRECTIONss.
:ivar product_category_name_space: Name space for Product
Categories.
:ivar service_feature_name_space: Name space for service features.
:ivar vehicle_feature_name_space: Name space for VEHICLE features.
"""
stop_point_name_space: Optional[str] = field(
default=None,
metadata={
"name": "StopPointNameSpace",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
line_name_space: Optional[str] = field(
default=None,
metadata={
"name": "LineNameSpace",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
product_category_name_space: Optional[str] = field(
default=None,
metadata={
"name": "ProductCategoryNameSpace",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
service_feature_name_space: Optional[str] = field(
default=None,
metadata={
"name": "ServiceFeatureNameSpace",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
vehicle_feature_name_space: Optional[str] = field(
default=None,
metadata={
"name": "VehicleFeatureNameSpace",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
@dataclass
class DataSupplyRequestBodyStructure:
"""Type for Body of Data Supply Request.
Used in WSDL.
:ivar notification_ref: Reference to a specific notification message
for which data is to be fetched. Can be used to distinguish
between notfcatiosn for the same service and subscriber but for
different filters.If none specified,
:ivar all_data: Whether to return all data, or just new updates
since the last delivery. Default false, i.e. just updates.
"""
notification_ref: Optional[str] = field(
default=None,
metadata={
"name": "NotificationRef",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
all_data: Optional[bool] = field(
default=None,
metadata={
"name": "AllData",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
class DeliveryMethodEnumeration(Enum):
"""
Delivery Method: Fetched or Direct Delivery.
"""
DIRECT = "direct"
FETCHED = "fetched"
class PredictorsEnumeration(Enum):
"""
Allowed values for predictors.
"""
AVMS_ONLY = "avmsOnly"
ANYONE = "anyone"
@dataclass
class CheckStatusRequestStructure(RequestStructure):
"""
Type for check status request.
"""
extensions: Optional[Extensions] = field(
default=None,
metadata={
"name": "Extensions",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
@dataclass
class CheckStatusResponseBodyStructure:
"""Type for Body of Service Status Check Response.
Used in WSDL. Same as CheckStatusResponseStructure, but without
extension to be consistent with the other operation definition.
:ivar status:
:ivar data_ready: Whether data delivery is ready to be fetched SIRI
v 2.0
:ivar error_condition: Description of any error or warning condition
that applies to the status check.
:ivar valid_until: End of data horizon of the data producer.
:ivar shortest_possible_cycle: Minimum interval at which updates can
be sent.
:ivar service_started_time: Time at which current instantiation of
service started.
"""
status: Optional[bool] = field(
default=None,
metadata={
"name": "Status",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
data_ready: Optional[bool] = field(
default=None,
metadata={
"name": "DataReady",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
error_condition: Optional["CheckStatusResponseBodyStructure.ErrorCondition"] = field(
default=None,
metadata={
"name": "ErrorCondition",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
valid_until: Optional[XmlDateTime] = field(
default=None,
metadata={
"name": "ValidUntil",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
shortest_possible_cycle: Optional[XmlDuration] = field(
default=None,
metadata={
"name": "ShortestPossibleCycle",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
service_started_time: Optional[XmlDateTime] = field(
default=None,
metadata={
"name": "ServiceStartedTime",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
@dataclass
class ErrorCondition:
"""
:ivar service_not_available_error:
:ivar other_error:
:ivar description: Text description of error.
"""
service_not_available_error: Optional[ServiceNotAvailableError] = field(
default=None,
metadata={
"name": "ServiceNotAvailableError",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
other_error: Optional[OtherError] = field(
default=None,
metadata={
"name": "OtherError",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
description: Optional[str] = field(
default=None,
metadata={
"name": "Description",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
@dataclass
class CheckStatusResponseStructure(ResponseStructure):
"""
Type for Service Status Check Response.
:ivar producer_ref: Unique identifier of Producer - Participant
reference.
:ivar address: Endpoint Address to which acknowledgements to confirm
delivery are to be sent.
:ivar response_message_identifier: An arbitrary unique reference
associated with the response which may be used to reference it.
:ivar request_message_ref: Reference to an arbitrary unique
identifier associated with the request which gave rise to this
response.
:ivar delegator_address: Address of original Consumer, ie requesting
system to which delegating response is to be returned. +SIRI
2.0
:ivar delegator_ref: Identifier of delegating system that originated
message. +SIRI 2.0
:ivar status:
:ivar data_ready: Whether data delivery is ready to be fetched SIRI
v 2.0
:ivar error_condition: Description of any error or warning condition
that applies to the status check.
:ivar valid_until: End of data horizon of the data producer.
:ivar shortest_possible_cycle: Minimum interval at which updates can
be sent.
:ivar service_started_time: Time at which current instantiation of
service started.
:ivar extensions:
"""
producer_ref: Optional[str] = field(
default=None,
metadata={
"name": "ProducerRef",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
address: Optional[str] = field(
default=None,
metadata={
"name": "Address",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
response_message_identifier: Optional[str] = field(
default=None,
metadata={
"name": "ResponseMessageIdentifier",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
request_message_ref: Optional[str] = field(
default=None,
metadata={
"name": "RequestMessageRef",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
delegator_address: Optional[str] = field(
default=None,
metadata={
"name": "DelegatorAddress",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
delegator_ref: Optional[str] = field(
default=None,
metadata={
"name": "DelegatorRef",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
status: Optional[bool] = field(
default=None,
metadata={
"name": "Status",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
data_ready: Optional[bool] = field(
default=None,
metadata={
"name": "DataReady",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
error_condition: Optional["CheckStatusResponseStructure.ErrorCondition"] = field(
default=None,
metadata={
"name": "ErrorCondition",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
valid_until: Optional[XmlDateTime] = field(
default=None,
metadata={
"name": "ValidUntil",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
shortest_possible_cycle: Optional[XmlDuration] = field(
default=None,
metadata={
"name": "ShortestPossibleCycle",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
service_started_time: Optional[XmlDateTime] = field(
default=None,
metadata={
"name": "ServiceStartedTime",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
extensions: Optional[Extensions] = field(
default=None,
metadata={
"name": "Extensions",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
@dataclass
class ErrorCondition:
"""
:ivar service_not_available_error:
:ivar other_error:
:ivar description: Text description of error.
"""
service_not_available_error: Optional[ServiceNotAvailableError] = field(
default=None,
metadata={
"name": "ServiceNotAvailableError",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
other_error: Optional[OtherError] = field(
default=None,
metadata={
"name": "OtherError",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
description: Optional[str] = field(
default=None,
metadata={
"name": "Description",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
@dataclass
class DataReadyRequestStructure(AbstractNotificationStructure):
"""Type for Request from Producer to Consumer to notify that data update is
ready to fetch.
Answered with a DataReadyResponse.
"""
@dataclass
class DataReadyResponseStructure(ConsumerResponseEndpointStructure):
"""
Type for Data ready Acknowledgement Response.
:ivar status:
:ivar error_condition: Description of any error or warning condition
as to why Consumer cannot fetch data.
"""
status: Optional[bool] = field(
default=None,
metadata={
"name": "Status",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
error_condition: Optional["DataReadyResponseStructure.ErrorCondition"] = field(
default=None,
metadata={
"name": "ErrorCondition",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
@dataclass
class ErrorCondition:
"""
:ivar unknown_subscription_error:
:ivar other_error:
:ivar description: Text description of error.
"""
unknown_subscription_error: Optional[UnknownSubscriptionError] = field(
default=None,
metadata={
"name": "UnknownSubscriptionError",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
other_error: Optional[OtherError] = field(
default=None,
metadata={
"name": "OtherError",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
description: Optional[str] = field(
default=None,
metadata={
"name": "Description",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
@dataclass
class DataReceivedResponseStructure(ConsumerResponseEndpointStructure):
"""
Type for Data received Acknowledgement Response.
:ivar status:
:ivar error_condition: Description of any error or warning
condition.
"""
status: bool = field(
default=True,
metadata={
"name": "Status",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
"required": True,
},
)
error_condition: Optional["DataReceivedResponseStructure.ErrorCondition"] = field(
default=None,
metadata={
"name": "ErrorCondition",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
@dataclass
class ErrorCondition:
"""
:ivar unknown_subscription_error:
:ivar other_error:
:ivar description: Text description of error.
"""
unknown_subscription_error: Optional[UnknownSubscriptionError] = field(
default=None,
metadata={
"name": "UnknownSubscriptionError",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
other_error: Optional[OtherError] = field(
default=None,
metadata={
"name": "OtherError",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
description: Optional[str] = field(
default=None,
metadata={
"name": "Description",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
@dataclass
class DataSupplyRequestStructure(ConsumerRequestEndpointStructure):
"""
Type for Data supply Request.
:ivar notification_ref: Reference to a specific notification message
for which data is to be fetched. Can be used to distinguish
between notfcatiosn for the same service and subscriber but for
different filters.If none specified,
:ivar all_data: Whether to return all data, or just new updates
since the last delivery. Default false, i.e. just updates.
"""
notification_ref: Optional[str] = field(
default=None,
metadata={
"name": "NotificationRef",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
all_data: Optional[bool] = field(
default=None,
metadata={
"name": "AllData",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
@dataclass
class HeartbeatNotificationStructure(ProducerRequestEndpointStructure):
"""
Type for Service Heartbeat Notification.
:ivar status:
:ivar data_ready: Whether data delivery is ready to be fetched SIRI
v 2.0
:ivar error_condition: Description of any error or warning condition
that applies to the status check.
:ivar valid_until: End of data horizon of the data producer.
:ivar shortest_possible_cycle: Minimum interval at which updates can
be sent.
:ivar service_started_time: Time at which current instantiation of
service started.
:ivar extensions:
"""
status: Optional[bool] = field(
default=None,
metadata={
"name": "Status",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
data_ready: Optional[bool] = field(
default=None,
metadata={
"name": "DataReady",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
error_condition: Optional["HeartbeatNotificationStructure.ErrorCondition"] = field(
default=None,
metadata={
"name": "ErrorCondition",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
valid_until: Optional[XmlDateTime] = field(
default=None,
metadata={
"name": "ValidUntil",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
shortest_possible_cycle: Optional[XmlDuration] = field(
default=None,
metadata={
"name": "ShortestPossibleCycle",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
service_started_time: Optional[XmlDateTime] = field(
default=None,
metadata={
"name": "ServiceStartedTime",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
extensions: Optional[Extensions] = field(
default=None,
metadata={
"name": "Extensions",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
@dataclass
class ErrorCondition:
"""
:ivar service_not_available_error:
:ivar other_error:
:ivar description: Text description of error.
"""
service_not_available_error: Optional[ServiceNotAvailableError] = field(
default=None,
metadata={
"name": "ServiceNotAvailableError",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
other_error: Optional[OtherError] = field(
default=None,
metadata={
"name": "OtherError",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
description: Optional[str] = field(
default=None,
metadata={
"name": "Description",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
@dataclass
class ServiceRequestContextStructure:
"""Configurable context for requests.
Intended Primarily as a documentation mechanism.
:ivar check_status_address: Address to which CheckStatus requests
are to be sent.
:ivar subscribe_address: Address to which subscription requests are
to be sent.
:ivar manage_subscription_address: Address to which subscription
requests are to be sent. If absent, same as SubscribeAddress.
:ivar get_data_address: Address to which requests are to return
data.
:ivar status_response_address: Address to which CheckStatus
responses and heartbeats are to be sent. If absent, same as
SubscriberAddress.
:ivar subscriber_address: Address to which subscription responses
are to be sent.
:ivar notify_address: Address to which notifcations requests are to
be sent. If absent, same as SubscriberAddress.
:ivar consumer_address: Address to which data is to be sent. If
absent, same as NotifyAddress.
:ivar data_name_spaces: Default names pace used to scope data
identifiers.
:ivar language: Preferred language in which to return text values.
:ivar wgs_decimal_degrees: Geospatial coordinates are given as Wgs
84 Latiude and longitude, decimial degrees of arc.
:ivar gml_coordinate_format: Name of GML Coordinate format used for
Geospatial points in responses.
:ivar distance_units: Units for Distance Type. Default is metres.
+SIRI v2.0
:ivar velocity_units: Units for Distance Type. Default is metres per
second. +SIRI v2.0
:ivar data_horizon: Maximum data horizon for requests.
:ivar request_timeout: Timeout for requests. [Should this be
separate for each type?]
:ivar delivery_method: Whether Delivery is fetched or retrieved.
:ivar multipart_despatch: Whether multi-part delivery is allowed,
i.e. the breaking up of updates into more than one delivery
messages with a MoreData flag,
:ivar confirm_delivery: Whether Consumers should issue an
acknowledgement on successful receipt of a delivery. Default is
' false'.
:ivar maximimum_number_of_subscriptions: Maximum Number of
subscriptions that can be sustained by the subscriber. If absent
no limit.
:ivar allowed_predictors: Who may make a prediction.
:ivar prediction_function: Name of prediction method used.
"""
check_status_address: Optional[str] = field(
default=None,
metadata={
"name": "CheckStatusAddress",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
subscribe_address: Optional[str] = field(
default=None,
metadata={
"name": "SubscribeAddress",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
manage_subscription_address: Optional[str] = field(
default=None,
metadata={
"name": "ManageSubscriptionAddress",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
get_data_address: Optional[str] = field(
default=None,
metadata={
"name": "GetDataAddress",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
status_response_address: Optional[str] = field(
default=None,
metadata={
"name": "StatusResponseAddress",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
subscriber_address: Optional[str] = field(
default=None,
metadata={
"name": "SubscriberAddress",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
notify_address: Optional[str] = field(
default=None,
metadata={
"name": "NotifyAddress",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
consumer_address: Optional[str] = field(
default=None,
metadata={
"name": "ConsumerAddress",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
data_name_spaces: Optional[DataNameSpacesStructure] = field(
default=None,
metadata={
"name": "DataNameSpaces",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
language: Optional[str] = field(
default=None,
metadata={
"name": "Language",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
wgs_decimal_degrees: Optional[EmptyType] = field(
default=None,
metadata={
"name": "WgsDecimalDegrees",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
gml_coordinate_format: Optional[str] = field(
default=None,
metadata={
"name": "GmlCoordinateFormat",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
distance_units: Optional[str] = field(
default=None,
metadata={
"name": "DistanceUnits",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
velocity_units: Optional[str] = field(
default=None,
metadata={
"name": "VelocityUnits",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
data_horizon: Optional[XmlDuration] = field(
default=None,
metadata={
"name": "DataHorizon",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
request_timeout: Optional[XmlDuration] = field(
default=None,
metadata={
"name": "RequestTimeout",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
delivery_method: Optional[DeliveryMethodEnumeration] = field(
default=None,
metadata={
"name": "DeliveryMethod",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
multipart_despatch: Optional[bool] = field(
default=None,
metadata={
"name": "MultipartDespatch",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
confirm_delivery: Optional[bool] = field(
default=None,
metadata={
"name": "ConfirmDelivery",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
maximimum_number_of_subscriptions: Optional[int] = field(
default=None,
metadata={
"name": "MaximimumNumberOfSubscriptions",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
allowed_predictors: Optional[PredictorsEnumeration] = field(
default=None,
metadata={
"name": "AllowedPredictors",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
prediction_function: Optional[str] = field(
default=None,
metadata={
"name": "PredictionFunction",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
@dataclass
class StatusResponseStructure(ResponseStructure):
"""
Type for Response Status.
:ivar request_message_ref: Arbitrary unique reference to the request
which gave rise to this message.
:ivar subscriber_ref: Unique identifier of Subscriber - reference to
a Participant.
:ivar subscription_filter_ref: Unique identifier of Subscription
filter to which this subscription is assigned. If there is onlya
single filter, then can be omitted.
:ivar subscription_ref: Reference to a service subscription: unique
within Service and Subscriber.
:ivar status:
:ivar error_condition: Description of any error or warning
condition.
:ivar valid_until: End of data horizon of the data producer.
:ivar shortest_possible_cycle: Minimum interval at which updates can
be sent.
"""
request_message_ref: Optional[str] = field(
default=None,
metadata={
"name": "RequestMessageRef",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
subscriber_ref: Optional[str] = field(
default=None,
metadata={
"name": "SubscriberRef",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
subscription_filter_ref: Optional[str] = field(
default=None,
metadata={
"name": "SubscriptionFilterRef",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
subscription_ref: Optional[str] = field(
default=None,
metadata={
"name": "SubscriptionRef",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
status: Optional[bool] = field(
default=None,
metadata={
"name": "Status",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
error_condition: Optional[ServiceDeliveryErrorConditionStructure] = field(
default=None,
metadata={
"name": "ErrorCondition",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
valid_until: Optional[XmlDateTime] = field(
default=None,
metadata={
"name": "ValidUntil",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
shortest_possible_cycle: Optional[XmlDuration] = field(
default=None,
metadata={
"name": "ShortestPossibleCycle",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
@dataclass
class TerminateSubscriptionRequestBodyStructure:
"""Type for Body of Terminate Subscription Request content.
Used in WSDL.
:ivar subscriber_ref: Participant identifier of Subscriber.
Subscription ref will be unique within this.
:ivar all: Terminate all subscriptions for the requestor.
:ivar subscription_ref: Terminate the subscription identfiied by the
reference.
"""
subscriber_ref: Optional[str] = field(
default=None,
metadata={
"name": "SubscriberRef",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
all: Optional[EmptyType] = field(
default=None,
metadata={
"name": "All",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
subscription_ref: List[str] = field(
default_factory=list,
metadata={
"name": "SubscriptionRef",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
@dataclass
class TerminateSubscriptionRequestStructure(AuthenticatedRequestStructure):
"""
Type for request to terminate a subscription or subscriptions.
:ivar address: Address to which response is to be sent. This may
also be determined from RequestorRef and preconfigured data.
:ivar requestor_ref:
:ivar message_identifier: Arbitrary unique identifier that can be
used to reference this message. n subsequent interactions.
:ivar delegator_address: Address of original Consumer, ie requesting
system to which delegating response is to be returned. +SIRI
2.0
:ivar delegator_ref: Identifier of delegating system that originated
message. +SIRI 2.0
:ivar subscriber_ref: Participant identifier of Subscriber.
Subscription ref will be unique within this.
:ivar all: Terminate all subscriptions for the requestor.
:ivar subscription_ref: Terminate the subscription identfiied by the
reference.
:ivar extensions:
"""
address: Optional[str] = field(
default=None,
metadata={
"name": "Address",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
requestor_ref: Optional[str] = field(
default=None,
metadata={
"name": "RequestorRef",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
"required": True,
},
)
message_identifier: Optional[str] = field(
default=None,
metadata={
"name": "MessageIdentifier",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
delegator_address: Optional[str] = field(
default=None,
metadata={
"name": "DelegatorAddress",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
delegator_ref: Optional[str] = field(
default=None,
metadata={
"name": "DelegatorRef",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
subscriber_ref: Optional[str] = field(
default=None,
metadata={
"name": "SubscriberRef",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
all: Optional[EmptyType] = field(
default=None,
metadata={
"name": "All",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
subscription_ref: List[str] = field(
default_factory=list,
metadata={
"name": "SubscriptionRef",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
extensions: Optional[Extensions] = field(
default=None,
metadata={
"name": "Extensions",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
@dataclass
class TerminationResponseStatusStructure:
"""
Type for Status of termination response.
:ivar response_timestamp:
:ivar request_message_ref: Arbitrary unique reference to the request
which gave rise to this message.
:ivar subscriber_ref: Unique identifier of Subscriber - reference to
a Participant.
:ivar subscription_filter_ref: Unique identifier of Subscription
filter to which this subscription is assigned. If there is onlya
single filter, then can be omitted.
:ivar subscription_ref: Reference to a service subscription: unique
within Service and Subscriber.
:ivar status:
:ivar error_condition: Description of any error or warning
condition.
"""
response_timestamp: Optional[XmlDateTime] = field(
default=None,
metadata={
"name": "ResponseTimestamp",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
request_message_ref: Optional[str] = field(
default=None,
metadata={
"name": "RequestMessageRef",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
subscriber_ref: Optional[str] = field(
default=None,
metadata={
"name": "SubscriberRef",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
subscription_filter_ref: Optional[str] = field(
default=None,
metadata={
"name": "SubscriptionFilterRef",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
subscription_ref: Optional[str] = field(
default=None,
metadata={
"name": "SubscriptionRef",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
status: bool = field(
default=True,
metadata={
"name": "Status",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
"required": True,
},
)
error_condition: Optional["TerminationResponseStatusStructure.ErrorCondition"] = field(
default=None,
metadata={
"name": "ErrorCondition",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
@dataclass
class ErrorCondition:
"""
:ivar capability_not_supported_error:
:ivar unknown_subscriber_error:
:ivar unknown_subscription_error:
:ivar other_error:
:ivar description: Text description of error.
"""
capability_not_supported_error: Optional[CapabilityNotSupportedError] = field(
default=None,
metadata={
"name": "CapabilityNotSupportedError",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
unknown_subscriber_error: Optional[UnknownSubscriberError] = field(
default=None,
metadata={
"name": "UnknownSubscriberError",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
unknown_subscription_error: Optional[UnknownSubscriptionError] = field(
default=None,
metadata={
"name": "UnknownSubscriptionError",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
other_error: Optional[OtherError] = field(
default=None,
metadata={
"name": "OtherError",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
description: Optional[str] = field(
default=None,
metadata={
"name": "Description",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
@dataclass
class CheckStatusRequest(CheckStatusRequestStructure):
"""Request from Consumer to Producer to check whether services is working.
Answers a CheckStatusRequest.
"""
class Meta:
namespace = "http://www.siri.org.uk/siri"
@dataclass
class CheckStatusResponse(CheckStatusResponseStructure):
"""Response from Producer to Consumer to inform whether services is
working.
Answers a CheckStatusRequest.
"""
class Meta:
namespace = "http://www.siri.org.uk/siri"
@dataclass
class ContextualisedRequestStructure:
"""
Type for General SIRI Request.
:ivar service_request_context: General request properties -
typically configured rather than repeated on request.
:ivar request_timestamp:
:ivar account_id: Account Identifier. May be used to attribute
requests to a particular application provider and authentication
key. The account may be common to all users of an application,
or to an individual user. Note that to identify an individual
user the RequestorRef can be used with an anonymised token. .
+SIRI v2.0
:ivar account_key: Authentication key for request. May be used to
authenticate requests from a particular account. +SIRI v2.0
:ivar address: Address to which response is to be sent. This may
also be determined from RequestorRef and preconfigured data.
:ivar requestor_ref:
:ivar message_identifier: Arbitrary unique identifier that can be
used to reference this message. n subsequent interactions.
:ivar delegator_address: Address of original Consumer, ie requesting
system to which delegating response is to be returned. +SIRI
2.0
:ivar delegator_ref: Identifier of delegating system that originated
message. +SIRI 2.0
"""
service_request_context: Optional[ServiceRequestContextStructure] = field(
default=None,
metadata={
"name": "ServiceRequestContext",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
request_timestamp: Optional[XmlDateTime] = field(
default=None,
metadata={
"name": "RequestTimestamp",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
"required": True,
},
)
account_id: Optional[str] = field(
default=None,
metadata={
"name": "AccountId",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
account_key: Optional[str] = field(
default=None,
metadata={
"name": "AccountKey",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
address: Optional[str] = field(
default=None,
metadata={
"name": "Address",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
requestor_ref: Optional[str] = field(
default=None,
metadata={
"name": "RequestorRef",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
"required": True,
},
)
message_identifier: Optional[str] = field(
default=None,
metadata={
"name": "MessageIdentifier",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
delegator_address: Optional[str] = field(
default=None,
metadata={
"name": "DelegatorAddress",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
delegator_ref: Optional[str] = field(
default=None,
metadata={
"name": "DelegatorRef",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
@dataclass
class DataReadyAcknowledgement(DataReadyResponseStructure):
"""
Response from Consumer to Producer to acknowledge to Producer that a
DataReadyRequest has been received.
"""
class Meta:
namespace = "http://www.siri.org.uk/siri"
@dataclass
class DataReadyNotification(DataReadyRequestStructure):
"""Request from Producer to Consumer to notify that data update is ready to
fetch.
Answered with a DataReadyResponse.
"""
class Meta:
namespace = "http://www.siri.org.uk/siri"
@dataclass
class DataReceivedAcknowledgement(DataReceivedResponseStructure):
"""Response from Consumer to Producer to acknowledge that data hase been
received.
Used as optioanl extra step if reliable delivery is needed. Answers
a ServiceDelivery.
"""
class Meta:
namespace = "http://www.siri.org.uk/siri"
@dataclass
class DataSupplyRequest(DataSupplyRequestStructure):
"""Request from Consumer to Producer to fetch update previously notified by
a Data ready message.
Answered with a Service Delivery.
"""
class Meta:
namespace = "http://www.siri.org.uk/siri"
@dataclass
class HeartbeatNotification(HeartbeatNotificationStructure):
"""
Notification from Producer to Consumer to indicate that the service is
running normally.
"""
class Meta:
namespace = "http://www.siri.org.uk/siri"
@dataclass
class ResponseStatus(StatusResponseStructure):
"""Contains infromation about the processing of a an individual service subscription - either success info or an error condition. (VDV Acknowledgement)."""
class Meta:
namespace = "http://www.siri.org.uk/siri"
@dataclass
class TerminateSubscriptionRequest(TerminateSubscriptionRequestStructure):
"""Request from Subscriber to Subscription Manager to terminate a
subscription.
Answered with a TerminateSubscriptionResponse.
"""
class Meta:
namespace = "http://www.siri.org.uk/siri"
@dataclass
class TerminateSubscriptionResponseStructure(ResponseEndpointStructure):
"""
Type for Response to a request to terminate a subscription or
subscriptions.
:ivar termination_response_status: Status of each subscription
termnination response.
"""
termination_response_status: List[TerminationResponseStatusStructure] = field(
default_factory=list,
metadata={
"name": "TerminationResponseStatus",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
@dataclass
class SubscriptionResponseBodyStructure:
"""Type for Body of Subscription Response.
Used in WSDL.
:ivar response_status:
:ivar subscription_manager_address: Endpoint address of subscription
manager if different from that of the Producer or known default.
:ivar service_started_time: Time at which service providing the
subscription was last started. Can be used to detect restarts.
If absent, unknown.
"""
response_status: List[ResponseStatus] = field(
default_factory=list,
metadata={
"name": "ResponseStatus",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
"min_occurs": 1,
},
)
subscription_manager_address: Optional[str] = field(
default=None,
metadata={
"name": "SubscriptionManagerAddress",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
service_started_time: Optional[XmlDateTime] = field(
default=None,
metadata={
"name": "ServiceStartedTime",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
@dataclass
class SubscriptionResponseStructure(ResponseEndpointStructure):
"""
Type for Subscription Response.
:ivar response_status:
:ivar subscription_manager_address: Endpoint address of subscription
manager if different from that of the Producer or known default.
:ivar service_started_time: Time at which service providing the
subscription was last started. Can be used to detect restarts.
If absent, unknown.
:ivar extensions:
"""
response_status: List[ResponseStatus] = field(
default_factory=list,
metadata={
"name": "ResponseStatus",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
"min_occurs": 1,
},
)
subscription_manager_address: Optional[str] = field(
default=None,
metadata={
"name": "SubscriptionManagerAddress",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
service_started_time: Optional[XmlDateTime] = field(
default=None,
metadata={
"name": "ServiceStartedTime",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
extensions: Optional[Extensions] = field(
default=None,
metadata={
"name": "Extensions",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
@dataclass
class TerminateSubscriptionResponse(TerminateSubscriptionResponseStructure):
"""Request from Subscriber to Subscription Manager to terminate a
subscription.
Answered with a TerminateSubscriptionResponse.
"""
class Meta:
namespace = "http://www.siri.org.uk/siri"
@dataclass
class SubscriptionResponse(SubscriptionResponseStructure):
"""Response from Producer to Consumer to inform whether subscriptions have
been created.
Answers a previous SubscriptionRequest.
"""
class Meta:
namespace = "http://www.siri.org.uk/siri"
| from dataclasses import dataclass, field
from enum import Enum
from typing import List, Optional
from xsdata.models.datatype import XmlDateTime, XmlDuration
from siri.siri.siri_request_error_conditions_v2_0 import (
CapabilityNotSupportedError,
OtherError,
ServiceDeliveryErrorConditionStructure,
ServiceNotAvailableError,
UnknownSubscriberError,
UnknownSubscriptionError,
)
from siri.siri.siri_requests_v2_0 import (
AbstractNotificationStructure,
AuthenticatedRequestStructure,
ConsumerRequestEndpointStructure,
ConsumerResponseEndpointStructure,
ProducerRequestEndpointStructure,
RequestStructure,
ResponseEndpointStructure,
ResponseStructure,
)
from siri.siri_utility.siri_utility_v1_1 import (
EmptyType,
Extensions,
)
__NAMESPACE__ = "http://www.siri.org.uk/siri"
@dataclass
class DataNameSpacesStructure:
"""
Name spaces.
:ivar stop_point_name_space: Name space for STOP POINTs.
:ivar line_name_space: Name space for LINE names and DIRECTIONss.
:ivar product_category_name_space: Name space for Product
Categories.
:ivar service_feature_name_space: Name space for service features.
:ivar vehicle_feature_name_space: Name space for VEHICLE features.
"""
stop_point_name_space: Optional[str] = field(
default=None,
metadata={
"name": "StopPointNameSpace",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
line_name_space: Optional[str] = field(
default=None,
metadata={
"name": "LineNameSpace",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
product_category_name_space: Optional[str] = field(
default=None,
metadata={
"name": "ProductCategoryNameSpace",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
service_feature_name_space: Optional[str] = field(
default=None,
metadata={
"name": "ServiceFeatureNameSpace",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
vehicle_feature_name_space: Optional[str] = field(
default=None,
metadata={
"name": "VehicleFeatureNameSpace",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
@dataclass
class DataSupplyRequestBodyStructure:
"""Type for Body of Data Supply Request.
Used in WSDL.
:ivar notification_ref: Reference to a specific notification message
for which data is to be fetched. Can be used to distinguish
between notfcatiosn for the same service and subscriber but for
different filters.If none specified,
:ivar all_data: Whether to return all data, or just new updates
since the last delivery. Default false, i.e. just updates.
"""
notification_ref: Optional[str] = field(
default=None,
metadata={
"name": "NotificationRef",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
all_data: Optional[bool] = field(
default=None,
metadata={
"name": "AllData",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
class DeliveryMethodEnumeration(Enum):
"""
Delivery Method: Fetched or Direct Delivery.
"""
DIRECT = "direct"
FETCHED = "fetched"
class PredictorsEnumeration(Enum):
"""
Allowed values for predictors.
"""
AVMS_ONLY = "avmsOnly"
ANYONE = "anyone"
@dataclass
class CheckStatusRequestStructure(RequestStructure):
"""
Type for check status request.
"""
extensions: Optional[Extensions] = field(
default=None,
metadata={
"name": "Extensions",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
@dataclass
class CheckStatusResponseBodyStructure:
"""Type for Body of Service Status Check Response.
Used in WSDL. Same as CheckStatusResponseStructure, but without
extension to be consistent with the other operation definition.
:ivar status:
:ivar data_ready: Whether data delivery is ready to be fetched SIRI
v 2.0
:ivar error_condition: Description of any error or warning condition
that applies to the status check.
:ivar valid_until: End of data horizon of the data producer.
:ivar shortest_possible_cycle: Minimum interval at which updates can
be sent.
:ivar service_started_time: Time at which current instantiation of
service started.
"""
status: Optional[bool] = field(
default=None,
metadata={
"name": "Status",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
data_ready: Optional[bool] = field(
default=None,
metadata={
"name": "DataReady",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
error_condition: Optional["CheckStatusResponseBodyStructure.ErrorCondition"] = field(
default=None,
metadata={
"name": "ErrorCondition",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
valid_until: Optional[XmlDateTime] = field(
default=None,
metadata={
"name": "ValidUntil",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
shortest_possible_cycle: Optional[XmlDuration] = field(
default=None,
metadata={
"name": "ShortestPossibleCycle",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
service_started_time: Optional[XmlDateTime] = field(
default=None,
metadata={
"name": "ServiceStartedTime",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
@dataclass
class ErrorCondition:
"""
:ivar service_not_available_error:
:ivar other_error:
:ivar description: Text description of error.
"""
service_not_available_error: Optional[ServiceNotAvailableError] = field(
default=None,
metadata={
"name": "ServiceNotAvailableError",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
other_error: Optional[OtherError] = field(
default=None,
metadata={
"name": "OtherError",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
description: Optional[str] = field(
default=None,
metadata={
"name": "Description",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
@dataclass
class CheckStatusResponseStructure(ResponseStructure):
"""
Type for Service Status Check Response.
:ivar producer_ref: Unique identifier of Producer - Participant
reference.
:ivar address: Endpoint Address to which acknowledgements to confirm
delivery are to be sent.
:ivar response_message_identifier: An arbitrary unique reference
associated with the response which may be used to reference it.
:ivar request_message_ref: Reference to an arbitrary unique
identifier associated with the request which gave rise to this
response.
:ivar delegator_address: Address of original Consumer, ie requesting
system to which delegating response is to be returned. +SIRI
2.0
:ivar delegator_ref: Identifier of delegating system that originated
message. +SIRI 2.0
:ivar status:
:ivar data_ready: Whether data delivery is ready to be fetched SIRI
v 2.0
:ivar error_condition: Description of any error or warning condition
that applies to the status check.
:ivar valid_until: End of data horizon of the data producer.
:ivar shortest_possible_cycle: Minimum interval at which updates can
be sent.
:ivar service_started_time: Time at which current instantiation of
service started.
:ivar extensions:
"""
producer_ref: Optional[str] = field(
default=None,
metadata={
"name": "ProducerRef",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
address: Optional[str] = field(
default=None,
metadata={
"name": "Address",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
response_message_identifier: Optional[str] = field(
default=None,
metadata={
"name": "ResponseMessageIdentifier",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
request_message_ref: Optional[str] = field(
default=None,
metadata={
"name": "RequestMessageRef",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
delegator_address: Optional[str] = field(
default=None,
metadata={
"name": "DelegatorAddress",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
delegator_ref: Optional[str] = field(
default=None,
metadata={
"name": "DelegatorRef",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
status: Optional[bool] = field(
default=None,
metadata={
"name": "Status",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
data_ready: Optional[bool] = field(
default=None,
metadata={
"name": "DataReady",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
error_condition: Optional["CheckStatusResponseStructure.ErrorCondition"] = field(
default=None,
metadata={
"name": "ErrorCondition",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
valid_until: Optional[XmlDateTime] = field(
default=None,
metadata={
"name": "ValidUntil",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
shortest_possible_cycle: Optional[XmlDuration] = field(
default=None,
metadata={
"name": "ShortestPossibleCycle",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
service_started_time: Optional[XmlDateTime] = field(
default=None,
metadata={
"name": "ServiceStartedTime",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
extensions: Optional[Extensions] = field(
default=None,
metadata={
"name": "Extensions",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
@dataclass
class ErrorCondition:
"""
:ivar service_not_available_error:
:ivar other_error:
:ivar description: Text description of error.
"""
service_not_available_error: Optional[ServiceNotAvailableError] = field(
default=None,
metadata={
"name": "ServiceNotAvailableError",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
other_error: Optional[OtherError] = field(
default=None,
metadata={
"name": "OtherError",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
description: Optional[str] = field(
default=None,
metadata={
"name": "Description",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
@dataclass
class DataReadyRequestStructure(AbstractNotificationStructure):
"""Type for Request from Producer to Consumer to notify that data update is
ready to fetch.
Answered with a DataReadyResponse.
"""
@dataclass
class DataReadyResponseStructure(ConsumerResponseEndpointStructure):
"""
Type for Data ready Acknowledgement Response.
:ivar status:
:ivar error_condition: Description of any error or warning condition
as to why Consumer cannot fetch data.
"""
status: Optional[bool] = field(
default=None,
metadata={
"name": "Status",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
error_condition: Optional["DataReadyResponseStructure.ErrorCondition"] = field(
default=None,
metadata={
"name": "ErrorCondition",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
@dataclass
class ErrorCondition:
"""
:ivar unknown_subscription_error:
:ivar other_error:
:ivar description: Text description of error.
"""
unknown_subscription_error: Optional[UnknownSubscriptionError] = field(
default=None,
metadata={
"name": "UnknownSubscriptionError",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
other_error: Optional[OtherError] = field(
default=None,
metadata={
"name": "OtherError",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
description: Optional[str] = field(
default=None,
metadata={
"name": "Description",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
@dataclass
class DataReceivedResponseStructure(ConsumerResponseEndpointStructure):
"""
Type for Data received Acknowledgement Response.
:ivar status:
:ivar error_condition: Description of any error or warning
condition.
"""
status: bool = field(
default=True,
metadata={
"name": "Status",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
"required": True,
},
)
error_condition: Optional["DataReceivedResponseStructure.ErrorCondition"] = field(
default=None,
metadata={
"name": "ErrorCondition",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
@dataclass
class ErrorCondition:
"""
:ivar unknown_subscription_error:
:ivar other_error:
:ivar description: Text description of error.
"""
unknown_subscription_error: Optional[UnknownSubscriptionError] = field(
default=None,
metadata={
"name": "UnknownSubscriptionError",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
other_error: Optional[OtherError] = field(
default=None,
metadata={
"name": "OtherError",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
description: Optional[str] = field(
default=None,
metadata={
"name": "Description",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
@dataclass
class DataSupplyRequestStructure(ConsumerRequestEndpointStructure):
"""
Type for Data supply Request.
:ivar notification_ref: Reference to a specific notification message
for which data is to be fetched. Can be used to distinguish
between notfcatiosn for the same service and subscriber but for
different filters.If none specified,
:ivar all_data: Whether to return all data, or just new updates
since the last delivery. Default false, i.e. just updates.
"""
notification_ref: Optional[str] = field(
default=None,
metadata={
"name": "NotificationRef",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
all_data: Optional[bool] = field(
default=None,
metadata={
"name": "AllData",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
@dataclass
class HeartbeatNotificationStructure(ProducerRequestEndpointStructure):
"""
Type for Service Heartbeat Notification.
:ivar status:
:ivar data_ready: Whether data delivery is ready to be fetched SIRI
v 2.0
:ivar error_condition: Description of any error or warning condition
that applies to the status check.
:ivar valid_until: End of data horizon of the data producer.
:ivar shortest_possible_cycle: Minimum interval at which updates can
be sent.
:ivar service_started_time: Time at which current instantiation of
service started.
:ivar extensions:
"""
status: Optional[bool] = field(
default=None,
metadata={
"name": "Status",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
data_ready: Optional[bool] = field(
default=None,
metadata={
"name": "DataReady",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
error_condition: Optional["HeartbeatNotificationStructure.ErrorCondition"] = field(
default=None,
metadata={
"name": "ErrorCondition",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
valid_until: Optional[XmlDateTime] = field(
default=None,
metadata={
"name": "ValidUntil",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
shortest_possible_cycle: Optional[XmlDuration] = field(
default=None,
metadata={
"name": "ShortestPossibleCycle",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
service_started_time: Optional[XmlDateTime] = field(
default=None,
metadata={
"name": "ServiceStartedTime",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
extensions: Optional[Extensions] = field(
default=None,
metadata={
"name": "Extensions",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
@dataclass
class ErrorCondition:
"""
:ivar service_not_available_error:
:ivar other_error:
:ivar description: Text description of error.
"""
service_not_available_error: Optional[ServiceNotAvailableError] = field(
default=None,
metadata={
"name": "ServiceNotAvailableError",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
other_error: Optional[OtherError] = field(
default=None,
metadata={
"name": "OtherError",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
description: Optional[str] = field(
default=None,
metadata={
"name": "Description",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
@dataclass
class ServiceRequestContextStructure:
"""Configurable context for requests.
Intended Primarily as a documentation mechanism.
:ivar check_status_address: Address to which CheckStatus requests
are to be sent.
:ivar subscribe_address: Address to which subscription requests are
to be sent.
:ivar manage_subscription_address: Address to which subscription
requests are to be sent. If absent, same as SubscribeAddress.
:ivar get_data_address: Address to which requests are to return
data.
:ivar status_response_address: Address to which CheckStatus
responses and heartbeats are to be sent. If absent, same as
SubscriberAddress.
:ivar subscriber_address: Address to which subscription responses
are to be sent.
:ivar notify_address: Address to which notifcations requests are to
be sent. If absent, same as SubscriberAddress.
:ivar consumer_address: Address to which data is to be sent. If
absent, same as NotifyAddress.
:ivar data_name_spaces: Default names pace used to scope data
identifiers.
:ivar language: Preferred language in which to return text values.
:ivar wgs_decimal_degrees: Geospatial coordinates are given as Wgs
84 Latiude and longitude, decimial degrees of arc.
:ivar gml_coordinate_format: Name of GML Coordinate format used for
Geospatial points in responses.
:ivar distance_units: Units for Distance Type. Default is metres.
+SIRI v2.0
:ivar velocity_units: Units for Distance Type. Default is metres per
second. +SIRI v2.0
:ivar data_horizon: Maximum data horizon for requests.
:ivar request_timeout: Timeout for requests. [Should this be
separate for each type?]
:ivar delivery_method: Whether Delivery is fetched or retrieved.
:ivar multipart_despatch: Whether multi-part delivery is allowed,
i.e. the breaking up of updates into more than one delivery
messages with a MoreData flag,
:ivar confirm_delivery: Whether Consumers should issue an
acknowledgement on successful receipt of a delivery. Default is
' false'.
:ivar maximimum_number_of_subscriptions: Maximum Number of
subscriptions that can be sustained by the subscriber. If absent
no limit.
:ivar allowed_predictors: Who may make a prediction.
:ivar prediction_function: Name of prediction method used.
"""
check_status_address: Optional[str] = field(
default=None,
metadata={
"name": "CheckStatusAddress",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
subscribe_address: Optional[str] = field(
default=None,
metadata={
"name": "SubscribeAddress",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
manage_subscription_address: Optional[str] = field(
default=None,
metadata={
"name": "ManageSubscriptionAddress",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
get_data_address: Optional[str] = field(
default=None,
metadata={
"name": "GetDataAddress",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
status_response_address: Optional[str] = field(
default=None,
metadata={
"name": "StatusResponseAddress",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
subscriber_address: Optional[str] = field(
default=None,
metadata={
"name": "SubscriberAddress",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
notify_address: Optional[str] = field(
default=None,
metadata={
"name": "NotifyAddress",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
consumer_address: Optional[str] = field(
default=None,
metadata={
"name": "ConsumerAddress",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
data_name_spaces: Optional[DataNameSpacesStructure] = field(
default=None,
metadata={
"name": "DataNameSpaces",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
language: Optional[str] = field(
default=None,
metadata={
"name": "Language",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
wgs_decimal_degrees: Optional[EmptyType] = field(
default=None,
metadata={
"name": "WgsDecimalDegrees",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
gml_coordinate_format: Optional[str] = field(
default=None,
metadata={
"name": "GmlCoordinateFormat",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
distance_units: Optional[str] = field(
default=None,
metadata={
"name": "DistanceUnits",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
velocity_units: Optional[str] = field(
default=None,
metadata={
"name": "VelocityUnits",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
data_horizon: Optional[XmlDuration] = field(
default=None,
metadata={
"name": "DataHorizon",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
request_timeout: Optional[XmlDuration] = field(
default=None,
metadata={
"name": "RequestTimeout",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
delivery_method: Optional[DeliveryMethodEnumeration] = field(
default=None,
metadata={
"name": "DeliveryMethod",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
multipart_despatch: Optional[bool] = field(
default=None,
metadata={
"name": "MultipartDespatch",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
confirm_delivery: Optional[bool] = field(
default=None,
metadata={
"name": "ConfirmDelivery",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
maximimum_number_of_subscriptions: Optional[int] = field(
default=None,
metadata={
"name": "MaximimumNumberOfSubscriptions",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
allowed_predictors: Optional[PredictorsEnumeration] = field(
default=None,
metadata={
"name": "AllowedPredictors",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
prediction_function: Optional[str] = field(
default=None,
metadata={
"name": "PredictionFunction",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
@dataclass
class StatusResponseStructure(ResponseStructure):
"""
Type for Response Status.
:ivar request_message_ref: Arbitrary unique reference to the request
which gave rise to this message.
:ivar subscriber_ref: Unique identifier of Subscriber - reference to
a Participant.
:ivar subscription_filter_ref: Unique identifier of Subscription
filter to which this subscription is assigned. If there is onlya
single filter, then can be omitted.
:ivar subscription_ref: Reference to a service subscription: unique
within Service and Subscriber.
:ivar status:
:ivar error_condition: Description of any error or warning
condition.
:ivar valid_until: End of data horizon of the data producer.
:ivar shortest_possible_cycle: Minimum interval at which updates can
be sent.
"""
request_message_ref: Optional[str] = field(
default=None,
metadata={
"name": "RequestMessageRef",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
subscriber_ref: Optional[str] = field(
default=None,
metadata={
"name": "SubscriberRef",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
subscription_filter_ref: Optional[str] = field(
default=None,
metadata={
"name": "SubscriptionFilterRef",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
subscription_ref: Optional[str] = field(
default=None,
metadata={
"name": "SubscriptionRef",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
status: Optional[bool] = field(
default=None,
metadata={
"name": "Status",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
error_condition: Optional[ServiceDeliveryErrorConditionStructure] = field(
default=None,
metadata={
"name": "ErrorCondition",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
valid_until: Optional[XmlDateTime] = field(
default=None,
metadata={
"name": "ValidUntil",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
shortest_possible_cycle: Optional[XmlDuration] = field(
default=None,
metadata={
"name": "ShortestPossibleCycle",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
@dataclass
class TerminateSubscriptionRequestBodyStructure:
"""Type for Body of Terminate Subscription Request content.
Used in WSDL.
:ivar subscriber_ref: Participant identifier of Subscriber.
Subscription ref will be unique within this.
:ivar all: Terminate all subscriptions for the requestor.
:ivar subscription_ref: Terminate the subscription identfiied by the
reference.
"""
subscriber_ref: Optional[str] = field(
default=None,
metadata={
"name": "SubscriberRef",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
all: Optional[EmptyType] = field(
default=None,
metadata={
"name": "All",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
subscription_ref: List[str] = field(
default_factory=list,
metadata={
"name": "SubscriptionRef",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
@dataclass
class TerminateSubscriptionRequestStructure(AuthenticatedRequestStructure):
"""
Type for request to terminate a subscription or subscriptions.
:ivar address: Address to which response is to be sent. This may
also be determined from RequestorRef and preconfigured data.
:ivar requestor_ref:
:ivar message_identifier: Arbitrary unique identifier that can be
used to reference this message. n subsequent interactions.
:ivar delegator_address: Address of original Consumer, ie requesting
system to which delegating response is to be returned. +SIRI
2.0
:ivar delegator_ref: Identifier of delegating system that originated
message. +SIRI 2.0
:ivar subscriber_ref: Participant identifier of Subscriber.
Subscription ref will be unique within this.
:ivar all: Terminate all subscriptions for the requestor.
:ivar subscription_ref: Terminate the subscription identfiied by the
reference.
:ivar extensions:
"""
address: Optional[str] = field(
default=None,
metadata={
"name": "Address",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
requestor_ref: Optional[str] = field(
default=None,
metadata={
"name": "RequestorRef",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
"required": True,
},
)
message_identifier: Optional[str] = field(
default=None,
metadata={
"name": "MessageIdentifier",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
delegator_address: Optional[str] = field(
default=None,
metadata={
"name": "DelegatorAddress",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
delegator_ref: Optional[str] = field(
default=None,
metadata={
"name": "DelegatorRef",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
subscriber_ref: Optional[str] = field(
default=None,
metadata={
"name": "SubscriberRef",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
all: Optional[EmptyType] = field(
default=None,
metadata={
"name": "All",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
subscription_ref: List[str] = field(
default_factory=list,
metadata={
"name": "SubscriptionRef",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
extensions: Optional[Extensions] = field(
default=None,
metadata={
"name": "Extensions",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
@dataclass
class TerminationResponseStatusStructure:
"""
Type for Status of termination response.
:ivar response_timestamp:
:ivar request_message_ref: Arbitrary unique reference to the request
which gave rise to this message.
:ivar subscriber_ref: Unique identifier of Subscriber - reference to
a Participant.
:ivar subscription_filter_ref: Unique identifier of Subscription
filter to which this subscription is assigned. If there is onlya
single filter, then can be omitted.
:ivar subscription_ref: Reference to a service subscription: unique
within Service and Subscriber.
:ivar status:
:ivar error_condition: Description of any error or warning
condition.
"""
response_timestamp: Optional[XmlDateTime] = field(
default=None,
metadata={
"name": "ResponseTimestamp",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
request_message_ref: Optional[str] = field(
default=None,
metadata={
"name": "RequestMessageRef",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
subscriber_ref: Optional[str] = field(
default=None,
metadata={
"name": "SubscriberRef",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
subscription_filter_ref: Optional[str] = field(
default=None,
metadata={
"name": "SubscriptionFilterRef",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
subscription_ref: Optional[str] = field(
default=None,
metadata={
"name": "SubscriptionRef",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
status: bool = field(
default=True,
metadata={
"name": "Status",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
"required": True,
},
)
error_condition: Optional["TerminationResponseStatusStructure.ErrorCondition"] = field(
default=None,
metadata={
"name": "ErrorCondition",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
@dataclass
class ErrorCondition:
"""
:ivar capability_not_supported_error:
:ivar unknown_subscriber_error:
:ivar unknown_subscription_error:
:ivar other_error:
:ivar description: Text description of error.
"""
capability_not_supported_error: Optional[CapabilityNotSupportedError] = field(
default=None,
metadata={
"name": "CapabilityNotSupportedError",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
unknown_subscriber_error: Optional[UnknownSubscriberError] = field(
default=None,
metadata={
"name": "UnknownSubscriberError",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
unknown_subscription_error: Optional[UnknownSubscriptionError] = field(
default=None,
metadata={
"name": "UnknownSubscriptionError",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
other_error: Optional[OtherError] = field(
default=None,
metadata={
"name": "OtherError",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
description: Optional[str] = field(
default=None,
metadata={
"name": "Description",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
@dataclass
class CheckStatusRequest(CheckStatusRequestStructure):
"""Request from Consumer to Producer to check whether services is working.
Answers a CheckStatusRequest.
"""
class Meta:
namespace = "http://www.siri.org.uk/siri"
@dataclass
class CheckStatusResponse(CheckStatusResponseStructure):
"""Response from Producer to Consumer to inform whether services is
working.
Answers a CheckStatusRequest.
"""
class Meta:
namespace = "http://www.siri.org.uk/siri"
@dataclass
class ContextualisedRequestStructure:
"""
Type for General SIRI Request.
:ivar service_request_context: General request properties -
typically configured rather than repeated on request.
:ivar request_timestamp:
:ivar account_id: Account Identifier. May be used to attribute
requests to a particular application provider and authentication
key. The account may be common to all users of an application,
or to an individual user. Note that to identify an individual
user the RequestorRef can be used with an anonymised token. .
+SIRI v2.0
:ivar account_key: Authentication key for request. May be used to
authenticate requests from a particular account. +SIRI v2.0
:ivar address: Address to which response is to be sent. This may
also be determined from RequestorRef and preconfigured data.
:ivar requestor_ref:
:ivar message_identifier: Arbitrary unique identifier that can be
used to reference this message. n subsequent interactions.
:ivar delegator_address: Address of original Consumer, ie requesting
system to which delegating response is to be returned. +SIRI
2.0
:ivar delegator_ref: Identifier of delegating system that originated
message. +SIRI 2.0
"""
service_request_context: Optional[ServiceRequestContextStructure] = field(
default=None,
metadata={
"name": "ServiceRequestContext",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
request_timestamp: Optional[XmlDateTime] = field(
default=None,
metadata={
"name": "RequestTimestamp",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
"required": True,
},
)
account_id: Optional[str] = field(
default=None,
metadata={
"name": "AccountId",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
account_key: Optional[str] = field(
default=None,
metadata={
"name": "AccountKey",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
address: Optional[str] = field(
default=None,
metadata={
"name": "Address",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
requestor_ref: Optional[str] = field(
default=None,
metadata={
"name": "RequestorRef",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
"required": True,
},
)
message_identifier: Optional[str] = field(
default=None,
metadata={
"name": "MessageIdentifier",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
delegator_address: Optional[str] = field(
default=None,
metadata={
"name": "DelegatorAddress",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
delegator_ref: Optional[str] = field(
default=None,
metadata={
"name": "DelegatorRef",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
@dataclass
class DataReadyAcknowledgement(DataReadyResponseStructure):
"""
Response from Consumer to Producer to acknowledge to Producer that a
DataReadyRequest has been received.
"""
class Meta:
namespace = "http://www.siri.org.uk/siri"
@dataclass
class DataReadyNotification(DataReadyRequestStructure):
"""Request from Producer to Consumer to notify that data update is ready to
fetch.
Answered with a DataReadyResponse.
"""
class Meta:
namespace = "http://www.siri.org.uk/siri"
@dataclass
class DataReceivedAcknowledgement(DataReceivedResponseStructure):
"""Response from Consumer to Producer to acknowledge that data hase been
received.
Used as optioanl extra step if reliable delivery is needed. Answers
a ServiceDelivery.
"""
class Meta:
namespace = "http://www.siri.org.uk/siri"
@dataclass
class DataSupplyRequest(DataSupplyRequestStructure):
"""Request from Consumer to Producer to fetch update previously notified by
a Data ready message.
Answered with a Service Delivery.
"""
class Meta:
namespace = "http://www.siri.org.uk/siri"
@dataclass
class HeartbeatNotification(HeartbeatNotificationStructure):
"""
Notification from Producer to Consumer to indicate that the service is
running normally.
"""
class Meta:
namespace = "http://www.siri.org.uk/siri"
@dataclass
class ResponseStatus(StatusResponseStructure):
"""Contains infromation about the processing of a an individual service subscription - either success info or an error condition. (VDV Acknowledgement)."""
class Meta:
namespace = "http://www.siri.org.uk/siri"
@dataclass
class TerminateSubscriptionRequest(TerminateSubscriptionRequestStructure):
"""Request from Subscriber to Subscription Manager to terminate a
subscription.
Answered with a TerminateSubscriptionResponse.
"""
class Meta:
namespace = "http://www.siri.org.uk/siri"
@dataclass
class TerminateSubscriptionResponseStructure(ResponseEndpointStructure):
"""
Type for Response to a request to terminate a subscription or
subscriptions.
:ivar termination_response_status: Status of each subscription
termnination response.
"""
termination_response_status: List[TerminationResponseStatusStructure] = field(
default_factory=list,
metadata={
"name": "TerminationResponseStatus",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
@dataclass
class SubscriptionResponseBodyStructure:
"""Type for Body of Subscription Response.
Used in WSDL.
:ivar response_status:
:ivar subscription_manager_address: Endpoint address of subscription
manager if different from that of the Producer or known default.
:ivar service_started_time: Time at which service providing the
subscription was last started. Can be used to detect restarts.
If absent, unknown.
"""
response_status: List[ResponseStatus] = field(
default_factory=list,
metadata={
"name": "ResponseStatus",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
"min_occurs": 1,
},
)
subscription_manager_address: Optional[str] = field(
default=None,
metadata={
"name": "SubscriptionManagerAddress",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
service_started_time: Optional[XmlDateTime] = field(
default=None,
metadata={
"name": "ServiceStartedTime",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
@dataclass
class SubscriptionResponseStructure(ResponseEndpointStructure):
"""
Type for Subscription Response.
:ivar response_status:
:ivar subscription_manager_address: Endpoint address of subscription
manager if different from that of the Producer or known default.
:ivar service_started_time: Time at which service providing the
subscription was last started. Can be used to detect restarts.
If absent, unknown.
:ivar extensions:
"""
response_status: List[ResponseStatus] = field(
default_factory=list,
metadata={
"name": "ResponseStatus",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
"min_occurs": 1,
},
)
subscription_manager_address: Optional[str] = field(
default=None,
metadata={
"name": "SubscriptionManagerAddress",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
service_started_time: Optional[XmlDateTime] = field(
default=None,
metadata={
"name": "ServiceStartedTime",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
extensions: Optional[Extensions] = field(
default=None,
metadata={
"name": "Extensions",
"type": "Element",
"namespace": "http://www.siri.org.uk/siri",
},
)
@dataclass
class TerminateSubscriptionResponse(TerminateSubscriptionResponseStructure):
"""Request from Subscriber to Subscription Manager to terminate a
subscription.
Answered with a TerminateSubscriptionResponse.
"""
class Meta:
namespace = "http://www.siri.org.uk/siri"
@dataclass
class SubscriptionResponse(SubscriptionResponseStructure):
"""Response from Producer to Consumer to inform whether subscriptions have
been created.
Answers a previous SubscriptionRequest.
"""
class Meta:
namespace = "http://www.siri.org.uk/siri"
| en | 0.805163 | Name spaces. :ivar stop_point_name_space: Name space for STOP POINTs. :ivar line_name_space: Name space for LINE names and DIRECTIONss. :ivar product_category_name_space: Name space for Product Categories. :ivar service_feature_name_space: Name space for service features. :ivar vehicle_feature_name_space: Name space for VEHICLE features. Type for Body of Data Supply Request. Used in WSDL. :ivar notification_ref: Reference to a specific notification message for which data is to be fetched. Can be used to distinguish between notfcatiosn for the same service and subscriber but for different filters.If none specified, :ivar all_data: Whether to return all data, or just new updates since the last delivery. Default false, i.e. just updates. Delivery Method: Fetched or Direct Delivery. Allowed values for predictors. Type for check status request. Type for Body of Service Status Check Response. Used in WSDL. Same as CheckStatusResponseStructure, but without extension to be consistent with the other operation definition. :ivar status: :ivar data_ready: Whether data delivery is ready to be fetched SIRI v 2.0 :ivar error_condition: Description of any error or warning condition that applies to the status check. :ivar valid_until: End of data horizon of the data producer. :ivar shortest_possible_cycle: Minimum interval at which updates can be sent. :ivar service_started_time: Time at which current instantiation of service started. :ivar service_not_available_error: :ivar other_error: :ivar description: Text description of error. Type for Service Status Check Response. :ivar producer_ref: Unique identifier of Producer - Participant reference. :ivar address: Endpoint Address to which acknowledgements to confirm delivery are to be sent. :ivar response_message_identifier: An arbitrary unique reference associated with the response which may be used to reference it. :ivar request_message_ref: Reference to an arbitrary unique identifier associated with the request which gave rise to this response. :ivar delegator_address: Address of original Consumer, ie requesting system to which delegating response is to be returned. +SIRI 2.0 :ivar delegator_ref: Identifier of delegating system that originated message. +SIRI 2.0 :ivar status: :ivar data_ready: Whether data delivery is ready to be fetched SIRI v 2.0 :ivar error_condition: Description of any error or warning condition that applies to the status check. :ivar valid_until: End of data horizon of the data producer. :ivar shortest_possible_cycle: Minimum interval at which updates can be sent. :ivar service_started_time: Time at which current instantiation of service started. :ivar extensions: :ivar service_not_available_error: :ivar other_error: :ivar description: Text description of error. Type for Request from Producer to Consumer to notify that data update is ready to fetch. Answered with a DataReadyResponse. Type for Data ready Acknowledgement Response. :ivar status: :ivar error_condition: Description of any error or warning condition as to why Consumer cannot fetch data. :ivar unknown_subscription_error: :ivar other_error: :ivar description: Text description of error. Type for Data received Acknowledgement Response. :ivar status: :ivar error_condition: Description of any error or warning condition. :ivar unknown_subscription_error: :ivar other_error: :ivar description: Text description of error. Type for Data supply Request. :ivar notification_ref: Reference to a specific notification message for which data is to be fetched. Can be used to distinguish between notfcatiosn for the same service and subscriber but for different filters.If none specified, :ivar all_data: Whether to return all data, or just new updates since the last delivery. Default false, i.e. just updates. Type for Service Heartbeat Notification. :ivar status: :ivar data_ready: Whether data delivery is ready to be fetched SIRI v 2.0 :ivar error_condition: Description of any error or warning condition that applies to the status check. :ivar valid_until: End of data horizon of the data producer. :ivar shortest_possible_cycle: Minimum interval at which updates can be sent. :ivar service_started_time: Time at which current instantiation of service started. :ivar extensions: :ivar service_not_available_error: :ivar other_error: :ivar description: Text description of error. Configurable context for requests. Intended Primarily as a documentation mechanism. :ivar check_status_address: Address to which CheckStatus requests are to be sent. :ivar subscribe_address: Address to which subscription requests are to be sent. :ivar manage_subscription_address: Address to which subscription requests are to be sent. If absent, same as SubscribeAddress. :ivar get_data_address: Address to which requests are to return data. :ivar status_response_address: Address to which CheckStatus responses and heartbeats are to be sent. If absent, same as SubscriberAddress. :ivar subscriber_address: Address to which subscription responses are to be sent. :ivar notify_address: Address to which notifcations requests are to be sent. If absent, same as SubscriberAddress. :ivar consumer_address: Address to which data is to be sent. If absent, same as NotifyAddress. :ivar data_name_spaces: Default names pace used to scope data identifiers. :ivar language: Preferred language in which to return text values. :ivar wgs_decimal_degrees: Geospatial coordinates are given as Wgs 84 Latiude and longitude, decimial degrees of arc. :ivar gml_coordinate_format: Name of GML Coordinate format used for Geospatial points in responses. :ivar distance_units: Units for Distance Type. Default is metres. +SIRI v2.0 :ivar velocity_units: Units for Distance Type. Default is metres per second. +SIRI v2.0 :ivar data_horizon: Maximum data horizon for requests. :ivar request_timeout: Timeout for requests. [Should this be separate for each type?] :ivar delivery_method: Whether Delivery is fetched or retrieved. :ivar multipart_despatch: Whether multi-part delivery is allowed, i.e. the breaking up of updates into more than one delivery messages with a MoreData flag, :ivar confirm_delivery: Whether Consumers should issue an acknowledgement on successful receipt of a delivery. Default is ' false'. :ivar maximimum_number_of_subscriptions: Maximum Number of subscriptions that can be sustained by the subscriber. If absent no limit. :ivar allowed_predictors: Who may make a prediction. :ivar prediction_function: Name of prediction method used. Type for Response Status. :ivar request_message_ref: Arbitrary unique reference to the request which gave rise to this message. :ivar subscriber_ref: Unique identifier of Subscriber - reference to a Participant. :ivar subscription_filter_ref: Unique identifier of Subscription filter to which this subscription is assigned. If there is onlya single filter, then can be omitted. :ivar subscription_ref: Reference to a service subscription: unique within Service and Subscriber. :ivar status: :ivar error_condition: Description of any error or warning condition. :ivar valid_until: End of data horizon of the data producer. :ivar shortest_possible_cycle: Minimum interval at which updates can be sent. Type for Body of Terminate Subscription Request content. Used in WSDL. :ivar subscriber_ref: Participant identifier of Subscriber. Subscription ref will be unique within this. :ivar all: Terminate all subscriptions for the requestor. :ivar subscription_ref: Terminate the subscription identfiied by the reference. Type for request to terminate a subscription or subscriptions. :ivar address: Address to which response is to be sent. This may also be determined from RequestorRef and preconfigured data. :ivar requestor_ref: :ivar message_identifier: Arbitrary unique identifier that can be used to reference this message. n subsequent interactions. :ivar delegator_address: Address of original Consumer, ie requesting system to which delegating response is to be returned. +SIRI 2.0 :ivar delegator_ref: Identifier of delegating system that originated message. +SIRI 2.0 :ivar subscriber_ref: Participant identifier of Subscriber. Subscription ref will be unique within this. :ivar all: Terminate all subscriptions for the requestor. :ivar subscription_ref: Terminate the subscription identfiied by the reference. :ivar extensions: Type for Status of termination response. :ivar response_timestamp: :ivar request_message_ref: Arbitrary unique reference to the request which gave rise to this message. :ivar subscriber_ref: Unique identifier of Subscriber - reference to a Participant. :ivar subscription_filter_ref: Unique identifier of Subscription filter to which this subscription is assigned. If there is onlya single filter, then can be omitted. :ivar subscription_ref: Reference to a service subscription: unique within Service and Subscriber. :ivar status: :ivar error_condition: Description of any error or warning condition. :ivar capability_not_supported_error: :ivar unknown_subscriber_error: :ivar unknown_subscription_error: :ivar other_error: :ivar description: Text description of error. Request from Consumer to Producer to check whether services is working. Answers a CheckStatusRequest. Response from Producer to Consumer to inform whether services is working. Answers a CheckStatusRequest. Type for General SIRI Request. :ivar service_request_context: General request properties - typically configured rather than repeated on request. :ivar request_timestamp: :ivar account_id: Account Identifier. May be used to attribute requests to a particular application provider and authentication key. The account may be common to all users of an application, or to an individual user. Note that to identify an individual user the RequestorRef can be used with an anonymised token. . +SIRI v2.0 :ivar account_key: Authentication key for request. May be used to authenticate requests from a particular account. +SIRI v2.0 :ivar address: Address to which response is to be sent. This may also be determined from RequestorRef and preconfigured data. :ivar requestor_ref: :ivar message_identifier: Arbitrary unique identifier that can be used to reference this message. n subsequent interactions. :ivar delegator_address: Address of original Consumer, ie requesting system to which delegating response is to be returned. +SIRI 2.0 :ivar delegator_ref: Identifier of delegating system that originated message. +SIRI 2.0 Response from Consumer to Producer to acknowledge to Producer that a DataReadyRequest has been received. Request from Producer to Consumer to notify that data update is ready to fetch. Answered with a DataReadyResponse. Response from Consumer to Producer to acknowledge that data hase been received. Used as optioanl extra step if reliable delivery is needed. Answers a ServiceDelivery. Request from Consumer to Producer to fetch update previously notified by a Data ready message. Answered with a Service Delivery. Notification from Producer to Consumer to indicate that the service is running normally. Contains infromation about the processing of a an individual service subscription - either success info or an error condition. (VDV Acknowledgement). Request from Subscriber to Subscription Manager to terminate a subscription. Answered with a TerminateSubscriptionResponse. Type for Response to a request to terminate a subscription or subscriptions. :ivar termination_response_status: Status of each subscription termnination response. Type for Body of Subscription Response. Used in WSDL. :ivar response_status: :ivar subscription_manager_address: Endpoint address of subscription manager if different from that of the Producer or known default. :ivar service_started_time: Time at which service providing the subscription was last started. Can be used to detect restarts. If absent, unknown. Type for Subscription Response. :ivar response_status: :ivar subscription_manager_address: Endpoint address of subscription manager if different from that of the Producer or known default. :ivar service_started_time: Time at which service providing the subscription was last started. Can be used to detect restarts. If absent, unknown. :ivar extensions: Request from Subscriber to Subscription Manager to terminate a subscription. Answered with a TerminateSubscriptionResponse. Response from Producer to Consumer to inform whether subscriptions have been created. Answers a previous SubscriptionRequest. | 2.306487 | 2 |
app/config.py | GShamian/flask-training-blog | 0 | 6619695 | <reponame>GShamian/flask-training-blog<gh_stars>0
class Configuration(object):
DEBUG = True
SQLALCHEMY_TRACK_MODIFICATIONS = False
SQLALCHEMY_DATABASE_URI = 'mysql+mysqlconnector://root:120505Aa!@localhost/test1'
SECRET_KEY = 'something very secret ;)'
#Flask-Security
SECURITY_PASSWORD_SALT = '<PASSWORD>'
SECURITY_PASSWORD_HASH = '<PASSWORD>'
| class Configuration(object):
DEBUG = True
SQLALCHEMY_TRACK_MODIFICATIONS = False
SQLALCHEMY_DATABASE_URI = 'mysql+mysqlconnector://root:120505Aa!@localhost/test1'
SECRET_KEY = 'something very secret ;)'
#Flask-Security
SECURITY_PASSWORD_SALT = '<PASSWORD>'
SECURITY_PASSWORD_HASH = '<PASSWORD>' | en | 0.539201 | #Flask-Security | 1.859941 | 2 |
src/pyscaffold/extensions/no_pyproject.py | Bing1012/3 | 1,040 | 6619696 | """
Extension that omits the creation of file `pyproject.toml`.
Since the isolated builds with PEP517/PEP518 are not completely backward compatible,
this extension provides an escape hatch for people that want to maintain the legacy
behaviour.
.. warning::
This extension is **transitional** and will be removed in future versions of
PyScaffold.
Once support for isolated builds stabilises, the Python
community will likely move towards using them more exclusively.
"""
from typing import List
from .. import structure
from ..actions import Action, ActionParams, ScaffoldOpts, Structure
from . import Extension
class NoPyProject(Extension):
"""Do not include a pyproject.toml file in the project root, and thus avoid isolated
builds as defined in PEP517/518 [not recommended]
"""
name = "no_pyproject"
def activate(self, actions: List[Action]) -> List[Action]:
actions = self.register(actions, ensure_option, before="get_default_options")
return self.register(actions, remove_files, after="define_structure")
def ensure_option(struct: Structure, opts: ScaffoldOpts) -> ActionParams:
"""Make option available in non-CLI calls (used by other parts of PyScaffold)"""
return struct, {**opts, "pyproject": False, "isolated_build": False}
def remove_files(struct: Structure, opts: ScaffoldOpts) -> ActionParams:
struct = structure.reject(struct, "pyproject.toml")
return struct, opts
| """
Extension that omits the creation of file `pyproject.toml`.
Since the isolated builds with PEP517/PEP518 are not completely backward compatible,
this extension provides an escape hatch for people that want to maintain the legacy
behaviour.
.. warning::
This extension is **transitional** and will be removed in future versions of
PyScaffold.
Once support for isolated builds stabilises, the Python
community will likely move towards using them more exclusively.
"""
from typing import List
from .. import structure
from ..actions import Action, ActionParams, ScaffoldOpts, Structure
from . import Extension
class NoPyProject(Extension):
"""Do not include a pyproject.toml file in the project root, and thus avoid isolated
builds as defined in PEP517/518 [not recommended]
"""
name = "no_pyproject"
def activate(self, actions: List[Action]) -> List[Action]:
actions = self.register(actions, ensure_option, before="get_default_options")
return self.register(actions, remove_files, after="define_structure")
def ensure_option(struct: Structure, opts: ScaffoldOpts) -> ActionParams:
"""Make option available in non-CLI calls (used by other parts of PyScaffold)"""
return struct, {**opts, "pyproject": False, "isolated_build": False}
def remove_files(struct: Structure, opts: ScaffoldOpts) -> ActionParams:
struct = structure.reject(struct, "pyproject.toml")
return struct, opts
| en | 0.915141 | Extension that omits the creation of file `pyproject.toml`. Since the isolated builds with PEP517/PEP518 are not completely backward compatible, this extension provides an escape hatch for people that want to maintain the legacy behaviour. .. warning:: This extension is **transitional** and will be removed in future versions of PyScaffold. Once support for isolated builds stabilises, the Python community will likely move towards using them more exclusively. Do not include a pyproject.toml file in the project root, and thus avoid isolated builds as defined in PEP517/518 [not recommended] Make option available in non-CLI calls (used by other parts of PyScaffold) | 2.27298 | 2 |
network.py | mehularora8/UNO | 0 | 6619697 | import socket
from multiprocessing.connection import Client
class Network():
def __init__(self):
# self.client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.server = socket.gethostbyname(socket.gethostname())
self.port = 5555
self.client = Client((self.server, self.port))
self.addr = (self.server, self.port)
# which player number are we?
self.playerNumber = self.connect()
def getPlayerNumber(self):
return self.playerNumber
def connect(self):
try:
# self.client.connect(self.addr)
return self.client.recv()
except socket.error as e:
str(e)
print("Could not connect")
def send(self, data, typeOfData):
"""
Param: type- What type of data are you sending? "C" for command or "M" for move
"""
if typeOfData == "C":
# Sending a command and not a move
try:
self.client.send(data)
# When the command is move, we have to send the Card object as well.
# Don't return immediately.
if data != "move":
receivedData = self.client.recv()
return receivedData
except socket.error as e:
print(e)
elif typeOfData == "M":
# Sending a move and not a command
try:
self.client.send(data)
receivedData = self.client.recv()
return receivedData
except socket.error as e:
print(e) | import socket
from multiprocessing.connection import Client
class Network():
def __init__(self):
# self.client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.server = socket.gethostbyname(socket.gethostname())
self.port = 5555
self.client = Client((self.server, self.port))
self.addr = (self.server, self.port)
# which player number are we?
self.playerNumber = self.connect()
def getPlayerNumber(self):
return self.playerNumber
def connect(self):
try:
# self.client.connect(self.addr)
return self.client.recv()
except socket.error as e:
str(e)
print("Could not connect")
def send(self, data, typeOfData):
"""
Param: type- What type of data are you sending? "C" for command or "M" for move
"""
if typeOfData == "C":
# Sending a command and not a move
try:
self.client.send(data)
# When the command is move, we have to send the Card object as well.
# Don't return immediately.
if data != "move":
receivedData = self.client.recv()
return receivedData
except socket.error as e:
print(e)
elif typeOfData == "M":
# Sending a move and not a command
try:
self.client.send(data)
receivedData = self.client.recv()
return receivedData
except socket.error as e:
print(e) | en | 0.900858 | # self.client = socket.socket(socket.AF_INET, socket.SOCK_STREAM) # which player number are we? # self.client.connect(self.addr) Param: type- What type of data are you sending? "C" for command or "M" for move # Sending a command and not a move # When the command is move, we have to send the Card object as well. # Don't return immediately. # Sending a move and not a command | 3.282053 | 3 |
src/python/WMComponent/DBS3Buffer/Oracle/DBSBufferFiles/HeritageLFNChild.py | khurtado/WMCore | 21 | 6619698 | #!/usr/bin/env python
"""
_HeritageLFNChild_
Oracle implementation of DBSBufferFiles.HeritageLFNChild
"""
from WMComponent.DBS3Buffer.MySQL.DBSBufferFiles.HeritageLFNChild import \
HeritageLFNChild as MySQLHeritageLFNChild
class HeritageLFNChild(MySQLHeritageLFNChild):
pass
| #!/usr/bin/env python
"""
_HeritageLFNChild_
Oracle implementation of DBSBufferFiles.HeritageLFNChild
"""
from WMComponent.DBS3Buffer.MySQL.DBSBufferFiles.HeritageLFNChild import \
HeritageLFNChild as MySQLHeritageLFNChild
class HeritageLFNChild(MySQLHeritageLFNChild):
pass
| en | 0.373217 | #!/usr/bin/env python _HeritageLFNChild_ Oracle implementation of DBSBufferFiles.HeritageLFNChild | 1.298066 | 1 |
mdevaluate/coordinates.py | lheyer/mdevaluate | 0 | 6619699 | <reponame>lheyer/mdevaluate
from functools import partial, lru_cache, wraps
from copy import copy
from .logging import logger
import numpy as np
from scipy.spatial import cKDTree, KDTree
from .atoms import AtomSubset
from .pbc import whole, nojump, pbc_diff
from .utils import mask2indices, singledispatchmethod
from .checksum import checksum
class UnknownCoordinatesMode(Exception):
pass
def rotate_axis(coords, axis):
"""
Rotate a set of coordinates to a given axis.
"""
axis = np.array(axis) / np.linalg.norm(axis)
zaxis = np.array([0, 0, 1])
if (axis == zaxis).sum() == 3:
return coords
rotation_axis = np.cross(axis, zaxis)
rotation_axis = rotation_axis / np.linalg.norm(rotation_axis)
theta = np.arccos(axis @ zaxis / np.linalg.norm(axis))
# return theta/pi, rotation_axis
ux, uy, uz = rotation_axis
cross_matrix = np.array([
[0, -uz, uy],
[uz, 0, -ux],
[-uy, ux, 0]
])
rotation_matrix = np.cos(theta) * np.identity(len(axis)) \
+ (1 - np.cos(theta)) * rotation_axis.reshape(-1, 1) @ rotation_axis.reshape(1, -1) \
+ np.sin(theta) * cross_matrix
if len(coords.shape) == 2:
rotated = np.array([rotation_matrix @ xyz for xyz in coords])
else:
rotated = rotation_matrix @ coords
return rotated
def spherical_radius(frame, origin=None):
"""
Transform a frame of cartesian coordinates into the sperical radius.
If origin=None the center of the box is taken as the coordinates origin.
"""
if origin is None:
origin = frame.box.diagonal() / 2
return ((frame - origin)**2).sum(axis=-1)**0.5
def polar_coordinates(x, y):
"""Convert cartesian to polar coordinates."""
radius = (x**2 + y**2)**0.5
phi = np.arctan2(y, x)
return radius, phi
def spherical_coordinates(x, y, z):
"""Convert cartesian to spherical coordinates."""
xy, phi = polar_coordinates(x, y)
theta = np.arccos(z / xy)
radius = (x**2 + y**2 + z**2)**0.5
return radius, phi, theta
def radial_selector(frame, coordinates, rmin, rmax):
"""
Return a selection of all atoms with radius in the interval [rmin, rmax].
"""
crd = coordinates[frame.step]
rad, _ = polar_coordinates(crd[:, 0], crd[:, 1])
selector = (rad >= rmin) & (rad <= rmax)
return mask2indices(selector)
def spatial_selector(frame, transform, rmin, rmax):
"""
Select a subset of atoms which have a radius between rmin and rmax.
Coordinates are filtered by the condition::
rmin <= transform(frame) <= rmax
Args:
frame: The coordinates of the actual trajectory
transform:
A function that transforms the coordinates of the frames into
the one-dimensional spatial coordinate (e.g. radius).
rmin: Minimum value of the radius
rmax: Maximum value of the radius
"""
r = transform(frame)
selector = (rmin <= r) & (rmax >= r)
return mask2indices(selector)
class CoordinateFrame(np.ndarray):
_known_modes = ('pbc', 'whole', 'nojump')
@property
def box(self):
return np.array(self.coordinates.frames[self.step].box)
@property
def volume(self):
return self.box.diagonal().cumprod()[-1]
@property
def time(self):
return self.coordinates.frames[self.step].time
@property
def masses(self):
return self.coordinates.atoms.masses[self.coordinates.atom_subset.selection]
@property
def charges(self):
return self.coordinates.atoms.charges[self.coordinates.atom_subset.selection]
@property
def residue_ids(self):
return self.coordinates.atom_subset.residue_ids
@property
def residue_names(self):
return self.coordinates.atom_subset.residue_names
@property
def atom_names(self):
return self.coordinates.atom_subset.atom_names
@property
def indices(self):
return self.coordinates.atom_subset.indices
@property
def selection(self):
return self.coordinates.atom_subset.selection
@property
def whole(self):
frame = whole(self)
frame.mode = 'whole'
return frame
@property
def pbc(self):
frame = self % self.box.diagonal()
frame.mode = 'pbc'
return frame
@property
def nojump(self):
if self.mode != 'nojump':
frame = nojump(self)
frame.mode = 'nojump'
return frame
else:
return self
def __new__(subtype, shape, dtype=float, buffer=None, offset=0, strides=None, order=None,
coordinates=None, step=None, box=None, mode=None):
obj = np.ndarray.__new__(subtype, shape, dtype, buffer, offset, strides)
obj.coordinates = coordinates
obj.step = step
obj.mode = mode
return obj
def __array_finalize__(self, obj):
if obj is None:
return
self.coordinates = getattr(obj, 'coordinates', None)
self.step = getattr(obj, 'step', None)
self.mode = getattr(obj, 'mode', None)
class Coordinates:
"""
Coordinates represent trajectory data, which is used for evaluation functions.
Atoms may be selected by specifing a atom_subset or a atom_filter.
"""
def get_mode(self, mode):
if self.atom_subset is not None:
return Coordinates(frames=self.frames, atom_subset=self.atom_subset, mode=mode)[self._slice]
else:
return Coordinates(frames=self.frames, atom_filter=self.atom_filter, mode=mode)[self._slice]
@property
def pbc(self):
return self.get_mode('pbc')
@property
def whole(self):
return self.get_mode('whole')
@property
def nojump(self):
return self.get_mode('nojump')
@property
def mode(self):
return self._mode
@mode.setter
def mode(self, val):
if val in CoordinateFrame._known_modes:
logger.warn('Changing the Coordinates mode directly is deprecated. Use Coordinates.%s instead, which returns a copy.', val)
self._mode = val
else:
raise UnknownCoordinatesMode('No such mode: {}'.format(val))
def __init__(self, frames, atom_filter=None, atom_subset: AtomSubset=None, mode=None):
"""
Args:
frames: The trajectory reader
atom_filter (opt.): A mask which selects a subset of the system
atom_subset (opt.): A AtomSubset that selects a subset of the system
mode (opt.): PBC mode of the Coordinates, can be pbc, whole or nojump.
Note:
The caching in Coordinates is deprecated, use the CachedReader or the function open
from the reader module instead.
"""
self._mode = mode
self.frames = frames
self._slice = slice(None)
assert atom_filter is None or atom_subset is None, "Cannot use both: subset and filter"
if atom_filter is not None:
self.atom_filter = atom_filter
self.atom_subset = None
elif atom_subset is not None:
self.atom_filter = atom_subset.selection
self.atom_subset = atom_subset
self.atoms = atom_subset.atoms
else:
self.atom_filter = np.ones(shape=(len(frames[0].coordinates),), dtype=bool)
self.atom_subset = None
def get_frame(self, fnr):
"""Returns the fnr-th frame."""
try:
if self.atom_filter is not None:
frame = self.frames[fnr].positions[self.atom_filter].view(CoordinateFrame)
else:
frame = self.frames.__getitem__(fnr).positions.view(CoordinateFrame)
frame.coordinates = self
frame.step = fnr
if self.mode is not None:
frame = getattr(frame, self.mode)
except EOFError:
raise IndexError
return frame
def clear_cache(self):
"""Clears the frame cache, if it is enabled."""
if hasattr(self.get_frame, 'clear_cache'):
self.get_frame.clear_cache()
def __iter__(self):
for i in range(len(self))[self._slice]:
yield self[i]
@singledispatchmethod
def __getitem__(self, item):
return self.get_frame(item)
@__getitem__.register(slice)
def _(self, item):
sliced = copy(self)
sliced._slice = item
return sliced
def __len__(self):
return len(self.frames)
def __checksum__(self):
return checksum(self.frames, self.atom_filter, self._slice, self.mode)
def __repr__(self):
return "Coordinates <{}>: {}".format(self.frames.filename, self.atom_subset)
@wraps(AtomSubset.subset)
def subset(self, **kwargs):
return Coordinates(self.frames, atom_subset=self.atom_subset.subset(**kwargs), mode=self._mode)
@property
def description(self):
return self.atom_subset.description
@description.setter
def description(self, desc):
self.atom_subset.description = desc
class MeanCoordinates(Coordinates):
def __init__(self, frames, atom_filter=None, mean=1):
super().__init__(frames, atom_filter)
self.mean = mean
assert mean >= 1, "Mean must be positive"
def __getitem__(self, item):
frame = super().__getitem__(item)
for i in range(item + 1, item + self.mean):
frame += super().__getitem__(i)
return frame / self.mean
def len(self):
return len(super() - self.mean + 1)
class CoordinatesMap:
def __init__(self, coordinates, function):
self.coordinates = coordinates
self.frames = self.coordinates.frames
self.atom_subset = self.coordinates.atom_subset
self.function = function
if isinstance(function, partial):
self._description = self.function.func.__name__
else:
self._description = self.function.__name__
def __iter__(self):
for frame in self.coordinates:
step = frame.step
frame = self.function(frame)
if not isinstance(frame, CoordinateFrame):
frame = frame.view(CoordinateFrame)
frame.coordinates = self
frame.step = step
yield frame
def __getitem__(self, item):
if isinstance(item, slice):
return self.__class__(self.coordinates[item], self.function)
else:
frame = self.function(self.coordinates.__getitem__(item))
if not isinstance(frame, CoordinateFrame):
frame = frame.view(CoordinateFrame)
frame.coordinates = self
frame.step = item
return frame
def __len__(self):
return len(self.coordinates.frames)
def __checksum__(self):
return checksum(self.coordinates, self.function)
@wraps(Coordinates.subset)
def subset(self, **kwargs):
return CoordinatesMap(self.coordinates.subset(**kwargs), self.function)
@property
def description(self):
return '{}_{}'.format(self._description, self.coordinates.description)
@description.setter
def description(self, desc):
self._description = desc
@property
def nojump(self):
return CoordinatesMap(self.coordinates.nojump, self.function)
@property
def whole(self):
return CoordinatesMap(self.coordinates.whole, self.function)
@property
def pbc(self):
return CoordinatesMap(self.coordinates.pbc, self.function)
class CoordinatesFilter:
@property
def atom_subset(self):
pass
def __init__(self, coordinates, atom_filter):
self.coordinates = coordinates
self.atom_filter = atom_filter
def __getitem__(self, item):
if isinstance(item, slice):
sliced = copy(self)
sliced.coordinates = self.coordinates[item]
return sliced
else:
frame = self.coordinates[item]
return frame[self.atom_filter]
class CoordinatesKDTree:
"""
A KDTree of coordinates frames. The KDtrees are cached by a :func:`functools.lru_cache`.
Uses :class:`scipy.spatial.cKDTree` by default, since it's significantly faster.
Make sure to use scipy 0.17 or later or switch to the normal KDTree, since cKDTree has
a memory leak in earlier versions.
"""
def clear_cache(self):
"""Clear the LRU cache."""
self._get_tree_at_index.cache_clear()
@property
def cache_info(self):
"""Return info about the state of the cache."""
return self._get_tree_at_index.cache_info()
def _get_tree_at_index(self, index):
frame = self.frames[index]
return self.kdtree(frame[self.selector(frame)])
def __init__(self, frames, selector=None, boxsize=None, maxcache=128, ckdtree=True):
"""
Args:
frames: Trajectory of the simulation, can be Coordinates object or reader
selector: Selector function that selects a subset of each frame
maxcache: Maxsize of the :func:`~functools.lru_cache`
ckdtree: Use :class:`~scipy.spatial.cKDTree` or :class:`~scipy.spatial.KDTree` if False
"""
if selector is not None:
self.selector = selector
else:
self.selector = lambda x: slice(None)
self.frames = frames
self.kdtree = cKDTree if ckdtree else KDTree
if boxsize is not None:
self.kdtree = partial(self.kdtree, boxsize=boxsize)
self._get_tree_at_index = lru_cache(maxsize=maxcache)(self._get_tree_at_index)
def __getitem__(self, index):
return self._get_tree_at_index(index)
def __checksum__(self):
return checksum(self.selector, self.frames)
def __eq__(self, other):
return super().__eq__(other)
def map_coordinates(func):
@wraps(func)
def wrapped(coordinates, **kwargs):
return CoordinatesMap(coordinates, partial(func, **kwargs))
return wrapped
@map_coordinates
def centers_of_mass(c, *, masses=None):
"""
A- 1
B- 2
A- 1
C 3
A-
B-
A-
C
A-
B-
A-
C
Example:
rd = XTCReader('t.xtc')
coordinates = Coordinates(rd)
com = centers_of_mass(coordinates, (1.0, 2.0, 1.0, 3.0))
"""
# At first, regroup our array
number_of_masses = len(masses)
number_of_coordinates, number_of_dimensions = c.shape
number_of_new_coordinates = number_of_coordinates // number_of_masses
grouped_masses = c.reshape(number_of_new_coordinates, number_of_masses, number_of_dimensions)
return np.average(grouped_masses, axis=1, weights=masses)
@map_coordinates
def pore_coordinates(coordinates, origin, sym_axis='z'):
"""
Map coordinates of a pore simulation so the pore has cylindrical symmetry.
Args:
coordinates: Coordinates of the simulation
origin: Origin of the pore which will be the coordinates origin after mapping
sym_axis (opt.): Symmtery axis of the pore, may be a literal direction
'x', 'y' or 'z' or an array of shape (3,)
"""
if sym_axis in ('x', 'y', 'z'):
rot_axis = np.zeros(shape=(3,))
rot_axis[['x', 'y', 'z'].index(sym_axis)] = 1
else:
rot_axis = sym_axis
return rotate_axis(coordinates - origin, rot_axis)
@map_coordinates
def vectors(coordinates, atoms_a, atoms_b, normed=False, box=None):
"""
Compute the vectors between the atoms of two subsets.
Args:
coordinates: The Coordinates object the atoms will be taken from
atoms_a: Mask or indices of the first atom subset
atoms_b: Mask or indices of the second atom subset
normed (opt.): If the vectors should be normed
box (opt.): If not None, the vectors are calcualte with PBC
The defintion of atoms_a/b can be any possible subript of a numpy array.
They can, for example, be given as a masking array of bool values with the
same length as the frames of the coordinates. Or they can be a list of
indices selecting the atoms of these indices from each frame.
It is possible to compute the mean of several atoms before calculating the vectors,
by using a two-dimensional list of indices. The following code computes the vectors
between atoms 0, 3, 6 and the mean coordinate of atoms 1, 4, 7 and 2, 5, 8::
>>> inds_a = [0, 3, 6]
>>> inds_b = [[1, 4, 7], [2, 5, 8]]
>>> vectors(coords, inds_a, inds_b)
array([
coords[0] - (coords[1] + coords[2])/2,
coords[3] - (coords[4] + coords[5])/2,
coords[6] - (coords[7] + coords[8])/2,
])
"""
coords_a = coordinates[atoms_a]
if len(coords_a.shape) > 2:
coords_a = coords_a.mean(axis=0)
coords_b = coordinates[atoms_b]
if len(coords_b.shape) > 2:
coords_b = coords_b.mean(axis=0)
vectors = pbc_diff(coords_a, coords_b, box=box)
norm = np.linalg.norm(vectors, axis=-1).reshape(-1, 1) if normed else 1
return vectors / norm
| from functools import partial, lru_cache, wraps
from copy import copy
from .logging import logger
import numpy as np
from scipy.spatial import cKDTree, KDTree
from .atoms import AtomSubset
from .pbc import whole, nojump, pbc_diff
from .utils import mask2indices, singledispatchmethod
from .checksum import checksum
class UnknownCoordinatesMode(Exception):
pass
def rotate_axis(coords, axis):
"""
Rotate a set of coordinates to a given axis.
"""
axis = np.array(axis) / np.linalg.norm(axis)
zaxis = np.array([0, 0, 1])
if (axis == zaxis).sum() == 3:
return coords
rotation_axis = np.cross(axis, zaxis)
rotation_axis = rotation_axis / np.linalg.norm(rotation_axis)
theta = np.arccos(axis @ zaxis / np.linalg.norm(axis))
# return theta/pi, rotation_axis
ux, uy, uz = rotation_axis
cross_matrix = np.array([
[0, -uz, uy],
[uz, 0, -ux],
[-uy, ux, 0]
])
rotation_matrix = np.cos(theta) * np.identity(len(axis)) \
+ (1 - np.cos(theta)) * rotation_axis.reshape(-1, 1) @ rotation_axis.reshape(1, -1) \
+ np.sin(theta) * cross_matrix
if len(coords.shape) == 2:
rotated = np.array([rotation_matrix @ xyz for xyz in coords])
else:
rotated = rotation_matrix @ coords
return rotated
def spherical_radius(frame, origin=None):
"""
Transform a frame of cartesian coordinates into the sperical radius.
If origin=None the center of the box is taken as the coordinates origin.
"""
if origin is None:
origin = frame.box.diagonal() / 2
return ((frame - origin)**2).sum(axis=-1)**0.5
def polar_coordinates(x, y):
"""Convert cartesian to polar coordinates."""
radius = (x**2 + y**2)**0.5
phi = np.arctan2(y, x)
return radius, phi
def spherical_coordinates(x, y, z):
"""Convert cartesian to spherical coordinates."""
xy, phi = polar_coordinates(x, y)
theta = np.arccos(z / xy)
radius = (x**2 + y**2 + z**2)**0.5
return radius, phi, theta
def radial_selector(frame, coordinates, rmin, rmax):
"""
Return a selection of all atoms with radius in the interval [rmin, rmax].
"""
crd = coordinates[frame.step]
rad, _ = polar_coordinates(crd[:, 0], crd[:, 1])
selector = (rad >= rmin) & (rad <= rmax)
return mask2indices(selector)
def spatial_selector(frame, transform, rmin, rmax):
"""
Select a subset of atoms which have a radius between rmin and rmax.
Coordinates are filtered by the condition::
rmin <= transform(frame) <= rmax
Args:
frame: The coordinates of the actual trajectory
transform:
A function that transforms the coordinates of the frames into
the one-dimensional spatial coordinate (e.g. radius).
rmin: Minimum value of the radius
rmax: Maximum value of the radius
"""
r = transform(frame)
selector = (rmin <= r) & (rmax >= r)
return mask2indices(selector)
class CoordinateFrame(np.ndarray):
_known_modes = ('pbc', 'whole', 'nojump')
@property
def box(self):
return np.array(self.coordinates.frames[self.step].box)
@property
def volume(self):
return self.box.diagonal().cumprod()[-1]
@property
def time(self):
return self.coordinates.frames[self.step].time
@property
def masses(self):
return self.coordinates.atoms.masses[self.coordinates.atom_subset.selection]
@property
def charges(self):
return self.coordinates.atoms.charges[self.coordinates.atom_subset.selection]
@property
def residue_ids(self):
return self.coordinates.atom_subset.residue_ids
@property
def residue_names(self):
return self.coordinates.atom_subset.residue_names
@property
def atom_names(self):
return self.coordinates.atom_subset.atom_names
@property
def indices(self):
return self.coordinates.atom_subset.indices
@property
def selection(self):
return self.coordinates.atom_subset.selection
@property
def whole(self):
frame = whole(self)
frame.mode = 'whole'
return frame
@property
def pbc(self):
frame = self % self.box.diagonal()
frame.mode = 'pbc'
return frame
@property
def nojump(self):
if self.mode != 'nojump':
frame = nojump(self)
frame.mode = 'nojump'
return frame
else:
return self
def __new__(subtype, shape, dtype=float, buffer=None, offset=0, strides=None, order=None,
coordinates=None, step=None, box=None, mode=None):
obj = np.ndarray.__new__(subtype, shape, dtype, buffer, offset, strides)
obj.coordinates = coordinates
obj.step = step
obj.mode = mode
return obj
def __array_finalize__(self, obj):
if obj is None:
return
self.coordinates = getattr(obj, 'coordinates', None)
self.step = getattr(obj, 'step', None)
self.mode = getattr(obj, 'mode', None)
class Coordinates:
"""
Coordinates represent trajectory data, which is used for evaluation functions.
Atoms may be selected by specifing a atom_subset or a atom_filter.
"""
def get_mode(self, mode):
if self.atom_subset is not None:
return Coordinates(frames=self.frames, atom_subset=self.atom_subset, mode=mode)[self._slice]
else:
return Coordinates(frames=self.frames, atom_filter=self.atom_filter, mode=mode)[self._slice]
@property
def pbc(self):
return self.get_mode('pbc')
@property
def whole(self):
return self.get_mode('whole')
@property
def nojump(self):
return self.get_mode('nojump')
@property
def mode(self):
return self._mode
@mode.setter
def mode(self, val):
if val in CoordinateFrame._known_modes:
logger.warn('Changing the Coordinates mode directly is deprecated. Use Coordinates.%s instead, which returns a copy.', val)
self._mode = val
else:
raise UnknownCoordinatesMode('No such mode: {}'.format(val))
def __init__(self, frames, atom_filter=None, atom_subset: AtomSubset=None, mode=None):
"""
Args:
frames: The trajectory reader
atom_filter (opt.): A mask which selects a subset of the system
atom_subset (opt.): A AtomSubset that selects a subset of the system
mode (opt.): PBC mode of the Coordinates, can be pbc, whole or nojump.
Note:
The caching in Coordinates is deprecated, use the CachedReader or the function open
from the reader module instead.
"""
self._mode = mode
self.frames = frames
self._slice = slice(None)
assert atom_filter is None or atom_subset is None, "Cannot use both: subset and filter"
if atom_filter is not None:
self.atom_filter = atom_filter
self.atom_subset = None
elif atom_subset is not None:
self.atom_filter = atom_subset.selection
self.atom_subset = atom_subset
self.atoms = atom_subset.atoms
else:
self.atom_filter = np.ones(shape=(len(frames[0].coordinates),), dtype=bool)
self.atom_subset = None
def get_frame(self, fnr):
"""Returns the fnr-th frame."""
try:
if self.atom_filter is not None:
frame = self.frames[fnr].positions[self.atom_filter].view(CoordinateFrame)
else:
frame = self.frames.__getitem__(fnr).positions.view(CoordinateFrame)
frame.coordinates = self
frame.step = fnr
if self.mode is not None:
frame = getattr(frame, self.mode)
except EOFError:
raise IndexError
return frame
def clear_cache(self):
"""Clears the frame cache, if it is enabled."""
if hasattr(self.get_frame, 'clear_cache'):
self.get_frame.clear_cache()
def __iter__(self):
for i in range(len(self))[self._slice]:
yield self[i]
@singledispatchmethod
def __getitem__(self, item):
return self.get_frame(item)
@__getitem__.register(slice)
def _(self, item):
sliced = copy(self)
sliced._slice = item
return sliced
def __len__(self):
return len(self.frames)
def __checksum__(self):
return checksum(self.frames, self.atom_filter, self._slice, self.mode)
def __repr__(self):
return "Coordinates <{}>: {}".format(self.frames.filename, self.atom_subset)
@wraps(AtomSubset.subset)
def subset(self, **kwargs):
return Coordinates(self.frames, atom_subset=self.atom_subset.subset(**kwargs), mode=self._mode)
@property
def description(self):
return self.atom_subset.description
@description.setter
def description(self, desc):
self.atom_subset.description = desc
class MeanCoordinates(Coordinates):
def __init__(self, frames, atom_filter=None, mean=1):
super().__init__(frames, atom_filter)
self.mean = mean
assert mean >= 1, "Mean must be positive"
def __getitem__(self, item):
frame = super().__getitem__(item)
for i in range(item + 1, item + self.mean):
frame += super().__getitem__(i)
return frame / self.mean
def len(self):
return len(super() - self.mean + 1)
class CoordinatesMap:
def __init__(self, coordinates, function):
self.coordinates = coordinates
self.frames = self.coordinates.frames
self.atom_subset = self.coordinates.atom_subset
self.function = function
if isinstance(function, partial):
self._description = self.function.func.__name__
else:
self._description = self.function.__name__
def __iter__(self):
for frame in self.coordinates:
step = frame.step
frame = self.function(frame)
if not isinstance(frame, CoordinateFrame):
frame = frame.view(CoordinateFrame)
frame.coordinates = self
frame.step = step
yield frame
def __getitem__(self, item):
if isinstance(item, slice):
return self.__class__(self.coordinates[item], self.function)
else:
frame = self.function(self.coordinates.__getitem__(item))
if not isinstance(frame, CoordinateFrame):
frame = frame.view(CoordinateFrame)
frame.coordinates = self
frame.step = item
return frame
def __len__(self):
return len(self.coordinates.frames)
def __checksum__(self):
return checksum(self.coordinates, self.function)
@wraps(Coordinates.subset)
def subset(self, **kwargs):
return CoordinatesMap(self.coordinates.subset(**kwargs), self.function)
@property
def description(self):
return '{}_{}'.format(self._description, self.coordinates.description)
@description.setter
def description(self, desc):
self._description = desc
@property
def nojump(self):
return CoordinatesMap(self.coordinates.nojump, self.function)
@property
def whole(self):
return CoordinatesMap(self.coordinates.whole, self.function)
@property
def pbc(self):
return CoordinatesMap(self.coordinates.pbc, self.function)
class CoordinatesFilter:
@property
def atom_subset(self):
pass
def __init__(self, coordinates, atom_filter):
self.coordinates = coordinates
self.atom_filter = atom_filter
def __getitem__(self, item):
if isinstance(item, slice):
sliced = copy(self)
sliced.coordinates = self.coordinates[item]
return sliced
else:
frame = self.coordinates[item]
return frame[self.atom_filter]
class CoordinatesKDTree:
"""
A KDTree of coordinates frames. The KDtrees are cached by a :func:`functools.lru_cache`.
Uses :class:`scipy.spatial.cKDTree` by default, since it's significantly faster.
Make sure to use scipy 0.17 or later or switch to the normal KDTree, since cKDTree has
a memory leak in earlier versions.
"""
def clear_cache(self):
"""Clear the LRU cache."""
self._get_tree_at_index.cache_clear()
@property
def cache_info(self):
"""Return info about the state of the cache."""
return self._get_tree_at_index.cache_info()
def _get_tree_at_index(self, index):
frame = self.frames[index]
return self.kdtree(frame[self.selector(frame)])
def __init__(self, frames, selector=None, boxsize=None, maxcache=128, ckdtree=True):
"""
Args:
frames: Trajectory of the simulation, can be Coordinates object or reader
selector: Selector function that selects a subset of each frame
maxcache: Maxsize of the :func:`~functools.lru_cache`
ckdtree: Use :class:`~scipy.spatial.cKDTree` or :class:`~scipy.spatial.KDTree` if False
"""
if selector is not None:
self.selector = selector
else:
self.selector = lambda x: slice(None)
self.frames = frames
self.kdtree = cKDTree if ckdtree else KDTree
if boxsize is not None:
self.kdtree = partial(self.kdtree, boxsize=boxsize)
self._get_tree_at_index = lru_cache(maxsize=maxcache)(self._get_tree_at_index)
def __getitem__(self, index):
return self._get_tree_at_index(index)
def __checksum__(self):
return checksum(self.selector, self.frames)
def __eq__(self, other):
return super().__eq__(other)
def map_coordinates(func):
@wraps(func)
def wrapped(coordinates, **kwargs):
return CoordinatesMap(coordinates, partial(func, **kwargs))
return wrapped
@map_coordinates
def centers_of_mass(c, *, masses=None):
"""
A- 1
B- 2
A- 1
C 3
A-
B-
A-
C
A-
B-
A-
C
Example:
rd = XTCReader('t.xtc')
coordinates = Coordinates(rd)
com = centers_of_mass(coordinates, (1.0, 2.0, 1.0, 3.0))
"""
# At first, regroup our array
number_of_masses = len(masses)
number_of_coordinates, number_of_dimensions = c.shape
number_of_new_coordinates = number_of_coordinates // number_of_masses
grouped_masses = c.reshape(number_of_new_coordinates, number_of_masses, number_of_dimensions)
return np.average(grouped_masses, axis=1, weights=masses)
@map_coordinates
def pore_coordinates(coordinates, origin, sym_axis='z'):
"""
Map coordinates of a pore simulation so the pore has cylindrical symmetry.
Args:
coordinates: Coordinates of the simulation
origin: Origin of the pore which will be the coordinates origin after mapping
sym_axis (opt.): Symmtery axis of the pore, may be a literal direction
'x', 'y' or 'z' or an array of shape (3,)
"""
if sym_axis in ('x', 'y', 'z'):
rot_axis = np.zeros(shape=(3,))
rot_axis[['x', 'y', 'z'].index(sym_axis)] = 1
else:
rot_axis = sym_axis
return rotate_axis(coordinates - origin, rot_axis)
@map_coordinates
def vectors(coordinates, atoms_a, atoms_b, normed=False, box=None):
"""
Compute the vectors between the atoms of two subsets.
Args:
coordinates: The Coordinates object the atoms will be taken from
atoms_a: Mask or indices of the first atom subset
atoms_b: Mask or indices of the second atom subset
normed (opt.): If the vectors should be normed
box (opt.): If not None, the vectors are calcualte with PBC
The defintion of atoms_a/b can be any possible subript of a numpy array.
They can, for example, be given as a masking array of bool values with the
same length as the frames of the coordinates. Or they can be a list of
indices selecting the atoms of these indices from each frame.
It is possible to compute the mean of several atoms before calculating the vectors,
by using a two-dimensional list of indices. The following code computes the vectors
between atoms 0, 3, 6 and the mean coordinate of atoms 1, 4, 7 and 2, 5, 8::
>>> inds_a = [0, 3, 6]
>>> inds_b = [[1, 4, 7], [2, 5, 8]]
>>> vectors(coords, inds_a, inds_b)
array([
coords[0] - (coords[1] + coords[2])/2,
coords[3] - (coords[4] + coords[5])/2,
coords[6] - (coords[7] + coords[8])/2,
])
"""
coords_a = coordinates[atoms_a]
if len(coords_a.shape) > 2:
coords_a = coords_a.mean(axis=0)
coords_b = coordinates[atoms_b]
if len(coords_b.shape) > 2:
coords_b = coords_b.mean(axis=0)
vectors = pbc_diff(coords_a, coords_b, box=box)
norm = np.linalg.norm(vectors, axis=-1).reshape(-1, 1) if normed else 1
return vectors / norm | en | 0.718825 | Rotate a set of coordinates to a given axis. # return theta/pi, rotation_axis Transform a frame of cartesian coordinates into the sperical radius. If origin=None the center of the box is taken as the coordinates origin. Convert cartesian to polar coordinates. Convert cartesian to spherical coordinates. Return a selection of all atoms with radius in the interval [rmin, rmax]. Select a subset of atoms which have a radius between rmin and rmax. Coordinates are filtered by the condition:: rmin <= transform(frame) <= rmax Args: frame: The coordinates of the actual trajectory transform: A function that transforms the coordinates of the frames into the one-dimensional spatial coordinate (e.g. radius). rmin: Minimum value of the radius rmax: Maximum value of the radius Coordinates represent trajectory data, which is used for evaluation functions. Atoms may be selected by specifing a atom_subset or a atom_filter. Args: frames: The trajectory reader atom_filter (opt.): A mask which selects a subset of the system atom_subset (opt.): A AtomSubset that selects a subset of the system mode (opt.): PBC mode of the Coordinates, can be pbc, whole or nojump. Note: The caching in Coordinates is deprecated, use the CachedReader or the function open from the reader module instead. Returns the fnr-th frame. Clears the frame cache, if it is enabled. A KDTree of coordinates frames. The KDtrees are cached by a :func:`functools.lru_cache`. Uses :class:`scipy.spatial.cKDTree` by default, since it's significantly faster. Make sure to use scipy 0.17 or later or switch to the normal KDTree, since cKDTree has a memory leak in earlier versions. Clear the LRU cache. Return info about the state of the cache. Args: frames: Trajectory of the simulation, can be Coordinates object or reader selector: Selector function that selects a subset of each frame maxcache: Maxsize of the :func:`~functools.lru_cache` ckdtree: Use :class:`~scipy.spatial.cKDTree` or :class:`~scipy.spatial.KDTree` if False A- 1 B- 2 A- 1 C 3 A- B- A- C A- B- A- C Example: rd = XTCReader('t.xtc') coordinates = Coordinates(rd) com = centers_of_mass(coordinates, (1.0, 2.0, 1.0, 3.0)) # At first, regroup our array Map coordinates of a pore simulation so the pore has cylindrical symmetry. Args: coordinates: Coordinates of the simulation origin: Origin of the pore which will be the coordinates origin after mapping sym_axis (opt.): Symmtery axis of the pore, may be a literal direction 'x', 'y' or 'z' or an array of shape (3,) Compute the vectors between the atoms of two subsets. Args: coordinates: The Coordinates object the atoms will be taken from atoms_a: Mask or indices of the first atom subset atoms_b: Mask or indices of the second atom subset normed (opt.): If the vectors should be normed box (opt.): If not None, the vectors are calcualte with PBC The defintion of atoms_a/b can be any possible subript of a numpy array. They can, for example, be given as a masking array of bool values with the same length as the frames of the coordinates. Or they can be a list of indices selecting the atoms of these indices from each frame. It is possible to compute the mean of several atoms before calculating the vectors, by using a two-dimensional list of indices. The following code computes the vectors between atoms 0, 3, 6 and the mean coordinate of atoms 1, 4, 7 and 2, 5, 8:: >>> inds_a = [0, 3, 6] >>> inds_b = [[1, 4, 7], [2, 5, 8]] >>> vectors(coords, inds_a, inds_b) array([ coords[0] - (coords[1] + coords[2])/2, coords[3] - (coords[4] + coords[5])/2, coords[6] - (coords[7] + coords[8])/2, ]) | 2.155132 | 2 |
src/passthrough_node.py | jskinn/unrealcv-ros | 5 | 6619700 | <filename>src/passthrough_node.py
#!/usr/bin/env python
from __future__ import print_function
import os
import threading
import numpy as np
import rospy
import std_msgs.msg
import geometry_msgs.msg
import cv_bridge
import cv2 as opencv
import unrealcv
import unreal_coordinates as ue_coords
import unrealcv_ros.srv as services
# Helper messages to create UnrealCV message URIs
def make_vget_camera_image(camera_id, view_mode, filename=None):
if filename is None:
return "vget /camera/{0}/{1}".format(camera_id, view_mode)
else:
return "vget /camera/{0}/{1} {2}".format(camera_id, view_mode, filename)
def make_vget_camera_location(camera_id):
return "vget /camera/{0}/location".format(camera_id)
def make_vget_camera_rotation(camera_id):
return "vget /camera/{0}/rotation".format(camera_id)
def make_vget_viewmode():
return "vget /viewmode"
def make_vset_move_camera(camera_id, x, y, z):
location, _ = ue_coords.transform_to_unreal((x, y, z), None)
return "vset /camera/{0}/moveto {1} {2} {3}".format(camera_id, location[0], location[1], location[2])
def make_vset_camera_location(camera_id, x, y, z):
location, _ = ue_coords.transform_to_unreal((x, y, z), None)
return "vset /camera/{0}/location {1} {2} {3}".format(camera_id, location[0], location[1], location[2])
def make_vset_camera_rotation(camera_id, w, x, y, z):
_, rotation = ue_coords.transform_to_unreal(None, (w, x, y, z))
roll, pitch, yaw = rotation
return "vset /camera/{0}/rotation {1} {2} {3}".format(camera_id, pitch, yaw, roll)
def make_vset_viewmode(viewmode):
return "vset /viewmode {0}".format(viewmode)
def make_vget_object_color(object_name):
return "vget /object/{0}/color".format(object_name)
def make_vset_object_color(object_name, r, g, b):
return "vset /object/{0}/color {1} {2} {3}".format(object_name, r, g, b)
def make_vget_object_location(object_name):
return "vget /object/{0}/location".format(object_name)
def make_vget_object_rotation(object_name):
return "vget /object/{0}/rotation".format(object_name)
def make_vset_object_location(object_name, x, y, z):
location, _ = ue_coords.transform_to_unreal((x, y, z), None)
return "vset /object/{0}/location {1} {2} {3}".format(object_name, location[0], location[1], location[2])
def make_vset_object_rotation(object_name, w, x, y, z):
_, rotation = ue_coords.transform_to_unreal(None, (w, x, y, z))
roll, pitch, yaw = rotation
return "vset /object/{0}/rotation {1} {2} {3}".format(object_name, pitch, yaw, roll)
class UnrealCVPassthrough(object):
"""
A ROS node for the unrealcv API.
The goal of this node is to exactly mirror the UnrealCV API, documented here:
http://unrealcv.org/reference/commands.html
This is based on the source code, at time of writing, the documentation above is incomplete.
"""
# These are the valid view modes for the cameras.
view_modes = ['lit', 'depth', 'normal', 'object_mask', 'wireframe']
def __init__(self, config):
host = unrealcv.HOST
port = unrealcv.PORT
if 'endpoint' in config:
host, port = config['endpoint']
if 'port' in config:
port = config['port']
if 'hostname' in config:
host = config['hostname']
self.opencv_bridge = cv_bridge.CvBridge()
self._client_lock = threading.Lock()
self._client = unrealcv.Client(endpoint=(host, port))
self._client.connect()
if not self._client.isconnected():
raise RuntimeError("Could not connect to unrealcv simulator, is it running?")
# Store the declare services
self._services = []
def create_services(self):
print("Starting services...")
# Camera control services
self._services.append(rospy.Service('get_camera_view', services.GetCameraImage, self.handle_get_camera_image))
self._services.append(rospy.Service('get_camera_view_with_filename', services.GetCameraImageWithFilename,
self.handle_get_camera_image))
self._services.append(rospy.Service('get_camera_location', services.GetCameraLocation,
self.handle_get_camera_location))
self._services.append(rospy.Service('get_camera_rotation', services.GetCameraRotation,
self.handle_get_camera_rotation))
self._services.append(rospy.Service('get_viewmode', services.GetViewmode, self.handle_get_viewmode))
self._services.append(rospy.Service('move_camera', services.MoveCamera, self.handle_move_camera))
self._services.append(rospy.Service('set_camera_location', services.SetCameraLocation,
self.handle_set_camera_location))
self._services.append(rospy.Service('set_camera_rotation', services.SetCameraRotation,
self.handle_set_camera_rotation))
self._services.append(rospy.Service('set_viewmode', services.SetViewmode, self.handle_set_viewmode))
# object control services
self._services.append(rospy.Service('get_object_color', services.GetObjectColor, self.handle_get_object_color))
self._services.append(rospy.Service('set_object_color', services.SetObjectColor, self.handle_set_object_color))
self._services.append(rospy.Service('get_object_location', services.GetObjectLocation,
self.handle_get_object_location))
self._services.append(rospy.Service('get_object_rotation', services.GetObjectRotation,
self.handle_get_object_rotation))
self._services.append(rospy.Service('set_object_location', services.SetObjectLocation,
self.handle_set_object_location))
self._services.append(rospy.Service('set_object_rotation', services.SetObjectRotation,
self.handle_set_object_rotation))
def shutdown_services(self, reason=''):
for service in self._services:
service.shutdown(reason)
self._client.disconnect()
# Helpers and locking
def request_client(self, request):
self._client_lock.acquire()
result = self._client.request(request)
self._client_lock.release()
return result
def get_camera_image(self, camera_id, location, rotation):
roll, pitch, yaw = rotation
self._client_lock.acquire()
self._client.request("vset /camera/{0}/location {1} {2} {3}".format(camera_id, location[0],
location[1], location[2]))
self._client.request("vset /camera/{0}/rotation {1} {2} {3}".format(camera_id, pitch, yaw, roll))
image_filename = self._client.request("vget /camera/{0}/lit".format(camera_id))
self._client_lock.release()
return image_filename
# Service Handlers
def handle_get_camera_image(self, request):
# Parse the request arguments
filename = None
if hasattr(request, 'filename'):
filename = request.filename
view_mode = 'lit'
if hasattr(request, 'view_mode') and request.view_mode in self.view_modes:
view_mode = request.view_mode
unrealcv_message = make_vget_camera_image(request.camera_id, view_mode, filename)
image_filename = self.request_client(unrealcv_message)
if os.path.isfile(image_filename):
image_mat = opencv.imread(image_filename)
os.remove(image_filename)
else:
image_mat = np.matrix([[]])
return self.opencv_bridge.cv2_to_imgmsg(image_mat, encoding='passthrough')
def handle_get_camera_location(self, request):
message = make_vget_camera_location(request.camera_id)
location = self.request_client(message)
location, _ = ue_coords.transform_from_unreal(location, None)
return geometry_msgs.msg.Point(x=location[0], y=location[1], z=location[2])
def handle_get_camera_rotation(self, request):
message = make_vget_camera_location(request.camera_id)
rotation = self.request_client(message)
_, rotation = ue_coords.transform_from_unreal(None, rotation)
return geometry_msgs.msg.Quaternion(w=rotation[0], x=rotation[1], y=rotation[2], z=rotation[3])
def handle_get_viewmode(self, request):
return self.request_client(make_vget_viewmode())
def handle_move_camera(self, request):
message = make_vset_move_camera(request.camera_id, request.location.x, request.location.y, request.location.z)
return self.request_client(message)
def handle_set_camera_location(self, request):
message = make_vset_camera_location(request.camera_id, request.location.x,
request.location.y, request.location.z)
return self.request_client(message)
def handle_set_camera_rotation(self, request):
message = make_vset_camera_rotation(request.camera_id, request.rotation.w, request.rotation.x,
request.rotation.y, request.rotation.z)
return self.request_client(message)
def handle_set_viewmode(self, request):
view_mode = 'lit'
if hasattr(request, 'view_mode') and request.view_mode in self.view_modes:
view_mode = request.view_mode
return self.request_client(make_vset_viewmode(view_mode))
def handle_get_object_color(self, request):
color = self.request_client(make_vget_object_color(request.object_name))
r, g, b, a = map(int, color.split())
return std_msgs.msg.ColorRGBA(r=r, g=g, b=b, a=a)
def handle_set_object_color(self, request):
message = make_vset_object_color(request.object_name, request.color.r, request.color.g, request.color.b)
return self.request_client(message)
def handle_get_object_location(self, request):
message = make_vget_object_location(request.object_name)
location = self.request_client(message)
location, _ = ue_coords.transform_from_unreal(location, None)
return geometry_msgs.msg.Point(x=location[0], y=location[1], z=location[2])
def handle_get_object_rotation(self, request):
message = make_vget_object_rotation(request.object_name)
rotation = self.request_client(message)
_, rotation = ue_coords.transform_from_unreal(None, rotation)
return geometry_msgs.msg.Quaternion(w=rotation[0], x=rotation[1], y=rotation[2], z=rotation[3])
def handle_set_object_location(self, request):
message = make_vset_object_location(request.object_name, request.location.x,
request.location.y, request.location.z)
return self.request_client(message)
def handle_set_object_rotation(self, request):
message = make_vset_object_rotation(request.object_name, request.rotation.w, request.rotation.x,
request.rotation.y, request.rotation.z)
return self.request_client(message)
def main():
rospy.init_node('unrealcv_ros')
unrealcv_bridge = UnrealCVPassthrough(config={}) # TODO: Get config from somewhere
unrealcv_bridge.create_services()
print("Ready!")
try:
rospy.spin()
except KeyboardInterrupt:
print('Shutting Down...')
unrealcv_bridge.shutdown_services("Finished")
if __name__ == '__main__':
main()
| <filename>src/passthrough_node.py
#!/usr/bin/env python
from __future__ import print_function
import os
import threading
import numpy as np
import rospy
import std_msgs.msg
import geometry_msgs.msg
import cv_bridge
import cv2 as opencv
import unrealcv
import unreal_coordinates as ue_coords
import unrealcv_ros.srv as services
# Helper messages to create UnrealCV message URIs
def make_vget_camera_image(camera_id, view_mode, filename=None):
if filename is None:
return "vget /camera/{0}/{1}".format(camera_id, view_mode)
else:
return "vget /camera/{0}/{1} {2}".format(camera_id, view_mode, filename)
def make_vget_camera_location(camera_id):
return "vget /camera/{0}/location".format(camera_id)
def make_vget_camera_rotation(camera_id):
return "vget /camera/{0}/rotation".format(camera_id)
def make_vget_viewmode():
return "vget /viewmode"
def make_vset_move_camera(camera_id, x, y, z):
location, _ = ue_coords.transform_to_unreal((x, y, z), None)
return "vset /camera/{0}/moveto {1} {2} {3}".format(camera_id, location[0], location[1], location[2])
def make_vset_camera_location(camera_id, x, y, z):
location, _ = ue_coords.transform_to_unreal((x, y, z), None)
return "vset /camera/{0}/location {1} {2} {3}".format(camera_id, location[0], location[1], location[2])
def make_vset_camera_rotation(camera_id, w, x, y, z):
_, rotation = ue_coords.transform_to_unreal(None, (w, x, y, z))
roll, pitch, yaw = rotation
return "vset /camera/{0}/rotation {1} {2} {3}".format(camera_id, pitch, yaw, roll)
def make_vset_viewmode(viewmode):
return "vset /viewmode {0}".format(viewmode)
def make_vget_object_color(object_name):
return "vget /object/{0}/color".format(object_name)
def make_vset_object_color(object_name, r, g, b):
return "vset /object/{0}/color {1} {2} {3}".format(object_name, r, g, b)
def make_vget_object_location(object_name):
return "vget /object/{0}/location".format(object_name)
def make_vget_object_rotation(object_name):
return "vget /object/{0}/rotation".format(object_name)
def make_vset_object_location(object_name, x, y, z):
location, _ = ue_coords.transform_to_unreal((x, y, z), None)
return "vset /object/{0}/location {1} {2} {3}".format(object_name, location[0], location[1], location[2])
def make_vset_object_rotation(object_name, w, x, y, z):
_, rotation = ue_coords.transform_to_unreal(None, (w, x, y, z))
roll, pitch, yaw = rotation
return "vset /object/{0}/rotation {1} {2} {3}".format(object_name, pitch, yaw, roll)
class UnrealCVPassthrough(object):
"""
A ROS node for the unrealcv API.
The goal of this node is to exactly mirror the UnrealCV API, documented here:
http://unrealcv.org/reference/commands.html
This is based on the source code, at time of writing, the documentation above is incomplete.
"""
# These are the valid view modes for the cameras.
view_modes = ['lit', 'depth', 'normal', 'object_mask', 'wireframe']
def __init__(self, config):
host = unrealcv.HOST
port = unrealcv.PORT
if 'endpoint' in config:
host, port = config['endpoint']
if 'port' in config:
port = config['port']
if 'hostname' in config:
host = config['hostname']
self.opencv_bridge = cv_bridge.CvBridge()
self._client_lock = threading.Lock()
self._client = unrealcv.Client(endpoint=(host, port))
self._client.connect()
if not self._client.isconnected():
raise RuntimeError("Could not connect to unrealcv simulator, is it running?")
# Store the declare services
self._services = []
def create_services(self):
print("Starting services...")
# Camera control services
self._services.append(rospy.Service('get_camera_view', services.GetCameraImage, self.handle_get_camera_image))
self._services.append(rospy.Service('get_camera_view_with_filename', services.GetCameraImageWithFilename,
self.handle_get_camera_image))
self._services.append(rospy.Service('get_camera_location', services.GetCameraLocation,
self.handle_get_camera_location))
self._services.append(rospy.Service('get_camera_rotation', services.GetCameraRotation,
self.handle_get_camera_rotation))
self._services.append(rospy.Service('get_viewmode', services.GetViewmode, self.handle_get_viewmode))
self._services.append(rospy.Service('move_camera', services.MoveCamera, self.handle_move_camera))
self._services.append(rospy.Service('set_camera_location', services.SetCameraLocation,
self.handle_set_camera_location))
self._services.append(rospy.Service('set_camera_rotation', services.SetCameraRotation,
self.handle_set_camera_rotation))
self._services.append(rospy.Service('set_viewmode', services.SetViewmode, self.handle_set_viewmode))
# object control services
self._services.append(rospy.Service('get_object_color', services.GetObjectColor, self.handle_get_object_color))
self._services.append(rospy.Service('set_object_color', services.SetObjectColor, self.handle_set_object_color))
self._services.append(rospy.Service('get_object_location', services.GetObjectLocation,
self.handle_get_object_location))
self._services.append(rospy.Service('get_object_rotation', services.GetObjectRotation,
self.handle_get_object_rotation))
self._services.append(rospy.Service('set_object_location', services.SetObjectLocation,
self.handle_set_object_location))
self._services.append(rospy.Service('set_object_rotation', services.SetObjectRotation,
self.handle_set_object_rotation))
def shutdown_services(self, reason=''):
for service in self._services:
service.shutdown(reason)
self._client.disconnect()
# Helpers and locking
def request_client(self, request):
self._client_lock.acquire()
result = self._client.request(request)
self._client_lock.release()
return result
def get_camera_image(self, camera_id, location, rotation):
roll, pitch, yaw = rotation
self._client_lock.acquire()
self._client.request("vset /camera/{0}/location {1} {2} {3}".format(camera_id, location[0],
location[1], location[2]))
self._client.request("vset /camera/{0}/rotation {1} {2} {3}".format(camera_id, pitch, yaw, roll))
image_filename = self._client.request("vget /camera/{0}/lit".format(camera_id))
self._client_lock.release()
return image_filename
# Service Handlers
def handle_get_camera_image(self, request):
# Parse the request arguments
filename = None
if hasattr(request, 'filename'):
filename = request.filename
view_mode = 'lit'
if hasattr(request, 'view_mode') and request.view_mode in self.view_modes:
view_mode = request.view_mode
unrealcv_message = make_vget_camera_image(request.camera_id, view_mode, filename)
image_filename = self.request_client(unrealcv_message)
if os.path.isfile(image_filename):
image_mat = opencv.imread(image_filename)
os.remove(image_filename)
else:
image_mat = np.matrix([[]])
return self.opencv_bridge.cv2_to_imgmsg(image_mat, encoding='passthrough')
def handle_get_camera_location(self, request):
message = make_vget_camera_location(request.camera_id)
location = self.request_client(message)
location, _ = ue_coords.transform_from_unreal(location, None)
return geometry_msgs.msg.Point(x=location[0], y=location[1], z=location[2])
def handle_get_camera_rotation(self, request):
message = make_vget_camera_location(request.camera_id)
rotation = self.request_client(message)
_, rotation = ue_coords.transform_from_unreal(None, rotation)
return geometry_msgs.msg.Quaternion(w=rotation[0], x=rotation[1], y=rotation[2], z=rotation[3])
def handle_get_viewmode(self, request):
return self.request_client(make_vget_viewmode())
def handle_move_camera(self, request):
message = make_vset_move_camera(request.camera_id, request.location.x, request.location.y, request.location.z)
return self.request_client(message)
def handle_set_camera_location(self, request):
message = make_vset_camera_location(request.camera_id, request.location.x,
request.location.y, request.location.z)
return self.request_client(message)
def handle_set_camera_rotation(self, request):
message = make_vset_camera_rotation(request.camera_id, request.rotation.w, request.rotation.x,
request.rotation.y, request.rotation.z)
return self.request_client(message)
def handle_set_viewmode(self, request):
view_mode = 'lit'
if hasattr(request, 'view_mode') and request.view_mode in self.view_modes:
view_mode = request.view_mode
return self.request_client(make_vset_viewmode(view_mode))
def handle_get_object_color(self, request):
color = self.request_client(make_vget_object_color(request.object_name))
r, g, b, a = map(int, color.split())
return std_msgs.msg.ColorRGBA(r=r, g=g, b=b, a=a)
def handle_set_object_color(self, request):
message = make_vset_object_color(request.object_name, request.color.r, request.color.g, request.color.b)
return self.request_client(message)
def handle_get_object_location(self, request):
message = make_vget_object_location(request.object_name)
location = self.request_client(message)
location, _ = ue_coords.transform_from_unreal(location, None)
return geometry_msgs.msg.Point(x=location[0], y=location[1], z=location[2])
def handle_get_object_rotation(self, request):
message = make_vget_object_rotation(request.object_name)
rotation = self.request_client(message)
_, rotation = ue_coords.transform_from_unreal(None, rotation)
return geometry_msgs.msg.Quaternion(w=rotation[0], x=rotation[1], y=rotation[2], z=rotation[3])
def handle_set_object_location(self, request):
message = make_vset_object_location(request.object_name, request.location.x,
request.location.y, request.location.z)
return self.request_client(message)
def handle_set_object_rotation(self, request):
message = make_vset_object_rotation(request.object_name, request.rotation.w, request.rotation.x,
request.rotation.y, request.rotation.z)
return self.request_client(message)
def main():
rospy.init_node('unrealcv_ros')
unrealcv_bridge = UnrealCVPassthrough(config={}) # TODO: Get config from somewhere
unrealcv_bridge.create_services()
print("Ready!")
try:
rospy.spin()
except KeyboardInterrupt:
print('Shutting Down...')
unrealcv_bridge.shutdown_services("Finished")
if __name__ == '__main__':
main()
| en | 0.711917 | #!/usr/bin/env python # Helper messages to create UnrealCV message URIs A ROS node for the unrealcv API. The goal of this node is to exactly mirror the UnrealCV API, documented here: http://unrealcv.org/reference/commands.html This is based on the source code, at time of writing, the documentation above is incomplete. # These are the valid view modes for the cameras. # Store the declare services # Camera control services # object control services # Helpers and locking # Service Handlers # Parse the request arguments # TODO: Get config from somewhere | 2.303475 | 2 |
Code/grafo/get_dep_data/get_data_interface.py | loribeiro/MeuPolitico | 0 | 6619701 | <reponame>loribeiro/MeuPolitico<filename>Code/grafo/get_dep_data/get_data_interface.py<gh_stars>0
from .deputados import Deputado, get_deputados, get_lista_ids_deputados, get_despesas_deputados, get_frentes_politicas, get_orgaos
from .referencias import get_lista_temas, get_dict_temas
from .votacoes import Votacao, Voto, get_votacoes_proposicao, get_voto_deputados
from .proposicoes import Proposicao, get_proposicoes
def lista_temas_proposicoes():
return get_lista_temas()
def dict_temas_proposicoes():
return get_dict_temas()
def proposicoes_do_tema(tema, data_inicio, data_fim):
"""
Retorna uma lista de objetos do tipo Proposicao
Parametros:
-tema: nome do tema escolhido (consulte a lista com a funcao lista_temas_proposicoes())
-data_inicio e data_fim: formato yyyy-mm-dd
"""
id_tema = get_dict_temas()[tema]
cont = 1
lista_proposicoes = []
while True:
resultado = get_proposicoes(kwargs={"codTema": id_tema,"itens":"100","pagina":cont,"dataInicio":data_inicio,"dataFim":data_fim})
print("Iteracao: ",cont)
print("len: ",len(resultado))
if len(resultado) == 0:
break
lista_proposicoes = lista_proposicoes + resultado
cont = cont+1
return lista_proposicoes
def votacoes_por_proposicao(id_proposicao, data_inicio, data_fim):
cont=1
lista_votacoes = []
while True:
print("andando ",cont)
resultado = get_votacoes_proposicao(kwargs={"dataInicio":data_inicio, "dataFim":data_fim, "idProposicao":id_proposicao,"pagina":cont})
if len(resultado) == 0:
break
lista_votacoes = lista_votacoes + resultado
cont = cont+1
print("retornando")
return lista_votacoes
def voto_deputados_proposicao(id_proposicao):
"""
Retorna uma lista de objetos do tipo Voto
"""
return get_voto_deputados(id_proposicao=id_proposicao)
def lista_ids_deputados():
return get_lista_ids_deputados()
def dicionario_deputados():
return get_deputados()
def despesas_deputado(id_deputado, ano):
cont = 1
lista_despesas = []
while True:
print("requisitando", cont)
resultado = get_despesas_deputados(id_deputado,kwargs={"ano":ano,"pagina":cont, "itens":100})
#lista_resposta = []
#a = proposicoes_do_tema("Defesa e Segurança", "2019-01-01","2019-12-31")
#for ele in a:
# lista_resposta.append(ele.ementa)
#print(lista_resposta) | from .deputados import Deputado, get_deputados, get_lista_ids_deputados, get_despesas_deputados, get_frentes_politicas, get_orgaos
from .referencias import get_lista_temas, get_dict_temas
from .votacoes import Votacao, Voto, get_votacoes_proposicao, get_voto_deputados
from .proposicoes import Proposicao, get_proposicoes
def lista_temas_proposicoes():
return get_lista_temas()
def dict_temas_proposicoes():
return get_dict_temas()
def proposicoes_do_tema(tema, data_inicio, data_fim):
"""
Retorna uma lista de objetos do tipo Proposicao
Parametros:
-tema: nome do tema escolhido (consulte a lista com a funcao lista_temas_proposicoes())
-data_inicio e data_fim: formato yyyy-mm-dd
"""
id_tema = get_dict_temas()[tema]
cont = 1
lista_proposicoes = []
while True:
resultado = get_proposicoes(kwargs={"codTema": id_tema,"itens":"100","pagina":cont,"dataInicio":data_inicio,"dataFim":data_fim})
print("Iteracao: ",cont)
print("len: ",len(resultado))
if len(resultado) == 0:
break
lista_proposicoes = lista_proposicoes + resultado
cont = cont+1
return lista_proposicoes
def votacoes_por_proposicao(id_proposicao, data_inicio, data_fim):
cont=1
lista_votacoes = []
while True:
print("andando ",cont)
resultado = get_votacoes_proposicao(kwargs={"dataInicio":data_inicio, "dataFim":data_fim, "idProposicao":id_proposicao,"pagina":cont})
if len(resultado) == 0:
break
lista_votacoes = lista_votacoes + resultado
cont = cont+1
print("retornando")
return lista_votacoes
def voto_deputados_proposicao(id_proposicao):
"""
Retorna uma lista de objetos do tipo Voto
"""
return get_voto_deputados(id_proposicao=id_proposicao)
def lista_ids_deputados():
return get_lista_ids_deputados()
def dicionario_deputados():
return get_deputados()
def despesas_deputado(id_deputado, ano):
cont = 1
lista_despesas = []
while True:
print("requisitando", cont)
resultado = get_despesas_deputados(id_deputado,kwargs={"ano":ano,"pagina":cont, "itens":100})
#lista_resposta = []
#a = proposicoes_do_tema("Defesa e Segurança", "2019-01-01","2019-12-31")
#for ele in a:
# lista_resposta.append(ele.ementa)
#print(lista_resposta) | pt | 0.838464 | Retorna uma lista de objetos do tipo Proposicao Parametros: -tema: nome do tema escolhido (consulte a lista com a funcao lista_temas_proposicoes()) -data_inicio e data_fim: formato yyyy-mm-dd Retorna uma lista de objetos do tipo Voto #lista_resposta = [] #a = proposicoes_do_tema("Defesa e Segurança", "2019-01-01","2019-12-31") #for ele in a: # lista_resposta.append(ele.ementa) #print(lista_resposta) | 3.037262 | 3 |
helper_code/save_video.py | BrancoLab/escape-analysis | 0 | 6619702 | '''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''
-----------# Display a saved video --------------------------------
'''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''
import cv2
import numpy as np
# ------------------------------------------
# Select video file name and folder location
# ------------------------------------------
file_locs = ['D:\\Dropbox (UCL - SWC)\\DAQ\\upstairs_rig\\20FEB05_wall left\\CA8461\\']
file_locs = ['D:\Dropbox (UCL - SWC)\\DAQ\\upstairs_rig\\19JUN26_walldownup\\CA7170\\']
video_file_names = ['cam1.avi']
save_fps = 30
color = [False]
save_name = 'whiteout_'
# more options
show_video = False
display_frame_rate = 1000
start_frame = 0 #(26*60+8)*30#0
end_frame = 18.5*60*30 #(26*60+25)*30 #np.inf
# loop across all videos
for vid_num in range(len(file_locs)):
# get the file location
file_loc = file_locs[vid_num]
video_file_name = video_file_names[vid_num]
save_file_location = file_loc + save_name + video_file_name
# set up video writer
vid = cv2.VideoCapture(file_loc + video_file_name)
ret, frame = vid.read()
fourcc_data = cv2.VideoWriter_fourcc(*"XVID") # LJPG for lossless, XVID for compressed
data_video = cv2.VideoWriter(save_file_location, fourcc_data, save_fps, (frame.shape[1], frame.shape[0]), color[vid_num])
# modify the frame in some way - make mask to cover dark areas
vid.set(cv2.CAP_PROP_POS_FRAMES, 200)
ret, frame = vid.read()
top_limit = 240
modified_top = frame[:top_limit, :, 0]
modified_top_mask = modified_top < 40
# ---------------------------
# Play video and save video
# ---------------------------
vid.set(cv2.CAP_PROP_POS_FRAMES,start_frame)
frame_num = vid.get(cv2.CAP_PROP_POS_FRAMES)
end_frame = min(end_frame,vid.get(cv2.CAP_PROP_FRAME_COUNT))
while True:
ret, frame = vid.read() # read the frame
frame_num = vid.get(cv2.CAP_PROP_POS_FRAMES)
if ret:
if not color[vid_num]:
frame = frame[:,:,0]
# modify the frame in some way
# modified_top = frame[:top_limit, :]
# modified_top[modified_top_mask] = 180
# frame[:top_limit, :] = modified_top
# write the new video
data_video.write(frame)
# display the video
if show_video:
cv2.imshow('movie',frame)
if cv2.waitKey(int(1000/display_frame_rate)) & 0xFF == ord('q'): break
# end the video
if frame_num >= end_frame: break
# if can't read the video
else: break
# close the video files
vid.release()
data_video.release()
# Display number of last frame
print('Stopped at frame ' + str(frame_num))
| '''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''
-----------# Display a saved video --------------------------------
'''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''''
import cv2
import numpy as np
# ------------------------------------------
# Select video file name and folder location
# ------------------------------------------
file_locs = ['D:\\Dropbox (UCL - SWC)\\DAQ\\upstairs_rig\\20FEB05_wall left\\CA8461\\']
file_locs = ['D:\Dropbox (UCL - SWC)\\DAQ\\upstairs_rig\\19JUN26_walldownup\\CA7170\\']
video_file_names = ['cam1.avi']
save_fps = 30
color = [False]
save_name = 'whiteout_'
# more options
show_video = False
display_frame_rate = 1000
start_frame = 0 #(26*60+8)*30#0
end_frame = 18.5*60*30 #(26*60+25)*30 #np.inf
# loop across all videos
for vid_num in range(len(file_locs)):
# get the file location
file_loc = file_locs[vid_num]
video_file_name = video_file_names[vid_num]
save_file_location = file_loc + save_name + video_file_name
# set up video writer
vid = cv2.VideoCapture(file_loc + video_file_name)
ret, frame = vid.read()
fourcc_data = cv2.VideoWriter_fourcc(*"XVID") # LJPG for lossless, XVID for compressed
data_video = cv2.VideoWriter(save_file_location, fourcc_data, save_fps, (frame.shape[1], frame.shape[0]), color[vid_num])
# modify the frame in some way - make mask to cover dark areas
vid.set(cv2.CAP_PROP_POS_FRAMES, 200)
ret, frame = vid.read()
top_limit = 240
modified_top = frame[:top_limit, :, 0]
modified_top_mask = modified_top < 40
# ---------------------------
# Play video and save video
# ---------------------------
vid.set(cv2.CAP_PROP_POS_FRAMES,start_frame)
frame_num = vid.get(cv2.CAP_PROP_POS_FRAMES)
end_frame = min(end_frame,vid.get(cv2.CAP_PROP_FRAME_COUNT))
while True:
ret, frame = vid.read() # read the frame
frame_num = vid.get(cv2.CAP_PROP_POS_FRAMES)
if ret:
if not color[vid_num]:
frame = frame[:,:,0]
# modify the frame in some way
# modified_top = frame[:top_limit, :]
# modified_top[modified_top_mask] = 180
# frame[:top_limit, :] = modified_top
# write the new video
data_video.write(frame)
# display the video
if show_video:
cv2.imshow('movie',frame)
if cv2.waitKey(int(1000/display_frame_rate)) & 0xFF == ord('q'): break
# end the video
if frame_num >= end_frame: break
# if can't read the video
else: break
# close the video files
vid.release()
data_video.release()
# Display number of last frame
print('Stopped at frame ' + str(frame_num))
| en | 0.532187 | -----------# Display a saved video -------------------------------- # ------------------------------------------ # Select video file name and folder location # ------------------------------------------ # more options #(26*60+8)*30#0 #(26*60+25)*30 #np.inf # loop across all videos # get the file location # set up video writer # LJPG for lossless, XVID for compressed # modify the frame in some way - make mask to cover dark areas # --------------------------- # Play video and save video # --------------------------- # read the frame # modify the frame in some way # modified_top = frame[:top_limit, :] # modified_top[modified_top_mask] = 180 # frame[:top_limit, :] = modified_top # write the new video # display the video # end the video # if can't read the video # close the video files # Display number of last frame | 2.002397 | 2 |
files/systemd/install.py | avinetworks/ansible-role-avise | 3 | 6619703 | <gh_stars>1-10
#!/usr/bin/python3
############################################################################
# ------------------------------------------------------------------------
# Copyright 2021 VMware, Inc. All rights reserved. VMware Confidential
# ------------------------------------------------------------------------
#############################################################################
"""
===================================
Avihost service migration Tool
===================================
"""
import traceback
import os
import logging
import sys
import time
import subprocess
import shlex
from avi.infrastructure.avi_logging import get_root_logger
log = logging.getLogger(__name__)
def print_info(msg):
"""
Wrapper function to prints the msg into stdout and log-file.
"""
msg = 'SEUC:: [' + time.ctime() + ']' + str(msg) + '::SEUC'
print(msg)
log.info(msg)
def print_error(msg):
"""
Wrapper function to prints the msg into stderr and log-file.
"""
msg = 'SEUC:: [' + time.ctime() + ']' + str(msg) + '::SEUC'
print(msg)
log.error(msg)
def copy_avihost_service_to_hostroot():
"""
SCP and Copy the latest avi_host service files from controller to /hostroot
"""
print_info("Copying new avihost service from controller to /hostroot")
try:
parent_folder = os.path.dirname(os.path.realpath(__file__))
host_files = {
'avihost.service' : '/hostroot/etc/systemd/system/',
'avihost_service_script.sh' : '/hostroot/etc/systemd/system/',
'avi_host_server.py' : '/hostroot/usr/sbin/'
}
replace_host_files = False
for host_file, local_folder in host_files.items():
remote_host_file = os.path.join(parent_folder,host_file)
local_host_file = os.path.join(local_folder, host_file)
if os.path.exists(remote_host_file):
print_info("Copied latest: %s" % remote_host_file)
cmd = 'sha512sum %s' %(remote_host_file)
latest_avi_host_md5 = subprocess.check_output(shlex.split(cmd))
if not isinstance(latest_avi_host_md5, str):
latest_avi_host_md5 = latest_avi_host_md5.decode(sys.stdout.encoding)
latest_avi_host_md5 = latest_avi_host_md5.split(' ')[0]
cmd = 'sha512sum %s' %(local_host_file)
current_avi_host_md5 = None
try:
current_avi_host_md5 = subprocess.check_output(shlex.split(cmd))
if not isinstance(current_avi_host_md5, str):
current_avi_host_md5 = current_avi_host_md5.decode(sys.stdout.encoding)
current_avi_host_md5 = current_avi_host_md5.split(' ')[0]
except Exception as e:
pass
print_info("Receive avihost checksum from controller: %s and current is: %s" % (latest_avi_host_md5, current_avi_host_md5))
if latest_avi_host_md5 and current_avi_host_md5 and current_avi_host_md5 == latest_avi_host_md5:
print_info("No differences detected in file %s, controller checksum: %s and current checksum is: %s" % (host_file, latest_avi_host_md5, current_avi_host_md5))
continue
else:
print_info("Migration needed, differences detected in file %s, controller checksum: %s and current checksum is: %s" % (host_file, latest_avi_host_md5, current_avi_host_md5))
replace_host_files = True
break
if replace_host_files:
#download aviservice files and replace it on hostroot
print_info("Copying latest avihost files from controller.")
for host_file, local_folder in host_files.items():
remote_tmp_file = os.path.join(parent_folder, host_file)
local_tmp_file = os.path.join(local_folder, host_file)
cmd = 'cp %s %s' % (remote_tmp_file, local_tmp_file)
move_out = subprocess.check_output(shlex.split(cmd))
msg = 'move %s to %s completed - done, out: %s' %(remote_tmp_file, local_tmp_file, move_out)
print_info(msg)
else:
print_info("Migration of avihost service not needed")
return 2
except subprocess.CalledProcessError as e:
msg = 'Failed to replace avihost service files, error exception:%s' % str(e)
print_error(msg)
return 1
print_info("Successfully replaced avihost service files.")
return 0
if __name__ == '__main__':
exitCode = 0
try:
exitCode = copy_avihost_service_to_hostroot()
except Exception as e:
traceback.print_exc()
print_error('Failed to migrate avihost service files, error exception:%s' % str(e))
sys.exit(1)
sys.exit(exitCode)
| #!/usr/bin/python3
############################################################################
# ------------------------------------------------------------------------
# Copyright 2021 VMware, Inc. All rights reserved. VMware Confidential
# ------------------------------------------------------------------------
#############################################################################
"""
===================================
Avihost service migration Tool
===================================
"""
import traceback
import os
import logging
import sys
import time
import subprocess
import shlex
from avi.infrastructure.avi_logging import get_root_logger
log = logging.getLogger(__name__)
def print_info(msg):
"""
Wrapper function to prints the msg into stdout and log-file.
"""
msg = 'SEUC:: [' + time.ctime() + ']' + str(msg) + '::SEUC'
print(msg)
log.info(msg)
def print_error(msg):
"""
Wrapper function to prints the msg into stderr and log-file.
"""
msg = 'SEUC:: [' + time.ctime() + ']' + str(msg) + '::SEUC'
print(msg)
log.error(msg)
def copy_avihost_service_to_hostroot():
"""
SCP and Copy the latest avi_host service files from controller to /hostroot
"""
print_info("Copying new avihost service from controller to /hostroot")
try:
parent_folder = os.path.dirname(os.path.realpath(__file__))
host_files = {
'avihost.service' : '/hostroot/etc/systemd/system/',
'avihost_service_script.sh' : '/hostroot/etc/systemd/system/',
'avi_host_server.py' : '/hostroot/usr/sbin/'
}
replace_host_files = False
for host_file, local_folder in host_files.items():
remote_host_file = os.path.join(parent_folder,host_file)
local_host_file = os.path.join(local_folder, host_file)
if os.path.exists(remote_host_file):
print_info("Copied latest: %s" % remote_host_file)
cmd = 'sha512sum %s' %(remote_host_file)
latest_avi_host_md5 = subprocess.check_output(shlex.split(cmd))
if not isinstance(latest_avi_host_md5, str):
latest_avi_host_md5 = latest_avi_host_md5.decode(sys.stdout.encoding)
latest_avi_host_md5 = latest_avi_host_md5.split(' ')[0]
cmd = 'sha512sum %s' %(local_host_file)
current_avi_host_md5 = None
try:
current_avi_host_md5 = subprocess.check_output(shlex.split(cmd))
if not isinstance(current_avi_host_md5, str):
current_avi_host_md5 = current_avi_host_md5.decode(sys.stdout.encoding)
current_avi_host_md5 = current_avi_host_md5.split(' ')[0]
except Exception as e:
pass
print_info("Receive avihost checksum from controller: %s and current is: %s" % (latest_avi_host_md5, current_avi_host_md5))
if latest_avi_host_md5 and current_avi_host_md5 and current_avi_host_md5 == latest_avi_host_md5:
print_info("No differences detected in file %s, controller checksum: %s and current checksum is: %s" % (host_file, latest_avi_host_md5, current_avi_host_md5))
continue
else:
print_info("Migration needed, differences detected in file %s, controller checksum: %s and current checksum is: %s" % (host_file, latest_avi_host_md5, current_avi_host_md5))
replace_host_files = True
break
if replace_host_files:
#download aviservice files and replace it on hostroot
print_info("Copying latest avihost files from controller.")
for host_file, local_folder in host_files.items():
remote_tmp_file = os.path.join(parent_folder, host_file)
local_tmp_file = os.path.join(local_folder, host_file)
cmd = 'cp %s %s' % (remote_tmp_file, local_tmp_file)
move_out = subprocess.check_output(shlex.split(cmd))
msg = 'move %s to %s completed - done, out: %s' %(remote_tmp_file, local_tmp_file, move_out)
print_info(msg)
else:
print_info("Migration of avihost service not needed")
return 2
except subprocess.CalledProcessError as e:
msg = 'Failed to replace avihost service files, error exception:%s' % str(e)
print_error(msg)
return 1
print_info("Successfully replaced avihost service files.")
return 0
if __name__ == '__main__':
exitCode = 0
try:
exitCode = copy_avihost_service_to_hostroot()
except Exception as e:
traceback.print_exc()
print_error('Failed to migrate avihost service files, error exception:%s' % str(e))
sys.exit(1)
sys.exit(exitCode) | en | 0.329433 | #!/usr/bin/python3 ############################################################################ # ------------------------------------------------------------------------ # Copyright 2021 VMware, Inc. All rights reserved. VMware Confidential # ------------------------------------------------------------------------ ############################################################################# =================================== Avihost service migration Tool =================================== Wrapper function to prints the msg into stdout and log-file. Wrapper function to prints the msg into stderr and log-file. SCP and Copy the latest avi_host service files from controller to /hostroot #download aviservice files and replace it on hostroot | 2.103298 | 2 |
content/migrations/0009_alter_tags_products.py | poya-kob/BiaBegard | 0 | 6619704 | # Generated by Django 3.2.6 on 2021-09-14 06:18
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('content', '0008_alter_productsgalleries_product'),
]
operations = [
migrations.AlterField(
model_name='tags',
name='products',
field=models.ManyToManyField(blank=True, related_name='tag', to='content.Products', verbose_name='محصولات'),
),
]
| # Generated by Django 3.2.6 on 2021-09-14 06:18
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('content', '0008_alter_productsgalleries_product'),
]
operations = [
migrations.AlterField(
model_name='tags',
name='products',
field=models.ManyToManyField(blank=True, related_name='tag', to='content.Products', verbose_name='محصولات'),
),
]
| en | 0.844703 | # Generated by Django 3.2.6 on 2021-09-14 06:18 | 1.46233 | 1 |
python/data_sutram/scraper/get_im_sat.py | SayanGhoshBDA/code-backup | 16 | 6619705 | """
code to get the satellite images from Zoom Earth
https://satellites.pro/
https://ecn.t3.tiles.virtualearth.net/tiles/a300101321312212320.jpeg?g=6856&mkt=en-US
"""
import urllib.request, urllib.parse, urllib.error
from bs4 import BeautifulSoup
import ssl
import wget
# Ignore SSL certificate errors
ctx = ssl.create_default_context()
ctx.check_hostname = False
ctx.verify_mode = ssl.CERT_NONE
from random import randint
encrypt_ = ["t0","t1","t2","t3"]
tile_id = 300101321312212320
for i in range(100):
next_id = tile_id + i
final_url = "https://ecn."+encrypt_[randint(0, 3)]+".tiles.virtualearth.net/tiles/a"+str(next_id)+".jpeg?g=6856&mkt=en-US"
print(final_url)
try:
filename = wget.download(final_url)
except:
print("Blown off")
| """
code to get the satellite images from Zoom Earth
https://satellites.pro/
https://ecn.t3.tiles.virtualearth.net/tiles/a300101321312212320.jpeg?g=6856&mkt=en-US
"""
import urllib.request, urllib.parse, urllib.error
from bs4 import BeautifulSoup
import ssl
import wget
# Ignore SSL certificate errors
ctx = ssl.create_default_context()
ctx.check_hostname = False
ctx.verify_mode = ssl.CERT_NONE
from random import randint
encrypt_ = ["t0","t1","t2","t3"]
tile_id = 300101321312212320
for i in range(100):
next_id = tile_id + i
final_url = "https://ecn."+encrypt_[randint(0, 3)]+".tiles.virtualearth.net/tiles/a"+str(next_id)+".jpeg?g=6856&mkt=en-US"
print(final_url)
try:
filename = wget.download(final_url)
except:
print("Blown off")
| en | 0.352032 | code to get the satellite images from Zoom Earth https://satellites.pro/ https://ecn.t3.tiles.virtualearth.net/tiles/a300101321312212320.jpeg?g=6856&mkt=en-US # Ignore SSL certificate errors | 3.045294 | 3 |
weightGIS/weighting/Calculate/__init__.py | sbaker-dev/weightGIS | 0 | 6619706 | from weightGIS.weighting.Calculate.exposures import assigned_exposure
| from weightGIS.weighting.Calculate.exposures import assigned_exposure
| none | 1 | 1.146445 | 1 | |
unidad2/mst/prim.py | upc-projects/cc76 | 1 | 6619707 | import heapq as hq
import math
INF = float("inf")
print(math.inf, INF)
def prim(G):
n = len(G)
Known = [False]*n
Cost = [math.inf]*n
Path = [-1]*n
queue = []
s = 0
Cost[s] = 0
hq.heappush(queue, (0, s))
while len(queue) > 0:
_, u = hq.heappop(queue)
if not Known[u]:
Known[u] = True
for v, w in G[u]:
if not Known[v] and w < Cost[v]:
Cost[v] = w
Path[v] = u
hq.heappush(queue, (w, v))
return Path, Cost
G = []
with open("unidad2/mst/grafito.in") as f:
for line in f:
u = len(G)
G.append([])
nums = [int(x) for x in line.split(' ')]
for i in range(len(nums) // 2):
G[u].append((nums[i * 2], nums[i * 2 + 1]))
print(prim(G))
q = []
hq.heappush(q, 5)
hq.heappush(q, 9)
hq.heappush(q, 3)
hq.heappush(q, 1)
hq.heappush(q, 2)
hq.heappush(q, 8)
hq.heappush(q, 4)
hq.heappush(q, 6)
hq.heappush(q, 7)
while len(q) > 0:
print(hq.heappop(q))
q = []
hq.heappush(q, (5, "Sebastian"))
hq.heappush(q, (9, "Eduardo"))
hq.heappush(q, (3, "Daniel"))
hq.heappush(q, (1, "Adriana"))
hq.heappush(q, (2, "Rosario"))
hq.heappush(q, (8, "Fiscal"))
hq.heappush(q, (4, "otra cosa"))
hq.heappush(q, (6, 1999))
hq.heappush(q, (7, {"nombre": "Perez"}))
while len(q) > 0:
priority, elem = hq.heappop(q)
print(priority, elem)
| import heapq as hq
import math
INF = float("inf")
print(math.inf, INF)
def prim(G):
n = len(G)
Known = [False]*n
Cost = [math.inf]*n
Path = [-1]*n
queue = []
s = 0
Cost[s] = 0
hq.heappush(queue, (0, s))
while len(queue) > 0:
_, u = hq.heappop(queue)
if not Known[u]:
Known[u] = True
for v, w in G[u]:
if not Known[v] and w < Cost[v]:
Cost[v] = w
Path[v] = u
hq.heappush(queue, (w, v))
return Path, Cost
G = []
with open("unidad2/mst/grafito.in") as f:
for line in f:
u = len(G)
G.append([])
nums = [int(x) for x in line.split(' ')]
for i in range(len(nums) // 2):
G[u].append((nums[i * 2], nums[i * 2 + 1]))
print(prim(G))
q = []
hq.heappush(q, 5)
hq.heappush(q, 9)
hq.heappush(q, 3)
hq.heappush(q, 1)
hq.heappush(q, 2)
hq.heappush(q, 8)
hq.heappush(q, 4)
hq.heappush(q, 6)
hq.heappush(q, 7)
while len(q) > 0:
print(hq.heappop(q))
q = []
hq.heappush(q, (5, "Sebastian"))
hq.heappush(q, (9, "Eduardo"))
hq.heappush(q, (3, "Daniel"))
hq.heappush(q, (1, "Adriana"))
hq.heappush(q, (2, "Rosario"))
hq.heappush(q, (8, "Fiscal"))
hq.heappush(q, (4, "otra cosa"))
hq.heappush(q, (6, 1999))
hq.heappush(q, (7, {"nombre": "Perez"}))
while len(q) > 0:
priority, elem = hq.heappop(q)
print(priority, elem)
| none | 1 | 3.200774 | 3 | |
bcs-ui/backend/container_service/observability/metric/views/prometheus.py | kayinli/bk-bcs | 1 | 6619708 | <gh_stars>1-10
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 TH<NAME>, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
import logging
import semantic_version
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from rest_framework import viewsets
from rest_framework.renderers import BrowsableAPIRenderer
from rest_framework.response import Response
from backend.bcs_web.audit_log import client as activity_client
from backend.components.bcs import k8s
from backend.utils.error_codes import error_codes
from backend.utils.renderers import BKAPIRenderer
logger = logging.getLogger(__name__)
class PrometheusUpdateViewSet(viewsets.ViewSet):
"""更新 Prometheus 相关"""
renderer_classes = (BKAPIRenderer, BrowsableAPIRenderer)
def _get_version(self, image):
version = image.rsplit(":", 1)[1]
if version.startswith("v"):
version = version[1:]
return version
def get(self, request, project_id, cluster_id):
"""是否需要更新 thano-sidecar 版本
Deprecated 已经统一升级到 v2.5.0 版本
"""
data = {"need_update": False, "update_tooltip": ""}
return Response(data)
def _activity_log(self, project_id, username, resource_name, description, status):
"""操作记录"""
client = activity_client.ContextActivityLogClient(
project_id=project_id, user=username, resource_type="metric", resource=resource_name
)
if status is True:
client.log_delete(activity_status="succeed", description=description)
else:
client.log_delete(activity_status="failed", description=description)
def update(self, request, project_id, cluster_id):
access_token = request.user.token.access_token
client = k8s.K8SClient(access_token, project_id, cluster_id, env=None)
resp = client.get_prometheus("thanos", "po-prometheus-operator-prometheus")
spec = resp.get("spec")
if not spec:
raise error_codes.APIError(_("Prometheus未安装, 请联系管理员解决"))
need_update = False
# 获取原来的值不变,覆盖更新
for container in spec["containers"]:
if container["name"] not in settings.PROMETHEUS_VERSIONS:
continue
image = settings.PROMETHEUS_VERSIONS[container["name"]]
if semantic_version.Version(self._get_version(image)) <= semantic_version.Version(
self._get_version(container["image"])
):
continue
need_update = True
container["image"] = image
if not need_update:
raise error_codes.APIError(_("已经最新版本, 不需要升级"))
patch_spec = {"spec": {"containers": spec["containers"]}}
resp = client.update_prometheus("thanos", "po-prometheus-operator-prometheus", patch_spec)
message = _("更新Metrics: 升级 thanos-sidecar 成功")
self._activity_log(project_id, request.user.username, "update thanos-sidecar", message, True)
return Response(resp)
| # -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2021 TH<NAME>, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
import logging
import semantic_version
from django.conf import settings
from django.utils.translation import ugettext_lazy as _
from rest_framework import viewsets
from rest_framework.renderers import BrowsableAPIRenderer
from rest_framework.response import Response
from backend.bcs_web.audit_log import client as activity_client
from backend.components.bcs import k8s
from backend.utils.error_codes import error_codes
from backend.utils.renderers import BKAPIRenderer
logger = logging.getLogger(__name__)
class PrometheusUpdateViewSet(viewsets.ViewSet):
"""更新 Prometheus 相关"""
renderer_classes = (BKAPIRenderer, BrowsableAPIRenderer)
def _get_version(self, image):
version = image.rsplit(":", 1)[1]
if version.startswith("v"):
version = version[1:]
return version
def get(self, request, project_id, cluster_id):
"""是否需要更新 thano-sidecar 版本
Deprecated 已经统一升级到 v2.5.0 版本
"""
data = {"need_update": False, "update_tooltip": ""}
return Response(data)
def _activity_log(self, project_id, username, resource_name, description, status):
"""操作记录"""
client = activity_client.ContextActivityLogClient(
project_id=project_id, user=username, resource_type="metric", resource=resource_name
)
if status is True:
client.log_delete(activity_status="succeed", description=description)
else:
client.log_delete(activity_status="failed", description=description)
def update(self, request, project_id, cluster_id):
access_token = request.user.token.access_token
client = k8s.K8SClient(access_token, project_id, cluster_id, env=None)
resp = client.get_prometheus("thanos", "po-prometheus-operator-prometheus")
spec = resp.get("spec")
if not spec:
raise error_codes.APIError(_("Prometheus未安装, 请联系管理员解决"))
need_update = False
# 获取原来的值不变,覆盖更新
for container in spec["containers"]:
if container["name"] not in settings.PROMETHEUS_VERSIONS:
continue
image = settings.PROMETHEUS_VERSIONS[container["name"]]
if semantic_version.Version(self._get_version(image)) <= semantic_version.Version(
self._get_version(container["image"])
):
continue
need_update = True
container["image"] = image
if not need_update:
raise error_codes.APIError(_("已经最新版本, 不需要升级"))
patch_spec = {"spec": {"containers": spec["containers"]}}
resp = client.update_prometheus("thanos", "po-prometheus-operator-prometheus", patch_spec)
message = _("更新Metrics: 升级 thanos-sidecar 成功")
self._activity_log(project_id, request.user.username, "update thanos-sidecar", message, True)
return Response(resp) | en | 0.767739 | # -*- coding: utf-8 -*- Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community Edition) available. Copyright (C) 2017-2021 TH<NAME>, a Tencent company. All rights reserved. Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://opensource.org/licenses/MIT Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. 更新 Prometheus 相关 是否需要更新 thano-sidecar 版本 Deprecated 已经统一升级到 v2.5.0 版本 操作记录 # 获取原来的值不变,覆盖更新 | 1.627738 | 2 |
tests/test_basic.py | niwibe/cobrascript | 1 | 6619709 | # -*- coding: utf-8 -*-
import pytest
from cobra.base import compile
from .utils import norm
def test_basic_op_add():
assert compile("2 + 2") == "2 + 2;"
def test_basic_op_mul():
assert compile("2 * 2") == "2 * 2;"
def test_basic_op_sub():
assert compile("2 - 2") == "2 - 2;"
def test_basic_op_div():
assert compile("2 / 2") == "2 / 2;"
def test_basic_op_mod():
assert compile("2 % 2") == "2 % 2;"
def test_basic_op_bitwise_and():
assert compile("2 & 2") == "2 & 2;"
def test_basic_op_bitwise_or():
assert compile("2 | 2") == "2 | 2;"
def test_basic_op_bitwise_xor():
assert compile("2 ^ 2") == "2 ^ 2;"
def test_basic_op_bitwise_not():
assert compile("~2") == "~2;"
def test_basic_op_bitwise_shifts():
input = """
2 << 2
2 >> 2
"""
expected = """
2 << 2;
2 >> 2;
"""
assert compile(input) == norm(expected)
def test_basic_op_pow():
assert compile("3 ** 2") == "Math.pow(3, 2);"
def test_basic_op_mod():
assert compile("3 // 2") == "Math.floor(3 / 2);"
def test_logic_equal():
assert compile("2 == 2") == "2 === 2;"
def test_logic_not_equal():
assert compile("2 != 2") == "2 !== 2;"
def test_logic_gt():
assert compile("2 > 2") == "2 > 2;"
def test_logic_gte():
assert compile("2 >= 2") == "2 >= 2;"
def test_logic_lt():
assert compile("2 < 2") == "2 < 2;"
def test_logic_lte():
assert compile("2 <= 2") == "2 <= 2;"
def test_logic_is():
assert compile("2 is 2") == "2 === 2;"
def test_logic_not():
assert compile("not True") == "!true;"
def test_logic_expr():
assert compile("True and False") == "true && false;"
def test_logic_nested_expr():
assert compile("True and (False or True)") == "true && (false || true);"
def test_delete_expr():
input = """
del x
"""
expected = """
var x;
delete x;
"""
assert compile(input) == norm(expected)
def test_delete_expr_multiple():
input = """
del x, y
"""
expected = """
var x, y;
delete x;
delete y;
"""
assert compile(input) == norm(expected)
def test_list_expr():
input = """
[1, 2, 3]
"""
expected = """
[1,2,3];
"""
assert compile(input) == norm(expected)
def test_tuple_expr():
input = """
(1, 2, 3)
"""
expected = """
[1,2,3];
"""
assert compile(input) == norm(expected)
def test_unary_operators():
input = """
x = +1
y = -1
"""
expected = """
var x, y;
x = +1;
y = -1;
"""
assert compile(input) == norm(expected)
def test_simple_assignation():
input = "x = 2"
expected = """
var x;
x = 2;
"""
assert compile(input) == norm(expected)
def test_nested_operations():
input = "x = 2 * ((33 + 2.2) / 2)"
expected = """
var x;
x = 2 * ((33 + 2.2) / 2);
"""
assert compile(input) == norm(expected)
def test_none_assignation():
input = "x = None"
expected = """
var x;
x = null;
"""
assert compile(input) == norm(expected)
def test_boolean_assignation():
input = """
x = True
y = False
"""
expected = """
var x, y;
x = true;
y = false;
"""
assert compile(input) == norm(expected)
def test_simple_multiple_assignation():
input = "x = y = 2"
expected = """
var x, y;
x = y = 2;
"""
assert compile(input) == norm(expected)
def test_assignation_with_operation():
input = """
x = 0
x += 2
x -= 2
x /= 2
x *= 2
x %= 2
x **= 2
x //= 2
"""
expected = """
var x;
x = 0;
x += 2;
x -= 2;
x /= 2;
x *= 2;
x %= 2;
x = Math.pow(x, 2);
x = Math.floor(x / 2);
"""
assert compile(input) == norm(expected)
def test_simple_function_declaration():
input = """
def foo():
return 2
"""
expected = """
var foo;
foo = function() {
return 2;
};
"""
assert compile(input) == norm(expected)
def test_simple_function_declaration_with_args():
input = """
def foo(a, b):
return a + b
"""
expected = """
var foo;
foo = function(a, b) {
return a + b;
};
"""
assert compile(input) == norm(expected)
def test_nested_function():
input = """
def foo(a, b):
def bar():
return 2
return bar
"""
expected = """
var foo;
foo = function(a, b) {
var bar;
bar = function() {
return 2;
};
return bar;
};
"""
assert compile(input) == norm(expected)
def test_simple_function_call():
input = """
x = foo("Hello World")
"""
expected = """
var x;
x = foo("Hello World");
"""
assert compile(input) == norm(expected)
def test_simple_function_call_with_multiple_args():
input = """
x = foo("Hello World", 2, 2.3)
"""
expected = """
var x;
x = foo("Hello World", 2, 2.3);
"""
assert compile(input) == norm(expected)
def test_function_call_with_lambda_as_parameter():
input = """
x = jQuery(".span")
x.on("click", lambda e: e.preventDefault())
"""
expected = """
var x;
x = jQuery(".span");
x.on("click", function(e) {
e.preventDefault();
});
"""
assert compile(input) == norm(expected)
def test_assign_dict():
input = """
x = {"foo": 2, "bar": {"kk": 3}}
"""
expected = """
var x;
x = {
"foo": 2,
"bar": {
"kk": 3
}
};
"""
assert compile(input) == norm(expected)
def test_assign_dict_with_lists():
input = """
x = {"foo": 2, "bar": {"kk": [1, 2, 3]}}
"""
expected = """
var x;
x = {
"foo": 2,
"bar": {
"kk": [1,2,3]
}
};
"""
assert compile(input) == norm(expected)
def test_simple_if_statement():
input = """
def foo(a):
if a is None:
return None
return a + 2
"""
expected = """
var foo;
foo = function(a) {
if (a === null) {
return null;
}
return a + 2;
};
"""
assert compile(input) == norm(expected)
def test_simple_if_statement_with_else():
input = """
def foo(a):
if a is None:
return None
else:
return a + 2
"""
expected = """
var foo;
foo = function(a) {
if (a === null) {
return null;
} else {
return a + 2;
}
};
"""
assert compile(input) == norm(expected)
def test_simple_if_statement_with_elif():
input = """
def foo(a):
if a is None:
return None
elif a == 0:
return a + 2
"""
expected = """
var foo;
foo = function(a) {
if (a === null) {
return null;
} else if (a === 0) {
return a + 2;
}
};
"""
assert compile(input) == norm(expected)
def test_basic_for():
input = """
for item in [1,2,3,4,5]:
console.log(item)
"""
expected = """
var item, ref_0, ref_1;
for (ref_0 = 0, ref_1 = [1,2,3,4,5]; ref_0 < ref_1.length; ref_0++) {
item = ref_1[ref_0];
console.log(item);
}
"""
assert compile(input) == norm(expected)
def test_nested_for():
input = """
for item1 in [1,2,3,4,5]:
for item2 in [10, 20, 34]:
console.log(item1, item2)
"""
expected = """
var item1, item2, ref_0, ref_1, ref_2, ref_3;
for (ref_2 = 0, ref_3 = [1,2,3,4,5]; ref_2 < ref_3.length; ref_2++) {
item1 = ref_3[ref_2];
for (ref_0 = 0, ref_1 = [10,20,34]; ref_0 < ref_1.length; ref_0++) {
item2 = ref_1[ref_0];
console.log(item1, item2);
}
}
"""
compiled = compile(input)
assert compiled == norm(expected)
def test_basic_while():
input = """
while True:
console.log("test")
"""
expected = """
while (true) {
console.log("test");
}
"""
assert compile(input) == norm(expected)
def test_nested_while():
input = """
while True:
while True:
console.log("test")
"""
expected = """
while (true) {
while (true) {
console.log("test");
}
}
"""
compiled = compile(input)
assert compiled == norm(expected)
def test_while_else():
# FIXME: this seems generate inconsisten js?
input = """
while my_var:
console.log("test")
else:
console.log("test else")
"""
expected = """
var ref_0;
ref_0 = true;
while (my_var) {
ref_0 = false;
console.log("test");
}
if (ref_0) {
console.log("test else");
}
"""
compiled = compile(input)
print(compiled)
assert compiled == norm(expected)
def test_basic_list_comprehensions():
input = """
count = [num for num in [1, 2, 3, 4]]
"""
expected = """
var _i_0, _len_0, _results_0, _values_0, count;
count = (function() {
var _i_0, _len_0, _values_0, _results_0;
_values_0 = [1,2,3,4];
_results_0 = [];
for (_i_0 = 0, _len_0 = _values_0.length; _i_0 < _len_0; _i_0++) {
_results_0.push(_values_0[_i_0])
}
return _results_0;
})();
"""
compiled = compile(input)
print(compiled)
assert compiled == norm(expected)
def test_exceptions_raise():
input = """
raise "sample exception"
"""
expected = """
throw "sample exception";
"""
compiled = compile(input)
print(compiled)
assert compiled == norm(expected)
def test_conditional_list_comprehensions():
input = """
count = [num for num in [1, 2, 3, 4] if num != 4]
"""
expected = """
var _i_0, _len_0, _results_0, _values_0, count;
count = (function() {
var _i_0, _len_0, _values_0, _results_0;
_values_0 = [1,2,3,4];
_results_0 = [];
for (_i_0 = 0, _len_0 = _values_0.length; _i_0 < _len_0; _i_0++) {
if (num !== 4) {
_results_0.push(_values_0[_i_0])
}
}
return _results_0;
})();
"""
compiled = compile(input)
print(compiled)
assert compiled == norm(expected)
def test_multiple_conditional_list_comprehensions():
input = """
count = [num for num in [1, 2, 3, 4] if num != 4 if num != 3]
"""
expected = """
var _i_0, _len_0, _results_0, _values_0, count;
count = (function() {
var _i_0, _len_0, _values_0, _results_0;
_values_0 = [1,2,3,4];
_results_0 = [];
for (_i_0 = 0, _len_0 = _values_0.length; _i_0 < _len_0; _i_0++) {
if (num !== 4 && num !== 3) {
_results_0.push(_values_0[_i_0])
}
}
return _results_0;
})();
"""
compiled = compile(input)
print(compiled)
assert compiled == norm(expected)
def test_exceptions_try_except():
input = """
try:
do_some_thing()
except Error as e:
do_some_thing_other()
"""
expected = """
try {
do_some_thing();
} catch (e) {
do_some_thing_other();
}
"""
compiled = compile(input)
assert compiled == norm(expected)
def test_exceptions_try_finally():
input = """
try:
do_some_thing()
finally:
do_some_thing_other()
"""
expected = """
try {
do_some_thing();
} finally {
do_some_thing_other();
}
"""
compiled = compile(input)
assert compiled == norm(expected)
def test_global_import():
input = """
import _global as g
"""
expected = """
var g;
g = this;
"""
compiled = compile(input)
print(compiled)
assert compiled == norm(expected)
def test_global_import_and_try_overwrite():
input = """
import _global as g
g = 2
"""
with pytest.raises(RuntimeError):
compiled = compile(input)
def test_new_import():
input = """
import _new as new_instance
"""
expected = """
var new_instance;
new_instance = function() { var ___args_array = Array.apply(null, arguments); var ___clazz = ___args_array.slice(0, 1)[0]; return new (___clazz.bind.apply(___clazz, ___args_array))();};
"""
compiled = compile(input)
assert compiled == norm(expected)
def test_new_import_and_try_overwrite():
input = """
import _new as new
new = 2
"""
with pytest.raises(RuntimeError):
compiled = compile(input)
def test_auto_camel_case():
input = """
xx = foo_bar()
"""
expected = """
var xx;
xx = fooBar();
"""
compiled = compile(input, translate_options={"auto_camelcase": True})
assert compiled == norm(expected)
def test_module_as_closure():
input = """
xx = foo_bar()
"""
expected = """
(function() {
var xx;
xx = foo_bar();
}).call(this);
"""
compiled = compile(input, translate_options={"module_as_closure": True})
assert compiled == norm(expected)
def test_dict_access():
input = """
xx = foo["22"]
"""
expected = """
var xx;
xx = foo["22"];
"""
compiled = compile(input)
assert compiled == norm(expected)
def test_empty_return():
input = """
return
"""
expected = """
return;
"""
compiled = compile(input)
assert compiled == norm(expected)
def test_this_assignation():
input = """
def person(name):
this.name = name
"""
expected = """
var person;
person = function(name) {
this.name = name;
};
"""
compiled = compile(input)
assert compiled == norm(expected)
def test_basic_class():
input = """
class MyClass:
def __init__(x):
this.x = x
def foo():
return this.x
"""
expected = """
var MyClass, foo;
MyClass = (function() {
var classref_0;
classref_0 = function(x) {
this.x = x;
};
classref_0.prototype.foo = function() {
return this.x;
};
return classref_0;
})();
"""
compiled = compile(input)
print(compiled)
assert compiled == norm(expected)
def test_simple_decorator():
input = """
@decorator
def test(x, y):
console.log("test")
"""
expected = """
var test;
test = function(x, y) {
console.log("test");
};
test = decorator(test);
"""
compiled = compile(input)
assert compiled == norm(expected)
def test_multiple_decorators():
input = """
@decorator1
@decorator2
def test(x, y):
console.log("test")
"""
expected = """
var test;
test = function(x, y) {
console.log("test");
};
test = decorator2(test);
test = decorator1(test);
"""
compiled = compile(input)
print(compiled)
assert compiled == norm(expected)
def test_decorator_with_params():
input = """
@decorator("test-param")
def test(x, y):
console.log("test")
"""
expected = """
var test;
test = function(x, y) {
console.log("test");
};
test = decorator("test-param")(test);
"""
compiled = compile(input)
print(compiled)
assert compiled == norm(expected)
def test_break():
input = """
while True:
break
"""
expected = """
while (true) {
break;
}
"""
compiled = compile(input)
print(compiled)
assert compiled == norm(expected)
def test_continue():
input = """
while True:
continue
"""
expected = """
while (true) {
continue;
}
"""
compiled = compile(input)
print(compiled)
assert compiled == norm(expected)
def test_simple_slice():
input = """
testList[1:5]
"""
expected = """
testList.slice(1, 5);
"""
compiled = compile(input)
print(compiled)
assert compiled == norm(expected)
def test_partial_slices():
input = """
testList[1:]
testList[:5]
"""
expected = """
testList.slice(1);
testList.slice(0, 5);
"""
compiled = compile(input)
print(compiled)
assert compiled == norm(expected)
def test_negative_slices():
input = """
testList[1:-1]
"""
expected = """
testList.slice(1, -1);
"""
compiled = compile(input)
print(compiled)
assert compiled == norm(expected)
def test_multiple_assignation():
# FIXME: this seems generate inconsisten js?
input = """
x = a, b = 1, 2
"""
expected = """
var _ref_0, x;
x = _ref_0 = [1,2];
a = _ref_0[0];
b = _ref_0[1];
"""
compiled = compile(input)
print(compiled)
assert compiled == norm(expected)
| # -*- coding: utf-8 -*-
import pytest
from cobra.base import compile
from .utils import norm
def test_basic_op_add():
assert compile("2 + 2") == "2 + 2;"
def test_basic_op_mul():
assert compile("2 * 2") == "2 * 2;"
def test_basic_op_sub():
assert compile("2 - 2") == "2 - 2;"
def test_basic_op_div():
assert compile("2 / 2") == "2 / 2;"
def test_basic_op_mod():
assert compile("2 % 2") == "2 % 2;"
def test_basic_op_bitwise_and():
assert compile("2 & 2") == "2 & 2;"
def test_basic_op_bitwise_or():
assert compile("2 | 2") == "2 | 2;"
def test_basic_op_bitwise_xor():
assert compile("2 ^ 2") == "2 ^ 2;"
def test_basic_op_bitwise_not():
assert compile("~2") == "~2;"
def test_basic_op_bitwise_shifts():
input = """
2 << 2
2 >> 2
"""
expected = """
2 << 2;
2 >> 2;
"""
assert compile(input) == norm(expected)
def test_basic_op_pow():
assert compile("3 ** 2") == "Math.pow(3, 2);"
def test_basic_op_mod():
assert compile("3 // 2") == "Math.floor(3 / 2);"
def test_logic_equal():
assert compile("2 == 2") == "2 === 2;"
def test_logic_not_equal():
assert compile("2 != 2") == "2 !== 2;"
def test_logic_gt():
assert compile("2 > 2") == "2 > 2;"
def test_logic_gte():
assert compile("2 >= 2") == "2 >= 2;"
def test_logic_lt():
assert compile("2 < 2") == "2 < 2;"
def test_logic_lte():
assert compile("2 <= 2") == "2 <= 2;"
def test_logic_is():
assert compile("2 is 2") == "2 === 2;"
def test_logic_not():
assert compile("not True") == "!true;"
def test_logic_expr():
assert compile("True and False") == "true && false;"
def test_logic_nested_expr():
assert compile("True and (False or True)") == "true && (false || true);"
def test_delete_expr():
input = """
del x
"""
expected = """
var x;
delete x;
"""
assert compile(input) == norm(expected)
def test_delete_expr_multiple():
input = """
del x, y
"""
expected = """
var x, y;
delete x;
delete y;
"""
assert compile(input) == norm(expected)
def test_list_expr():
input = """
[1, 2, 3]
"""
expected = """
[1,2,3];
"""
assert compile(input) == norm(expected)
def test_tuple_expr():
input = """
(1, 2, 3)
"""
expected = """
[1,2,3];
"""
assert compile(input) == norm(expected)
def test_unary_operators():
input = """
x = +1
y = -1
"""
expected = """
var x, y;
x = +1;
y = -1;
"""
assert compile(input) == norm(expected)
def test_simple_assignation():
input = "x = 2"
expected = """
var x;
x = 2;
"""
assert compile(input) == norm(expected)
def test_nested_operations():
input = "x = 2 * ((33 + 2.2) / 2)"
expected = """
var x;
x = 2 * ((33 + 2.2) / 2);
"""
assert compile(input) == norm(expected)
def test_none_assignation():
input = "x = None"
expected = """
var x;
x = null;
"""
assert compile(input) == norm(expected)
def test_boolean_assignation():
input = """
x = True
y = False
"""
expected = """
var x, y;
x = true;
y = false;
"""
assert compile(input) == norm(expected)
def test_simple_multiple_assignation():
input = "x = y = 2"
expected = """
var x, y;
x = y = 2;
"""
assert compile(input) == norm(expected)
def test_assignation_with_operation():
input = """
x = 0
x += 2
x -= 2
x /= 2
x *= 2
x %= 2
x **= 2
x //= 2
"""
expected = """
var x;
x = 0;
x += 2;
x -= 2;
x /= 2;
x *= 2;
x %= 2;
x = Math.pow(x, 2);
x = Math.floor(x / 2);
"""
assert compile(input) == norm(expected)
def test_simple_function_declaration():
input = """
def foo():
return 2
"""
expected = """
var foo;
foo = function() {
return 2;
};
"""
assert compile(input) == norm(expected)
def test_simple_function_declaration_with_args():
input = """
def foo(a, b):
return a + b
"""
expected = """
var foo;
foo = function(a, b) {
return a + b;
};
"""
assert compile(input) == norm(expected)
def test_nested_function():
input = """
def foo(a, b):
def bar():
return 2
return bar
"""
expected = """
var foo;
foo = function(a, b) {
var bar;
bar = function() {
return 2;
};
return bar;
};
"""
assert compile(input) == norm(expected)
def test_simple_function_call():
input = """
x = foo("Hello World")
"""
expected = """
var x;
x = foo("Hello World");
"""
assert compile(input) == norm(expected)
def test_simple_function_call_with_multiple_args():
input = """
x = foo("Hello World", 2, 2.3)
"""
expected = """
var x;
x = foo("Hello World", 2, 2.3);
"""
assert compile(input) == norm(expected)
def test_function_call_with_lambda_as_parameter():
input = """
x = jQuery(".span")
x.on("click", lambda e: e.preventDefault())
"""
expected = """
var x;
x = jQuery(".span");
x.on("click", function(e) {
e.preventDefault();
});
"""
assert compile(input) == norm(expected)
def test_assign_dict():
input = """
x = {"foo": 2, "bar": {"kk": 3}}
"""
expected = """
var x;
x = {
"foo": 2,
"bar": {
"kk": 3
}
};
"""
assert compile(input) == norm(expected)
def test_assign_dict_with_lists():
input = """
x = {"foo": 2, "bar": {"kk": [1, 2, 3]}}
"""
expected = """
var x;
x = {
"foo": 2,
"bar": {
"kk": [1,2,3]
}
};
"""
assert compile(input) == norm(expected)
def test_simple_if_statement():
input = """
def foo(a):
if a is None:
return None
return a + 2
"""
expected = """
var foo;
foo = function(a) {
if (a === null) {
return null;
}
return a + 2;
};
"""
assert compile(input) == norm(expected)
def test_simple_if_statement_with_else():
input = """
def foo(a):
if a is None:
return None
else:
return a + 2
"""
expected = """
var foo;
foo = function(a) {
if (a === null) {
return null;
} else {
return a + 2;
}
};
"""
assert compile(input) == norm(expected)
def test_simple_if_statement_with_elif():
input = """
def foo(a):
if a is None:
return None
elif a == 0:
return a + 2
"""
expected = """
var foo;
foo = function(a) {
if (a === null) {
return null;
} else if (a === 0) {
return a + 2;
}
};
"""
assert compile(input) == norm(expected)
def test_basic_for():
input = """
for item in [1,2,3,4,5]:
console.log(item)
"""
expected = """
var item, ref_0, ref_1;
for (ref_0 = 0, ref_1 = [1,2,3,4,5]; ref_0 < ref_1.length; ref_0++) {
item = ref_1[ref_0];
console.log(item);
}
"""
assert compile(input) == norm(expected)
def test_nested_for():
input = """
for item1 in [1,2,3,4,5]:
for item2 in [10, 20, 34]:
console.log(item1, item2)
"""
expected = """
var item1, item2, ref_0, ref_1, ref_2, ref_3;
for (ref_2 = 0, ref_3 = [1,2,3,4,5]; ref_2 < ref_3.length; ref_2++) {
item1 = ref_3[ref_2];
for (ref_0 = 0, ref_1 = [10,20,34]; ref_0 < ref_1.length; ref_0++) {
item2 = ref_1[ref_0];
console.log(item1, item2);
}
}
"""
compiled = compile(input)
assert compiled == norm(expected)
def test_basic_while():
input = """
while True:
console.log("test")
"""
expected = """
while (true) {
console.log("test");
}
"""
assert compile(input) == norm(expected)
def test_nested_while():
input = """
while True:
while True:
console.log("test")
"""
expected = """
while (true) {
while (true) {
console.log("test");
}
}
"""
compiled = compile(input)
assert compiled == norm(expected)
def test_while_else():
# FIXME: this seems generate inconsisten js?
input = """
while my_var:
console.log("test")
else:
console.log("test else")
"""
expected = """
var ref_0;
ref_0 = true;
while (my_var) {
ref_0 = false;
console.log("test");
}
if (ref_0) {
console.log("test else");
}
"""
compiled = compile(input)
print(compiled)
assert compiled == norm(expected)
def test_basic_list_comprehensions():
input = """
count = [num for num in [1, 2, 3, 4]]
"""
expected = """
var _i_0, _len_0, _results_0, _values_0, count;
count = (function() {
var _i_0, _len_0, _values_0, _results_0;
_values_0 = [1,2,3,4];
_results_0 = [];
for (_i_0 = 0, _len_0 = _values_0.length; _i_0 < _len_0; _i_0++) {
_results_0.push(_values_0[_i_0])
}
return _results_0;
})();
"""
compiled = compile(input)
print(compiled)
assert compiled == norm(expected)
def test_exceptions_raise():
input = """
raise "sample exception"
"""
expected = """
throw "sample exception";
"""
compiled = compile(input)
print(compiled)
assert compiled == norm(expected)
def test_conditional_list_comprehensions():
input = """
count = [num for num in [1, 2, 3, 4] if num != 4]
"""
expected = """
var _i_0, _len_0, _results_0, _values_0, count;
count = (function() {
var _i_0, _len_0, _values_0, _results_0;
_values_0 = [1,2,3,4];
_results_0 = [];
for (_i_0 = 0, _len_0 = _values_0.length; _i_0 < _len_0; _i_0++) {
if (num !== 4) {
_results_0.push(_values_0[_i_0])
}
}
return _results_0;
})();
"""
compiled = compile(input)
print(compiled)
assert compiled == norm(expected)
def test_multiple_conditional_list_comprehensions():
input = """
count = [num for num in [1, 2, 3, 4] if num != 4 if num != 3]
"""
expected = """
var _i_0, _len_0, _results_0, _values_0, count;
count = (function() {
var _i_0, _len_0, _values_0, _results_0;
_values_0 = [1,2,3,4];
_results_0 = [];
for (_i_0 = 0, _len_0 = _values_0.length; _i_0 < _len_0; _i_0++) {
if (num !== 4 && num !== 3) {
_results_0.push(_values_0[_i_0])
}
}
return _results_0;
})();
"""
compiled = compile(input)
print(compiled)
assert compiled == norm(expected)
def test_exceptions_try_except():
input = """
try:
do_some_thing()
except Error as e:
do_some_thing_other()
"""
expected = """
try {
do_some_thing();
} catch (e) {
do_some_thing_other();
}
"""
compiled = compile(input)
assert compiled == norm(expected)
def test_exceptions_try_finally():
input = """
try:
do_some_thing()
finally:
do_some_thing_other()
"""
expected = """
try {
do_some_thing();
} finally {
do_some_thing_other();
}
"""
compiled = compile(input)
assert compiled == norm(expected)
def test_global_import():
input = """
import _global as g
"""
expected = """
var g;
g = this;
"""
compiled = compile(input)
print(compiled)
assert compiled == norm(expected)
def test_global_import_and_try_overwrite():
input = """
import _global as g
g = 2
"""
with pytest.raises(RuntimeError):
compiled = compile(input)
def test_new_import():
input = """
import _new as new_instance
"""
expected = """
var new_instance;
new_instance = function() { var ___args_array = Array.apply(null, arguments); var ___clazz = ___args_array.slice(0, 1)[0]; return new (___clazz.bind.apply(___clazz, ___args_array))();};
"""
compiled = compile(input)
assert compiled == norm(expected)
def test_new_import_and_try_overwrite():
input = """
import _new as new
new = 2
"""
with pytest.raises(RuntimeError):
compiled = compile(input)
def test_auto_camel_case():
input = """
xx = foo_bar()
"""
expected = """
var xx;
xx = fooBar();
"""
compiled = compile(input, translate_options={"auto_camelcase": True})
assert compiled == norm(expected)
def test_module_as_closure():
input = """
xx = foo_bar()
"""
expected = """
(function() {
var xx;
xx = foo_bar();
}).call(this);
"""
compiled = compile(input, translate_options={"module_as_closure": True})
assert compiled == norm(expected)
def test_dict_access():
input = """
xx = foo["22"]
"""
expected = """
var xx;
xx = foo["22"];
"""
compiled = compile(input)
assert compiled == norm(expected)
def test_empty_return():
input = """
return
"""
expected = """
return;
"""
compiled = compile(input)
assert compiled == norm(expected)
def test_this_assignation():
input = """
def person(name):
this.name = name
"""
expected = """
var person;
person = function(name) {
this.name = name;
};
"""
compiled = compile(input)
assert compiled == norm(expected)
def test_basic_class():
input = """
class MyClass:
def __init__(x):
this.x = x
def foo():
return this.x
"""
expected = """
var MyClass, foo;
MyClass = (function() {
var classref_0;
classref_0 = function(x) {
this.x = x;
};
classref_0.prototype.foo = function() {
return this.x;
};
return classref_0;
})();
"""
compiled = compile(input)
print(compiled)
assert compiled == norm(expected)
def test_simple_decorator():
input = """
@decorator
def test(x, y):
console.log("test")
"""
expected = """
var test;
test = function(x, y) {
console.log("test");
};
test = decorator(test);
"""
compiled = compile(input)
assert compiled == norm(expected)
def test_multiple_decorators():
input = """
@decorator1
@decorator2
def test(x, y):
console.log("test")
"""
expected = """
var test;
test = function(x, y) {
console.log("test");
};
test = decorator2(test);
test = decorator1(test);
"""
compiled = compile(input)
print(compiled)
assert compiled == norm(expected)
def test_decorator_with_params():
input = """
@decorator("test-param")
def test(x, y):
console.log("test")
"""
expected = """
var test;
test = function(x, y) {
console.log("test");
};
test = decorator("test-param")(test);
"""
compiled = compile(input)
print(compiled)
assert compiled == norm(expected)
def test_break():
input = """
while True:
break
"""
expected = """
while (true) {
break;
}
"""
compiled = compile(input)
print(compiled)
assert compiled == norm(expected)
def test_continue():
input = """
while True:
continue
"""
expected = """
while (true) {
continue;
}
"""
compiled = compile(input)
print(compiled)
assert compiled == norm(expected)
def test_simple_slice():
input = """
testList[1:5]
"""
expected = """
testList.slice(1, 5);
"""
compiled = compile(input)
print(compiled)
assert compiled == norm(expected)
def test_partial_slices():
input = """
testList[1:]
testList[:5]
"""
expected = """
testList.slice(1);
testList.slice(0, 5);
"""
compiled = compile(input)
print(compiled)
assert compiled == norm(expected)
def test_negative_slices():
input = """
testList[1:-1]
"""
expected = """
testList.slice(1, -1);
"""
compiled = compile(input)
print(compiled)
assert compiled == norm(expected)
def test_multiple_assignation():
# FIXME: this seems generate inconsisten js?
input = """
x = a, b = 1, 2
"""
expected = """
var _ref_0, x;
x = _ref_0 = [1,2];
a = _ref_0[0];
b = _ref_0[1];
"""
compiled = compile(input)
print(compiled)
assert compiled == norm(expected)
| en | 0.240744 | # -*- coding: utf-8 -*- 2 << 2 2 >> 2 2 << 2; 2 >> 2; del x var x; delete x; del x, y var x, y; delete x; delete y; [1, 2, 3] [1,2,3]; (1, 2, 3) [1,2,3]; x = +1 y = -1 var x, y; x = +1; y = -1; var x; x = 2; var x; x = 2 * ((33 + 2.2) / 2); var x; x = null; x = True y = False var x, y; x = true; y = false; var x, y; x = y = 2; x = 0 x += 2 x -= 2 x /= 2 x *= 2 x %= 2 x **= 2 x //= 2 var x; x = 0; x += 2; x -= 2; x /= 2; x *= 2; x %= 2; x = Math.pow(x, 2); x = Math.floor(x / 2); def foo(): return 2 var foo; foo = function() { return 2; }; def foo(a, b): return a + b var foo; foo = function(a, b) { return a + b; }; def foo(a, b): def bar(): return 2 return bar var foo; foo = function(a, b) { var bar; bar = function() { return 2; }; return bar; }; x = foo("Hello World") var x; x = foo("Hello World"); x = foo("Hello World", 2, 2.3) var x; x = foo("Hello World", 2, 2.3); x = jQuery(".span") x.on("click", lambda e: e.preventDefault()) var x; x = jQuery(".span"); x.on("click", function(e) { e.preventDefault(); }); x = {"foo": 2, "bar": {"kk": 3}} var x; x = { "foo": 2, "bar": { "kk": 3 } }; x = {"foo": 2, "bar": {"kk": [1, 2, 3]}} var x; x = { "foo": 2, "bar": { "kk": [1,2,3] } }; def foo(a): if a is None: return None return a + 2 var foo; foo = function(a) { if (a === null) { return null; } return a + 2; }; def foo(a): if a is None: return None else: return a + 2 var foo; foo = function(a) { if (a === null) { return null; } else { return a + 2; } }; def foo(a): if a is None: return None elif a == 0: return a + 2 var foo; foo = function(a) { if (a === null) { return null; } else if (a === 0) { return a + 2; } }; for item in [1,2,3,4,5]: console.log(item) var item, ref_0, ref_1; for (ref_0 = 0, ref_1 = [1,2,3,4,5]; ref_0 < ref_1.length; ref_0++) { item = ref_1[ref_0]; console.log(item); } for item1 in [1,2,3,4,5]: for item2 in [10, 20, 34]: console.log(item1, item2) var item1, item2, ref_0, ref_1, ref_2, ref_3; for (ref_2 = 0, ref_3 = [1,2,3,4,5]; ref_2 < ref_3.length; ref_2++) { item1 = ref_3[ref_2]; for (ref_0 = 0, ref_1 = [10,20,34]; ref_0 < ref_1.length; ref_0++) { item2 = ref_1[ref_0]; console.log(item1, item2); } } while True: console.log("test") while (true) { console.log("test"); } while True: while True: console.log("test") while (true) { while (true) { console.log("test"); } } # FIXME: this seems generate inconsisten js? while my_var: console.log("test") else: console.log("test else") var ref_0; ref_0 = true; while (my_var) { ref_0 = false; console.log("test"); } if (ref_0) { console.log("test else"); } count = [num for num in [1, 2, 3, 4]] var _i_0, _len_0, _results_0, _values_0, count; count = (function() { var _i_0, _len_0, _values_0, _results_0; _values_0 = [1,2,3,4]; _results_0 = []; for (_i_0 = 0, _len_0 = _values_0.length; _i_0 < _len_0; _i_0++) { _results_0.push(_values_0[_i_0]) } return _results_0; })(); raise "sample exception" throw "sample exception"; count = [num for num in [1, 2, 3, 4] if num != 4] var _i_0, _len_0, _results_0, _values_0, count; count = (function() { var _i_0, _len_0, _values_0, _results_0; _values_0 = [1,2,3,4]; _results_0 = []; for (_i_0 = 0, _len_0 = _values_0.length; _i_0 < _len_0; _i_0++) { if (num !== 4) { _results_0.push(_values_0[_i_0]) } } return _results_0; })(); count = [num for num in [1, 2, 3, 4] if num != 4 if num != 3] var _i_0, _len_0, _results_0, _values_0, count; count = (function() { var _i_0, _len_0, _values_0, _results_0; _values_0 = [1,2,3,4]; _results_0 = []; for (_i_0 = 0, _len_0 = _values_0.length; _i_0 < _len_0; _i_0++) { if (num !== 4 && num !== 3) { _results_0.push(_values_0[_i_0]) } } return _results_0; })(); try: do_some_thing() except Error as e: do_some_thing_other() try { do_some_thing(); } catch (e) { do_some_thing_other(); } try: do_some_thing() finally: do_some_thing_other() try { do_some_thing(); } finally { do_some_thing_other(); } import _global as g var g; g = this; import _global as g g = 2 import _new as new_instance var new_instance; new_instance = function() { var ___args_array = Array.apply(null, arguments); var ___clazz = ___args_array.slice(0, 1)[0]; return new (___clazz.bind.apply(___clazz, ___args_array))();}; import _new as new new = 2 xx = foo_bar() var xx; xx = fooBar(); xx = foo_bar() (function() { var xx; xx = foo_bar(); }).call(this); xx = foo["22"] var xx; xx = foo["22"]; return return; def person(name): this.name = name var person; person = function(name) { this.name = name; }; class MyClass: def __init__(x): this.x = x def foo(): return this.x var MyClass, foo; MyClass = (function() { var classref_0; classref_0 = function(x) { this.x = x; }; classref_0.prototype.foo = function() { return this.x; }; return classref_0; })(); @decorator def test(x, y): console.log("test") var test; test = function(x, y) { console.log("test"); }; test = decorator(test); @decorator1 @decorator2 def test(x, y): console.log("test") var test; test = function(x, y) { console.log("test"); }; test = decorator2(test); test = decorator1(test); @decorator("test-param") def test(x, y): console.log("test") var test; test = function(x, y) { console.log("test"); }; test = decorator("test-param")(test); while True: break while (true) { break; } while True: continue while (true) { continue; } testList[1:5] testList.slice(1, 5); testList[1:] testList[:5] testList.slice(1); testList.slice(0, 5); testList[1:-1] testList.slice(1, -1); # FIXME: this seems generate inconsisten js? x = a, b = 1, 2 var _ref_0, x; x = _ref_0 = [1,2]; a = _ref_0[0]; b = _ref_0[1]; | 2.742483 | 3 |
normalizing_flows/models/losses.py | TanguyUrvoy/normalizing-flows | 15 | 6619710 | import tensorflow as tf
import tensorflow_probability as tfp
from tensorflow.keras.losses import binary_crossentropy
def wasserstein_loss(D, lam=10.):
"""
Implementation of the Wasserstein loss (Arjovsky et al. 2017) with
gradient penalty for Lipschitz constraint (Gulrajani et al. 2017).
Returns (D_loss, G_loss); loss functions for the discriminator and
generator respectively.
D : discriminator/critic function
x : "real" data
x_gen : "fake" data
lam : gradient penalty scalar; defaults to 10, as suggested by the authors
"""
lam = tf.constant(lam)
@tf.function
def D_loss(x_true, x_pred):
reduction_axes = [i for i in range(1, x_true.shape.rank)]
d_x = D(x_true)
d_xg = D(x_pred)
# compute Wasserstein distance
wloss = tf.math.reduce_mean(d_xg) - tf.math.reduce_mean(d_x)
# interpolate xs
eps = tf.random.uniform(tf.shape(x_true)[:1], minval=0.0, maxval=1.0)
eps = tf.reshape(eps, (-1,*[1]*(x_true.shape.rank-1)))
x_i = x_true + eps*(x_pred - x_true)
dD_dx = tf.gradients(D(x_i), x_i)[0]
grad_norm = tf.math.sqrt(tf.math.reduce_sum(dD_dx**2, axis=reduction_axes))
D_loss = wloss + lam*tf.math.reduce_mean((grad_norm - 1.0)**2)
return D_loss
@tf.function
def G_loss(x_true, x_pred):
return -tf.math.reduce_mean(D(x_pred))
return D_loss, G_loss
def bce_loss(D, from_logits=True):
"""
Implementation of traditional GAN discriminator loss with soft/noisy labels.
"""
@tf.function
def D_loss(x_true, x_pred):
pred_real = D(x_true)
pred_fake = D(x_pred)
target_real = tf.ones_like(pred_real)
target_real -= tf.random.normal(tf.shape(target_real), mean=0.1, stddev=0.02)
target_fake = tf.zeros_like(pred_fake)
target_fake += tf.random.normal(tf.shape(target_fake), mean=0.1, stddev=0.02)
loss_real = binary_crossentropy(target_real, pred_real, from_logits=from_logits)
loss_fake = binary_crossentropy(target_fake, pred_fake, from_logits=from_logits)
loss = (loss_real + loss_fake)*0.5
return tf.math.reduce_mean(loss)
@tf.function
def G_loss(_, x_pred):
# Use discriminator loss with labels inverted
pred_fake = D(x_pred)
target_fake = tf.ones_like(pred_fake)
target_fake -= tf.random.normal(tf.shape(target_fake), mean=0.1, stddev=0.02)
loss_fake = binary_crossentropy(target_fake, pred_fake, from_logits=from_logits)
return tf.math.reduce_mean(loss_fake)
return D_loss, G_loss
def spatial_mae(scale, c=1.0, stride=1):
"""
"Spatial" MAE auxiliary loss for generator. Penalizes outputs
which violate spatial average preservation between input and output.
c is an additional constant multiplied with the kernel.
"""
kernel = c*tf.ones((scale,scale,1,1)) / (scale**2.)
def _spatial_mse(x_in, y_pred):
x_avg = tf.nn.conv2d(x_in, kernel, strides=(stride, stride), padding='SAME')
y_avg = tf.nn.conv2d(y_pred, kernel, strides=(stride, stride), padding='SAME')
return tf.math.reduce_mean(tf.math.abs(y_avg - x_avg))
return _spatial_mse
def kl_divergence_normal(q, p, mu_q, mu_p, log_var_q, log_var_p, ldj=0.0):
logd_q = tf.math.reduce_sum(-0.5*(log_var_q + (q - mu_q)**2 / tf.math.exp(log_var_q)), axis=1)
logd_p = tf.math.reduce_sum(-0.5*(log_var_p + (p - mu_p)**2 / tf.math.exp(log_var_p)), axis=1)
kld = tf.math.reduce_mean(logd_q - logd_p - ldj)
return kld
| import tensorflow as tf
import tensorflow_probability as tfp
from tensorflow.keras.losses import binary_crossentropy
def wasserstein_loss(D, lam=10.):
"""
Implementation of the Wasserstein loss (Arjovsky et al. 2017) with
gradient penalty for Lipschitz constraint (Gulrajani et al. 2017).
Returns (D_loss, G_loss); loss functions for the discriminator and
generator respectively.
D : discriminator/critic function
x : "real" data
x_gen : "fake" data
lam : gradient penalty scalar; defaults to 10, as suggested by the authors
"""
lam = tf.constant(lam)
@tf.function
def D_loss(x_true, x_pred):
reduction_axes = [i for i in range(1, x_true.shape.rank)]
d_x = D(x_true)
d_xg = D(x_pred)
# compute Wasserstein distance
wloss = tf.math.reduce_mean(d_xg) - tf.math.reduce_mean(d_x)
# interpolate xs
eps = tf.random.uniform(tf.shape(x_true)[:1], minval=0.0, maxval=1.0)
eps = tf.reshape(eps, (-1,*[1]*(x_true.shape.rank-1)))
x_i = x_true + eps*(x_pred - x_true)
dD_dx = tf.gradients(D(x_i), x_i)[0]
grad_norm = tf.math.sqrt(tf.math.reduce_sum(dD_dx**2, axis=reduction_axes))
D_loss = wloss + lam*tf.math.reduce_mean((grad_norm - 1.0)**2)
return D_loss
@tf.function
def G_loss(x_true, x_pred):
return -tf.math.reduce_mean(D(x_pred))
return D_loss, G_loss
def bce_loss(D, from_logits=True):
"""
Implementation of traditional GAN discriminator loss with soft/noisy labels.
"""
@tf.function
def D_loss(x_true, x_pred):
pred_real = D(x_true)
pred_fake = D(x_pred)
target_real = tf.ones_like(pred_real)
target_real -= tf.random.normal(tf.shape(target_real), mean=0.1, stddev=0.02)
target_fake = tf.zeros_like(pred_fake)
target_fake += tf.random.normal(tf.shape(target_fake), mean=0.1, stddev=0.02)
loss_real = binary_crossentropy(target_real, pred_real, from_logits=from_logits)
loss_fake = binary_crossentropy(target_fake, pred_fake, from_logits=from_logits)
loss = (loss_real + loss_fake)*0.5
return tf.math.reduce_mean(loss)
@tf.function
def G_loss(_, x_pred):
# Use discriminator loss with labels inverted
pred_fake = D(x_pred)
target_fake = tf.ones_like(pred_fake)
target_fake -= tf.random.normal(tf.shape(target_fake), mean=0.1, stddev=0.02)
loss_fake = binary_crossentropy(target_fake, pred_fake, from_logits=from_logits)
return tf.math.reduce_mean(loss_fake)
return D_loss, G_loss
def spatial_mae(scale, c=1.0, stride=1):
"""
"Spatial" MAE auxiliary loss for generator. Penalizes outputs
which violate spatial average preservation between input and output.
c is an additional constant multiplied with the kernel.
"""
kernel = c*tf.ones((scale,scale,1,1)) / (scale**2.)
def _spatial_mse(x_in, y_pred):
x_avg = tf.nn.conv2d(x_in, kernel, strides=(stride, stride), padding='SAME')
y_avg = tf.nn.conv2d(y_pred, kernel, strides=(stride, stride), padding='SAME')
return tf.math.reduce_mean(tf.math.abs(y_avg - x_avg))
return _spatial_mse
def kl_divergence_normal(q, p, mu_q, mu_p, log_var_q, log_var_p, ldj=0.0):
logd_q = tf.math.reduce_sum(-0.5*(log_var_q + (q - mu_q)**2 / tf.math.exp(log_var_q)), axis=1)
logd_p = tf.math.reduce_sum(-0.5*(log_var_p + (p - mu_p)**2 / tf.math.exp(log_var_p)), axis=1)
kld = tf.math.reduce_mean(logd_q - logd_p - ldj)
return kld
| en | 0.811856 | Implementation of the Wasserstein loss (Arjovsky et al. 2017) with gradient penalty for Lipschitz constraint (Gulrajani et al. 2017). Returns (D_loss, G_loss); loss functions for the discriminator and generator respectively. D : discriminator/critic function x : "real" data x_gen : "fake" data lam : gradient penalty scalar; defaults to 10, as suggested by the authors # compute Wasserstein distance # interpolate xs Implementation of traditional GAN discriminator loss with soft/noisy labels. # Use discriminator loss with labels inverted "Spatial" MAE auxiliary loss for generator. Penalizes outputs which violate spatial average preservation between input and output. c is an additional constant multiplied with the kernel. | 2.628893 | 3 |
venv/lib/python3.8/site-packages/clikit/io/input_stream/standard_input_stream.py | GiulianaPola/select_repeats | 2 | 6619711 | <gh_stars>1-10
/home/runner/.cache/pip/pool/43/51/2e/b2a00048d641364aa3f0a950e11fb31338ed67aba116892abf504d86ce | /home/runner/.cache/pip/pool/43/51/2e/b2a00048d641364aa3f0a950e11fb31338ed67aba116892abf504d86ce | none | 1 | 0.772882 | 1 | |
inputs.py | pikulak/xoxo | 1 | 6619712 | <reponame>pikulak/xoxo
from abc import abstractmethod
class InputInterface:
@abstractmethod
def get_next_move():
raise NotImplementedError
class LocalPlayerInput(InputInterface):
def get_next_move(player):
prompt = "{}'s ({}) turn: ".format(player.name, player.marker)
try:
next_move = int(input(prompt))
return next_move
except ValueError:
return False | from abc import abstractmethod
class InputInterface:
@abstractmethod
def get_next_move():
raise NotImplementedError
class LocalPlayerInput(InputInterface):
def get_next_move(player):
prompt = "{}'s ({}) turn: ".format(player.name, player.marker)
try:
next_move = int(input(prompt))
return next_move
except ValueError:
return False | none | 1 | 3.447582 | 3 | |
tester/utils.py | KuipersT/CS2900-Lab-2 | 0 | 6619713 | <reponame>KuipersT/CS2900-Lab-2<gh_stars>0
"""
Utility functions for CS2900 Lab 2 checkpoint test suite.
Author: <NAME>
"""
def check_code(regex):
import inspect
import re
local_vars = str(inspect.currentframe().f_back.f_back.f_locals)
no_match = len(re.findall(regex, local_vars)) == 0
return no_match
def check_nrow_ncol():
return (check_code(r"nrow\s*=\s*5") and check_code(r"ncol\s*=\s*3"))
def check_rot_inv():
return not check_code(r"R1m\s*=\s*R1\.t[^b-m]{3}[^a-o, w-z]{2}o[se]{2}\(\)")
| """
Utility functions for CS2900 Lab 2 checkpoint test suite.
Author: <NAME>
"""
def check_code(regex):
import inspect
import re
local_vars = str(inspect.currentframe().f_back.f_back.f_locals)
no_match = len(re.findall(regex, local_vars)) == 0
return no_match
def check_nrow_ncol():
return (check_code(r"nrow\s*=\s*5") and check_code(r"ncol\s*=\s*3"))
def check_rot_inv():
return not check_code(r"R1m\s*=\s*R1\.t[^b-m]{3}[^a-o, w-z]{2}o[se]{2}\(\)") | en | 0.666431 | Utility functions for CS2900 Lab 2 checkpoint test suite. Author: <NAME> | 2.681731 | 3 |
data_types/sequence_type/list_type.py | aryanz-co-in/python-indentation-datatypes-tamil | 0 | 6619714 | <gh_stars>0
# list, tuple, range Sequence Types
# List
names = ["Ram", "Akbar", "Antony"]
# List will allow us to change item
names[0] = "Lakshmi"
print(names)
door_no = [201, 202, 203]
# 0 1 2
print(door_no[2])
| # list, tuple, range Sequence Types
# List
names = ["Ram", "Akbar", "Antony"]
# List will allow us to change item
names[0] = "Lakshmi"
print(names)
door_no = [201, 202, 203]
# 0 1 2
print(door_no[2]) | en | 0.762442 | # list, tuple, range Sequence Types # List # List will allow us to change item # 0 1 2 | 3.743843 | 4 |
bots/daemon_ball_possession.py | harishkrupo/cs7632-haxball-ai | 2 | 6619715 | """
* Copyright 2020 cs7632-haxball-ai team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
"""
from bots.btnode import BTNode
from replay import *
import numpy as np
class BallPossession(BTNode):
def parseArgs(self, args):
if len(args) > 0:
self.id = args[0]
# 0:Red and 1:Blue Look in replay.py
self.team = args[1]
self.is_mine = args[2]
def execute(self):
BTNode.execute(self)
gameworld = self.agent
if gameworld.player:
t = 90
player = gameworld.player
game = gameworld.game
pteam = player.team
bx = game.ball.x
by = game.ball.y
if self.is_mine:
px = player.disc.x
py = player.disc.y
xdiff = abs(px - bx)
ydiff = abs(py - by)
dist = np.sqrt(xdiff ** 2 + ydiff ** 2)
if pteam == self.team and dist < t:
print("OUR TEAM POSSESSION")
return True
else:
enemy_player = [
game_player
for game_player in game.players
if not game_player.team == player.team
][0]
px = enemy_player.disc.x
py = enemy_player.disc.y
xdiff = abs(px - bx)
ydiff = abs(py - by)
dist = np.sqrt(xdiff ** 2 + ydiff ** 2)
if pteam != self.team and dist < t:
print("ENEMY TEAM POSSESSION")
return True
return False
return None
| """
* Copyright 2020 cs7632-haxball-ai team
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
"""
from bots.btnode import BTNode
from replay import *
import numpy as np
class BallPossession(BTNode):
def parseArgs(self, args):
if len(args) > 0:
self.id = args[0]
# 0:Red and 1:Blue Look in replay.py
self.team = args[1]
self.is_mine = args[2]
def execute(self):
BTNode.execute(self)
gameworld = self.agent
if gameworld.player:
t = 90
player = gameworld.player
game = gameworld.game
pteam = player.team
bx = game.ball.x
by = game.ball.y
if self.is_mine:
px = player.disc.x
py = player.disc.y
xdiff = abs(px - bx)
ydiff = abs(py - by)
dist = np.sqrt(xdiff ** 2 + ydiff ** 2)
if pteam == self.team and dist < t:
print("OUR TEAM POSSESSION")
return True
else:
enemy_player = [
game_player
for game_player in game.players
if not game_player.team == player.team
][0]
px = enemy_player.disc.x
py = enemy_player.disc.y
xdiff = abs(px - bx)
ydiff = abs(py - by)
dist = np.sqrt(xdiff ** 2 + ydiff ** 2)
if pteam != self.team and dist < t:
print("ENEMY TEAM POSSESSION")
return True
return False
return None
| en | 0.825527 | * Copyright 2020 cs7632-haxball-ai team * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. # 0:Red and 1:Blue Look in replay.py | 2.847548 | 3 |
tests/st/ops/cpu/test_matmul.py | PowerOlive/mindspore | 5 | 6619716 | # Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import numpy as np
import pytest
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.ops import operations as P
np.random.seed(100)
class MatMulNet(nn.Cell):
def __init__(self, transpose_a=False, transpose_b=False):
super(MatMulNet, self).__init__()
self.matmul = P.MatMul(transpose_a, transpose_b)
def construct(self, x, y):
return self.matmul(x, y)
def judge_result_correct(result, expect):
assert result.dtype == expect.dtype
assert result.shape == expect.shape
assert np.allclose(result, expect)
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.env_onecard
@pytest.mark.parametrize('dtype', [np.float16, np.float32, np.float64])
def test_matmul_no_transpose_vec(dtype):
"""
Feature: matrix & vec
Description: test cases for matmul between matrix and vector
Expectation: the result match to scipy
"""
a = np.arange(1 * 3).reshape((1, 3)).astype(dtype)
b = np.arange(3 * 5).reshape((3, 5)).astype(dtype)
context.set_context(mode=context.GRAPH_MODE, device_target='CPU')
net = MatMulNet()
output = net(Tensor(a), Tensor(b)).asnumpy()
expect = np.array([[25., 28., 31., 34., 37.]], dtype)
judge_result_correct(output, expect)
def np_matmul(a: np.ndarray, b: np.ndarray, trans_a: bool, trans_b: bool):
if trans_a:
a = a.T
if trans_b:
b = b.T
return np.matmul(a, b)
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.env_onecard
@pytest.mark.parametrize('trans_a', [True, False])
@pytest.mark.parametrize('trans_b', [True, False])
@pytest.mark.parametrize('dtype', [np.float16, np.float32, np.float64])
def test_matmul_matrix(trans_a, trans_b, dtype):
"""
Feature: ALL To ALL
Description: test cases for matmul for all float types and transpose args combinations
Expectation: the result match to scipy
"""
m, k, n = 5, 3, 4
a = np.random.random((m, k)).astype(dtype)
b = np.random.random((k, n)).astype(dtype)
if trans_a:
a = a.T
if trans_b:
b = b.T
expect = np_matmul(a, b, trans_a, trans_b)
context.set_context(mode=context.GRAPH_MODE, device_target="CPU")
net = MatMulNet(transpose_a=trans_a, transpose_b=trans_b)
output = net(Tensor(a), Tensor(b)).asnumpy()
judge_result_correct(output, expect)
| # Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import numpy as np
import pytest
import mindspore.context as context
import mindspore.nn as nn
from mindspore import Tensor
from mindspore.ops import operations as P
np.random.seed(100)
class MatMulNet(nn.Cell):
def __init__(self, transpose_a=False, transpose_b=False):
super(MatMulNet, self).__init__()
self.matmul = P.MatMul(transpose_a, transpose_b)
def construct(self, x, y):
return self.matmul(x, y)
def judge_result_correct(result, expect):
assert result.dtype == expect.dtype
assert result.shape == expect.shape
assert np.allclose(result, expect)
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.env_onecard
@pytest.mark.parametrize('dtype', [np.float16, np.float32, np.float64])
def test_matmul_no_transpose_vec(dtype):
"""
Feature: matrix & vec
Description: test cases for matmul between matrix and vector
Expectation: the result match to scipy
"""
a = np.arange(1 * 3).reshape((1, 3)).astype(dtype)
b = np.arange(3 * 5).reshape((3, 5)).astype(dtype)
context.set_context(mode=context.GRAPH_MODE, device_target='CPU')
net = MatMulNet()
output = net(Tensor(a), Tensor(b)).asnumpy()
expect = np.array([[25., 28., 31., 34., 37.]], dtype)
judge_result_correct(output, expect)
def np_matmul(a: np.ndarray, b: np.ndarray, trans_a: bool, trans_b: bool):
if trans_a:
a = a.T
if trans_b:
b = b.T
return np.matmul(a, b)
@pytest.mark.level0
@pytest.mark.platform_x86_cpu
@pytest.mark.env_onecard
@pytest.mark.parametrize('trans_a', [True, False])
@pytest.mark.parametrize('trans_b', [True, False])
@pytest.mark.parametrize('dtype', [np.float16, np.float32, np.float64])
def test_matmul_matrix(trans_a, trans_b, dtype):
"""
Feature: ALL To ALL
Description: test cases for matmul for all float types and transpose args combinations
Expectation: the result match to scipy
"""
m, k, n = 5, 3, 4
a = np.random.random((m, k)).astype(dtype)
b = np.random.random((k, n)).astype(dtype)
if trans_a:
a = a.T
if trans_b:
b = b.T
expect = np_matmul(a, b, trans_a, trans_b)
context.set_context(mode=context.GRAPH_MODE, device_target="CPU")
net = MatMulNet(transpose_a=trans_a, transpose_b=trans_b)
output = net(Tensor(a), Tensor(b)).asnumpy()
judge_result_correct(output, expect)
| en | 0.784179 | # Copyright 2021 Huawei Technologies Co., Ltd # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================ Feature: matrix & vec Description: test cases for matmul between matrix and vector Expectation: the result match to scipy Feature: ALL To ALL Description: test cases for matmul for all float types and transpose args combinations Expectation: the result match to scipy | 2.527273 | 3 |
math checker/checker.py | AlexGogev/My-Projects | 0 | 6619717 | <filename>math checker/checker.py<gh_stars>0
with open("math.txt", "r") as completed:
text = completed.readlines()
ans = []
for i in text:
ans.append(i.strip("\n"))
completed = []
for i in ans:
completed.append(i.split('=')[1].lstrip().split(' ')[0])
with open("results.txt", "r") as results:
res = results.readlines()
results = []
for i in res:
results.append(i[:-2])
print(completed)
print(results)
n =0
correct = 0
wrong = 0
for i in completed:
if i == results[n]:
correct +=1
n +=1
else :
wrong +=1
n +=1
print(f'correct: {correct} \nwrong: {wrong}') | <filename>math checker/checker.py<gh_stars>0
with open("math.txt", "r") as completed:
text = completed.readlines()
ans = []
for i in text:
ans.append(i.strip("\n"))
completed = []
for i in ans:
completed.append(i.split('=')[1].lstrip().split(' ')[0])
with open("results.txt", "r") as results:
res = results.readlines()
results = []
for i in res:
results.append(i[:-2])
print(completed)
print(results)
n =0
correct = 0
wrong = 0
for i in completed:
if i == results[n]:
correct +=1
n +=1
else :
wrong +=1
n +=1
print(f'correct: {correct} \nwrong: {wrong}') | none | 1 | 3.290219 | 3 | |
ros/src/waypoint_updater/waypoint_updater.py | MikeBMW/CarND-Capstone-debug | 0 | 6619718 | <filename>ros/src/waypoint_updater/waypoint_updater.py
#!/usr/bin/env python
import rospy
from geometry_msgs.msg import PoseStamped
from styx_msgs.msg import Lane, Waypoint
from std_msgs.msg import Int32
import math
import numpy as np
from scipy.spatial import KDTree
'''
This node will publish waypoints from the car's current position to some `x` distance ahead.
As mentioned in the doc, you should ideally first implement a version which does not care
about traffic lights or obstacles.
Once you have created dbw_node, you will update this node to use the status of traffic lights too.
Please note that our simulator also provides the exact location of traffic lights and their
current status in `/vehicle/traffic_lights` message. You can use this message to build this node
as well as to verify your TL classifier.
'''
LOOKAHEAD_WPS = 200 # Number of waypoints we will publish. You can change this number
MAX_DECEL = .5
class WaypointUpdater(object):
def __init__(self):
rospy.init_node('waypoint_updater')
# reserve waypoint
self.base_lane = None
# get from traffic_waypoint
self.stopline_wp_idx = -1
# contains a list of (x,y) tuples for all waypoints
self.waypoints_2d = None
# KD tree of the x,y waypoints to increase lookup time
self.waypoint_tree = None
# stores the raw pose message
self.pose_msg = None
rospy.Subscriber('/current_pose', PoseStamped, self.pose_cb)
rospy.Subscriber('/base_waypoints', Lane, self.waypoints_cb)
rospy.Subscriber('/traffic_waypoint', Int32, self.traffic_cb)
self.final_waypoints_pub = rospy.Publisher('final_waypoints', Lane, queue_size=1)
self.publisher_loop(50)
def publisher_loop(self, frequency):
"""
Task: This method is called from the constructor and is responsible for calling the
publishers and their helpers repeatedly.
arguments:
-frequency: int, the frequency with which to call the publishers
returns: Nothing
"""
rate = rospy.Rate(frequency)
while not rospy.is_shutdown():
if self.pose_msg and self.base_lane:
self.publish_waypoints()
rate.sleep()
def pose_cb(self, msg):
"""
Task: Processes the messages which contain the current
position of the vehicle in map coordinates
arguments:
- msg: message type geometry_msgs/PoseStamped
returns: Nothing
ROS integration
===
Type: Callback
Topic: /current_pose
msg_type: geometry_msgs/PoseStamped
std_msgs/Header header
uint32 seq
time stamp
string frame_id
geometry_msgs/Pose pose
geometry_msgs/Point position
float64 x
float64 y
float64 z
geometry_msgs/Quaternion orientation
float64 x
float64 y
float64 z
float64 w
"""
self.pose_msg = msg
def waypoints_cb(self, waypoint_msg):
"""
Task: Processes the waypoints message which contains all of the track's waypoints in map coordinates.
Needs only to run once, because the waypoints are sent only once at the beginning.
arguments:
- waypoints: message type styx_msgs/Lane
returns: Nothing
ROS integration:
===
Type: Callback
Topic: /base_waypoints
msg_type: styx_msgs/Lane
std_msgs/Header header
uint32 seq
time stamp
string frame_id
styx_msgs/Waypoint[] waypoints
geometry_msgs/PoseStamped pose
std_msgs/Header header
uint32 seq
time stamp
string frame_id
geometry_msgs/Pose pose
geometry_msgs/Point position
float64 x
float64 y
float64 z
geometry_msgs/Quaternion orientation
float64 x
float64 y
float64 z
float64 w
geometry_msgs/TwistStamped twist
std_msgs/Header header
uint32 seq
time stamp
string frame_id
geometry_msgs/Twist twist
geometry_msgs/Vector3 linear
float64 x
float64 y
float64 z
geometry_msgs/Vector3 angular
float64 x
float64 y
float64 z
"""
self.base_lane = waypoint_msg
if not self.waypoints_2d:
self.waypoints_2d = [[waypoint.pose.pose.position.x, waypoint.pose.pose.position.y] for waypoint in waypoint_msg.waypoints]
self.waypoint_tree = KDTree(self.waypoints_2d)
def publish_waypoints(self):
"""
Task: Invokes the waypoint publisher and publishes the nearest waypoints to the
/final_waypoints topic.
arguments:
- closest_idx: int, the idx of the nearest waypoints in front of the car.
ROS integration:
===
Type: Publisher
Topic: /final_waypoints
msg_type: styx_msgs/Lane
std_msgs/Header header
uint32 seq
time stamp
string frame_id
styx_msgs/Waypoint[] waypoints
geometry_msgs/PoseStamped pose
std_msgs/Header header
uint32 seq
time stamp
string frame_id
geometry_msgs/Pose pose
geometry_msgs/Point position
float64 x
float64 y
float64 z
geometry_msgs/Quaternion orientation
float64 x
float64 y
float64 z
float64 w
geometry_msgs/TwistStamped twist
std_msgs/Header header
uint32 seq
time stamp
string frame_id
geometry_msgs/Twist twist
geometry_msgs/Vector3 linear
float64 x
float64 y
float64 z
geometry_msgs/Vector3 angular
float64 x
float64 y
float64 z
"""
final_lane = self.generate_lane()
self.final_waypoints_pub.publish(final_lane)
def generate_lane(self):
lane = Lane()
closest_idx = self.get_nearest_waypoint_idx()
farthest_idx = closest_idx + LOOKAHEAD_WPS
base_waypoints = self.base_lane.waypoints[closest_idx:farthest_idx]
# don't care about it, leave it alone
if self.stopline_wp_idx == -1 or (self.stopline_wp_idx >= farthest_idx):
lane.waypoints = base_waypoints
# brake action
else:
lane.waypoints = self.decelerate_waypoints(base_waypoints, closest_idx)
return lane
def decelerate_waypoints(self, waypoints, closest_idx):
# don't modify base waypoint directly, so use temp[]
temp = []
for i, wp in enumerate(waypoints):
p = Waypoint()
p.pose = wp.pose
# find the center of the car ,so use "-2"
stop_idx = max(self.stopline_wp_idx - closest_idx - 2, 0)
# figure out how far away to decelerate
dist = self.distance(waypoints, i, stop_idx)
# velocity falling down profile when brake, the larger distance the smaller brake
vel = math.sqrt(2 * MAX_DECEL * dist)
if vel <1.:
vel = 0.
p.twist.twist.linear.x = min(vel, wp.twist.twist.linear.x)
temp.append(p)
return temp
def get_nearest_waypoint_idx(self):
"""
Task: Finds the nearest waypoint according to the car's current position
and returns the index of that waypoint
returns: int, index of nearest waypoint in self.waypoints_2d
"""
x = self.pose_msg.pose.position.x
y = self.pose_msg.pose.position.y
# lookup the KDtree to find the nearest point and return its index
closest_idx = self.waypoint_tree.query([x, y], 1)[1]
closest_coord = np.array(self.waypoints_2d[closest_idx])
prev_coord = np.array(self.waypoints_2d[closest_idx - 1])
current_pos = np.array([x, y])
wp_vec = closest_coord - prev_coord
car_vec = closest_coord - current_pos
# calculate dot product between the two vectors
# to determine if closest point is ahead of car
# -> same heading if dot product is > 0
dot_product = np.dot(wp_vec, car_vec)
# if the closest point is not ahead of the vehicle, choose the next point
if dot_product < 0:
closest_idx = (closest_idx + 1) % len(self.waypoints_2d)
return closest_idx
def traffic_cb(self, msg):
self.stopline_wp_idx = msg.data
def obstacle_cb(self, msg):
# TODO: Callback for /obstacle_waypoint message. We will implement it later
pass
def get_waypoint_velocity(self, waypoint):
return waypoint.twist.twist.linear.x
def set_waypoint_velocity(self, waypoints, waypoint, velocity):
waypoints[waypoint].twist.twist.linear.x = velocity
def distance(self, waypoints, wp1, wp2):
dist = 0
dl = lambda a, b: math.sqrt((a.x-b.x)**2 + (a.y-b.y)**2 + (a.z-b.z)**2)
for i in range(wp1, wp2+1):
dist += dl(waypoints[wp1].pose.pose.position, waypoints[i].pose.pose.position)
wp1 = i
return dist
if __name__ == '__main__':
try:
WaypointUpdater()
except rospy.ROSInterruptException:
rospy.logerr('Could not start waypoint updater node.')
| <filename>ros/src/waypoint_updater/waypoint_updater.py
#!/usr/bin/env python
import rospy
from geometry_msgs.msg import PoseStamped
from styx_msgs.msg import Lane, Waypoint
from std_msgs.msg import Int32
import math
import numpy as np
from scipy.spatial import KDTree
'''
This node will publish waypoints from the car's current position to some `x` distance ahead.
As mentioned in the doc, you should ideally first implement a version which does not care
about traffic lights or obstacles.
Once you have created dbw_node, you will update this node to use the status of traffic lights too.
Please note that our simulator also provides the exact location of traffic lights and their
current status in `/vehicle/traffic_lights` message. You can use this message to build this node
as well as to verify your TL classifier.
'''
LOOKAHEAD_WPS = 200 # Number of waypoints we will publish. You can change this number
MAX_DECEL = .5
class WaypointUpdater(object):
def __init__(self):
rospy.init_node('waypoint_updater')
# reserve waypoint
self.base_lane = None
# get from traffic_waypoint
self.stopline_wp_idx = -1
# contains a list of (x,y) tuples for all waypoints
self.waypoints_2d = None
# KD tree of the x,y waypoints to increase lookup time
self.waypoint_tree = None
# stores the raw pose message
self.pose_msg = None
rospy.Subscriber('/current_pose', PoseStamped, self.pose_cb)
rospy.Subscriber('/base_waypoints', Lane, self.waypoints_cb)
rospy.Subscriber('/traffic_waypoint', Int32, self.traffic_cb)
self.final_waypoints_pub = rospy.Publisher('final_waypoints', Lane, queue_size=1)
self.publisher_loop(50)
def publisher_loop(self, frequency):
"""
Task: This method is called from the constructor and is responsible for calling the
publishers and their helpers repeatedly.
arguments:
-frequency: int, the frequency with which to call the publishers
returns: Nothing
"""
rate = rospy.Rate(frequency)
while not rospy.is_shutdown():
if self.pose_msg and self.base_lane:
self.publish_waypoints()
rate.sleep()
def pose_cb(self, msg):
"""
Task: Processes the messages which contain the current
position of the vehicle in map coordinates
arguments:
- msg: message type geometry_msgs/PoseStamped
returns: Nothing
ROS integration
===
Type: Callback
Topic: /current_pose
msg_type: geometry_msgs/PoseStamped
std_msgs/Header header
uint32 seq
time stamp
string frame_id
geometry_msgs/Pose pose
geometry_msgs/Point position
float64 x
float64 y
float64 z
geometry_msgs/Quaternion orientation
float64 x
float64 y
float64 z
float64 w
"""
self.pose_msg = msg
def waypoints_cb(self, waypoint_msg):
"""
Task: Processes the waypoints message which contains all of the track's waypoints in map coordinates.
Needs only to run once, because the waypoints are sent only once at the beginning.
arguments:
- waypoints: message type styx_msgs/Lane
returns: Nothing
ROS integration:
===
Type: Callback
Topic: /base_waypoints
msg_type: styx_msgs/Lane
std_msgs/Header header
uint32 seq
time stamp
string frame_id
styx_msgs/Waypoint[] waypoints
geometry_msgs/PoseStamped pose
std_msgs/Header header
uint32 seq
time stamp
string frame_id
geometry_msgs/Pose pose
geometry_msgs/Point position
float64 x
float64 y
float64 z
geometry_msgs/Quaternion orientation
float64 x
float64 y
float64 z
float64 w
geometry_msgs/TwistStamped twist
std_msgs/Header header
uint32 seq
time stamp
string frame_id
geometry_msgs/Twist twist
geometry_msgs/Vector3 linear
float64 x
float64 y
float64 z
geometry_msgs/Vector3 angular
float64 x
float64 y
float64 z
"""
self.base_lane = waypoint_msg
if not self.waypoints_2d:
self.waypoints_2d = [[waypoint.pose.pose.position.x, waypoint.pose.pose.position.y] for waypoint in waypoint_msg.waypoints]
self.waypoint_tree = KDTree(self.waypoints_2d)
def publish_waypoints(self):
"""
Task: Invokes the waypoint publisher and publishes the nearest waypoints to the
/final_waypoints topic.
arguments:
- closest_idx: int, the idx of the nearest waypoints in front of the car.
ROS integration:
===
Type: Publisher
Topic: /final_waypoints
msg_type: styx_msgs/Lane
std_msgs/Header header
uint32 seq
time stamp
string frame_id
styx_msgs/Waypoint[] waypoints
geometry_msgs/PoseStamped pose
std_msgs/Header header
uint32 seq
time stamp
string frame_id
geometry_msgs/Pose pose
geometry_msgs/Point position
float64 x
float64 y
float64 z
geometry_msgs/Quaternion orientation
float64 x
float64 y
float64 z
float64 w
geometry_msgs/TwistStamped twist
std_msgs/Header header
uint32 seq
time stamp
string frame_id
geometry_msgs/Twist twist
geometry_msgs/Vector3 linear
float64 x
float64 y
float64 z
geometry_msgs/Vector3 angular
float64 x
float64 y
float64 z
"""
final_lane = self.generate_lane()
self.final_waypoints_pub.publish(final_lane)
def generate_lane(self):
lane = Lane()
closest_idx = self.get_nearest_waypoint_idx()
farthest_idx = closest_idx + LOOKAHEAD_WPS
base_waypoints = self.base_lane.waypoints[closest_idx:farthest_idx]
# don't care about it, leave it alone
if self.stopline_wp_idx == -1 or (self.stopline_wp_idx >= farthest_idx):
lane.waypoints = base_waypoints
# brake action
else:
lane.waypoints = self.decelerate_waypoints(base_waypoints, closest_idx)
return lane
def decelerate_waypoints(self, waypoints, closest_idx):
# don't modify base waypoint directly, so use temp[]
temp = []
for i, wp in enumerate(waypoints):
p = Waypoint()
p.pose = wp.pose
# find the center of the car ,so use "-2"
stop_idx = max(self.stopline_wp_idx - closest_idx - 2, 0)
# figure out how far away to decelerate
dist = self.distance(waypoints, i, stop_idx)
# velocity falling down profile when brake, the larger distance the smaller brake
vel = math.sqrt(2 * MAX_DECEL * dist)
if vel <1.:
vel = 0.
p.twist.twist.linear.x = min(vel, wp.twist.twist.linear.x)
temp.append(p)
return temp
def get_nearest_waypoint_idx(self):
"""
Task: Finds the nearest waypoint according to the car's current position
and returns the index of that waypoint
returns: int, index of nearest waypoint in self.waypoints_2d
"""
x = self.pose_msg.pose.position.x
y = self.pose_msg.pose.position.y
# lookup the KDtree to find the nearest point and return its index
closest_idx = self.waypoint_tree.query([x, y], 1)[1]
closest_coord = np.array(self.waypoints_2d[closest_idx])
prev_coord = np.array(self.waypoints_2d[closest_idx - 1])
current_pos = np.array([x, y])
wp_vec = closest_coord - prev_coord
car_vec = closest_coord - current_pos
# calculate dot product between the two vectors
# to determine if closest point is ahead of car
# -> same heading if dot product is > 0
dot_product = np.dot(wp_vec, car_vec)
# if the closest point is not ahead of the vehicle, choose the next point
if dot_product < 0:
closest_idx = (closest_idx + 1) % len(self.waypoints_2d)
return closest_idx
def traffic_cb(self, msg):
self.stopline_wp_idx = msg.data
def obstacle_cb(self, msg):
# TODO: Callback for /obstacle_waypoint message. We will implement it later
pass
def get_waypoint_velocity(self, waypoint):
return waypoint.twist.twist.linear.x
def set_waypoint_velocity(self, waypoints, waypoint, velocity):
waypoints[waypoint].twist.twist.linear.x = velocity
def distance(self, waypoints, wp1, wp2):
dist = 0
dl = lambda a, b: math.sqrt((a.x-b.x)**2 + (a.y-b.y)**2 + (a.z-b.z)**2)
for i in range(wp1, wp2+1):
dist += dl(waypoints[wp1].pose.pose.position, waypoints[i].pose.pose.position)
wp1 = i
return dist
if __name__ == '__main__':
try:
WaypointUpdater()
except rospy.ROSInterruptException:
rospy.logerr('Could not start waypoint updater node.')
| en | 0.726946 | #!/usr/bin/env python This node will publish waypoints from the car's current position to some `x` distance ahead. As mentioned in the doc, you should ideally first implement a version which does not care about traffic lights or obstacles. Once you have created dbw_node, you will update this node to use the status of traffic lights too. Please note that our simulator also provides the exact location of traffic lights and their current status in `/vehicle/traffic_lights` message. You can use this message to build this node as well as to verify your TL classifier. # Number of waypoints we will publish. You can change this number # reserve waypoint # get from traffic_waypoint # contains a list of (x,y) tuples for all waypoints # KD tree of the x,y waypoints to increase lookup time # stores the raw pose message Task: This method is called from the constructor and is responsible for calling the publishers and their helpers repeatedly. arguments: -frequency: int, the frequency with which to call the publishers returns: Nothing Task: Processes the messages which contain the current position of the vehicle in map coordinates arguments: - msg: message type geometry_msgs/PoseStamped returns: Nothing ROS integration === Type: Callback Topic: /current_pose msg_type: geometry_msgs/PoseStamped std_msgs/Header header uint32 seq time stamp string frame_id geometry_msgs/Pose pose geometry_msgs/Point position float64 x float64 y float64 z geometry_msgs/Quaternion orientation float64 x float64 y float64 z float64 w Task: Processes the waypoints message which contains all of the track's waypoints in map coordinates. Needs only to run once, because the waypoints are sent only once at the beginning. arguments: - waypoints: message type styx_msgs/Lane returns: Nothing ROS integration: === Type: Callback Topic: /base_waypoints msg_type: styx_msgs/Lane std_msgs/Header header uint32 seq time stamp string frame_id styx_msgs/Waypoint[] waypoints geometry_msgs/PoseStamped pose std_msgs/Header header uint32 seq time stamp string frame_id geometry_msgs/Pose pose geometry_msgs/Point position float64 x float64 y float64 z geometry_msgs/Quaternion orientation float64 x float64 y float64 z float64 w geometry_msgs/TwistStamped twist std_msgs/Header header uint32 seq time stamp string frame_id geometry_msgs/Twist twist geometry_msgs/Vector3 linear float64 x float64 y float64 z geometry_msgs/Vector3 angular float64 x float64 y float64 z Task: Invokes the waypoint publisher and publishes the nearest waypoints to the /final_waypoints topic. arguments: - closest_idx: int, the idx of the nearest waypoints in front of the car. ROS integration: === Type: Publisher Topic: /final_waypoints msg_type: styx_msgs/Lane std_msgs/Header header uint32 seq time stamp string frame_id styx_msgs/Waypoint[] waypoints geometry_msgs/PoseStamped pose std_msgs/Header header uint32 seq time stamp string frame_id geometry_msgs/Pose pose geometry_msgs/Point position float64 x float64 y float64 z geometry_msgs/Quaternion orientation float64 x float64 y float64 z float64 w geometry_msgs/TwistStamped twist std_msgs/Header header uint32 seq time stamp string frame_id geometry_msgs/Twist twist geometry_msgs/Vector3 linear float64 x float64 y float64 z geometry_msgs/Vector3 angular float64 x float64 y float64 z # don't care about it, leave it alone # brake action # don't modify base waypoint directly, so use temp[] # find the center of the car ,so use "-2" # figure out how far away to decelerate # velocity falling down profile when brake, the larger distance the smaller brake Task: Finds the nearest waypoint according to the car's current position and returns the index of that waypoint returns: int, index of nearest waypoint in self.waypoints_2d # lookup the KDtree to find the nearest point and return its index # calculate dot product between the two vectors # to determine if closest point is ahead of car # -> same heading if dot product is > 0 # if the closest point is not ahead of the vehicle, choose the next point # TODO: Callback for /obstacle_waypoint message. We will implement it later | 2.756482 | 3 |
python_path/__init__.py | raghuprasadks/chatshoppienew | 1 | 6619719 | <filename>python_path/__init__.py
import sys, os
class PythonPath(object):
def __init__(self, *path, **kwargs):
"""
path: defines
"""
relative_to = kwargs.get("relative_to", None)
path = os.path.join(*path)
if not os.path.isabs(path):
if relative_to is None:
base = os.getcwd()
else:
base = relative_to
base_is_file = not os.path.isdir(base)
if base_is_file:
base = os.path.dirname(base)
path = os.path.join(base, path)
self.dir_path = os.path.realpath(path)
def __enter__(self):
sys.path.insert(0, self.dir_path)
return self.dir_path
def __exit__(self, type, value, traceback):
sys.path.remove(self.dir_path)
| <filename>python_path/__init__.py
import sys, os
class PythonPath(object):
def __init__(self, *path, **kwargs):
"""
path: defines
"""
relative_to = kwargs.get("relative_to", None)
path = os.path.join(*path)
if not os.path.isabs(path):
if relative_to is None:
base = os.getcwd()
else:
base = relative_to
base_is_file = not os.path.isdir(base)
if base_is_file:
base = os.path.dirname(base)
path = os.path.join(base, path)
self.dir_path = os.path.realpath(path)
def __enter__(self):
sys.path.insert(0, self.dir_path)
return self.dir_path
def __exit__(self, type, value, traceback):
sys.path.remove(self.dir_path)
| en | 0.573483 | path: defines | 3.494602 | 3 |
imagetagger/venv/lib/python3.7/base64.py | wrigsa/ImageTagger | 8 | 6619720 | <reponame>wrigsa/ImageTagger
/anaconda3/lib/python3.7/base64.py | /anaconda3/lib/python3.7/base64.py | none | 1 | 1.067781 | 1 | |
class_getitem.py | Kalpavrikshika/python_modules | 1 | 6619721 | class GetTest(object):
def __init__(self):
self.info = {
'name' : 'yasoob',
'Country': 'Pakistan',
'number' : 12345812
}
def __getitem__(self, i):
return self.info[i]
foo = GetTest()
print (foo['name'])
print (foo['number']) | class GetTest(object):
def __init__(self):
self.info = {
'name' : 'yasoob',
'Country': 'Pakistan',
'number' : 12345812
}
def __getitem__(self, i):
return self.info[i]
foo = GetTest()
print (foo['name'])
print (foo['number']) | none | 1 | 3.270578 | 3 | |
veripy/verify.py | AD1024/veripy | 15 | 6619722 | <filename>veripy/verify.py
import ast
import z3
import inspect
from typing import List, Tuple, TypeVar
from veripy.parser.syntax import *
from veripy.parser.parser import parse_assertion, parse_expr
from functools import wraps
from veripy.transformer import *
from functools import reduce
from veripy.prettyprint import pretty_print
from veripy import typecheck as tc
class VerificationStore:
def __init__(self):
self.store = dict()
self.scope = []
self.switch = False
def enable_verification(self):
self.switch = True
def push(self, scope):
assert scope not in self.store
self.scope.append(scope)
self.store[scope] = {
'func_attrs' : dict(),
'vf' : []
}
def current_scope(self):
if self.scope:
return self.scope[-1]
def push_verification(self, func_name, verification_func):
if self.switch:
if not self.scope:
raise Exception('No Scope Defined')
self.store[self.scope[-1]]['vf'].append((func_name, verification_func))
def verify(self, scope, ignore_err):
if self.switch and self.store:
print(f'=> Verifying Scope `{scope}`')
verifications = self.store[scope]
for f_name, f in verifications['vf']:
try:
f()
except Exception as e:
print(f'Exception encountered while verifying {scope}::{f_name}')
if not ignore_err:
raise e
else:
print(e)
print(f'=> End Of `{scope}`\n')
def verify_all(self, ignore_err):
if self.switch:
try:
while self.scope:
self.verify(self.scope.pop(), ignore_err)
except Exception as e:
if not ignore_err:
raise e
else:
print(e)
def insert_func_attr(self, scope, fname, inputs=[], inputs_map={}, returns=tc.types.TANY, requires=[], ensures=[]):
if self.switch and self.store:
self.store[scope]['func_attrs'][fname] = {
'inputs' : inputs_map,
'ensures': ensures,
'requires': requires,
'returns' : returns,
'func_type' : tc.types.TARROW(tc.types.TPROD(lambda i: i[1], inputs), returns)
}
def get_func_attr(self, fname):
if self.store:
return self.store[-1].get('func_attrs', dict()).get(fname)
return None
def current_func_attrs(self):
if self.scope:
return self.store[self.scope[-1]]['func_attrs']
def get_func_attrs(self, scope, fname):
if self.scope:
return self.store[scope]['func_attrs'][fname]
STORE = VerificationStore()
def enable_verification():
STORE.enable_verification()
def scope(name : str):
STORE.push(name)
def do_verification(name : str, ignore_err : bool=True):
STORE.verify(name, ignore_err)
def verify_all(ignore_err : bool=True):
STORE.verify_all(ignore_err)
def invariant(inv):
return parse_assertion(inv)
def assume(C):
if not C:
raise RuntimeError('Assumption Violation')
def wp_seq(sigma, stmt, Q):
(p2, c2) = wp(sigma, stmt.s2, Q)
(p1, c1) = wp(sigma, stmt.s1, p2)
return (p1, c1.union(c2))
def wp_if(sigma, stmt, Q):
(p1, c1) = wp(sigma, stmt.lb, Q)
(p2, c2) = wp(sigma, stmt.rb, Q)
return (
BinOp(
BinOp(stmt.cond, BoolOps.Implies, p1),
BoolOps.And,
BinOp(
UnOp(BoolOps.Not, stmt.cond), BoolOps.Implies, p2
)
),
c1.union(c2)
)
def wp_while(stmt, Q):
cond = stmt.cond
s = stmt.body
invars = stmt.invariants
combined_invars = Literal (VBool (True)) if not invars \
else reduce(lambda i1, i2: BinOp(i1, BoolOps.And, i2), invars)
(p, c) = wp(s, combined_invars)
return (combined_invars, c.union({
BinOp(BinOp(combined_invars, BoolOps.And, cond), BoolOps.Implies, p),
BinOp(BinOp(combined_invars, BoolOps.And, (UnOp(BoolOps.Not, cond))), BoolOps.Implies, Q)
}))
def wp(sigma, stmt, Q):
return {
Skip: lambda: (Q, set()),
Assume: lambda: (BinOp(stmt.e, BoolOps.Implies, Q), set()),
Assign: lambda: (subst(stmt.var, stmt.expr, Q), set()),
Assert: lambda: (BinOp(Q, BoolOps.And, stmt.e), set()),
Seq: lambda: wp_seq(sigma, stmt, Q),
If: lambda: wp_if(sigma, stmt, Q),
Havoc: lambda: (Quantification(Var(stmt.var + '$0'), subst(stmt.var, Var(stmt.var + '$0'), Q), ty=sigma[stmt.var]), set())
}.get(type(stmt), lambda: raise_exception(f'wp not implemented for {type(stmt)}'))()
def emit_smt(translator: Expr2Z3, solver, constraint : Expr, fail_msg : str):
solver.push()
const = translator.visit(UnOp(BoolOps.Not, constraint))
solver.add(const)
if str(solver.check()) == 'sat':
model = solver.model()
raise Exception(f'VerificationViolated on\n{const}\nModel: {model}\n{fail_msg}')
solver.pop()
def fold_constraints(constraints : List[str]):
fold_and_str = lambda x, y: BinOp(parse_assertion(x) if isinstance(x, str) else x,
BoolOps.And, parse_assertion(y) if isinstance(y, str) else y)
if len(constraints) >= 2:
return reduce(fold_and_str, constraints)
elif len(constraints) == 1:
return parse_assertion(constraints[0])
else:
return Literal(VBool(True))
def verify_func(func, scope, inputs, requires, ensures):
code = inspect.getsource(func)
func_ast = ast.parse(code)
target_language_ast = StmtTranslator().visit(func_ast)
func_attrs = STORE.get_func_attrs(scope, func.__name__)
sigma = tc.type_check_stmt(func_attrs['inputs'], func_attrs, target_language_ast)
user_precond = fold_constraints(requires)
user_postcond = fold_constraints(ensures)
tc.type_check_expr(sigma, func_attrs, TBOOL, user_precond)
tc.type_check_expr(sigma, func_attrs, TBOOL, user_postcond)
(P, C) = wp(sigma, target_language_ast, user_postcond)
check_P = BinOp(user_precond, BoolOps.Implies, P)
solver = z3.Solver()
translator = Expr2Z3(declare_consts(sigma))
emit_smt(translator, solver, check_P, f'Precondition does not imply wp at {func.__name__}')
for c in C:
emit_smt(translator, solver, c, f'Side condition violated at {func.__name__}')
print(f'{func.__name__} Verified!')
def declare_consts(sigma : dict):
consts = dict()
for (name, ty) in sigma.items():
if type(ty) != dict:
consts[name] = {
tc.types.TINT: lambda: z3.Int(name),
tc.types.TBOOL: lambda: z3.Bool(name)
}.get(ty)()
return consts
def parse_func_types(func, inputs=[]):
code = inspect.getsource(func)
func_ast = ast.parse(code)
func_def = func_ast.body[0]
result = []
provided = dict(inputs)
for i in func_def.args.args:
if i.annotation:
result.append(tc.types.to_ast_type(i.annotation))
else:
result.append(provided.get(i.arg, tc.types.TANY))
provided[i.arg] = result[-1]
if func_def.returns:
ret_type = tc.types.to_ast_type(func_def.returns)
return (result, provided, ret_type)
else:
raise Exception('Return annotation is required for verifying functions')
def verify(inputs: List[Tuple[str, tc.types.SUPPORTED]]=[], requires: List[str]=[], ensures: List[str]=[]):
def verify_impl(func):
@wraps(func)
def caller(*args, **kargs):
return func(*args, **kargs)
types = parse_func_types(func, inputs=inputs)
scope = STORE.current_scope()
STORE.insert_func_attr(scope, func.__name__, types[0], types[1], types[2], requires, ensures)
STORE.push_verification(func.__name__, lambda: verify_func(func, scope, inputs, requires, ensures))
return caller
return verify_impl | <filename>veripy/verify.py
import ast
import z3
import inspect
from typing import List, Tuple, TypeVar
from veripy.parser.syntax import *
from veripy.parser.parser import parse_assertion, parse_expr
from functools import wraps
from veripy.transformer import *
from functools import reduce
from veripy.prettyprint import pretty_print
from veripy import typecheck as tc
class VerificationStore:
def __init__(self):
self.store = dict()
self.scope = []
self.switch = False
def enable_verification(self):
self.switch = True
def push(self, scope):
assert scope not in self.store
self.scope.append(scope)
self.store[scope] = {
'func_attrs' : dict(),
'vf' : []
}
def current_scope(self):
if self.scope:
return self.scope[-1]
def push_verification(self, func_name, verification_func):
if self.switch:
if not self.scope:
raise Exception('No Scope Defined')
self.store[self.scope[-1]]['vf'].append((func_name, verification_func))
def verify(self, scope, ignore_err):
if self.switch and self.store:
print(f'=> Verifying Scope `{scope}`')
verifications = self.store[scope]
for f_name, f in verifications['vf']:
try:
f()
except Exception as e:
print(f'Exception encountered while verifying {scope}::{f_name}')
if not ignore_err:
raise e
else:
print(e)
print(f'=> End Of `{scope}`\n')
def verify_all(self, ignore_err):
if self.switch:
try:
while self.scope:
self.verify(self.scope.pop(), ignore_err)
except Exception as e:
if not ignore_err:
raise e
else:
print(e)
def insert_func_attr(self, scope, fname, inputs=[], inputs_map={}, returns=tc.types.TANY, requires=[], ensures=[]):
if self.switch and self.store:
self.store[scope]['func_attrs'][fname] = {
'inputs' : inputs_map,
'ensures': ensures,
'requires': requires,
'returns' : returns,
'func_type' : tc.types.TARROW(tc.types.TPROD(lambda i: i[1], inputs), returns)
}
def get_func_attr(self, fname):
if self.store:
return self.store[-1].get('func_attrs', dict()).get(fname)
return None
def current_func_attrs(self):
if self.scope:
return self.store[self.scope[-1]]['func_attrs']
def get_func_attrs(self, scope, fname):
if self.scope:
return self.store[scope]['func_attrs'][fname]
STORE = VerificationStore()
def enable_verification():
STORE.enable_verification()
def scope(name : str):
STORE.push(name)
def do_verification(name : str, ignore_err : bool=True):
STORE.verify(name, ignore_err)
def verify_all(ignore_err : bool=True):
STORE.verify_all(ignore_err)
def invariant(inv):
return parse_assertion(inv)
def assume(C):
if not C:
raise RuntimeError('Assumption Violation')
def wp_seq(sigma, stmt, Q):
(p2, c2) = wp(sigma, stmt.s2, Q)
(p1, c1) = wp(sigma, stmt.s1, p2)
return (p1, c1.union(c2))
def wp_if(sigma, stmt, Q):
(p1, c1) = wp(sigma, stmt.lb, Q)
(p2, c2) = wp(sigma, stmt.rb, Q)
return (
BinOp(
BinOp(stmt.cond, BoolOps.Implies, p1),
BoolOps.And,
BinOp(
UnOp(BoolOps.Not, stmt.cond), BoolOps.Implies, p2
)
),
c1.union(c2)
)
def wp_while(stmt, Q):
cond = stmt.cond
s = stmt.body
invars = stmt.invariants
combined_invars = Literal (VBool (True)) if not invars \
else reduce(lambda i1, i2: BinOp(i1, BoolOps.And, i2), invars)
(p, c) = wp(s, combined_invars)
return (combined_invars, c.union({
BinOp(BinOp(combined_invars, BoolOps.And, cond), BoolOps.Implies, p),
BinOp(BinOp(combined_invars, BoolOps.And, (UnOp(BoolOps.Not, cond))), BoolOps.Implies, Q)
}))
def wp(sigma, stmt, Q):
return {
Skip: lambda: (Q, set()),
Assume: lambda: (BinOp(stmt.e, BoolOps.Implies, Q), set()),
Assign: lambda: (subst(stmt.var, stmt.expr, Q), set()),
Assert: lambda: (BinOp(Q, BoolOps.And, stmt.e), set()),
Seq: lambda: wp_seq(sigma, stmt, Q),
If: lambda: wp_if(sigma, stmt, Q),
Havoc: lambda: (Quantification(Var(stmt.var + '$0'), subst(stmt.var, Var(stmt.var + '$0'), Q), ty=sigma[stmt.var]), set())
}.get(type(stmt), lambda: raise_exception(f'wp not implemented for {type(stmt)}'))()
def emit_smt(translator: Expr2Z3, solver, constraint : Expr, fail_msg : str):
solver.push()
const = translator.visit(UnOp(BoolOps.Not, constraint))
solver.add(const)
if str(solver.check()) == 'sat':
model = solver.model()
raise Exception(f'VerificationViolated on\n{const}\nModel: {model}\n{fail_msg}')
solver.pop()
def fold_constraints(constraints : List[str]):
fold_and_str = lambda x, y: BinOp(parse_assertion(x) if isinstance(x, str) else x,
BoolOps.And, parse_assertion(y) if isinstance(y, str) else y)
if len(constraints) >= 2:
return reduce(fold_and_str, constraints)
elif len(constraints) == 1:
return parse_assertion(constraints[0])
else:
return Literal(VBool(True))
def verify_func(func, scope, inputs, requires, ensures):
code = inspect.getsource(func)
func_ast = ast.parse(code)
target_language_ast = StmtTranslator().visit(func_ast)
func_attrs = STORE.get_func_attrs(scope, func.__name__)
sigma = tc.type_check_stmt(func_attrs['inputs'], func_attrs, target_language_ast)
user_precond = fold_constraints(requires)
user_postcond = fold_constraints(ensures)
tc.type_check_expr(sigma, func_attrs, TBOOL, user_precond)
tc.type_check_expr(sigma, func_attrs, TBOOL, user_postcond)
(P, C) = wp(sigma, target_language_ast, user_postcond)
check_P = BinOp(user_precond, BoolOps.Implies, P)
solver = z3.Solver()
translator = Expr2Z3(declare_consts(sigma))
emit_smt(translator, solver, check_P, f'Precondition does not imply wp at {func.__name__}')
for c in C:
emit_smt(translator, solver, c, f'Side condition violated at {func.__name__}')
print(f'{func.__name__} Verified!')
def declare_consts(sigma : dict):
consts = dict()
for (name, ty) in sigma.items():
if type(ty) != dict:
consts[name] = {
tc.types.TINT: lambda: z3.Int(name),
tc.types.TBOOL: lambda: z3.Bool(name)
}.get(ty)()
return consts
def parse_func_types(func, inputs=[]):
code = inspect.getsource(func)
func_ast = ast.parse(code)
func_def = func_ast.body[0]
result = []
provided = dict(inputs)
for i in func_def.args.args:
if i.annotation:
result.append(tc.types.to_ast_type(i.annotation))
else:
result.append(provided.get(i.arg, tc.types.TANY))
provided[i.arg] = result[-1]
if func_def.returns:
ret_type = tc.types.to_ast_type(func_def.returns)
return (result, provided, ret_type)
else:
raise Exception('Return annotation is required for verifying functions')
def verify(inputs: List[Tuple[str, tc.types.SUPPORTED]]=[], requires: List[str]=[], ensures: List[str]=[]):
def verify_impl(func):
@wraps(func)
def caller(*args, **kargs):
return func(*args, **kargs)
types = parse_func_types(func, inputs=inputs)
scope = STORE.current_scope()
STORE.insert_func_attr(scope, func.__name__, types[0], types[1], types[2], requires, ensures)
STORE.push_verification(func.__name__, lambda: verify_func(func, scope, inputs, requires, ensures))
return caller
return verify_impl | none | 1 | 2.422958 | 2 | |
Connect4.py | jag567/Connect-4 | 0 | 6619723 | <reponame>jag567/Connect-4
import random
class Connect4:
def __init__(self, db, game_id):
self.board = []
self.players = {}
self.winner = None
for row in range(6):
self.board.append([])
for col in range(7):
self.board[row].append(' ')
curs = db.cursor()
try:
curs.execute("select turn, created, winner from games where game_id = {};".format(game_id))
game = curs.fetchone()
self.turn = int(game[0])
self.created = game[1]
if game[2]:
self.winner = game[2]
curs.execute("select row, col, player from board where game_id = {};".format(game_id))
for row, col, player in curs.fetchall():
self.board[row][col] = player;
curs.execute("select player, player_name from players where game_id = {};".format(game_id))
for player, player_name in curs.fetchall():
self.players[player] = player_name
except:
raise Exception("Unable to load game state")
finally:
curs.close()
def get_board(self):
return self.board
def get_turn(self):
return self.turn
def get_players(self):
return self.players
def get_created(self):
return self.created
def get_curr_player(self):
return ('X', 'O') [self.turn]
def get_opponent(self):
return ('X', 'O') [(self.turn + 1) % 2]
def get_winner(self):
return self.winner
def next_turn(self):
self.turn = (self.turn + 1) % 2
return self.turn
def make_move(self, move, player=None):
for row in range(len(self.board)):
if row < len(self.board) - 1:
if self.board[row + 1][move] != ' ':
break
if not player:
player = ('X', 'O') [self.turn]
self.board[row][move] = player
if not self.winner:
self.check_across(row, move, player)
if not self.winner:
self.check_down(row, move, player)
if not self.winner:
self.check_lr(row, move, player)
if not self.winner:
self.check_rl(row, move, player)
return row
def check_across(self, row, col, player):
count = 0
co = col
while co >= 0 and self.board[row][co] == player:
count += 1
co -= 1
co = col + 1
while co < len(self.board[row]) and self.board[row][co] == player:
count += 1
co += 1
if count >= 4:
self.winner = player
def check_down(self, row, col, player):
count = 0
ro = row
while ro >= 0 and self.board[ro][col] == player:
count += 1
ro -= 1
ro = row + 1
while ro < len(self.board) and self.board[ro][col] == player:
count += 1
ro += 1
if count >= 4:
self.winner = player
def check_lr(self, row, col, player):
count = 0
ro = row
co = col
while min(ro, co) >= 0 and self.board[ro][co] == player:
count += 1
ro -= 1
co -= 1
ro = row + 1
co = col + 1
while ro < len(self.board) and co < len(self.board[ro]) and self.board[ro][co] == player:
count += 1
ro += 1
co += 1
if count >= 4:
self.winner = player
def check_rl(self, row, col, player):
count = 0
ro = row
co = col
while ro >= 0 and co < len(self.board[ro]) and self.board[ro][co] == player:
count += 1
ro -= 1
co += 1
ro = row + 1
co = col - 1
while ro < len(self.board) and co >= 0 and self.board[ro][co] == player:
count += 1
ro += 1
co -= 1
if count >= 4:
self.winner = player
def select_move(self):
score = 8 ** 6
points = [0 for _ in range(len(self.board[0]))]
turn = self.get_turn()
self.score_moves(turn, points, score, None)
best = None
for move in range(len(points)):
if not self.is_col_full(move):
if best is None or best < points[move]:
best = points[move]
moves = []
for move in range(len(points)):
if points[move] == best and not self.is_col_full(move):
moves.append(move)
if moves:
return random.choice(moves)
else:
return None
def score_moves(self, turn, points, score, pos):
if score < 1 or not self.more_moves():
return
for move in range(len(self.board[0])):
if self.is_col_full(move):
continue
if pos is None:
index = move
else:
index = pos
self.make_move(move, ('X', 'O') [turn])
if self.get_winner():
if turn == self.turn:
points[index] += score
else:
points[index] -= score
else:
self.score_moves((turn + 1) % 2, points, int(score / 8), index)
self.remove_move(move)
def remove_move(self, move):
for row in range(len(self.board)):
if self.board[row][move] != ' ':
break
self.board[row][move] = ' '
self.winner = None
def more_moves(self):
for col in self.board[0]:
if col == ' ':
return True
return False
def is_col_full(self, move):
return self.board[0][move] != ' '
| import random
class Connect4:
def __init__(self, db, game_id):
self.board = []
self.players = {}
self.winner = None
for row in range(6):
self.board.append([])
for col in range(7):
self.board[row].append(' ')
curs = db.cursor()
try:
curs.execute("select turn, created, winner from games where game_id = {};".format(game_id))
game = curs.fetchone()
self.turn = int(game[0])
self.created = game[1]
if game[2]:
self.winner = game[2]
curs.execute("select row, col, player from board where game_id = {};".format(game_id))
for row, col, player in curs.fetchall():
self.board[row][col] = player;
curs.execute("select player, player_name from players where game_id = {};".format(game_id))
for player, player_name in curs.fetchall():
self.players[player] = player_name
except:
raise Exception("Unable to load game state")
finally:
curs.close()
def get_board(self):
return self.board
def get_turn(self):
return self.turn
def get_players(self):
return self.players
def get_created(self):
return self.created
def get_curr_player(self):
return ('X', 'O') [self.turn]
def get_opponent(self):
return ('X', 'O') [(self.turn + 1) % 2]
def get_winner(self):
return self.winner
def next_turn(self):
self.turn = (self.turn + 1) % 2
return self.turn
def make_move(self, move, player=None):
for row in range(len(self.board)):
if row < len(self.board) - 1:
if self.board[row + 1][move] != ' ':
break
if not player:
player = ('X', 'O') [self.turn]
self.board[row][move] = player
if not self.winner:
self.check_across(row, move, player)
if not self.winner:
self.check_down(row, move, player)
if not self.winner:
self.check_lr(row, move, player)
if not self.winner:
self.check_rl(row, move, player)
return row
def check_across(self, row, col, player):
count = 0
co = col
while co >= 0 and self.board[row][co] == player:
count += 1
co -= 1
co = col + 1
while co < len(self.board[row]) and self.board[row][co] == player:
count += 1
co += 1
if count >= 4:
self.winner = player
def check_down(self, row, col, player):
count = 0
ro = row
while ro >= 0 and self.board[ro][col] == player:
count += 1
ro -= 1
ro = row + 1
while ro < len(self.board) and self.board[ro][col] == player:
count += 1
ro += 1
if count >= 4:
self.winner = player
def check_lr(self, row, col, player):
count = 0
ro = row
co = col
while min(ro, co) >= 0 and self.board[ro][co] == player:
count += 1
ro -= 1
co -= 1
ro = row + 1
co = col + 1
while ro < len(self.board) and co < len(self.board[ro]) and self.board[ro][co] == player:
count += 1
ro += 1
co += 1
if count >= 4:
self.winner = player
def check_rl(self, row, col, player):
count = 0
ro = row
co = col
while ro >= 0 and co < len(self.board[ro]) and self.board[ro][co] == player:
count += 1
ro -= 1
co += 1
ro = row + 1
co = col - 1
while ro < len(self.board) and co >= 0 and self.board[ro][co] == player:
count += 1
ro += 1
co -= 1
if count >= 4:
self.winner = player
def select_move(self):
score = 8 ** 6
points = [0 for _ in range(len(self.board[0]))]
turn = self.get_turn()
self.score_moves(turn, points, score, None)
best = None
for move in range(len(points)):
if not self.is_col_full(move):
if best is None or best < points[move]:
best = points[move]
moves = []
for move in range(len(points)):
if points[move] == best and not self.is_col_full(move):
moves.append(move)
if moves:
return random.choice(moves)
else:
return None
def score_moves(self, turn, points, score, pos):
if score < 1 or not self.more_moves():
return
for move in range(len(self.board[0])):
if self.is_col_full(move):
continue
if pos is None:
index = move
else:
index = pos
self.make_move(move, ('X', 'O') [turn])
if self.get_winner():
if turn == self.turn:
points[index] += score
else:
points[index] -= score
else:
self.score_moves((turn + 1) % 2, points, int(score / 8), index)
self.remove_move(move)
def remove_move(self, move):
for row in range(len(self.board)):
if self.board[row][move] != ' ':
break
self.board[row][move] = ' '
self.winner = None
def more_moves(self):
for col in self.board[0]:
if col == ' ':
return True
return False
def is_col_full(self, move):
return self.board[0][move] != ' ' | none | 1 | 2.981476 | 3 | |
examples/sandbox/sandbox/element_buttons.py | salt-die/nurses_2 | 171 | 6619724 | <gh_stars>100-1000
from nurses_2.colors import ColorPair, Color, BLACK
from nurses_2.widgets.text_widget import TextWidget
from nurses_2.widgets.behaviors.button_behavior import ButtonBehavior
from .particles import Element
MENU_BACKGROUND_COLOR = Color(222, 224, 127) # Mustard
class ElementButton(ButtonBehavior, TextWidget):
"""
Button which selects an element when pressed and updates the element display.
"""
def __init__(self, pos, element):
self.element = element
super().__init__(
size=(2, 4),
pos=pos,
default_color_pair=ColorPair.from_colors(BLACK, element.COLOR),
always_release=True,
)
self.down_color = ColorPair.from_colors(
BLACK,
Color(*(127 + c // 2 for c in element.COLOR)),
)
def update_down(self):
self.colors[:] = self.down_color
def update_normal(self):
self.colors[:] = self.default_color_pair
def on_release(self):
element = self.element
sandbox = self.parent.parent
sandbox.particle_type = element
sandbox.display.add_text(f"{element.__name__:^{sandbox.display.width}}")
class ButtonContainer(TextWidget):
"""
Container widget of `ElementButton`s.
"""
def __init__(self):
nelements = len(Element.all_elements)
super().__init__(
size=(3 * nelements + 1, 8),
default_color_pair=ColorPair.from_colors(BLACK, MENU_BACKGROUND_COLOR),
)
for i, element in enumerate(Element.all_elements.values()):
self.add_widget(ElementButton(pos=(3 * i + 1, 2), element=element))
def on_click(self, mouse_event):
return self.collides_point(mouse_event.position)
| from nurses_2.colors import ColorPair, Color, BLACK
from nurses_2.widgets.text_widget import TextWidget
from nurses_2.widgets.behaviors.button_behavior import ButtonBehavior
from .particles import Element
MENU_BACKGROUND_COLOR = Color(222, 224, 127) # Mustard
class ElementButton(ButtonBehavior, TextWidget):
"""
Button which selects an element when pressed and updates the element display.
"""
def __init__(self, pos, element):
self.element = element
super().__init__(
size=(2, 4),
pos=pos,
default_color_pair=ColorPair.from_colors(BLACK, element.COLOR),
always_release=True,
)
self.down_color = ColorPair.from_colors(
BLACK,
Color(*(127 + c // 2 for c in element.COLOR)),
)
def update_down(self):
self.colors[:] = self.down_color
def update_normal(self):
self.colors[:] = self.default_color_pair
def on_release(self):
element = self.element
sandbox = self.parent.parent
sandbox.particle_type = element
sandbox.display.add_text(f"{element.__name__:^{sandbox.display.width}}")
class ButtonContainer(TextWidget):
"""
Container widget of `ElementButton`s.
"""
def __init__(self):
nelements = len(Element.all_elements)
super().__init__(
size=(3 * nelements + 1, 8),
default_color_pair=ColorPair.from_colors(BLACK, MENU_BACKGROUND_COLOR),
)
for i, element in enumerate(Element.all_elements.values()):
self.add_widget(ElementButton(pos=(3 * i + 1, 2), element=element))
def on_click(self, mouse_event):
return self.collides_point(mouse_event.position) | en | 0.57409 | # Mustard Button which selects an element when pressed and updates the element display. Container widget of `ElementButton`s. | 2.926749 | 3 |
django_modals/helper.py | jonesim/django-modals | 8 | 6619725 | <reponame>jonesim/django-modals
import json
from base64 import urlsafe_b64encode
from ajax_helpers.templatetags.ajax_helpers import button_javascript
from django.urls import reverse, resolve, NoReverseMatch
from django.template.loader import render_to_string
from crispy_forms.layout import HTML, Div
from django.utils.safestring import mark_safe
DUMMY_SLUG = 'DUMMY-SLUG'
modal_buttons = {
'edit': '<i class="fas fa-edit"></i>',
'add': '<i class="fas fa-plus-circle p-1"></i>',
'delete': '<i class="fas fa-trash"></i>',
}
progress_bar_html = '''
<div class="progress" style="margin-top: 5px;">
<div id='file_progress_bar{}' class="progress-bar{}" role="progressbar" aria-valuenow="0"
aria-valuemin="0" aria-valuemax="100" style="width: 0%">
</div>
</div>
'''
def progress_bar(progress_id=None, css=''):
if progress_id is not None:
progress_id = '_' + str(progress_id)
else:
progress_id = ''
if css:
css = ' ' + css
return progress_bar_html.format(progress_id, css)
def make_slug(*args, make_pk=False):
slug = ''.join([str(a) for a in args])
if make_pk and '-' not in slug:
slug = 'pk-' + slug
return slug
def show_modal(modal_name, *args, base64=False, datatable=False, href=False, button=None,
button_classes='btn btn-primary mx-1', row=False, font_awesome=None):
try:
javascript = f"django_modal.show_modal('{reverse(modal_name, args=[DUMMY_SLUG])}')"
except NoReverseMatch:
javascript = f"django_modal.show_modal('{reverse(modal_name)}')"
if base64:
slug = urlsafe_b64encode(json.dumps(base64).encode('utf8')).decode('ascii')
else:
slug = make_slug(*args)
if datatable:
if base64:
slug = '%ref%'
else:
if slug:
slug += '-'
slug += 'pk-%ref%'
if row:
slug += '-row-%row%'
if href:
javascript = 'javascript:' + javascript
if button is not None:
button_text = modal_buttons.get(button, button)
if font_awesome:
button_text = f'<i class="{font_awesome}"></i> {button_text}'
javascript = f'<a {css_classes(button_classes)} href="javascript:{javascript}">{button_text}</a>'
if not slug:
slug = '-'
return javascript.replace(DUMMY_SLUG, slug)
def render_modal(template_name='django_modals/modal_base.html', **kwargs):
if 'request' in kwargs and 'modal_url' not in kwargs:
kwargs['modal_url'] = kwargs['request'].get_full_path()
button_kwargs = {a: kwargs[a] for a in ['button_group_class', 'button_container_class'] if a in kwargs}
kwargs['contents'] = mark_safe(kwargs.get('contents', '') + modal_button_group(kwargs.get('modal_buttons', None),
**button_kwargs))
return render_to_string(template_name, kwargs)
def css_classes(classes):
return f' class="{classes}"' if classes else ''
def crispy_modal_link(modal_name, text, div=False, div_classes='', button_classes=''):
link = HTML(show_modal(modal_name, button=text, button_classes=button_classes))
if div:
link = Div(link, css_class=div_classes)
return link
def modal_button(title, commands, css_class='btn-primary'):
if type(commands) == str:
params = [{'function': commands}]
elif type(commands) == dict:
params = [commands]
else:
params = commands
return mark_safe(f'''<button onclick='django_modal.process_commands_lock({json.dumps(params)})'
class="btn {css_class}">{title}</button>''')
def modal_button_method(title, method_name, css_class='btn-primary', **kwargs):
return modal_button(title, dict(function='post_modal', button=dict(button=method_name, **kwargs)), css_class)
def modal_button_group(buttons=None, button_container_class=None, button_group_class='btn-group'):
group_class = f'form-buttons{" " + button_container_class if button_container_class else ""}'
if type(buttons) == str:
return f'<div class="{group_class}"><div class="{button_group_class}">{buttons}</div></div>'
if buttons:
return (f'<div class="{group_class}">'
f'<div class="{button_group_class}">{"".join(buttons)}</div></div>')
return ''
def modal_delete_javascript(url_name, pk):
return mark_safe(button_javascript('delete', url_name=url_name, url_args=[pk]).replace('"', "'"))
def reverse_modal(modal_name, slug='-', base64=None):
if base64:
slug = urlsafe_b64encode(json.dumps(base64).encode('utf8')).decode('ascii')
try:
return reverse(modal_name, args=[slug])
except NoReverseMatch:
if slug == '-':
return reverse(modal_name)
else:
raise NoReverseMatch
def ajax_modal_redirect(modal_name, slug='-', base64=None):
return [{'function': 'close'}, {'function': 'show_modal', 'modal': reverse_modal(modal_name, slug=slug,
base64=base64)}]
def ajax_modal_replace(request, modal_name=None, modal_class=None, slug='-', ajax_function='overwrite_modal', **kwargs):
request.method = 'get'
if modal_class:
view_class = modal_class
else:
request.path = reverse_modal(modal_name, slug)
view_class = resolve(request.path).func.view_class
return {'function': ajax_function, 'html': view_class.as_view()(request, slug=slug, **kwargs).rendered_content}
| import json
from base64 import urlsafe_b64encode
from ajax_helpers.templatetags.ajax_helpers import button_javascript
from django.urls import reverse, resolve, NoReverseMatch
from django.template.loader import render_to_string
from crispy_forms.layout import HTML, Div
from django.utils.safestring import mark_safe
DUMMY_SLUG = 'DUMMY-SLUG'
modal_buttons = {
'edit': '<i class="fas fa-edit"></i>',
'add': '<i class="fas fa-plus-circle p-1"></i>',
'delete': '<i class="fas fa-trash"></i>',
}
progress_bar_html = '''
<div class="progress" style="margin-top: 5px;">
<div id='file_progress_bar{}' class="progress-bar{}" role="progressbar" aria-valuenow="0"
aria-valuemin="0" aria-valuemax="100" style="width: 0%">
</div>
</div>
'''
def progress_bar(progress_id=None, css=''):
if progress_id is not None:
progress_id = '_' + str(progress_id)
else:
progress_id = ''
if css:
css = ' ' + css
return progress_bar_html.format(progress_id, css)
def make_slug(*args, make_pk=False):
slug = ''.join([str(a) for a in args])
if make_pk and '-' not in slug:
slug = 'pk-' + slug
return slug
def show_modal(modal_name, *args, base64=False, datatable=False, href=False, button=None,
button_classes='btn btn-primary mx-1', row=False, font_awesome=None):
try:
javascript = f"django_modal.show_modal('{reverse(modal_name, args=[DUMMY_SLUG])}')"
except NoReverseMatch:
javascript = f"django_modal.show_modal('{reverse(modal_name)}')"
if base64:
slug = urlsafe_b64encode(json.dumps(base64).encode('utf8')).decode('ascii')
else:
slug = make_slug(*args)
if datatable:
if base64:
slug = '%ref%'
else:
if slug:
slug += '-'
slug += 'pk-%ref%'
if row:
slug += '-row-%row%'
if href:
javascript = 'javascript:' + javascript
if button is not None:
button_text = modal_buttons.get(button, button)
if font_awesome:
button_text = f'<i class="{font_awesome}"></i> {button_text}'
javascript = f'<a {css_classes(button_classes)} href="javascript:{javascript}">{button_text}</a>'
if not slug:
slug = '-'
return javascript.replace(DUMMY_SLUG, slug)
def render_modal(template_name='django_modals/modal_base.html', **kwargs):
if 'request' in kwargs and 'modal_url' not in kwargs:
kwargs['modal_url'] = kwargs['request'].get_full_path()
button_kwargs = {a: kwargs[a] for a in ['button_group_class', 'button_container_class'] if a in kwargs}
kwargs['contents'] = mark_safe(kwargs.get('contents', '') + modal_button_group(kwargs.get('modal_buttons', None),
**button_kwargs))
return render_to_string(template_name, kwargs)
def css_classes(classes):
return f' class="{classes}"' if classes else ''
def crispy_modal_link(modal_name, text, div=False, div_classes='', button_classes=''):
link = HTML(show_modal(modal_name, button=text, button_classes=button_classes))
if div:
link = Div(link, css_class=div_classes)
return link
def modal_button(title, commands, css_class='btn-primary'):
if type(commands) == str:
params = [{'function': commands}]
elif type(commands) == dict:
params = [commands]
else:
params = commands
return mark_safe(f'''<button onclick='django_modal.process_commands_lock({json.dumps(params)})'
class="btn {css_class}">{title}</button>''')
def modal_button_method(title, method_name, css_class='btn-primary', **kwargs):
return modal_button(title, dict(function='post_modal', button=dict(button=method_name, **kwargs)), css_class)
def modal_button_group(buttons=None, button_container_class=None, button_group_class='btn-group'):
group_class = f'form-buttons{" " + button_container_class if button_container_class else ""}'
if type(buttons) == str:
return f'<div class="{group_class}"><div class="{button_group_class}">{buttons}</div></div>'
if buttons:
return (f'<div class="{group_class}">'
f'<div class="{button_group_class}">{"".join(buttons)}</div></div>')
return ''
def modal_delete_javascript(url_name, pk):
return mark_safe(button_javascript('delete', url_name=url_name, url_args=[pk]).replace('"', "'"))
def reverse_modal(modal_name, slug='-', base64=None):
if base64:
slug = urlsafe_b64encode(json.dumps(base64).encode('utf8')).decode('ascii')
try:
return reverse(modal_name, args=[slug])
except NoReverseMatch:
if slug == '-':
return reverse(modal_name)
else:
raise NoReverseMatch
def ajax_modal_redirect(modal_name, slug='-', base64=None):
return [{'function': 'close'}, {'function': 'show_modal', 'modal': reverse_modal(modal_name, slug=slug,
base64=base64)}]
def ajax_modal_replace(request, modal_name=None, modal_class=None, slug='-', ajax_function='overwrite_modal', **kwargs):
request.method = 'get'
if modal_class:
view_class = modal_class
else:
request.path = reverse_modal(modal_name, slug)
view_class = resolve(request.path).func.view_class
return {'function': ajax_function, 'html': view_class.as_view()(request, slug=slug, **kwargs).rendered_content} | en | 0.15135 | <div class="progress" style="margin-top: 5px;"> <div id='file_progress_bar{}' class="progress-bar{}" role="progressbar" aria-valuenow="0" aria-valuemin="0" aria-valuemax="100" style="width: 0%"> </div> </div> <button onclick='django_modal.process_commands_lock({json.dumps(params)})' class="btn {css_class}">{title}</button> | 2.021833 | 2 |
makecid.py | obsidiansystems/go-ipfs-swh-plugin | 1 | 6619726 | #!/usr/bin/env python3
# With nix, try:
# nix-shell -p 'python3.withPackages (s: with s; [ py-multicodec py-multihash py-cid ])'
from cid import make_cid
import sys
import multicodec
import multihash
if len(sys.argv) <= 1:
print(f"usage: {sys.argv[0]} [list of swhids or hashes]...")
print("Alternatively, copy the hash from the SWHID and paste f01781114 in front")
for arg in sys.argv[1:]:
if arg.startswith("swh:"):
splat = arg.split(";")[0].split(':')
ty = splat[2]
if ty in {'cnt', 'dir', 'rev', 'rel'}:
codec = 'git-raw'
elif ty == 'snp':
codec = 'swhid-1-snp'
sys.stderr.write(f"warning: {arg} encoding is not yet supported\n")
else:
sys.stderr.write(f"unknown SWHID object type: {ty}")
sys.exit(1)
arg = splat[3]
it = bytes.fromhex(arg)
hash = multihash.encode(it, multicodec.multicodec.NAME_TABLE['sha1'])
print(make_cid(1, codec, hash).encode(encoding='base16').decode('utf-8'))
| #!/usr/bin/env python3
# With nix, try:
# nix-shell -p 'python3.withPackages (s: with s; [ py-multicodec py-multihash py-cid ])'
from cid import make_cid
import sys
import multicodec
import multihash
if len(sys.argv) <= 1:
print(f"usage: {sys.argv[0]} [list of swhids or hashes]...")
print("Alternatively, copy the hash from the SWHID and paste f01781114 in front")
for arg in sys.argv[1:]:
if arg.startswith("swh:"):
splat = arg.split(";")[0].split(':')
ty = splat[2]
if ty in {'cnt', 'dir', 'rev', 'rel'}:
codec = 'git-raw'
elif ty == 'snp':
codec = 'swhid-1-snp'
sys.stderr.write(f"warning: {arg} encoding is not yet supported\n")
else:
sys.stderr.write(f"unknown SWHID object type: {ty}")
sys.exit(1)
arg = splat[3]
it = bytes.fromhex(arg)
hash = multihash.encode(it, multicodec.multicodec.NAME_TABLE['sha1'])
print(make_cid(1, codec, hash).encode(encoding='base16').decode('utf-8'))
| en | 0.438995 | #!/usr/bin/env python3 # With nix, try: # nix-shell -p 'python3.withPackages (s: with s; [ py-multicodec py-multihash py-cid ])' | 2.406893 | 2 |
test-ch05/netelements.py | kgkang/deep-learning-from-scratch | 0 | 6619727 | <reponame>kgkang/deep-learning-from-scratch<filename>test-ch05/netelements.py<gh_stars>0
import numpy as np
class Operation():
def __init__(self, input_nodes=[]):
self.input_nodes = input_nodes
self.output_nodes = []
_default_graph.operations.append(self)
for node in input_nodes:
node.output_nodes.append(self)
def compute(self):
pass
class add(Operation):
def __init__(self,x,y):
super().__init__([x,y])
def compute(self, x_var, y_var):
self.inputs = [x_var, y_var]
return x_var + y_var
class multiply(Operation):
def __init__(self,x,y):
super().__init__([x,y])
def compute(self, x_var, y_var):
self.inputs = [x_var, y_var]
return x_var * y_var
class matmul(Operation):
def __init__(self,x,y):
super().__init__([x,y])
def compute(self, x_var, y_var):
self.inputs = [x_var, y_var]
return x_var.dot(y_var)
class sigmoid(Operation):
def __init__(self,z):
super().__init__([z])
def compute(self,z_val):
return 1 / (1 + np.exp(-z_val))
class Placeholder():
def __init__(self):
self.output_nodes = []
self.output = []
_default_graph.placeholders.append(self)
class Variable():
def __init__(self, initial_value = []):
self.value = initial_value
self.output_nodes = []
self.output = []
_default_graph.variables.append(self)
class Graph():
def __init__(self):
self.operations = []
self.variables = []
self.placeholders = []
def set_as_default(self):
global _default_graph
_default_graph = self
def traverse_postorder(operation):
"""
PostOrder Traversal of Nodes. Basically makes sure computations are done
in the correct order(Ax first, then Ax + b). Feel free to copy and
paste this.
"""
nodes_postorder = []
def recurse(node):
if isinstance(node, Operation):
for input_node in node.input_nodes:
recurse(input_node)
nodes_postorder.append(node)
recurse(operation)
return nodes_postorder
class Session():
def run(self, operation, feed_dict={}):
node_postorder = traverse_postorder(operation)
for node in node_postorder:
if type(node) == Placeholder:
node.output = feed_dict[node]
elif type(node) == Variable:
node.output = node.value
else:
# Operation
node.inputs = [input_node.output for input_node in node.input_nodes]
node.output = node.compute(*node.inputs)
if type(node.output) == list:
node.output = np.array(node.output)
return operation.output
| import numpy as np
class Operation():
def __init__(self, input_nodes=[]):
self.input_nodes = input_nodes
self.output_nodes = []
_default_graph.operations.append(self)
for node in input_nodes:
node.output_nodes.append(self)
def compute(self):
pass
class add(Operation):
def __init__(self,x,y):
super().__init__([x,y])
def compute(self, x_var, y_var):
self.inputs = [x_var, y_var]
return x_var + y_var
class multiply(Operation):
def __init__(self,x,y):
super().__init__([x,y])
def compute(self, x_var, y_var):
self.inputs = [x_var, y_var]
return x_var * y_var
class matmul(Operation):
def __init__(self,x,y):
super().__init__([x,y])
def compute(self, x_var, y_var):
self.inputs = [x_var, y_var]
return x_var.dot(y_var)
class sigmoid(Operation):
def __init__(self,z):
super().__init__([z])
def compute(self,z_val):
return 1 / (1 + np.exp(-z_val))
class Placeholder():
def __init__(self):
self.output_nodes = []
self.output = []
_default_graph.placeholders.append(self)
class Variable():
def __init__(self, initial_value = []):
self.value = initial_value
self.output_nodes = []
self.output = []
_default_graph.variables.append(self)
class Graph():
def __init__(self):
self.operations = []
self.variables = []
self.placeholders = []
def set_as_default(self):
global _default_graph
_default_graph = self
def traverse_postorder(operation):
"""
PostOrder Traversal of Nodes. Basically makes sure computations are done
in the correct order(Ax first, then Ax + b). Feel free to copy and
paste this.
"""
nodes_postorder = []
def recurse(node):
if isinstance(node, Operation):
for input_node in node.input_nodes:
recurse(input_node)
nodes_postorder.append(node)
recurse(operation)
return nodes_postorder
class Session():
def run(self, operation, feed_dict={}):
node_postorder = traverse_postorder(operation)
for node in node_postorder:
if type(node) == Placeholder:
node.output = feed_dict[node]
elif type(node) == Variable:
node.output = node.value
else:
# Operation
node.inputs = [input_node.output for input_node in node.input_nodes]
node.output = node.compute(*node.inputs)
if type(node.output) == list:
node.output = np.array(node.output)
return operation.output | en | 0.847676 | PostOrder Traversal of Nodes. Basically makes sure computations are done in the correct order(Ax first, then Ax + b). Feel free to copy and paste this. # Operation | 3.072603 | 3 |
lib/gamtools/radial_position.py | nroberts67/gamtools-dev | 8 | 6619728 | <filename>lib/gamtools/radial_position.py
"""
==========================
The radial position module
==========================
The radial position module contains functions for calculating chromatin
radial position from GAM :ref:`segregation tables <segregation_table>`.
"""
import numpy as np
from .segregation import open_segregation
def get_radial_position(segregation_data, no_blanks=False):
"""Get the radial position of each genomic window from a segregation table
:param segregation_data: Segregation table generated by gamtools
:returns: :class:`pandas.DataFrame` giving the radial position of each window
"""
# Get the percentage genome coverage for each NP
cov_per_np = 100 * segregation_data.mean()
def get_window_radial_pos(segregation_row):
"""Internal function that calculates radial position for each row"""
# Which NPs are positive for this window?
nps_with_window = segregation_row.values.astype(bool)
# Get the mean genome coverage of NPs positive for this window
return cov_per_np[nps_with_window].mean()
radial_position = segregation_data.apply(get_window_radial_pos, axis=1)
if no_blanks:
radial_position = radial_position[
np.logical_not(radial_position.isnull())]
return radial_position
def radial_position_from_args(args):
"""Helper function to call get_radial_position from doit"""
segregation_data = open_segregation(args.segregation_file)
radial_position = get_radial_position(segregation_data,
args.no_blanks)
radial_position.to_csv(args.output_file, sep='\t')
| <filename>lib/gamtools/radial_position.py
"""
==========================
The radial position module
==========================
The radial position module contains functions for calculating chromatin
radial position from GAM :ref:`segregation tables <segregation_table>`.
"""
import numpy as np
from .segregation import open_segregation
def get_radial_position(segregation_data, no_blanks=False):
"""Get the radial position of each genomic window from a segregation table
:param segregation_data: Segregation table generated by gamtools
:returns: :class:`pandas.DataFrame` giving the radial position of each window
"""
# Get the percentage genome coverage for each NP
cov_per_np = 100 * segregation_data.mean()
def get_window_radial_pos(segregation_row):
"""Internal function that calculates radial position for each row"""
# Which NPs are positive for this window?
nps_with_window = segregation_row.values.astype(bool)
# Get the mean genome coverage of NPs positive for this window
return cov_per_np[nps_with_window].mean()
radial_position = segregation_data.apply(get_window_radial_pos, axis=1)
if no_blanks:
radial_position = radial_position[
np.logical_not(radial_position.isnull())]
return radial_position
def radial_position_from_args(args):
"""Helper function to call get_radial_position from doit"""
segregation_data = open_segregation(args.segregation_file)
radial_position = get_radial_position(segregation_data,
args.no_blanks)
radial_position.to_csv(args.output_file, sep='\t')
| en | 0.682396 | ========================== The radial position module ========================== The radial position module contains functions for calculating chromatin radial position from GAM :ref:`segregation tables <segregation_table>`. Get the radial position of each genomic window from a segregation table :param segregation_data: Segregation table generated by gamtools :returns: :class:`pandas.DataFrame` giving the radial position of each window # Get the percentage genome coverage for each NP Internal function that calculates radial position for each row # Which NPs are positive for this window? # Get the mean genome coverage of NPs positive for this window Helper function to call get_radial_position from doit | 2.657738 | 3 |
homeassistant/components/isy994/entity.py | stickpin/core | 0 | 6619729 | """Representation of ISYEntity Types."""
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.typing import Dict
class ISYEntity(Entity):
"""Representation of an ISY994 device."""
_attrs = {}
_name: str = None
def __init__(self, node) -> None:
"""Initialize the insteon device."""
self._node = node
self._change_handler = None
self._control_handler = None
async def async_added_to_hass(self) -> None:
"""Subscribe to the node change events."""
self._change_handler = self._node.status.subscribe("changed", self.on_update)
if hasattr(self._node, "controlEvents"):
self._control_handler = self._node.controlEvents.subscribe(self.on_control)
def on_update(self, event: object) -> None:
"""Handle the update event from the ISY994 Node."""
self.schedule_update_ha_state()
def on_control(self, event: object) -> None:
"""Handle a control event from the ISY994 Node."""
self.hass.bus.fire(
"isy994_control", {"entity_id": self.entity_id, "control": event}
)
@property
def unique_id(self) -> str:
"""Get the unique identifier of the device."""
# pylint: disable=protected-access
if hasattr(self._node, "_id"):
return self._node._id
return None
@property
def name(self) -> str:
"""Get the name of the device."""
return self._name or str(self._node.name)
@property
def should_poll(self) -> bool:
"""No polling required since we're using the subscription."""
return False
@property
def value(self) -> int:
"""Get the current value of the device."""
# pylint: disable=protected-access
return self._node.status._val
def is_unknown(self) -> bool:
"""Get whether or not the value of this Entity's node is unknown.
PyISY reports unknown values as -inf
"""
return self.value == -1 * float("inf")
@property
def state(self):
"""Return the state of the ISY device."""
if self.is_unknown():
return None
return super().state
class ISYNodeEntity(ISYEntity):
"""Representation of a ISY Nodebase (Node/Group) entity."""
@property
def device_state_attributes(self) -> Dict:
"""Get the state attributes for the device."""
attr = {}
if hasattr(self._node, "aux_properties"):
for name, val in self._node.aux_properties.items():
attr[name] = f"{val.get('value')} {val.get('uom')}"
return attr
class ISYProgramEntity(ISYEntity):
"""Representation of an ISY994 program base."""
def __init__(self, name: str, status, actions=None) -> None:
"""Initialize the ISY994 program-based entity."""
super().__init__(status)
self._name = name
self._actions = actions
| """Representation of ISYEntity Types."""
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.typing import Dict
class ISYEntity(Entity):
"""Representation of an ISY994 device."""
_attrs = {}
_name: str = None
def __init__(self, node) -> None:
"""Initialize the insteon device."""
self._node = node
self._change_handler = None
self._control_handler = None
async def async_added_to_hass(self) -> None:
"""Subscribe to the node change events."""
self._change_handler = self._node.status.subscribe("changed", self.on_update)
if hasattr(self._node, "controlEvents"):
self._control_handler = self._node.controlEvents.subscribe(self.on_control)
def on_update(self, event: object) -> None:
"""Handle the update event from the ISY994 Node."""
self.schedule_update_ha_state()
def on_control(self, event: object) -> None:
"""Handle a control event from the ISY994 Node."""
self.hass.bus.fire(
"isy994_control", {"entity_id": self.entity_id, "control": event}
)
@property
def unique_id(self) -> str:
"""Get the unique identifier of the device."""
# pylint: disable=protected-access
if hasattr(self._node, "_id"):
return self._node._id
return None
@property
def name(self) -> str:
"""Get the name of the device."""
return self._name or str(self._node.name)
@property
def should_poll(self) -> bool:
"""No polling required since we're using the subscription."""
return False
@property
def value(self) -> int:
"""Get the current value of the device."""
# pylint: disable=protected-access
return self._node.status._val
def is_unknown(self) -> bool:
"""Get whether or not the value of this Entity's node is unknown.
PyISY reports unknown values as -inf
"""
return self.value == -1 * float("inf")
@property
def state(self):
"""Return the state of the ISY device."""
if self.is_unknown():
return None
return super().state
class ISYNodeEntity(ISYEntity):
"""Representation of a ISY Nodebase (Node/Group) entity."""
@property
def device_state_attributes(self) -> Dict:
"""Get the state attributes for the device."""
attr = {}
if hasattr(self._node, "aux_properties"):
for name, val in self._node.aux_properties.items():
attr[name] = f"{val.get('value')} {val.get('uom')}"
return attr
class ISYProgramEntity(ISYEntity):
"""Representation of an ISY994 program base."""
def __init__(self, name: str, status, actions=None) -> None:
"""Initialize the ISY994 program-based entity."""
super().__init__(status)
self._name = name
self._actions = actions
| en | 0.778138 | Representation of ISYEntity Types. Representation of an ISY994 device. Initialize the insteon device. Subscribe to the node change events. Handle the update event from the ISY994 Node. Handle a control event from the ISY994 Node. Get the unique identifier of the device. # pylint: disable=protected-access Get the name of the device. No polling required since we're using the subscription. Get the current value of the device. # pylint: disable=protected-access Get whether or not the value of this Entity's node is unknown. PyISY reports unknown values as -inf Return the state of the ISY device. Representation of a ISY Nodebase (Node/Group) entity. Get the state attributes for the device. Representation of an ISY994 program base. Initialize the ISY994 program-based entity. | 2.54135 | 3 |
code/abc024_c_01.py | KoyanagiHitoshi/AtCoder | 3 | 6619730 | N,D,K=map(int,input().split())
LR=[list(map(int,input().split())) for i in range(D)]
ST=[list(map(int,input().split())) for i in range(K)]
for s,t in ST:
ans=0
for l,r in LR:
ans+=1
if l<=t<=r and l<=s<=r:
print(ans)
break
if l<=s<=r:
if abs(t-r)<abs(t-l):s=r
else:s=l | N,D,K=map(int,input().split())
LR=[list(map(int,input().split())) for i in range(D)]
ST=[list(map(int,input().split())) for i in range(K)]
for s,t in ST:
ans=0
for l,r in LR:
ans+=1
if l<=t<=r and l<=s<=r:
print(ans)
break
if l<=s<=r:
if abs(t-r)<abs(t-l):s=r
else:s=l | none | 1 | 2.38544 | 2 | |
utils/task_graph/draw_thesis_task_graph.py | srgrr/TFM | 6 | 6619731 | <reponame>srgrr/TFM
#!/usr/bin/python3
import pandas as pd
FILE_PATH = "time_estimations.csv"
def main():
g = []
with open(FILE_PATH, "r") as f:
df = pd.read_csv(f, delimiter = ",", dtype = str)
for (i, line) in df.iterrows():
dep_line = line["Dependencies"].replace("no", "")
cur_node = {
"name": "%s (%s)\\n%s" % (line["Name"], line["Time"], line["Comment"]),
"cost": int(line["Time"]),
"edges": [x - 2 for x in list(map(int, dep_line.split()))]
}
g.append(cur_node)
print("digraph thesis_tasks {")
print("rankdir=LR;")
print("\tnode [shape = circle]; %s ;" % " ".join("\"%s\"" % x["name"] for x in g))
vis = [False] * len(g)
def dfs(i):
if vis[i]: return
vis[i] = True
for edge in g[i]["edges"]:
adj = g[edge]
print(
"\t\"%s\" -> \"%s\"" % (adj["name"], g[i]["name"])
)
dfs(edge)
for (i, v) in enumerate(g):
dfs(i)
print("}")
if __name__ == "__main__":
main()
| #!/usr/bin/python3
import pandas as pd
FILE_PATH = "time_estimations.csv"
def main():
g = []
with open(FILE_PATH, "r") as f:
df = pd.read_csv(f, delimiter = ",", dtype = str)
for (i, line) in df.iterrows():
dep_line = line["Dependencies"].replace("no", "")
cur_node = {
"name": "%s (%s)\\n%s" % (line["Name"], line["Time"], line["Comment"]),
"cost": int(line["Time"]),
"edges": [x - 2 for x in list(map(int, dep_line.split()))]
}
g.append(cur_node)
print("digraph thesis_tasks {")
print("rankdir=LR;")
print("\tnode [shape = circle]; %s ;" % " ".join("\"%s\"" % x["name"] for x in g))
vis = [False] * len(g)
def dfs(i):
if vis[i]: return
vis[i] = True
for edge in g[i]["edges"]:
adj = g[edge]
print(
"\t\"%s\" -> \"%s\"" % (adj["name"], g[i]["name"])
)
dfs(edge)
for (i, v) in enumerate(g):
dfs(i)
print("}")
if __name__ == "__main__":
main() | fr | 0.386793 | #!/usr/bin/python3 | 3.161433 | 3 |
now/bff/v1/routers/text.py | jina-ai/now | 6 | 6619732 | from typing import List
from docarray import Document, DocumentArray
from fastapi import APIRouter
from jina import Client
from now.bff.v1.models.text import (
NowTextIndexRequestModel,
NowTextResponseModel,
NowTextSearchRequestModel,
)
from now.bff.v1.routers.helper import process_query
router = APIRouter()
# Index
@router.post(
"/index",
summary='Add more data to the indexer',
)
def index(data: NowTextIndexRequestModel):
"""
Append the list of texts to the indexer.
"""
index_docs = DocumentArray()
for text in data.texts:
index_docs.append(Document(text=text))
c = Client(host=data.host, port=data.port)
c.post('/index', index_docs)
# Search
@router.post(
"/search",
response_model=List[NowTextResponseModel],
summary='Search text data via text or image as query',
)
def search(data: NowTextSearchRequestModel):
"""
Retrieve matching texts for a given text as query. Query should be `base64` encoded
using human-readable characters - `utf-8`.
"""
query_doc = process_query(data.text, data.image)
c = Client(host=data.host, port=data.port)
docs = c.post('/search', query_doc, parameters={"limit": data.limit})
return docs[0].matches.to_dict()
| from typing import List
from docarray import Document, DocumentArray
from fastapi import APIRouter
from jina import Client
from now.bff.v1.models.text import (
NowTextIndexRequestModel,
NowTextResponseModel,
NowTextSearchRequestModel,
)
from now.bff.v1.routers.helper import process_query
router = APIRouter()
# Index
@router.post(
"/index",
summary='Add more data to the indexer',
)
def index(data: NowTextIndexRequestModel):
"""
Append the list of texts to the indexer.
"""
index_docs = DocumentArray()
for text in data.texts:
index_docs.append(Document(text=text))
c = Client(host=data.host, port=data.port)
c.post('/index', index_docs)
# Search
@router.post(
"/search",
response_model=List[NowTextResponseModel],
summary='Search text data via text or image as query',
)
def search(data: NowTextSearchRequestModel):
"""
Retrieve matching texts for a given text as query. Query should be `base64` encoded
using human-readable characters - `utf-8`.
"""
query_doc = process_query(data.text, data.image)
c = Client(host=data.host, port=data.port)
docs = c.post('/search', query_doc, parameters={"limit": data.limit})
return docs[0].matches.to_dict()
| en | 0.752389 | # Index Append the list of texts to the indexer. # Search Retrieve matching texts for a given text as query. Query should be `base64` encoded using human-readable characters - `utf-8`. | 2.318284 | 2 |
tests/testsPhoenixReader.py | geobook2015/magPy | 1 | 6619733 | import os
import sys
sys.path.append(os.path.join("..", "core"))
sys.path.append(os.path.join("..", "inbuilt"))
sys.path.append(os.path.join("..", "utils"))
import numpy as np
import math
from datetime import datetime, timedelta
import struct
# import readers
from dataReaderPhoenix import DataReaderPhoenix
from dataReaderInternal import DataReaderInternal
# import writers
from dataWriterInternal import DataWriterInternal
# import inbuilt
from projectDefault import *
from projectViewTime import *
# import utils
from utilsProcess import *
from utilsIO import *
# graphing
import matplotlib.pyplot as plt
# def readCoil(coilFile):
# coilPath = os.path.join("..", "..", "Data", "riftVolc", "202", "COIL1547.CLC")
# coilFile = open(coilPath, "rb")
# print struct.unpack("20b", coilFile.read(20))
# print struct.unpack("12s", coilFile.read(12))
# print struct.unpack("12s", coilFile.read(12))
# print struct.unpack("12s", coilFile.read(12))
# print struct.unpack("8s", coilFile.read(8))
# print struct.unpack("12s", coilFile.read(12))
# print struct.unpack("d", coilFile.read(8))
# print struct.unpack("d", coilFile.read(8))
# print struct.unpack("7s", coilFile.read(7))
# print struct.unpack("18s", coilFile.read(18))
# print struct.unpack("f", coilFile.read(4))
# print struct.unpack("f", coilFile.read(4))
# print struct.unpack("f", coilFile.read(4))
# print struct.unpack("f", coilFile.read(4))
# print struct.unpack("f", coilFile.read(4))
# print struct.unpack("f", coilFile.read(4))
# print struct.unpack("f", coilFile.read(4))
# print struct.unpack("f", coilFile.read(4))
# print struct.unpack("f", coilFile.read(4))
# print struct.unpack("f", coilFile.read(4))
# print struct.unpack("f", coilFile.read(4))
# print struct.unpack("f", coilFile.read(4))
# print struct.unpack("f", coilFile.read(4))
# print struct.unpack("f", coilFile.read(4))
# print struct.unpack("d", coilFile.read(8))
# print struct.unpack("d", coilFile.read(8))
# print struct.unpack("d", coilFile.read(8))
# print struct.unpack("d", coilFile.read(8))
# print struct.unpack("d", coilFile.read(8))
# print struct.unpack("500s", coilFile.read(500))
# coilFile.close()
### test the data reader
dataPath = os.path.join("..", "..", "Data", "riftVolc", "202")
dataReader = DataReaderPhoenix(dataPath)
dataReader.printInfo()
dataReader.printDataFileList()
print dataReader.getSamplesRatesTS()
print dataReader.getNumberSamplesTS()
dataReader.printTableFile()
# startTime = "2017-04-07 23:00:00"
# endTime = "2017-04-08 01:00:00"
# data = dataReader.getUnscaledData(startTime, endTime)
# plt.figure()
# for idx, chan in enumerate(data.keys()):
# plt.subplot(dataReader.getNumChannels(), 1, idx+1)
# plt.title(chan)
# plt.plot(data[chan]-np.average(data[chan]))
# plt.show()
### now try and reformat
# outpath = os.path.join("..", "..", "Data", "riftVolc", "202_reformat")
# dataReader.reformat(outpath)
### create a project
# projectPath = (os.path.join("..", "..", "Data", "riftVolcProject"))
# projectMakeDefault(projectPath, "2017-04-07 06:00:00")
# proj = projectLoad(projectPath, "mtProj.prj")
### let's look at some time
# projectViewTime(proj, "2017-04-08 02:00:00", "2017-04-08 04:30:00", freqs=[15], save=True, chans=["Ex", "Ey", "Hx", "Hy", "Hz"])
# projectViewTime(proj, "2017-04-07 09:16:00", "2017-04-07 09:16:16", freqs=[150], save=True, chans=["Ex", "Ey", "Hx", "Hy", "Hz"])
# projectViewTime(proj, "2017-04-07 09:33:00", "2017-04-07 09:33:01", freqs=[2400], save=True, chans=["Ex", "Ey", "Hx", "Hy", "Hz"])
| import os
import sys
sys.path.append(os.path.join("..", "core"))
sys.path.append(os.path.join("..", "inbuilt"))
sys.path.append(os.path.join("..", "utils"))
import numpy as np
import math
from datetime import datetime, timedelta
import struct
# import readers
from dataReaderPhoenix import DataReaderPhoenix
from dataReaderInternal import DataReaderInternal
# import writers
from dataWriterInternal import DataWriterInternal
# import inbuilt
from projectDefault import *
from projectViewTime import *
# import utils
from utilsProcess import *
from utilsIO import *
# graphing
import matplotlib.pyplot as plt
# def readCoil(coilFile):
# coilPath = os.path.join("..", "..", "Data", "riftVolc", "202", "COIL1547.CLC")
# coilFile = open(coilPath, "rb")
# print struct.unpack("20b", coilFile.read(20))
# print struct.unpack("12s", coilFile.read(12))
# print struct.unpack("12s", coilFile.read(12))
# print struct.unpack("12s", coilFile.read(12))
# print struct.unpack("8s", coilFile.read(8))
# print struct.unpack("12s", coilFile.read(12))
# print struct.unpack("d", coilFile.read(8))
# print struct.unpack("d", coilFile.read(8))
# print struct.unpack("7s", coilFile.read(7))
# print struct.unpack("18s", coilFile.read(18))
# print struct.unpack("f", coilFile.read(4))
# print struct.unpack("f", coilFile.read(4))
# print struct.unpack("f", coilFile.read(4))
# print struct.unpack("f", coilFile.read(4))
# print struct.unpack("f", coilFile.read(4))
# print struct.unpack("f", coilFile.read(4))
# print struct.unpack("f", coilFile.read(4))
# print struct.unpack("f", coilFile.read(4))
# print struct.unpack("f", coilFile.read(4))
# print struct.unpack("f", coilFile.read(4))
# print struct.unpack("f", coilFile.read(4))
# print struct.unpack("f", coilFile.read(4))
# print struct.unpack("f", coilFile.read(4))
# print struct.unpack("f", coilFile.read(4))
# print struct.unpack("d", coilFile.read(8))
# print struct.unpack("d", coilFile.read(8))
# print struct.unpack("d", coilFile.read(8))
# print struct.unpack("d", coilFile.read(8))
# print struct.unpack("d", coilFile.read(8))
# print struct.unpack("500s", coilFile.read(500))
# coilFile.close()
### test the data reader
dataPath = os.path.join("..", "..", "Data", "riftVolc", "202")
dataReader = DataReaderPhoenix(dataPath)
dataReader.printInfo()
dataReader.printDataFileList()
print dataReader.getSamplesRatesTS()
print dataReader.getNumberSamplesTS()
dataReader.printTableFile()
# startTime = "2017-04-07 23:00:00"
# endTime = "2017-04-08 01:00:00"
# data = dataReader.getUnscaledData(startTime, endTime)
# plt.figure()
# for idx, chan in enumerate(data.keys()):
# plt.subplot(dataReader.getNumChannels(), 1, idx+1)
# plt.title(chan)
# plt.plot(data[chan]-np.average(data[chan]))
# plt.show()
### now try and reformat
# outpath = os.path.join("..", "..", "Data", "riftVolc", "202_reformat")
# dataReader.reformat(outpath)
### create a project
# projectPath = (os.path.join("..", "..", "Data", "riftVolcProject"))
# projectMakeDefault(projectPath, "2017-04-07 06:00:00")
# proj = projectLoad(projectPath, "mtProj.prj")
### let's look at some time
# projectViewTime(proj, "2017-04-08 02:00:00", "2017-04-08 04:30:00", freqs=[15], save=True, chans=["Ex", "Ey", "Hx", "Hy", "Hz"])
# projectViewTime(proj, "2017-04-07 09:16:00", "2017-04-07 09:16:16", freqs=[150], save=True, chans=["Ex", "Ey", "Hx", "Hy", "Hz"])
# projectViewTime(proj, "2017-04-07 09:33:00", "2017-04-07 09:33:01", freqs=[2400], save=True, chans=["Ex", "Ey", "Hx", "Hy", "Hz"])
| en | 0.243121 | # import readers # import writers # import inbuilt # import utils # graphing # def readCoil(coilFile): # coilPath = os.path.join("..", "..", "Data", "riftVolc", "202", "COIL1547.CLC") # coilFile = open(coilPath, "rb") # print struct.unpack("20b", coilFile.read(20)) # print struct.unpack("12s", coilFile.read(12)) # print struct.unpack("12s", coilFile.read(12)) # print struct.unpack("12s", coilFile.read(12)) # print struct.unpack("8s", coilFile.read(8)) # print struct.unpack("12s", coilFile.read(12)) # print struct.unpack("d", coilFile.read(8)) # print struct.unpack("d", coilFile.read(8)) # print struct.unpack("7s", coilFile.read(7)) # print struct.unpack("18s", coilFile.read(18)) # print struct.unpack("f", coilFile.read(4)) # print struct.unpack("f", coilFile.read(4)) # print struct.unpack("f", coilFile.read(4)) # print struct.unpack("f", coilFile.read(4)) # print struct.unpack("f", coilFile.read(4)) # print struct.unpack("f", coilFile.read(4)) # print struct.unpack("f", coilFile.read(4)) # print struct.unpack("f", coilFile.read(4)) # print struct.unpack("f", coilFile.read(4)) # print struct.unpack("f", coilFile.read(4)) # print struct.unpack("f", coilFile.read(4)) # print struct.unpack("f", coilFile.read(4)) # print struct.unpack("f", coilFile.read(4)) # print struct.unpack("f", coilFile.read(4)) # print struct.unpack("d", coilFile.read(8)) # print struct.unpack("d", coilFile.read(8)) # print struct.unpack("d", coilFile.read(8)) # print struct.unpack("d", coilFile.read(8)) # print struct.unpack("d", coilFile.read(8)) # print struct.unpack("500s", coilFile.read(500)) # coilFile.close() ### test the data reader # startTime = "2017-04-07 23:00:00" # endTime = "2017-04-08 01:00:00" # data = dataReader.getUnscaledData(startTime, endTime) # plt.figure() # for idx, chan in enumerate(data.keys()): # plt.subplot(dataReader.getNumChannels(), 1, idx+1) # plt.title(chan) # plt.plot(data[chan]-np.average(data[chan])) # plt.show() ### now try and reformat # outpath = os.path.join("..", "..", "Data", "riftVolc", "202_reformat") # dataReader.reformat(outpath) ### create a project # projectPath = (os.path.join("..", "..", "Data", "riftVolcProject")) # projectMakeDefault(projectPath, "2017-04-07 06:00:00") # proj = projectLoad(projectPath, "mtProj.prj") ### let's look at some time # projectViewTime(proj, "2017-04-08 02:00:00", "2017-04-08 04:30:00", freqs=[15], save=True, chans=["Ex", "Ey", "Hx", "Hy", "Hz"]) # projectViewTime(proj, "2017-04-07 09:16:00", "2017-04-07 09:16:16", freqs=[150], save=True, chans=["Ex", "Ey", "Hx", "Hy", "Hz"]) # projectViewTime(proj, "2017-04-07 09:33:00", "2017-04-07 09:33:01", freqs=[2400], save=True, chans=["Ex", "Ey", "Hx", "Hy", "Hz"]) | 1.954651 | 2 |
decibel/utils/find_item_in_list.py | DaphneO/DECIBEL | 13 | 6619734 | <filename>decibel/utils/find_item_in_list.py
def find_item(list_containing_list, item):
"""
Find the index of the list that contains the item
:param list_containing_list: List of lists; one of them must contain the item
:param item: The item we are looking for
:return: Index of the item in the outer list
>>> find_item([[1,2,3],[4,5,6]],5)
1
"""
for _list in list_containing_list:
if item in _list:
return list_containing_list.index(_list)
return None
| <filename>decibel/utils/find_item_in_list.py
def find_item(list_containing_list, item):
"""
Find the index of the list that contains the item
:param list_containing_list: List of lists; one of them must contain the item
:param item: The item we are looking for
:return: Index of the item in the outer list
>>> find_item([[1,2,3],[4,5,6]],5)
1
"""
for _list in list_containing_list:
if item in _list:
return list_containing_list.index(_list)
return None
| en | 0.740186 | Find the index of the list that contains the item :param list_containing_list: List of lists; one of them must contain the item :param item: The item we are looking for :return: Index of the item in the outer list >>> find_item([[1,2,3],[4,5,6]],5) 1 | 3.913445 | 4 |
examples/0011-book-3-behavior-manifest-individuals.py | dnoneill/pyIIIFpres | 12 | 6619735 | <filename>examples/0011-book-3-behavior-manifest-individuals.py<gh_stars>10-100
# https://iiif.io/api/cookbook/recipe/0009-book-1/
from IIIFpres import iiifpapi3
iiifpapi3.BASE_URL = "https://iiif.io/api/cookbook/recipe/0011-book-3-behavior/"
manifest = iiifpapi3.Manifest()
manifest.set_id(extendbase_url="manifest-individuals.json")
manifest.add_label("ca","[Conoximent de las orines] Ihesus, Ihesus. En nom de Deu et dela beneyeta sa mare e de tots los angels i archangels e de tots los sants e santes de paradis yo micer Johannes comense aquest libre de reseptes en l’ayn Mi 466.")
manifest.add_behavior("individuals")
data = [('inside cover; 1r',
3375,
2250,
'https://iiif.io/api/image/3.0/example/reference/85a96c630f077e6ac6cb984f1b752bbf-0-21198-zz00022840-1-master',
'/full/max/0/default.jpg'),
('2v, 3r',
3375,
2250,
'https://iiif.io/api/image/3.0/example/reference/85a96c630f077e6ac6cb984f1b752bbf-1-21198-zz00022882-1-master',
'/full/max/0/default.jpg'),
('3v, 4r',
3375,
2250,
'https://iiif.io/api/image/3.0/example/reference/85a96c630f077e6ac6cb984f1b752bbf-2-21198-zz000228b3-1-master',
'/full/max/0/default.jpg'),
('4v, 5r',
3375,
2250,
'https://iiif.io/api/image/3.0/example/reference/85a96c630f077e6ac6cb984f1b752bbf-3-21198-zz000228d4-1-master',
'/full/max/0/default.jpg')]
for idx,d in enumerate(data):
idx+=1
canvas = manifest.add_canvas_to_items()
canvas.set_id(extendbase_url="canvas/v%s"%idx) # in this case we use the base url
canvas.set_height(d[2])
canvas.set_width(d[1])
canvas.add_label("en",d[0])
annopage = canvas.add_annotationpage_to_items()
annopage.set_id(extendbase_url="page/v%s/1" %idx)
annotation = annopage.add_annotation_to_items(target=canvas.id)
annotation.set_id(extendbase_url="annotation/v%s-image" %str(idx).zfill(4))
annotation.set_motivation("painting")
annotation.body.set_id("".join(d[3:]))
annotation.body.set_type("Image")
annotation.body.set_format("image/jpeg")
annotation.body.set_width(d[1])
annotation.body.set_height(d[2])
s = annotation.body.add_service()
s.set_id(d[3])
s.set_type("ImageService3")
s.set_profile("level1")
if __name__ == "__main__":
manifest.json_save("manifest.json") | <filename>examples/0011-book-3-behavior-manifest-individuals.py<gh_stars>10-100
# https://iiif.io/api/cookbook/recipe/0009-book-1/
from IIIFpres import iiifpapi3
iiifpapi3.BASE_URL = "https://iiif.io/api/cookbook/recipe/0011-book-3-behavior/"
manifest = iiifpapi3.Manifest()
manifest.set_id(extendbase_url="manifest-individuals.json")
manifest.add_label("ca","[Conoximent de las orines] Ihesus, Ihesus. En nom de Deu et dela beneyeta sa mare e de tots los angels i archangels e de tots los sants e santes de paradis yo micer Johannes comense aquest libre de reseptes en l’ayn Mi 466.")
manifest.add_behavior("individuals")
data = [('inside cover; 1r',
3375,
2250,
'https://iiif.io/api/image/3.0/example/reference/85a96c630f077e6ac6cb984f1b752bbf-0-21198-zz00022840-1-master',
'/full/max/0/default.jpg'),
('2v, 3r',
3375,
2250,
'https://iiif.io/api/image/3.0/example/reference/85a96c630f077e6ac6cb984f1b752bbf-1-21198-zz00022882-1-master',
'/full/max/0/default.jpg'),
('3v, 4r',
3375,
2250,
'https://iiif.io/api/image/3.0/example/reference/85a96c630f077e6ac6cb984f1b752bbf-2-21198-zz000228b3-1-master',
'/full/max/0/default.jpg'),
('4v, 5r',
3375,
2250,
'https://iiif.io/api/image/3.0/example/reference/85a96c630f077e6ac6cb984f1b752bbf-3-21198-zz000228d4-1-master',
'/full/max/0/default.jpg')]
for idx,d in enumerate(data):
idx+=1
canvas = manifest.add_canvas_to_items()
canvas.set_id(extendbase_url="canvas/v%s"%idx) # in this case we use the base url
canvas.set_height(d[2])
canvas.set_width(d[1])
canvas.add_label("en",d[0])
annopage = canvas.add_annotationpage_to_items()
annopage.set_id(extendbase_url="page/v%s/1" %idx)
annotation = annopage.add_annotation_to_items(target=canvas.id)
annotation.set_id(extendbase_url="annotation/v%s-image" %str(idx).zfill(4))
annotation.set_motivation("painting")
annotation.body.set_id("".join(d[3:]))
annotation.body.set_type("Image")
annotation.body.set_format("image/jpeg")
annotation.body.set_width(d[1])
annotation.body.set_height(d[2])
s = annotation.body.add_service()
s.set_id(d[3])
s.set_type("ImageService3")
s.set_profile("level1")
if __name__ == "__main__":
manifest.json_save("manifest.json") | en | 0.483669 | # https://iiif.io/api/cookbook/recipe/0009-book-1/ # in this case we use the base url | 1.726094 | 2 |
beacon/response/info.py | elixir-luxembourg/BH2021-beacon-2.x-omop | 0 | 6619736 | """
Info Endpoint.
Querying the info endpoint reveals information about this beacon and its existing datasets
and their associated metadata.
* ``/`` Beacon-v1
* ``/info`` Beacon-v1
* ``/info?model=GA4GH-ServiceInfo-v0.1`` GA4GH
* ``/service-info`` GA4GH
"""
import logging
import json
from aiohttp.web_request import Request
from beacon.db.datasets import get_datasets
from beacon.request import IncludeResultsetResponses, RequestParams, get_parameters
from beacon.response.info_response_schema import build_beacon_info_response
from beacon.utils.auth import resolve_token
from beacon.utils.stream import json_stream
from bson import json_util
LOG = logging.getLogger(__name__)
async def handler(request: Request):
LOG.info('Running a GET info request')
# Fetch datasets info
qparams: RequestParams = await get_parameters(request)
beacon_datasets = [ json.loads(json_util.dumps(r)) for r in get_datasets(None, qparams)]
all_datasets = [ r['_id'] for r in beacon_datasets]
access_token = request.headers.get('Authorization')
if access_token:
access_token = access_token[7:] # cut out 7 characters: len('Bearer ')
authorized_datasets, authenticated = await resolve_token(access_token, all_datasets)
if authenticated:
LOG.debug('all datasets: %s', all_datasets)
LOG.info('resolved datasets: %s', authorized_datasets)
response_converted = build_beacon_info_response(beacon_datasets,
qparams,
lambda x,y,z: x,
authorized_datasets if authenticated else [])
return await json_stream(request, response_converted)
| """
Info Endpoint.
Querying the info endpoint reveals information about this beacon and its existing datasets
and their associated metadata.
* ``/`` Beacon-v1
* ``/info`` Beacon-v1
* ``/info?model=GA4GH-ServiceInfo-v0.1`` GA4GH
* ``/service-info`` GA4GH
"""
import logging
import json
from aiohttp.web_request import Request
from beacon.db.datasets import get_datasets
from beacon.request import IncludeResultsetResponses, RequestParams, get_parameters
from beacon.response.info_response_schema import build_beacon_info_response
from beacon.utils.auth import resolve_token
from beacon.utils.stream import json_stream
from bson import json_util
LOG = logging.getLogger(__name__)
async def handler(request: Request):
LOG.info('Running a GET info request')
# Fetch datasets info
qparams: RequestParams = await get_parameters(request)
beacon_datasets = [ json.loads(json_util.dumps(r)) for r in get_datasets(None, qparams)]
all_datasets = [ r['_id'] for r in beacon_datasets]
access_token = request.headers.get('Authorization')
if access_token:
access_token = access_token[7:] # cut out 7 characters: len('Bearer ')
authorized_datasets, authenticated = await resolve_token(access_token, all_datasets)
if authenticated:
LOG.debug('all datasets: %s', all_datasets)
LOG.info('resolved datasets: %s', authorized_datasets)
response_converted = build_beacon_info_response(beacon_datasets,
qparams,
lambda x,y,z: x,
authorized_datasets if authenticated else [])
return await json_stream(request, response_converted)
| en | 0.718094 | Info Endpoint. Querying the info endpoint reveals information about this beacon and its existing datasets and their associated metadata. * ``/`` Beacon-v1 * ``/info`` Beacon-v1 * ``/info?model=GA4GH-ServiceInfo-v0.1`` GA4GH * ``/service-info`` GA4GH # Fetch datasets info # cut out 7 characters: len('Bearer ') | 2.699109 | 3 |
benchmark/scripts/build_bench.py | divenswu/proximabilin | 103 | 6619737 | <gh_stars>100-1000
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Proxima SE benchmark toolkits
#
from datetime import datetime
import json
import shutil
import sys
import logging
from concurrent.futures import as_completed
from concurrent.futures.thread import ThreadPoolExecutor
from optparse import OptionParser
from urllib.parse import urlparse
from google.protobuf.json_format import MessageToDict
from pyproximase import Client as SEClient
from pyproximase import *
from common.proxima_se_repo import MysqlRepoSource
from common.proxima_se_service import *
from urllib import parse
"""
Requirements:
1. Above Python3.6
Test Scenario:
------
Step 1: Prepared Repository for ProximaSE
Requirements:
i: table should have first column named by id with property auto_increment
ii: vector column should be prefixed by vector
iii: all columns treated as forward attributes except vector and id
Step 2: Clone source code of ProximaSE
cd (source of ProximaSE)
Step 3: Build ProximaSE
mkdir build; cd build; cmake ../; make -j
Step 4: Run Bench tools
cd benchmark; pip install -r requirements.txt; PYTHONPATH=$(pwd) python scripts/build_bench.py
------
"""
class BenchContext(object):
def __init__(self, output_dir, kwargs):
self._output = os.path.realpath(output_dir)
self._kwargs = kwargs
self._log_dir = kwargs.log_dir if kwargs.log_dir else os.path.join(self._output, 'logs')
self._indices_dir = kwargs.index_directory if kwargs.index_directory else os.path.join(
self._output, 'indices')
self._meta_uri = kwargs.meta_uri
self._meta_dir = None
if self._meta_uri:
uri = urlparse(self._meta_uri)
if uri and uri.scheme.lower() == 'sqlite' and uri.path:
self._meta_dir = os.path.dirname(uri.path)
if self._meta_dir == "/":
logging.error("Can't create meta to root directory '/'")
self._meta_dir = None
else:
self._meta_dir = os.path.join(self._output, 'meta')
self._meta_uri = f'sqlite://{self._meta_dir}/meta.sqlite'
self._repository = None
self._table = kwargs.table
self._dimension = int(kwargs.dimension)
if kwargs.jdbc:
path = urlparse(kwargs.jdbc).path
if path and len(path) != 0:
self._repository = path.strip("/").rstrip("/")
self._proxima_se_conf = None
self._repository_conf = None
self._report = None
def output(self):
return self._output
def options(self):
return self._kwargs
def repository(self):
return self._repository
def table(self):
return self._table
def counts(self):
return int(self._kwargs.counts) if self._kwargs.counts else sys.maxsize
def meta_dir(self):
return self._meta_dir
def indices_dir(self):
return self._indices_dir
def log_dir(self):
return self._log_dir
def proxima_se_log_dir(self):
return os.path.join(self.log_dir(), "be")
def repository_log_dir(self):
return os.path.join(self.log_dir(), "repo")
def timeout_in_seconds(self):
return int(self._kwargs.timeout)
def output_flush_interval(self):
return float(self._kwargs.interval)
def config_dir(self):
return os.path.join(self._output, "conf")
def proxima_se_config(self):
if self._proxima_se_conf:
return MessageToDict(self._proxima_se_conf)
return {}
def repository_config(self):
return self._repository
def proxima_se_config_file(self):
return os.path.join(self.config_dir(), 'proxima_se.conf')
def proxima_se_repo_config_file(self):
return os.path.join(self.config_dir(), 'mysql_repo.conf')
def proxima_se_admin_address(self):
return ['127.0.0.1', self._proxima_se_conf.common_config.grpc_listen_port]
def max_docs_per_segment(self):
return int(self._kwargs.max_docs_per_segment)
def dimension(self):
return self._dimension
def report_file(self):
return self._report
def _init_bench_config(self):
self._proxima_se_conf = ProximaSE.build_config(log_directory=self.proxima_se_log_dir(),
grpc_port=int(self._kwargs.grpc_port),
http_port=int(self._kwargs.http_port),
index_build_threads=int(self._kwargs.index_build_threads),
index_build_qps=int(self._kwargs.index_build_qps),
index_directory=self.indices_dir(),
meta_uri=self._meta_uri)
self._repository_conf = ProximaSEMysqlRepo.build_config(log_directory=self.repository_log_dir(),
index_port=int(self._kwargs.grpc_port),
repository_name=self._repository)
if self._proxima_se_conf and self._repository_conf:
ProximaSEService.write_config_file(self.proxima_se_config_file(), self._proxima_se_conf)
ProximaSEService.write_file(self.proxima_se_repo_config_file(), self._repository_conf)
return True
return False
@staticmethod
def _create_directory(directory):
if not os.path.exists(directory):
os.makedirs(directory)
def _init_output_dir(self):
self._create_directory(self.output())
if not self.meta_dir():
return False
logging.info(f'Create meta directory {self.meta_dir()}')
self._create_directory(self.meta_dir())
logging.info(f'Create indices directory {self.indices_dir()}')
self._create_directory(self.indices_dir())
logging.info(f'Create log directory {self.log_dir()}')
self._create_directory(self.log_dir())
self._create_directory(self.proxima_se_log_dir())
self._create_directory(self.repository_log_dir())
logging.info(f'Create config directory {self.config_dir()}')
self._create_directory(self.config_dir())
return True
def _init_report(self):
if self._kwargs.report:
if os.path.exists(os.path.dirname(os.path.realpath(self._kwargs.report))):
self._report = self._kwargs.report
else:
logging.error(f'Can not write report file: {self._kwargs.report}')
return False
else:
self._report = os.path.join(self.output(), 'report.json')
return True
def init(self):
if not self._init_report():
return False
if not self._init_output_dir():
logging.error(f'Initialize output directory({self.output()}) failed')
return False
if not self._init_bench_config():
logging.error(f'Can not create bench configs')
return False
return True
@staticmethod
def _cleanup_dir(directory):
if os.path.isdir(directory):
shutil.rmtree(directory)
def cleanup(self):
if self._kwargs.cleanup:
self._cleanup_dir(self.output())
self._cleanup_dir(self.meta_dir())
self._cleanup_dir(self.indices_dir())
self._cleanup_dir(self.log_dir())
self._cleanup_dir(self.config_dir())
return True
class ProximaSEBuilds(object):
def __init__(self, roots):
if not roots:
roots = os.getenv("PROXIMA_SE_BUILD_ROOT")
if not roots:
roots = os.path.realpath(os.path.join(os.getcwd(), "..", "build"))
if not os.path.isdir(roots):
roots = os.path.realpath(os.path.join(os.getcwd(), "..", "cmake-build-debug"))
self._build_roots = roots
logging.info(f'Locate Build Root of ProximaSE: {roots}')
def proxima_se_binary(self):
return os.path.join(self._build_roots, 'bin', 'proxima_se')
def proxima_repo_binary(self):
return os.path.join(self._build_roots, 'bin', 'mysql_repository')
def is_valid(self):
return os.path.isdir(self._build_roots) and os.path.isfile(self.proxima_se_binary()) and \
os.path.isfile(self.proxima_repo_binary())
def _timeout_monitor(stopped, timeout, interval):
logging.info(f'Timeout monitor started, sleep for {timeout} seconds')
slept = 0
while not stopped() and slept < timeout:
time.sleep(interval)
slept += interval
logging.info(f'Timeout monitor stopped')
if slept < timeout:
return {}
else:
return {"timeout": True}
def _service_stopped_monitor(stopped, interval, *args):
logging.info(f'Service monitor started, interval({interval})')
while not stopped():
services = map(lambda svc: svc.service_name(), filter(lambda svc: not svc.running(), args))
for service in services:
logging.info(f'{service} is not running')
return {"stopped": service}
time.sleep(interval)
logging.info(f'Service monitor stopped')
return {}
def _progress_monitor(stopped, callback, total, interval, notifier):
logging.info(f'Start progress monitor, total({total})')
start = time.monotonic()
progress = 0.0
last = [0, 0.0]
processed = 0
while not stopped():
processed = callback()
if processed < 0:
processed = 0
logging.info("Can't get processed from callback")
break
progress = round(float(processed) / total * 100, 4)
seconds = time.monotonic() - start
incremental = [processed - last[0], seconds - last[1]]
last = [processed, seconds]
if progress > 100.00:
progress = 100.00
print("Processed: %02.2f%%, QPS %d/S, RTQPS %d/S" % (
progress, int(processed / seconds), int(incremental[0] / incremental[1])))
notifier(progress, int(processed / seconds), round(seconds, 2), int(incremental[0] / incremental[1]))
if processed >= total:
print("Processed: 100%%, QPS %d/S, RTQPS %d/S" % (
int(processed / seconds), int(incremental[0] / incremental[1])))
notifier(100.00, int(processed / seconds), round(seconds, 2), int(incremental[0] / incremental[1]))
progress = 100.00
break
else:
time.sleep(interval)
seconds = time.monotonic() - start
logging.info("Progress monitor finished")
return {"total": processed, "progress": progress, "seconds": int(seconds), 'qps': int(processed / seconds)}
class ProximaSEBuildBench(object):
_filter_columns = ('id', 'vector')
def __init__(self, output_dir, arg_options):
self._builds = ProximaSEBuilds(arg_options.build_root)
self._context = BenchContext(output_dir, arg_options)
self._source = None
self._repository = None
self._proxima_se = None
self._client = None
self._last_progress = -1.0
self._summary = {"report": self._context.output(),
"progress_header": ['Progress', 'QPS(AVG)', 'Seconds', 'QPS(RT)', 'IndexSize', 'CPU',
'MEM(GB)', 'Time'],
"progress_table": []}
self._pool = ThreadPoolExecutor(max_workers=5, thread_name_prefix='BenchMonitors')
self._futures = []
@staticmethod
def human_number(num):
if num > 1000000000:
return f'{round(float(num) / 1000000000, 2)}B'
elif num > 1000000:
return f'{round(float(num) / 1000000, 2)}M'
elif num > 1000:
return f'{round(float(num) / 1000, 2)}T'
return str(num)
@staticmethod
def summary_report(progress, report_file):
try:
with open(report_file, 'r') as report_fd:
report = json.load(report_fd)
threads = report['config']['proxima_se']['indexConfig']['buildThreadCount']
print("Threads", "Total", *report['progress_header'])
for idx in progress:
if 0 <= idx < len(report['progress_table']):
print(threads,
ProximaSEBuildBench.human_number(
int(report['total'] / 100 * report['progress_table'][idx][0])),
*report['progress_table'][idx])
else:
break
except Exception as e:
logging.error(e)
@staticmethod
def summary_reports(progress, interval, *reports):
if interval != 0:
items = [int(i) for i in range(0, 10000, interval)]
elif progress:
items = [int(p) for p in progress.split(',')]
else:
logging.error("Failed to get interested progress")
return
for report in reports:
ProximaSEBuildBench.summary_report(items, report)
def init(self):
if not self._builds.is_valid():
logging.error("Proxima SE build is invalid, lost binary of proxima_se or mysql_repository")
return False
if not self._context.init():
logging.error("Init bench context failed")
return False
self._source = MysqlRepoSource(self._context.options().jdbc, self._context.options().table)
if not self._source.is_valid():
logging.error(
f'Can not init repository with jdbc: {self._context.options().jdbc}, '
f'table: {self._context.options().table}')
return False
self._repository = ProximaSEMysqlRepo(self._builds.proxima_repo_binary(),
self._context.proxima_se_repo_config_file(),
self._context.log_dir())
self._proxima_se = ProximaSE(self._builds.proxima_se_binary(),
self._context.proxima_se_config_file(),
self._context.log_dir())
self._host, self._port = self._context.proxima_se_admin_address()
return self._repository.init() and self._proxima_se.init()
def cleanup(self):
if self._proxima_se:
self._proxima_se.cleanup()
if self._repository:
self._repository.cleanup()
if self._context:
self._context.cleanup()
return True
def _max_lsn(self):
try:
status, collection = self._client.describe_collection(self._context.table())
if not status.ok():
print(status)
return -1
logging.debug(collection)
return collection.latest_lsn_context.lsn
except Exception as e:
logging.error("BRPC Exception")
logging.error(e)
return -1
def _collection_docs(self):
status, stats = self._client.stats_collection(self._context.table())
if status.ok():
return stats.total_doc_count
else:
logging.debug(status)
return -1
def _collection_index_size(self):
status, stats = self._client.stats_collection(self._context.table())
if status.ok():
return stats.total_index_file_size
else:
logging.debug(status)
return -1
def _report_progress(self, progress, qps, seconds, rtqps):
if progress >= 100 or progress - self._last_progress > 0.1:
stats = self._proxima_se.stats()
# noinspection PyTypeChecker
self._summary['progress_table'].append(
[progress, qps, seconds, rtqps, self._collection_index_size(), stats.cpu(), stats.memory_in_gb(),
datetime.now().strftime("%Y%m%d_%H:%M:%S")])
self._last_progress = progress
def _start_monitors(self):
total = min(self._source.counts(), self._context.counts())
return [self._pool.submit(_timeout_monitor, lambda: self.has_monitor_finished(),
self._context.timeout_in_seconds(), 5),
self._pool.submit(_service_stopped_monitor, lambda: self.has_monitor_finished(), 1,
self._repository, self._proxima_se),
self._pool.submit(_progress_monitor, lambda: self.has_monitor_finished(),
lambda: self._collection_docs(), total, self._context.output_flush_interval(),
lambda progress, qps, seconds, rtqps: self._report_progress(progress, qps, seconds,
rtqps))]
def start(self):
if self._proxima_se.start():
time.sleep(10)
self._client = SEClient(host=self._host, port=self._port)
self.sync_schema()
time.sleep(1)
if self._repository.start():
self._futures = self._start_monitors()
return True
return False
def stop(self):
for done in as_completed(self._futures):
self._summary.update(done.result())
self._pool.shutdown()
return self._proxima_se.stop() and self._repository.stop()
def _valid_schema(self):
schema = self._source.schema()
columns = [field[0] for field in schema]
try:
columns.index("id")
columns.index("vector")
except ValueError:
return False
return True
def _build_repository_config(self):
o = parse.urlparse(self._source.jdbc_str())
return DatabaseRepository(self._context.repository(), self._source.jdbc_str(),
self._context.table(), o.username, o.password)
def _build_forward_metas(self, schema):
forwards = filter(lambda field: not field[0] in self._filter_columns, schema)
return [field[0] for field in forwards]
def _build_column_metas(self):
# Init column meta
return [IndexColumnParam(self._filter_columns[1], self._context.dimension())]
def _build_collection_meta(self, schema):
# Init collection meta
return CollectionConfig(self._context.table(),
self._build_column_metas(),
self._build_forward_metas(schema),
self._build_repository_config(),
self._context.max_docs_per_segment())
def sync_schema(self):
if self._proxima_se.running() and self._valid_schema():
meta = self._build_collection_meta(self._source.schema())
status = self._client.create_collection(meta)
assert status.ok()
_, collection = self._client.describe_collection(meta.collection_name)
self._summary['collection'] = str(collection)
return True
else:
logging.error("Can't sync table to ProximaSE")
return False
def has_monitor_finished(self):
logging.debug("Enter")
for _ in filter(lambda future: future.done() or future.cancelled(), self._futures):
return True
logging.debug("Exist")
return False
def wait_finish(self):
logging.debug("Enter")
for _ in as_completed(self._futures):
# self._summary.update(done.result())
break
logging.debug("Exist")
return True
def summary(self):
temp = self._summary
temp['config'] = {'proxima_se': self._context.proxima_se_config(),
'repository': self._context.repository_config()}
return temp
def output_report(self):
with open(self._context.report_file(), 'w+') as out:
json.dump(self.summary(), out, indent=4)
def opt_parser():
arg_parser = OptionParser()
arg_parser.add_option('--build_root', dest='build_root', default=None,
help="The build directory of ProximaSE, default value: [ENV variable PROXIMA_SE_BUILD_ROOT "
"or '$(pwd)/../build']")
arg_parser.add_option('--repo', dest='jdbc', default=None,
help='The source of repository, represented by jdbc string')
arg_parser.add_option('-t', '--table', dest='table', default=None, help='Target table sync to ProximaSE')
arg_parser.add_option('--counts', dest='counts', default=None,
help='The number of records will be sync to ProximaSE')
arg_parser.add_option('--log_dir', dest='log_dir', default=None, help='Log directory, default is logs')
arg_parser.add_option('--grpc_port', dest='grpc_port', default=GRPC_PORT,
help=f'Proxima SE GRPC service port, default {GRPC_PORT}')
arg_parser.add_option('--http_port', dest='http_port', default=HTTP_PORT,
help=f'Proxima SE GRPC service port, default {HTTP_PORT}')
arg_parser.add_option('--index_build_threads', dest='index_build_threads', default=10,
help='Index Agent build threads count, default is 10')
arg_parser.add_option('--index_build_qps', dest='index_build_qps', default=1000000,
help='Threshold QPS of incremental records, default 1000000')
arg_parser.add_option('--index_directory', dest='index_directory', default=None,
help="Index directory, where indices located, default is 'indices'")
arg_parser.add_option('--max_docs_per_segment', dest='max_docs_per_segment', default=1000000,
help='Max records per segment, default 1000000')
arg_parser.add_option('--dimension', dest='dimension', default=512,
help='Dimension of vector')
arg_parser.add_option('--meta_uri', dest='meta_uri', default=None, help='URI of meta store, meta/meta.sqlite')
arg_parser.add_option('-o', '--output_dir', dest='output', default=None,
help='Output directory, default random directory')
arg_parser.add_option('--cleanup', dest='cleanup', action='store_true', default=False,
help='Cleanup all the outputs after finished')
arg_parser.add_option('--timeout', dest='timeout', default=86400,
help='Timeout in seconds, default is 86400')
arg_parser.add_option('--interval', dest='interval', default=5.0,
help='Progress flush interval, default is 5 seconds')
arg_parser.add_option('--report', dest='report', default=None,
help='Report file, default write to [output]/report.json')
arg_parser.add_option('--summary_progress', dest='summary', default=None,
help="Extract interested (approximate Progress, separated by ',') progress records from "
"reports")
arg_parser.add_option('--summary_interval', dest='summary_interval', default=0,
help="Extract interested progress records from reports")
return arg_parser
def handle_help_and_exit(arg_options, arg_parser, nargs):
try:
arg_parser.print_help() if nargs == 1 or arg_options.help else None
quit()
except AttributeError:
pass
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO,
format="%(levelname)s %(asctime)s %(filename)s:%(lineno)d:%(funcName)s \t%(message)s")
parser = opt_parser()
(options, args) = parser.parse_args()
handle_help_and_exit(options, parser, len(sys.argv))
if options.summary or int(options.summary_interval) != 0:
ProximaSEBuildBench.summary_reports(options.summary, int(options.summary_interval), *args)
exit(0)
logging.info(f'Arguments: {options}')
output = options.output if options.output else tempfile.mktemp()
logging.info(f'Run tools with output directory: {output}')
code = 0
task = ProximaSEBuildBench(output, options)
if task.init():
logging.info("Init bench tools succeed")
task.start()
task.wait_finish()
task.stop()
task.output_report()
else:
logging.error("Failed to init bench tools")
code = 1
summary = task.summary()
del summary['progress_header']
del summary['progress_table']
logging.info(task.summary())
task.cleanup()
exit(code)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Proxima SE benchmark toolkits
#
from datetime import datetime
import json
import shutil
import sys
import logging
from concurrent.futures import as_completed
from concurrent.futures.thread import ThreadPoolExecutor
from optparse import OptionParser
from urllib.parse import urlparse
from google.protobuf.json_format import MessageToDict
from pyproximase import Client as SEClient
from pyproximase import *
from common.proxima_se_repo import MysqlRepoSource
from common.proxima_se_service import *
from urllib import parse
"""
Requirements:
1. Above Python3.6
Test Scenario:
------
Step 1: Prepared Repository for ProximaSE
Requirements:
i: table should have first column named by id with property auto_increment
ii: vector column should be prefixed by vector
iii: all columns treated as forward attributes except vector and id
Step 2: Clone source code of ProximaSE
cd (source of ProximaSE)
Step 3: Build ProximaSE
mkdir build; cd build; cmake ../; make -j
Step 4: Run Bench tools
cd benchmark; pip install -r requirements.txt; PYTHONPATH=$(pwd) python scripts/build_bench.py
------
"""
class BenchContext(object):
def __init__(self, output_dir, kwargs):
self._output = os.path.realpath(output_dir)
self._kwargs = kwargs
self._log_dir = kwargs.log_dir if kwargs.log_dir else os.path.join(self._output, 'logs')
self._indices_dir = kwargs.index_directory if kwargs.index_directory else os.path.join(
self._output, 'indices')
self._meta_uri = kwargs.meta_uri
self._meta_dir = None
if self._meta_uri:
uri = urlparse(self._meta_uri)
if uri and uri.scheme.lower() == 'sqlite' and uri.path:
self._meta_dir = os.path.dirname(uri.path)
if self._meta_dir == "/":
logging.error("Can't create meta to root directory '/'")
self._meta_dir = None
else:
self._meta_dir = os.path.join(self._output, 'meta')
self._meta_uri = f'sqlite://{self._meta_dir}/meta.sqlite'
self._repository = None
self._table = kwargs.table
self._dimension = int(kwargs.dimension)
if kwargs.jdbc:
path = urlparse(kwargs.jdbc).path
if path and len(path) != 0:
self._repository = path.strip("/").rstrip("/")
self._proxima_se_conf = None
self._repository_conf = None
self._report = None
def output(self):
return self._output
def options(self):
return self._kwargs
def repository(self):
return self._repository
def table(self):
return self._table
def counts(self):
return int(self._kwargs.counts) if self._kwargs.counts else sys.maxsize
def meta_dir(self):
return self._meta_dir
def indices_dir(self):
return self._indices_dir
def log_dir(self):
return self._log_dir
def proxima_se_log_dir(self):
return os.path.join(self.log_dir(), "be")
def repository_log_dir(self):
return os.path.join(self.log_dir(), "repo")
def timeout_in_seconds(self):
return int(self._kwargs.timeout)
def output_flush_interval(self):
return float(self._kwargs.interval)
def config_dir(self):
return os.path.join(self._output, "conf")
def proxima_se_config(self):
if self._proxima_se_conf:
return MessageToDict(self._proxima_se_conf)
return {}
def repository_config(self):
return self._repository
def proxima_se_config_file(self):
return os.path.join(self.config_dir(), 'proxima_se.conf')
def proxima_se_repo_config_file(self):
return os.path.join(self.config_dir(), 'mysql_repo.conf')
def proxima_se_admin_address(self):
return ['127.0.0.1', self._proxima_se_conf.common_config.grpc_listen_port]
def max_docs_per_segment(self):
return int(self._kwargs.max_docs_per_segment)
def dimension(self):
return self._dimension
def report_file(self):
return self._report
def _init_bench_config(self):
self._proxima_se_conf = ProximaSE.build_config(log_directory=self.proxima_se_log_dir(),
grpc_port=int(self._kwargs.grpc_port),
http_port=int(self._kwargs.http_port),
index_build_threads=int(self._kwargs.index_build_threads),
index_build_qps=int(self._kwargs.index_build_qps),
index_directory=self.indices_dir(),
meta_uri=self._meta_uri)
self._repository_conf = ProximaSEMysqlRepo.build_config(log_directory=self.repository_log_dir(),
index_port=int(self._kwargs.grpc_port),
repository_name=self._repository)
if self._proxima_se_conf and self._repository_conf:
ProximaSEService.write_config_file(self.proxima_se_config_file(), self._proxima_se_conf)
ProximaSEService.write_file(self.proxima_se_repo_config_file(), self._repository_conf)
return True
return False
@staticmethod
def _create_directory(directory):
if not os.path.exists(directory):
os.makedirs(directory)
def _init_output_dir(self):
self._create_directory(self.output())
if not self.meta_dir():
return False
logging.info(f'Create meta directory {self.meta_dir()}')
self._create_directory(self.meta_dir())
logging.info(f'Create indices directory {self.indices_dir()}')
self._create_directory(self.indices_dir())
logging.info(f'Create log directory {self.log_dir()}')
self._create_directory(self.log_dir())
self._create_directory(self.proxima_se_log_dir())
self._create_directory(self.repository_log_dir())
logging.info(f'Create config directory {self.config_dir()}')
self._create_directory(self.config_dir())
return True
def _init_report(self):
if self._kwargs.report:
if os.path.exists(os.path.dirname(os.path.realpath(self._kwargs.report))):
self._report = self._kwargs.report
else:
logging.error(f'Can not write report file: {self._kwargs.report}')
return False
else:
self._report = os.path.join(self.output(), 'report.json')
return True
def init(self):
if not self._init_report():
return False
if not self._init_output_dir():
logging.error(f'Initialize output directory({self.output()}) failed')
return False
if not self._init_bench_config():
logging.error(f'Can not create bench configs')
return False
return True
@staticmethod
def _cleanup_dir(directory):
if os.path.isdir(directory):
shutil.rmtree(directory)
def cleanup(self):
if self._kwargs.cleanup:
self._cleanup_dir(self.output())
self._cleanup_dir(self.meta_dir())
self._cleanup_dir(self.indices_dir())
self._cleanup_dir(self.log_dir())
self._cleanup_dir(self.config_dir())
return True
class ProximaSEBuilds(object):
def __init__(self, roots):
if not roots:
roots = os.getenv("PROXIMA_SE_BUILD_ROOT")
if not roots:
roots = os.path.realpath(os.path.join(os.getcwd(), "..", "build"))
if not os.path.isdir(roots):
roots = os.path.realpath(os.path.join(os.getcwd(), "..", "cmake-build-debug"))
self._build_roots = roots
logging.info(f'Locate Build Root of ProximaSE: {roots}')
def proxima_se_binary(self):
return os.path.join(self._build_roots, 'bin', 'proxima_se')
def proxima_repo_binary(self):
return os.path.join(self._build_roots, 'bin', 'mysql_repository')
def is_valid(self):
return os.path.isdir(self._build_roots) and os.path.isfile(self.proxima_se_binary()) and \
os.path.isfile(self.proxima_repo_binary())
def _timeout_monitor(stopped, timeout, interval):
logging.info(f'Timeout monitor started, sleep for {timeout} seconds')
slept = 0
while not stopped() and slept < timeout:
time.sleep(interval)
slept += interval
logging.info(f'Timeout monitor stopped')
if slept < timeout:
return {}
else:
return {"timeout": True}
def _service_stopped_monitor(stopped, interval, *args):
logging.info(f'Service monitor started, interval({interval})')
while not stopped():
services = map(lambda svc: svc.service_name(), filter(lambda svc: not svc.running(), args))
for service in services:
logging.info(f'{service} is not running')
return {"stopped": service}
time.sleep(interval)
logging.info(f'Service monitor stopped')
return {}
def _progress_monitor(stopped, callback, total, interval, notifier):
logging.info(f'Start progress monitor, total({total})')
start = time.monotonic()
progress = 0.0
last = [0, 0.0]
processed = 0
while not stopped():
processed = callback()
if processed < 0:
processed = 0
logging.info("Can't get processed from callback")
break
progress = round(float(processed) / total * 100, 4)
seconds = time.monotonic() - start
incremental = [processed - last[0], seconds - last[1]]
last = [processed, seconds]
if progress > 100.00:
progress = 100.00
print("Processed: %02.2f%%, QPS %d/S, RTQPS %d/S" % (
progress, int(processed / seconds), int(incremental[0] / incremental[1])))
notifier(progress, int(processed / seconds), round(seconds, 2), int(incremental[0] / incremental[1]))
if processed >= total:
print("Processed: 100%%, QPS %d/S, RTQPS %d/S" % (
int(processed / seconds), int(incremental[0] / incremental[1])))
notifier(100.00, int(processed / seconds), round(seconds, 2), int(incremental[0] / incremental[1]))
progress = 100.00
break
else:
time.sleep(interval)
seconds = time.monotonic() - start
logging.info("Progress monitor finished")
return {"total": processed, "progress": progress, "seconds": int(seconds), 'qps': int(processed / seconds)}
class ProximaSEBuildBench(object):
_filter_columns = ('id', 'vector')
def __init__(self, output_dir, arg_options):
self._builds = ProximaSEBuilds(arg_options.build_root)
self._context = BenchContext(output_dir, arg_options)
self._source = None
self._repository = None
self._proxima_se = None
self._client = None
self._last_progress = -1.0
self._summary = {"report": self._context.output(),
"progress_header": ['Progress', 'QPS(AVG)', 'Seconds', 'QPS(RT)', 'IndexSize', 'CPU',
'MEM(GB)', 'Time'],
"progress_table": []}
self._pool = ThreadPoolExecutor(max_workers=5, thread_name_prefix='BenchMonitors')
self._futures = []
@staticmethod
def human_number(num):
if num > 1000000000:
return f'{round(float(num) / 1000000000, 2)}B'
elif num > 1000000:
return f'{round(float(num) / 1000000, 2)}M'
elif num > 1000:
return f'{round(float(num) / 1000, 2)}T'
return str(num)
@staticmethod
def summary_report(progress, report_file):
try:
with open(report_file, 'r') as report_fd:
report = json.load(report_fd)
threads = report['config']['proxima_se']['indexConfig']['buildThreadCount']
print("Threads", "Total", *report['progress_header'])
for idx in progress:
if 0 <= idx < len(report['progress_table']):
print(threads,
ProximaSEBuildBench.human_number(
int(report['total'] / 100 * report['progress_table'][idx][0])),
*report['progress_table'][idx])
else:
break
except Exception as e:
logging.error(e)
@staticmethod
def summary_reports(progress, interval, *reports):
if interval != 0:
items = [int(i) for i in range(0, 10000, interval)]
elif progress:
items = [int(p) for p in progress.split(',')]
else:
logging.error("Failed to get interested progress")
return
for report in reports:
ProximaSEBuildBench.summary_report(items, report)
def init(self):
if not self._builds.is_valid():
logging.error("Proxima SE build is invalid, lost binary of proxima_se or mysql_repository")
return False
if not self._context.init():
logging.error("Init bench context failed")
return False
self._source = MysqlRepoSource(self._context.options().jdbc, self._context.options().table)
if not self._source.is_valid():
logging.error(
f'Can not init repository with jdbc: {self._context.options().jdbc}, '
f'table: {self._context.options().table}')
return False
self._repository = ProximaSEMysqlRepo(self._builds.proxima_repo_binary(),
self._context.proxima_se_repo_config_file(),
self._context.log_dir())
self._proxima_se = ProximaSE(self._builds.proxima_se_binary(),
self._context.proxima_se_config_file(),
self._context.log_dir())
self._host, self._port = self._context.proxima_se_admin_address()
return self._repository.init() and self._proxima_se.init()
def cleanup(self):
if self._proxima_se:
self._proxima_se.cleanup()
if self._repository:
self._repository.cleanup()
if self._context:
self._context.cleanup()
return True
def _max_lsn(self):
try:
status, collection = self._client.describe_collection(self._context.table())
if not status.ok():
print(status)
return -1
logging.debug(collection)
return collection.latest_lsn_context.lsn
except Exception as e:
logging.error("BRPC Exception")
logging.error(e)
return -1
def _collection_docs(self):
status, stats = self._client.stats_collection(self._context.table())
if status.ok():
return stats.total_doc_count
else:
logging.debug(status)
return -1
def _collection_index_size(self):
status, stats = self._client.stats_collection(self._context.table())
if status.ok():
return stats.total_index_file_size
else:
logging.debug(status)
return -1
def _report_progress(self, progress, qps, seconds, rtqps):
if progress >= 100 or progress - self._last_progress > 0.1:
stats = self._proxima_se.stats()
# noinspection PyTypeChecker
self._summary['progress_table'].append(
[progress, qps, seconds, rtqps, self._collection_index_size(), stats.cpu(), stats.memory_in_gb(),
datetime.now().strftime("%Y%m%d_%H:%M:%S")])
self._last_progress = progress
def _start_monitors(self):
total = min(self._source.counts(), self._context.counts())
return [self._pool.submit(_timeout_monitor, lambda: self.has_monitor_finished(),
self._context.timeout_in_seconds(), 5),
self._pool.submit(_service_stopped_monitor, lambda: self.has_monitor_finished(), 1,
self._repository, self._proxima_se),
self._pool.submit(_progress_monitor, lambda: self.has_monitor_finished(),
lambda: self._collection_docs(), total, self._context.output_flush_interval(),
lambda progress, qps, seconds, rtqps: self._report_progress(progress, qps, seconds,
rtqps))]
def start(self):
if self._proxima_se.start():
time.sleep(10)
self._client = SEClient(host=self._host, port=self._port)
self.sync_schema()
time.sleep(1)
if self._repository.start():
self._futures = self._start_monitors()
return True
return False
def stop(self):
for done in as_completed(self._futures):
self._summary.update(done.result())
self._pool.shutdown()
return self._proxima_se.stop() and self._repository.stop()
def _valid_schema(self):
schema = self._source.schema()
columns = [field[0] for field in schema]
try:
columns.index("id")
columns.index("vector")
except ValueError:
return False
return True
def _build_repository_config(self):
o = parse.urlparse(self._source.jdbc_str())
return DatabaseRepository(self._context.repository(), self._source.jdbc_str(),
self._context.table(), o.username, o.password)
def _build_forward_metas(self, schema):
forwards = filter(lambda field: not field[0] in self._filter_columns, schema)
return [field[0] for field in forwards]
def _build_column_metas(self):
# Init column meta
return [IndexColumnParam(self._filter_columns[1], self._context.dimension())]
def _build_collection_meta(self, schema):
# Init collection meta
return CollectionConfig(self._context.table(),
self._build_column_metas(),
self._build_forward_metas(schema),
self._build_repository_config(),
self._context.max_docs_per_segment())
def sync_schema(self):
if self._proxima_se.running() and self._valid_schema():
meta = self._build_collection_meta(self._source.schema())
status = self._client.create_collection(meta)
assert status.ok()
_, collection = self._client.describe_collection(meta.collection_name)
self._summary['collection'] = str(collection)
return True
else:
logging.error("Can't sync table to ProximaSE")
return False
def has_monitor_finished(self):
logging.debug("Enter")
for _ in filter(lambda future: future.done() or future.cancelled(), self._futures):
return True
logging.debug("Exist")
return False
def wait_finish(self):
logging.debug("Enter")
for _ in as_completed(self._futures):
# self._summary.update(done.result())
break
logging.debug("Exist")
return True
def summary(self):
temp = self._summary
temp['config'] = {'proxima_se': self._context.proxima_se_config(),
'repository': self._context.repository_config()}
return temp
def output_report(self):
with open(self._context.report_file(), 'w+') as out:
json.dump(self.summary(), out, indent=4)
def opt_parser():
arg_parser = OptionParser()
arg_parser.add_option('--build_root', dest='build_root', default=None,
help="The build directory of ProximaSE, default value: [ENV variable PROXIMA_SE_BUILD_ROOT "
"or '$(pwd)/../build']")
arg_parser.add_option('--repo', dest='jdbc', default=None,
help='The source of repository, represented by jdbc string')
arg_parser.add_option('-t', '--table', dest='table', default=None, help='Target table sync to ProximaSE')
arg_parser.add_option('--counts', dest='counts', default=None,
help='The number of records will be sync to ProximaSE')
arg_parser.add_option('--log_dir', dest='log_dir', default=None, help='Log directory, default is logs')
arg_parser.add_option('--grpc_port', dest='grpc_port', default=GRPC_PORT,
help=f'Proxima SE GRPC service port, default {GRPC_PORT}')
arg_parser.add_option('--http_port', dest='http_port', default=HTTP_PORT,
help=f'Proxima SE GRPC service port, default {HTTP_PORT}')
arg_parser.add_option('--index_build_threads', dest='index_build_threads', default=10,
help='Index Agent build threads count, default is 10')
arg_parser.add_option('--index_build_qps', dest='index_build_qps', default=1000000,
help='Threshold QPS of incremental records, default 1000000')
arg_parser.add_option('--index_directory', dest='index_directory', default=None,
help="Index directory, where indices located, default is 'indices'")
arg_parser.add_option('--max_docs_per_segment', dest='max_docs_per_segment', default=1000000,
help='Max records per segment, default 1000000')
arg_parser.add_option('--dimension', dest='dimension', default=512,
help='Dimension of vector')
arg_parser.add_option('--meta_uri', dest='meta_uri', default=None, help='URI of meta store, meta/meta.sqlite')
arg_parser.add_option('-o', '--output_dir', dest='output', default=None,
help='Output directory, default random directory')
arg_parser.add_option('--cleanup', dest='cleanup', action='store_true', default=False,
help='Cleanup all the outputs after finished')
arg_parser.add_option('--timeout', dest='timeout', default=86400,
help='Timeout in seconds, default is 86400')
arg_parser.add_option('--interval', dest='interval', default=5.0,
help='Progress flush interval, default is 5 seconds')
arg_parser.add_option('--report', dest='report', default=None,
help='Report file, default write to [output]/report.json')
arg_parser.add_option('--summary_progress', dest='summary', default=None,
help="Extract interested (approximate Progress, separated by ',') progress records from "
"reports")
arg_parser.add_option('--summary_interval', dest='summary_interval', default=0,
help="Extract interested progress records from reports")
return arg_parser
def handle_help_and_exit(arg_options, arg_parser, nargs):
try:
arg_parser.print_help() if nargs == 1 or arg_options.help else None
quit()
except AttributeError:
pass
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO,
format="%(levelname)s %(asctime)s %(filename)s:%(lineno)d:%(funcName)s \t%(message)s")
parser = opt_parser()
(options, args) = parser.parse_args()
handle_help_and_exit(options, parser, len(sys.argv))
if options.summary or int(options.summary_interval) != 0:
ProximaSEBuildBench.summary_reports(options.summary, int(options.summary_interval), *args)
exit(0)
logging.info(f'Arguments: {options}')
output = options.output if options.output else tempfile.mktemp()
logging.info(f'Run tools with output directory: {output}')
code = 0
task = ProximaSEBuildBench(output, options)
if task.init():
logging.info("Init bench tools succeed")
task.start()
task.wait_finish()
task.stop()
task.output_report()
else:
logging.error("Failed to init bench tools")
code = 1
summary = task.summary()
del summary['progress_header']
del summary['progress_table']
logging.info(task.summary())
task.cleanup()
exit(code) | en | 0.725329 | #!/usr/bin/env python # -*- coding: utf-8 -*- # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # Proxima SE benchmark toolkits # Requirements: 1. Above Python3.6 Test Scenario: ------ Step 1: Prepared Repository for ProximaSE Requirements: i: table should have first column named by id with property auto_increment ii: vector column should be prefixed by vector iii: all columns treated as forward attributes except vector and id Step 2: Clone source code of ProximaSE cd (source of ProximaSE) Step 3: Build ProximaSE mkdir build; cd build; cmake ../; make -j Step 4: Run Bench tools cd benchmark; pip install -r requirements.txt; PYTHONPATH=$(pwd) python scripts/build_bench.py ------ # noinspection PyTypeChecker # Init column meta # Init collection meta # self._summary.update(done.result()) | 1.947261 | 2 |
footprints/ui/ui_checkout_form.py | enwawerueli/footprints | 1 | 6619738 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'forms/checkout_form.ui',
# licensing of 'forms/checkout_form.ui' applies.
#
# Created: Tue Jan 29 22:11:15 2019
# by: pyside2-uic running on PySide2 5.11.2
#
# WARNING! All changes made in this file will be lost!
from PySide2 import QtCore, QtGui, QtWidgets
class Ui_CheckoutForm(object):
def setupUi(self, CheckoutForm):
CheckoutForm.setObjectName("CheckoutForm")
CheckoutForm.resize(393, 286)
CheckoutForm.setSizeGripEnabled(True)
CheckoutForm.setModal(True)
self.verticalLayout_2 = QtWidgets.QVBoxLayout(CheckoutForm)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.groupBox = QtWidgets.QGroupBox(CheckoutForm)
self.groupBox.setObjectName("groupBox")
self.verticalLayout = QtWidgets.QVBoxLayout(self.groupBox)
self.verticalLayout.setObjectName("verticalLayout")
self.gridLayout = QtWidgets.QGridLayout()
self.gridLayout.setObjectName("gridLayout")
self.label = QtWidgets.QLabel(self.groupBox)
self.label.setObjectName("label")
self.gridLayout.addWidget(self.label, 0, 0, 1, 1)
self.label_2 = QtWidgets.QLabel(self.groupBox)
self.label_2.setObjectName("label_2")
self.gridLayout.addWidget(self.label_2, 1, 0, 1, 1)
self.discount_le = QtWidgets.QLineEdit(self.groupBox)
self.discount_le.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.discount_le.setObjectName("discount_le")
self.gridLayout.addWidget(self.discount_le, 1, 1, 1, 2)
self.absolute_rb = QtWidgets.QRadioButton(self.groupBox)
self.absolute_rb.setChecked(True)
self.absolute_rb.setObjectName("absolute_rb")
self.gridLayout.addWidget(self.absolute_rb, 2, 1, 1, 1)
self.percent_rb = QtWidgets.QRadioButton(self.groupBox)
self.percent_rb.setChecked(False)
self.percent_rb.setObjectName("percent_rb")
self.gridLayout.addWidget(self.percent_rb, 2, 2, 1, 1)
self.label_5 = QtWidgets.QLabel(self.groupBox)
self.label_5.setObjectName("label_5")
self.gridLayout.addWidget(self.label_5, 3, 0, 1, 1)
self.label_3 = QtWidgets.QLabel(self.groupBox)
self.label_3.setObjectName("label_3")
self.gridLayout.addWidget(self.label_3, 4, 0, 1, 1)
self.cash_le = QtWidgets.QLineEdit(self.groupBox)
self.cash_le.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.cash_le.setObjectName("cash_le")
self.gridLayout.addWidget(self.cash_le, 4, 1, 1, 2)
self.label_4 = QtWidgets.QLabel(self.groupBox)
self.label_4.setObjectName("label_4")
self.gridLayout.addWidget(self.label_4, 5, 0, 1, 1)
self.total_lb = QtWidgets.QLabel(self.groupBox)
font = QtGui.QFont()
font.setWeight(50)
font.setBold(False)
self.total_lb.setFont(font)
self.total_lb.setFrameShape(QtWidgets.QFrame.NoFrame)
self.total_lb.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.total_lb.setObjectName("total_lb")
self.gridLayout.addWidget(self.total_lb, 0, 1, 1, 2)
self.amount_due_lb = QtWidgets.QLabel(self.groupBox)
font = QtGui.QFont()
font.setWeight(75)
font.setBold(True)
self.amount_due_lb.setFont(font)
self.amount_due_lb.setFrameShape(QtWidgets.QFrame.NoFrame)
self.amount_due_lb.setFrameShadow(QtWidgets.QFrame.Plain)
self.amount_due_lb.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.amount_due_lb.setObjectName("amount_due_lb")
self.gridLayout.addWidget(self.amount_due_lb, 3, 1, 1, 2)
self.change_lb = QtWidgets.QLabel(self.groupBox)
font = QtGui.QFont()
font.setWeight(75)
font.setBold(True)
self.change_lb.setFont(font)
self.change_lb.setFrameShape(QtWidgets.QFrame.NoFrame)
self.change_lb.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.change_lb.setObjectName("change_lb")
self.gridLayout.addWidget(self.change_lb, 5, 1, 1, 2)
self.verticalLayout.addLayout(self.gridLayout)
self.verticalLayout_2.addWidget(self.groupBox)
self.buttonBox = QtWidgets.QDialogButtonBox(CheckoutForm)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtWidgets.QDialogButtonBox.Cancel|QtWidgets.QDialogButtonBox.Ok)
self.buttonBox.setObjectName("buttonBox")
self.verticalLayout_2.addWidget(self.buttonBox)
self.label_2.setBuddy(self.discount_le)
self.label_3.setBuddy(self.cash_le)
self.retranslateUi(CheckoutForm)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL("accepted()"), CheckoutForm.accept)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL("rejected()"), CheckoutForm.reject)
QtCore.QMetaObject.connectSlotsByName(CheckoutForm)
CheckoutForm.setTabOrder(self.discount_le, self.cash_le)
CheckoutForm.setTabOrder(self.cash_le, self.buttonBox)
def retranslateUi(self, CheckoutForm):
CheckoutForm.setWindowTitle(QtWidgets.QApplication.translate("CheckoutForm", "Dialog", None, -1))
self.groupBox.setTitle(QtWidgets.QApplication.translate("CheckoutForm", "Checkout", None, -1))
self.label.setText(QtWidgets.QApplication.translate("CheckoutForm", "Total Amount :", None, -1))
self.label_2.setText(QtWidgets.QApplication.translate("CheckoutForm", "&Discount :", None, -1))
self.discount_le.setPlaceholderText(QtWidgets.QApplication.translate("CheckoutForm", "0.00", None, -1))
self.absolute_rb.setText(QtWidgets.QApplication.translate("CheckoutForm", "A&bsolute", None, -1))
self.percent_rb.setText(QtWidgets.QApplication.translate("CheckoutForm", "P&ercent", None, -1))
self.label_5.setText(QtWidgets.QApplication.translate("CheckoutForm", "Amount Due :", None, -1))
self.label_3.setText(QtWidgets.QApplication.translate("CheckoutForm", "C&ash Tendered :", None, -1))
self.cash_le.setPlaceholderText(QtWidgets.QApplication.translate("CheckoutForm", "0.00", None, -1))
self.label_4.setText(QtWidgets.QApplication.translate("CheckoutForm", "Change Due :", None, -1))
self.total_lb.setText(QtWidgets.QApplication.translate("CheckoutForm", "0.00", None, -1))
self.amount_due_lb.setText(QtWidgets.QApplication.translate("CheckoutForm", "0.00", None, -1))
self.change_lb.setText(QtWidgets.QApplication.translate("CheckoutForm", "0.00", None, -1))
| # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'forms/checkout_form.ui',
# licensing of 'forms/checkout_form.ui' applies.
#
# Created: Tue Jan 29 22:11:15 2019
# by: pyside2-uic running on PySide2 5.11.2
#
# WARNING! All changes made in this file will be lost!
from PySide2 import QtCore, QtGui, QtWidgets
class Ui_CheckoutForm(object):
def setupUi(self, CheckoutForm):
CheckoutForm.setObjectName("CheckoutForm")
CheckoutForm.resize(393, 286)
CheckoutForm.setSizeGripEnabled(True)
CheckoutForm.setModal(True)
self.verticalLayout_2 = QtWidgets.QVBoxLayout(CheckoutForm)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.groupBox = QtWidgets.QGroupBox(CheckoutForm)
self.groupBox.setObjectName("groupBox")
self.verticalLayout = QtWidgets.QVBoxLayout(self.groupBox)
self.verticalLayout.setObjectName("verticalLayout")
self.gridLayout = QtWidgets.QGridLayout()
self.gridLayout.setObjectName("gridLayout")
self.label = QtWidgets.QLabel(self.groupBox)
self.label.setObjectName("label")
self.gridLayout.addWidget(self.label, 0, 0, 1, 1)
self.label_2 = QtWidgets.QLabel(self.groupBox)
self.label_2.setObjectName("label_2")
self.gridLayout.addWidget(self.label_2, 1, 0, 1, 1)
self.discount_le = QtWidgets.QLineEdit(self.groupBox)
self.discount_le.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.discount_le.setObjectName("discount_le")
self.gridLayout.addWidget(self.discount_le, 1, 1, 1, 2)
self.absolute_rb = QtWidgets.QRadioButton(self.groupBox)
self.absolute_rb.setChecked(True)
self.absolute_rb.setObjectName("absolute_rb")
self.gridLayout.addWidget(self.absolute_rb, 2, 1, 1, 1)
self.percent_rb = QtWidgets.QRadioButton(self.groupBox)
self.percent_rb.setChecked(False)
self.percent_rb.setObjectName("percent_rb")
self.gridLayout.addWidget(self.percent_rb, 2, 2, 1, 1)
self.label_5 = QtWidgets.QLabel(self.groupBox)
self.label_5.setObjectName("label_5")
self.gridLayout.addWidget(self.label_5, 3, 0, 1, 1)
self.label_3 = QtWidgets.QLabel(self.groupBox)
self.label_3.setObjectName("label_3")
self.gridLayout.addWidget(self.label_3, 4, 0, 1, 1)
self.cash_le = QtWidgets.QLineEdit(self.groupBox)
self.cash_le.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.cash_le.setObjectName("cash_le")
self.gridLayout.addWidget(self.cash_le, 4, 1, 1, 2)
self.label_4 = QtWidgets.QLabel(self.groupBox)
self.label_4.setObjectName("label_4")
self.gridLayout.addWidget(self.label_4, 5, 0, 1, 1)
self.total_lb = QtWidgets.QLabel(self.groupBox)
font = QtGui.QFont()
font.setWeight(50)
font.setBold(False)
self.total_lb.setFont(font)
self.total_lb.setFrameShape(QtWidgets.QFrame.NoFrame)
self.total_lb.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.total_lb.setObjectName("total_lb")
self.gridLayout.addWidget(self.total_lb, 0, 1, 1, 2)
self.amount_due_lb = QtWidgets.QLabel(self.groupBox)
font = QtGui.QFont()
font.setWeight(75)
font.setBold(True)
self.amount_due_lb.setFont(font)
self.amount_due_lb.setFrameShape(QtWidgets.QFrame.NoFrame)
self.amount_due_lb.setFrameShadow(QtWidgets.QFrame.Plain)
self.amount_due_lb.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.amount_due_lb.setObjectName("amount_due_lb")
self.gridLayout.addWidget(self.amount_due_lb, 3, 1, 1, 2)
self.change_lb = QtWidgets.QLabel(self.groupBox)
font = QtGui.QFont()
font.setWeight(75)
font.setBold(True)
self.change_lb.setFont(font)
self.change_lb.setFrameShape(QtWidgets.QFrame.NoFrame)
self.change_lb.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter)
self.change_lb.setObjectName("change_lb")
self.gridLayout.addWidget(self.change_lb, 5, 1, 1, 2)
self.verticalLayout.addLayout(self.gridLayout)
self.verticalLayout_2.addWidget(self.groupBox)
self.buttonBox = QtWidgets.QDialogButtonBox(CheckoutForm)
self.buttonBox.setOrientation(QtCore.Qt.Horizontal)
self.buttonBox.setStandardButtons(QtWidgets.QDialogButtonBox.Cancel|QtWidgets.QDialogButtonBox.Ok)
self.buttonBox.setObjectName("buttonBox")
self.verticalLayout_2.addWidget(self.buttonBox)
self.label_2.setBuddy(self.discount_le)
self.label_3.setBuddy(self.cash_le)
self.retranslateUi(CheckoutForm)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL("accepted()"), CheckoutForm.accept)
QtCore.QObject.connect(self.buttonBox, QtCore.SIGNAL("rejected()"), CheckoutForm.reject)
QtCore.QMetaObject.connectSlotsByName(CheckoutForm)
CheckoutForm.setTabOrder(self.discount_le, self.cash_le)
CheckoutForm.setTabOrder(self.cash_le, self.buttonBox)
def retranslateUi(self, CheckoutForm):
CheckoutForm.setWindowTitle(QtWidgets.QApplication.translate("CheckoutForm", "Dialog", None, -1))
self.groupBox.setTitle(QtWidgets.QApplication.translate("CheckoutForm", "Checkout", None, -1))
self.label.setText(QtWidgets.QApplication.translate("CheckoutForm", "Total Amount :", None, -1))
self.label_2.setText(QtWidgets.QApplication.translate("CheckoutForm", "&Discount :", None, -1))
self.discount_le.setPlaceholderText(QtWidgets.QApplication.translate("CheckoutForm", "0.00", None, -1))
self.absolute_rb.setText(QtWidgets.QApplication.translate("CheckoutForm", "A&bsolute", None, -1))
self.percent_rb.setText(QtWidgets.QApplication.translate("CheckoutForm", "P&ercent", None, -1))
self.label_5.setText(QtWidgets.QApplication.translate("CheckoutForm", "Amount Due :", None, -1))
self.label_3.setText(QtWidgets.QApplication.translate("CheckoutForm", "C&ash Tendered :", None, -1))
self.cash_le.setPlaceholderText(QtWidgets.QApplication.translate("CheckoutForm", "0.00", None, -1))
self.label_4.setText(QtWidgets.QApplication.translate("CheckoutForm", "Change Due :", None, -1))
self.total_lb.setText(QtWidgets.QApplication.translate("CheckoutForm", "0.00", None, -1))
self.amount_due_lb.setText(QtWidgets.QApplication.translate("CheckoutForm", "0.00", None, -1))
self.change_lb.setText(QtWidgets.QApplication.translate("CheckoutForm", "0.00", None, -1))
| en | 0.757697 | # -*- coding: utf-8 -*- # Form implementation generated from reading ui file 'forms/checkout_form.ui', # licensing of 'forms/checkout_form.ui' applies. # # Created: Tue Jan 29 22:11:15 2019 # by: pyside2-uic running on PySide2 5.11.2 # # WARNING! All changes made in this file will be lost! | 1.599681 | 2 |
Prophet/Legacy/__init__.py | zen0bit/menu-maker | 0 | 6619739 | <filename>Prophet/Legacy/__init__.py
import sys
import os
import glob
import re
from Keywords import Keyword as Kw, Set as KwS
from Prophet.Categories import *
from Prophet import msg
import Prophet
class App(Prophet.App):
"""A simple application that may have different executables"""
pref = 30
def __new__(cls):
try:
self = cls.__inst__
if not self:
raise Prophet.NotSet
except AttributeError:
self = cls.__inst__ = object.__new__(cls)
try:
self.__setup__()
except Prophet.NotSet:
cls.__inst__ = None
raise
return self
def setKeywords(self):
super(App, self).setKeywords()
self.keywords |= KwS(Legacy)
def setExename(self):
# Obtain known executable names for the application
try:
exes = self.exes
except AttributeError:
# If none were explicitly specified, use self class name
exes = [self.__class__.__name__]
prefixes = self.getPrefixes()
paths = self.getPaths()
valids = []
for x in exes:
for pfx, bps in prefixes.binpaths.items():
try:
exe = bps.which(x)
if self.valid(pfx, exe):
valids.append((pfx, exe))
except Prophet.NotFound:
pass
try:
self.prefix, self.exename = self.select(valids)
except Prophet.NotFound:
raise Prophet.NotSet
def valid(self, pfx, exe):
return True
def select(self, valids):
if len(valids):
return valids[0]
else:
raise Prophet.NotFound
class ConsoleApp(Prophet.App):
"""Mixin class for the console application that must be run in terminal"""
def setKeywords(self):
super(ConsoleApp, self).setKeywords()
self.keywords |= KwS(ConsoleOnly)
class X11App(Prophet.App):
"""Mixin class for the X11 GUI application"""
class ZeroG(App):
"""Application installed by the ZeroG LaunchAnywhere system.
This is usually a commercial Java application"""
registry = "~/.com.zerog.registry.xml"
def getPrefixes(self):
prefixes = super(ZeroG, self).getPrefixes()
try:
zerog = open(os.path.expanduser(self.registry), "r")
pattern = re.compile(
".*<product.*name=\"(%s)\".*location=\"(.*)\".*last_modified=\"(.*)\".*>.*" %
self.magic)
found = []
for x in zerog:
rx = pattern.match(x)
if rx:
found.append((rx.group(3), rx.group(1), rx.group(2)))
zerog.close()
found.sort()
found.reverse()
prefixes = Prophet.PrefixSet([x[2] for x in found]) + prefixes
except IOError:
pass
return prefixes
class DropIn(App):
maxDepth = 3
dropRoots = ["~"]
class StopDescention(Exception):
def __init__(self, value):
self.value = value
def getPrefixes(self):
prefixes = super(DropIn, self).getPrefixes()
try:
for r in self.dropRoots:
self.descend(os.path.expanduser(r), 1)
except DropIn.StopDescention as e:
prefixes += e.value
return prefixes
def descend(self, path, depth):
self.relevant(path)
if depth > self.maxDepth:
return
try:
for x in os.listdir(path):
dir = os.path.join(path, x)
if not x.startswith(".") and os.path.isdir(dir):
self.descend(dir, depth + 1)
except OSError:
pass
__legacy__ = ["Development", "Editor", "Emulator",
"Multimedia", "Graphics", "Network", "Shell"]
entries = [] # List of all legacy entries found
def _register(module, this=True):
"""Import and store the specified module along with all its submodules"""
try:
names = module.__legacy__
except AttributeError:
names = []
if this:
for k, v in module.__dict__.items():
if not k.startswith("_") and isinstance(
v,
type) and issubclass(
v,
Prophet.App):
entries.append(v)
for x in names:
name = module.__name__ + "." + x
try:
__import__(name)
except ImportError:
raise ImportError("No module named " + name)
imp = sys.modules[name]
_register(imp)
def setup():
_register(sys.modules[__name__], this=False)
def scan():
result = []
msg(" legacy...", newline=False)
for x in entries:
try:
result.append(x())
except Prophet.NotSet:
pass
msg(" %d apps found" % len(result))
return result
| <filename>Prophet/Legacy/__init__.py
import sys
import os
import glob
import re
from Keywords import Keyword as Kw, Set as KwS
from Prophet.Categories import *
from Prophet import msg
import Prophet
class App(Prophet.App):
"""A simple application that may have different executables"""
pref = 30
def __new__(cls):
try:
self = cls.__inst__
if not self:
raise Prophet.NotSet
except AttributeError:
self = cls.__inst__ = object.__new__(cls)
try:
self.__setup__()
except Prophet.NotSet:
cls.__inst__ = None
raise
return self
def setKeywords(self):
super(App, self).setKeywords()
self.keywords |= KwS(Legacy)
def setExename(self):
# Obtain known executable names for the application
try:
exes = self.exes
except AttributeError:
# If none were explicitly specified, use self class name
exes = [self.__class__.__name__]
prefixes = self.getPrefixes()
paths = self.getPaths()
valids = []
for x in exes:
for pfx, bps in prefixes.binpaths.items():
try:
exe = bps.which(x)
if self.valid(pfx, exe):
valids.append((pfx, exe))
except Prophet.NotFound:
pass
try:
self.prefix, self.exename = self.select(valids)
except Prophet.NotFound:
raise Prophet.NotSet
def valid(self, pfx, exe):
return True
def select(self, valids):
if len(valids):
return valids[0]
else:
raise Prophet.NotFound
class ConsoleApp(Prophet.App):
"""Mixin class for the console application that must be run in terminal"""
def setKeywords(self):
super(ConsoleApp, self).setKeywords()
self.keywords |= KwS(ConsoleOnly)
class X11App(Prophet.App):
"""Mixin class for the X11 GUI application"""
class ZeroG(App):
"""Application installed by the ZeroG LaunchAnywhere system.
This is usually a commercial Java application"""
registry = "~/.com.zerog.registry.xml"
def getPrefixes(self):
prefixes = super(ZeroG, self).getPrefixes()
try:
zerog = open(os.path.expanduser(self.registry), "r")
pattern = re.compile(
".*<product.*name=\"(%s)\".*location=\"(.*)\".*last_modified=\"(.*)\".*>.*" %
self.magic)
found = []
for x in zerog:
rx = pattern.match(x)
if rx:
found.append((rx.group(3), rx.group(1), rx.group(2)))
zerog.close()
found.sort()
found.reverse()
prefixes = Prophet.PrefixSet([x[2] for x in found]) + prefixes
except IOError:
pass
return prefixes
class DropIn(App):
maxDepth = 3
dropRoots = ["~"]
class StopDescention(Exception):
def __init__(self, value):
self.value = value
def getPrefixes(self):
prefixes = super(DropIn, self).getPrefixes()
try:
for r in self.dropRoots:
self.descend(os.path.expanduser(r), 1)
except DropIn.StopDescention as e:
prefixes += e.value
return prefixes
def descend(self, path, depth):
self.relevant(path)
if depth > self.maxDepth:
return
try:
for x in os.listdir(path):
dir = os.path.join(path, x)
if not x.startswith(".") and os.path.isdir(dir):
self.descend(dir, depth + 1)
except OSError:
pass
__legacy__ = ["Development", "Editor", "Emulator",
"Multimedia", "Graphics", "Network", "Shell"]
entries = [] # List of all legacy entries found
def _register(module, this=True):
"""Import and store the specified module along with all its submodules"""
try:
names = module.__legacy__
except AttributeError:
names = []
if this:
for k, v in module.__dict__.items():
if not k.startswith("_") and isinstance(
v,
type) and issubclass(
v,
Prophet.App):
entries.append(v)
for x in names:
name = module.__name__ + "." + x
try:
__import__(name)
except ImportError:
raise ImportError("No module named " + name)
imp = sys.modules[name]
_register(imp)
def setup():
_register(sys.modules[__name__], this=False)
def scan():
result = []
msg(" legacy...", newline=False)
for x in entries:
try:
result.append(x())
except Prophet.NotSet:
pass
msg(" %d apps found" % len(result))
return result
| en | 0.917069 | A simple application that may have different executables # Obtain known executable names for the application # If none were explicitly specified, use self class name Mixin class for the console application that must be run in terminal Mixin class for the X11 GUI application Application installed by the ZeroG LaunchAnywhere system. This is usually a commercial Java application # List of all legacy entries found Import and store the specified module along with all its submodules | 2.284033 | 2 |
common/utility/folder.py | KGMSFT/integral-human-pose | 472 | 6619740 | <reponame>KGMSFT/integral-human-pose<filename>common/utility/folder.py
import os
def make_folder(folder_name):
if not os.path.exists(folder_name):
os.makedirs(folder_name)
def change_tuple_element(t, dim, val):
l = list(t)
l[dim] = val
return tuple(l) | import os
def make_folder(folder_name):
if not os.path.exists(folder_name):
os.makedirs(folder_name)
def change_tuple_element(t, dim, val):
l = list(t)
l[dim] = val
return tuple(l) | none | 1 | 3.072796 | 3 | |
awsimple/mock.py | jamesabel/awsimple | 21 | 6619741 | import os
from tobool import to_bool
use_moto_mock_env_var = "AWSIMPLE_USE_MOTO_MOCK"
def is_mock() -> bool:
return to_bool(os.environ.get(use_moto_mock_env_var, "0"))
| import os
from tobool import to_bool
use_moto_mock_env_var = "AWSIMPLE_USE_MOTO_MOCK"
def is_mock() -> bool:
return to_bool(os.environ.get(use_moto_mock_env_var, "0"))
| none | 1 | 1.908693 | 2 | |
bin/other/manga_site.py | Yokotes/MangaObserver | 1 | 6619742 | <gh_stars>1-10
# =========================================================================
# Copyright 2020 <NAME>
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#==========================================================================
class MangaSite():
def __init__(self, name, xpaths, test_link = ''):
self.name = name
self.xpaths = xpaths
self.optimized = False
self.test_link = test_link | # =========================================================================
# Copyright 2020 <NAME>
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#==========================================================================
class MangaSite():
def __init__(self, name, xpaths, test_link = ''):
self.name = name
self.xpaths = xpaths
self.optimized = False
self.test_link = test_link | en | 0.769182 | # ========================================================================= # Copyright 2020 <NAME> # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. #========================================================================== | 2.057409 | 2 |
trello_helper/models.py | mte-unicamp/captacao-talento | 0 | 6619743 | # Python std library
import os
import time
# Third Party
import requests as rq
# Django
from django.db import models
# Project
from bot.models import Company, Contractor, PostSeller, Reminder, Seller
from globalvars.models import Global
class Helper(models.Model):
"""docstring for Helper"""
api_url = "https://api.trello.com/1"
@staticmethod
def generic_request(ext_obj, obj_id, nested_obj):
url = "{}/{}/{}/{}".format(Helper.api_url, ext_obj, obj_id, nested_obj)
querystring = {
"key": os.environ["TRELLO_KEY"],
"token": os.environ["TRELLO_TOKEN"],
}
time.sleep(0.05) # this avoids making too many requests
return url, querystring
@staticmethod
def get_nested_objs(ext_obj, obj_id, nested_obj=""):
url, querystring = Helper.generic_request(ext_obj, obj_id, nested_obj)
return rq.get(url, params=querystring)
@staticmethod
def post_card(name, list_id):
url = f"{Helper.api_url}/cards"
querystring = {
"name": name,
"idList": list_id,
"key": os.environ["TRELLO_KEY"],
"token": os.environ["TRELLO_TOKEN"],
}
return rq.post(url, params=querystring)
@staticmethod
def post_list(name, board_id):
url = f"{Helper.api_url}/lists"
querystring = {
"name": name,
"idBoard": board_id,
"key": os.environ["TRELLO_KEY"],
"token": os.environ["TRELLO_TOKEN"],
}
return rq.post(url, params=querystring)
@staticmethod
def put_card_in_list(card_id, list_id):
url = f"{Helper.api_url}/cards/{card_id}"
querystring = {
"idList": list_id,
"key": os.environ["TRELLO_KEY"],
"token": os.environ["TRELLO_TOKEN"],
}
return rq.put(url, params=querystring)
@staticmethod
def post_label(card_id, label_id):
url, querystring = Helper.generic_request("cards", card_id, "idLabels")
querystring.update({"value": label_id})
return rq.post(url, params=querystring)
@staticmethod
def delete_label(card_id, label_id):
url, querystring = Helper.generic_request("cards", card_id, "idLabels")
url = "{}/{}".format(url, label_id)
return rq.delete(url, params=querystring)
class Updater(models.Model):
"""docstring for Updater"""
@staticmethod
def set_last_activity(company):
progress_graph = {
Global.FIRS: [Global.NANS, Global.INTE, Global.NEGO, Global.REJE, Global.CLOS],
Global.NANS: [Global.INTE, Global.NEGO, Global.REJE, Global.CLOS],
Global.INTE: [Global.NANS, Global.NEGO, Global.REJE, Global.CLOS],
Global.NEGO: [Global.NANS, Global.REJE, Global.CLOS],
}
stage = company.seller_stage
if stage in progress_graph.keys():
try:
card = Helper.get_nested_objs("cards", company.card_id).json()
except:
print("{} não presente no quadro!".format(company.name))
return
labels = card["labels"]
labels_names = [
i["name"] for i in labels if i["name"] in Global.MANUAL_LABEL_NAMES.keys()
]
if not labels_names:
all_labels = Helper.get_nested_objs(
"boards", os.environ["SALES_BOARD_ID"], "labels"
).json()
reverse_manual_label_names = {k: v for v, k in Global.MANUAL_LABEL_NAMES.items()}
for l in all_labels:
if l["name"] == reverse_manual_label_names[Global.FIRS]:
label_id = l["id"]
break
Helper.post_label(card["id"], label_id)
for l in labels_names:
if Global.MANUAL_LABEL_NAMES[l] in progress_graph[stage]:
company.update()
if Global.MANUAL_LABEL_NAMES[l] != Global.CLOS:
company.seller_stage = Global.MANUAL_LABEL_NAMES[l]
company.save()
break
if card["badges"]["comments"] > company.comments_number:
company.update()
company.comments_number = card["badges"]["comments"]
company.save()
@staticmethod
def label_update(board_id):
cards = Helper.get_nested_objs("boards", board_id, "cards").json()
labels = Helper.get_nested_objs("boards", board_id, "labels").json()
for l in labels:
if l["name"] == Global.AUTO_LABEL_NAMES[0]:
upd_id = l["id"] # id for updated label
elif l["name"] == Global.AUTO_LABEL_NAMES[1]:
att_id = l["id"] # id for attention label
elif l["name"] == Global.AUTO_LABEL_NAMES[2]:
urg_id = l["id"] # id for urgent label
for c in cards:
try:
company = Company.objects.get(card_id=c["id"])
except Company.DoesNotExist:
continue
card_labels = Helper.get_nested_objs("cards", c["id"], "labels").json()
right_label = company.status_label
if right_label == Global.AUTO_LABEL_NAMES[0]:
right_id = upd_id
elif right_label == Global.AUTO_LABEL_NAMES[1]:
right_id = att_id
elif right_label == Global.AUTO_LABEL_NAMES[2]:
right_id = urg_id
found = False
for cl in card_labels:
if cl["name"] in Global.AUTO_LABEL_NAMES:
if cl["name"] != right_label:
Helper.delete_label(c["id"], cl["id"])
else:
found = True
if not found:
Helper.post_label(c["id"], right_id)
@staticmethod
def assign_new_hunter(company, hunter):
if type(hunter) != PostSeller:
r = Helper.put_card_in_list(company.card_id, hunter.list_id)
if r.status_code != 200:
raise r
Reminder.new_company_reminder(company, hunter)
if type(hunter) == Seller:
company.seller = hunter
elif type(hunter) == Contractor:
company.closedcom.contractor = hunter
elif type(hunter) == PostSeller:
company.closedcom.postseller = hunter
else:
raise TypeError("Hunter must be Seller, Contractor or PostSeller")
company.save()
company.update()
| # Python std library
import os
import time
# Third Party
import requests as rq
# Django
from django.db import models
# Project
from bot.models import Company, Contractor, PostSeller, Reminder, Seller
from globalvars.models import Global
class Helper(models.Model):
"""docstring for Helper"""
api_url = "https://api.trello.com/1"
@staticmethod
def generic_request(ext_obj, obj_id, nested_obj):
url = "{}/{}/{}/{}".format(Helper.api_url, ext_obj, obj_id, nested_obj)
querystring = {
"key": os.environ["TRELLO_KEY"],
"token": os.environ["TRELLO_TOKEN"],
}
time.sleep(0.05) # this avoids making too many requests
return url, querystring
@staticmethod
def get_nested_objs(ext_obj, obj_id, nested_obj=""):
url, querystring = Helper.generic_request(ext_obj, obj_id, nested_obj)
return rq.get(url, params=querystring)
@staticmethod
def post_card(name, list_id):
url = f"{Helper.api_url}/cards"
querystring = {
"name": name,
"idList": list_id,
"key": os.environ["TRELLO_KEY"],
"token": os.environ["TRELLO_TOKEN"],
}
return rq.post(url, params=querystring)
@staticmethod
def post_list(name, board_id):
url = f"{Helper.api_url}/lists"
querystring = {
"name": name,
"idBoard": board_id,
"key": os.environ["TRELLO_KEY"],
"token": os.environ["TRELLO_TOKEN"],
}
return rq.post(url, params=querystring)
@staticmethod
def put_card_in_list(card_id, list_id):
url = f"{Helper.api_url}/cards/{card_id}"
querystring = {
"idList": list_id,
"key": os.environ["TRELLO_KEY"],
"token": os.environ["TRELLO_TOKEN"],
}
return rq.put(url, params=querystring)
@staticmethod
def post_label(card_id, label_id):
url, querystring = Helper.generic_request("cards", card_id, "idLabels")
querystring.update({"value": label_id})
return rq.post(url, params=querystring)
@staticmethod
def delete_label(card_id, label_id):
url, querystring = Helper.generic_request("cards", card_id, "idLabels")
url = "{}/{}".format(url, label_id)
return rq.delete(url, params=querystring)
class Updater(models.Model):
"""docstring for Updater"""
@staticmethod
def set_last_activity(company):
progress_graph = {
Global.FIRS: [Global.NANS, Global.INTE, Global.NEGO, Global.REJE, Global.CLOS],
Global.NANS: [Global.INTE, Global.NEGO, Global.REJE, Global.CLOS],
Global.INTE: [Global.NANS, Global.NEGO, Global.REJE, Global.CLOS],
Global.NEGO: [Global.NANS, Global.REJE, Global.CLOS],
}
stage = company.seller_stage
if stage in progress_graph.keys():
try:
card = Helper.get_nested_objs("cards", company.card_id).json()
except:
print("{} não presente no quadro!".format(company.name))
return
labels = card["labels"]
labels_names = [
i["name"] for i in labels if i["name"] in Global.MANUAL_LABEL_NAMES.keys()
]
if not labels_names:
all_labels = Helper.get_nested_objs(
"boards", os.environ["SALES_BOARD_ID"], "labels"
).json()
reverse_manual_label_names = {k: v for v, k in Global.MANUAL_LABEL_NAMES.items()}
for l in all_labels:
if l["name"] == reverse_manual_label_names[Global.FIRS]:
label_id = l["id"]
break
Helper.post_label(card["id"], label_id)
for l in labels_names:
if Global.MANUAL_LABEL_NAMES[l] in progress_graph[stage]:
company.update()
if Global.MANUAL_LABEL_NAMES[l] != Global.CLOS:
company.seller_stage = Global.MANUAL_LABEL_NAMES[l]
company.save()
break
if card["badges"]["comments"] > company.comments_number:
company.update()
company.comments_number = card["badges"]["comments"]
company.save()
@staticmethod
def label_update(board_id):
cards = Helper.get_nested_objs("boards", board_id, "cards").json()
labels = Helper.get_nested_objs("boards", board_id, "labels").json()
for l in labels:
if l["name"] == Global.AUTO_LABEL_NAMES[0]:
upd_id = l["id"] # id for updated label
elif l["name"] == Global.AUTO_LABEL_NAMES[1]:
att_id = l["id"] # id for attention label
elif l["name"] == Global.AUTO_LABEL_NAMES[2]:
urg_id = l["id"] # id for urgent label
for c in cards:
try:
company = Company.objects.get(card_id=c["id"])
except Company.DoesNotExist:
continue
card_labels = Helper.get_nested_objs("cards", c["id"], "labels").json()
right_label = company.status_label
if right_label == Global.AUTO_LABEL_NAMES[0]:
right_id = upd_id
elif right_label == Global.AUTO_LABEL_NAMES[1]:
right_id = att_id
elif right_label == Global.AUTO_LABEL_NAMES[2]:
right_id = urg_id
found = False
for cl in card_labels:
if cl["name"] in Global.AUTO_LABEL_NAMES:
if cl["name"] != right_label:
Helper.delete_label(c["id"], cl["id"])
else:
found = True
if not found:
Helper.post_label(c["id"], right_id)
@staticmethod
def assign_new_hunter(company, hunter):
if type(hunter) != PostSeller:
r = Helper.put_card_in_list(company.card_id, hunter.list_id)
if r.status_code != 200:
raise r
Reminder.new_company_reminder(company, hunter)
if type(hunter) == Seller:
company.seller = hunter
elif type(hunter) == Contractor:
company.closedcom.contractor = hunter
elif type(hunter) == PostSeller:
company.closedcom.postseller = hunter
else:
raise TypeError("Hunter must be Seller, Contractor or PostSeller")
company.save()
company.update()
| en | 0.719162 | # Python std library # Third Party # Django # Project docstring for Helper # this avoids making too many requests docstring for Updater # id for updated label # id for attention label # id for urgent label | 2.310045 | 2 |
leetcode/bisectionMethod/hIndex.py | BennyJane/algorithm_mad | 0 | 6619744 | <filename>leetcode/bisectionMethod/hIndex.py
from typing import List
# 275. H 指数 II
class Solution:
# 关键点:右侧长度决定了h的下限
def hIndex(self, citations: List[int]) -> int:
n = len(citations)
count = 0 # 记录已经遍历数组长度
ans = 0
for i in range(n - 1, -1, -1):
count += 1
cur = citations[i]
if cur >= count:
ans = max(ans, count)
return ans
def hIndex2(self, citations: List[int]) -> int:
n = len(citations)
left = 0
right = n - 1
ans = 0
while left <= right:
mid = int((left + right + 1) / 2)
right_len = n - mid
if citations[mid] >= right_len:
ans = max(ans, right_len)
right -= 1
else:
left += 1
return ans
def hIndex3(self, citations: List[int]) -> int:
n = len(citations)
left = 0
right = n - 1
while left <= right:
mid = left + (right - left) // 2
if citations[mid] >= n - mid:
right = mid - 1
else:
left = mid + 1
return n - left
| <filename>leetcode/bisectionMethod/hIndex.py
from typing import List
# 275. H 指数 II
class Solution:
# 关键点:右侧长度决定了h的下限
def hIndex(self, citations: List[int]) -> int:
n = len(citations)
count = 0 # 记录已经遍历数组长度
ans = 0
for i in range(n - 1, -1, -1):
count += 1
cur = citations[i]
if cur >= count:
ans = max(ans, count)
return ans
def hIndex2(self, citations: List[int]) -> int:
n = len(citations)
left = 0
right = n - 1
ans = 0
while left <= right:
mid = int((left + right + 1) / 2)
right_len = n - mid
if citations[mid] >= right_len:
ans = max(ans, right_len)
right -= 1
else:
left += 1
return ans
def hIndex3(self, citations: List[int]) -> int:
n = len(citations)
left = 0
right = n - 1
while left <= right:
mid = left + (right - left) // 2
if citations[mid] >= n - mid:
right = mid - 1
else:
left = mid + 1
return n - left
| zh | 0.678671 | # 275. H 指数 II # 关键点:右侧长度决定了h的下限 # 记录已经遍历数组长度 | 3.663458 | 4 |
warehouse.py | JoshuaMcroberts/DeliveryDilemmaLite | 0 | 6619745 | from libraries import *
from game import N_game
def warehouse(game = N_game()):
loop = True
while loop:
if(game.game_over == False):
game.game_map.pre = game.game_map.player_enter((2,1),game.game_map.pre)
clear_screen()
print("")
print_tab(pr_colour("l_blue","-- WAREHOUSE --")+"\n")
print_tab("The warehouse is a cavernous open space with concrete floors painted a pale blue colour.")
print_tab("Red lines clearly mark out walk ways from fork lift drive paths. The warehouse appears to")
print_tab("have been broken down into sections. To the front of the warehouse there are two plastic ")
print_tab("sheeting, covered holes in the wall. The space behind them is clear, however after that on")
print_tab("the wall can be found the word " + pr_colour("l_blue", "Sorting Area") + ". Looking to the opposite side of the room")
print_tab("you can see six smaller gaps in the wall covered by the same plastic sheeting as the others.")
print_tab("The wall beside this area reads " + pr_colour("l_blue", "Loading Bay") + ". Next to you there is a desk that has been")
print_tab("labelled " + pr_colour("l_blue", "Parcel Repair") + ". This seems to be were damaged parcels go when they need fixing. ")
print_tab("The last feature of the warehouse is a window surrounded " + pr_colour("l_blue", "Office") + " in the near right hand corner. ")
var = san_input()
# Navigation IF
if var == "sortingarea":
shelves(game)
elif var == "parcelrepair":
damaged_parcel_area(game)
elif var == "loadingbay":
loading_bay(game)
elif var == "office":
office(game)
else:
hint = "Look around for Uncle Jock's Parcel"
loop = game.basic_game_func(var, hint)
else:
loop = False
def shelves(game = N_game()):
loop = True
while loop:
game.game_map.pre = game.game_map.player_enter((2,0),game.game_map.pre)
clear_screen()
print("")
print_tab(pr_colour("l_blue","-- SORTING AREA --") + "\n")
print_tab("The sorting area is broken down into postcodes and forwarding piles. Some to be shipped to ")
print_tab("other distribution centres and others to be delivered to the local area. In the forwarding ")
print_tab("section there are a number of parcels to be sent however only four of them match the size of ")
print_tab("the parcel you are looking for. Have a look around at the parcels. You may need a " + pr_colour("l_blue","Hint") + " to ")
print_tab("start your search.")
var, num = item_input()
if str(type(num)) != "<class 'int'>":
var = san_text(var + str(num))
if var == "parcel" and num < 5 and num > 0:
boxes(1 ,num, game)
else:
hint = "Type: Parcel 1 "
loop = game.basic_game_func(var, hint)
def damaged_parcel_area(game = N_game()):
loop = True
while loop:
game.game_map.pre = game.game_map.player_enter((2,2),game.game_map.pre)
clear_screen()
print("")
print_tab(pr_colour("l_blue","-- PARCEL REPAIR STATION --") + "\n")
print_tab("On the desk sits two parcels that seem a little worst for wear. The " + pr_colour("l_blue", "Parcel 1") + " seems to have")
print_tab("been dropped as one of the corners has the characteristic signs of landing face first. ")
print_tab(pr_colour("l_blue", "Parcel 2") + " seems to have been crashed by another parcel significantly heavier then if could ")
print_tab("withstand. All around its side are the wrinkles in the cardboard formed when it buckled")
print_tab("under the weight which also seems to have caused the corners to spilt.")
var = san_input()
if var == "parcel1":
clear_screen()
print("")
print_tab(pr_colour("l_blue","-- PARCEL 1 --") + "\n")
print_tab("The address label on the parcel reads:\n")
print_tab("\t┌────────────────────┐")
print_tab("\t│ <NAME> │")
print_tab("\t│ New Chester Road │")
print_tab("\t│ Ellesmere Port │")
print_tab("\t│ Cheshire │")
print_tab("\t│ CH66 1QW │")
print_tab("\t│ United Kingdom │")
print_tab("\t└────────────────────┘\n")
print_tab("Not Uncle Jock's Parcel, Lets keep looking")
pause()
game.set_boxes(1)
office_empty(game)
elif var == "parcel2":
clear_screen()
print("")
print_tab(pr_colour("l_blue","-- PARCEL 2 --") + "\n")
print_tab("The address on this label appears to be ripped:\n")
print_tab("\t ─────────┐")
print_tab("\t _\Roberts │")
print_tab("\t /raney Road │")
print_tab("\t __\derry │")
print_tab("\t /rn │")
print_tab("\t /JG │")
print_tab("\t /ern Ireland │")
print_tab("\t ─────────────────┘\n")
print_tab("Not Uncle Jock's Parcel, Lets keep looking")
pause()
game.set_boxes(2)
office_empty(game)
else:
hint = "Don't lick icy lamp posts"
loop = game.basic_game_func(var, hint)
def loading_bay(game = N_game()):
loop = True
while loop:
game.game_map.pre = game.game_map.player_enter((1,3),game.game_map.pre)
clear_screen()
print("")
print_tab(pr_colour("l_blue","-- LOADING BAY --") + "\n")
print_tab("The loading bay has a fairly simple layout. A wheeled cage trolley can be easily wheeled from")
print_tab("the sorting area to the smaller entrances which then allows for easy loading of the delivery")
print_tab("vans when they are getting ready for their delivery runs. There is a single " + pr_colour("l_blue", "Roller Cage"))
print_tab("sitting off to the side of one of the loading areas.")
var = san_input()
if var == "rollercage":
rollercage(game)
else:
hint = "Don't lick icy lamp posts"
loop = game.basic_game_func(var, hint)
def rollercage(game = N_game()):
loop = True
while loop:
clear_screen()
print("")
print_tab(pr_colour("l_blue","-- ROLLER CAGE --") + "\n")
print_tab("Three parcel lie in an almost tower like structure in the bottom of the Roller Cage. Most of ")
print_tab("the labels are obscured. You can take a closer look at each parcel to see its shipping label.")
print_tab("You may need a " + pr_colour("l_blue","Hint") + " to start your search.")
var, num = item_input()
if str(type(num)) != "<class 'int'>":
var = san_text(var + str(num))
if var == "parcel" and num <4 and num > 0:
boxes( 2 ,num, game)
else:
hint = "Type: Parcel 1 "
loop = game.basic_game_func(var, hint)
def office(game = N_game()):
loop = True
while loop:
if(game.game_over == False):
game.game_map.pre = game.game_map.player_enter((0,3),game.game_map.pre)
clear_screen()
print("")
print_tab(pr_colour("l_blue","-- WAREHOUSE OFFICE --") + "\n")
if game.worker == True:
print_tab("As you get closer to the office you see there is someone inside it. They would recognise ")
print_tab("instantly that you weren't supposed to be here. You best search elsewhere until they leave. ")
pause()
loop = False
else:
print_tab("You enter the office and find cluttered space. On a table in the back of the room semi-ordered ")
print_tab("stacks of paper climb the wall. Three of the four sides of the boxy room have glass windows ")
print_tab("that span the length of the side. The bottom edges of the window frames are coated with a thin")
print_tab("layer of dust which appears to have been disturbed in places where people have lent against it.")
print_tab("On a table that faces into the warehouse sits a " + pr_colour("l_blue","Computer") + " with it password and username handily")
print_tab("stored on a post-it note stuck to the top left-hand corner of the screen. ")
var = san_input()
if var == "computer":
computer(game)
else:
hint = "Don't lick icy lamp posts"
loop = game.basic_game_func(var, hint)
else:
loop = False
def computer(game = N_game()):
clear_screen()
print("")
print_tab(pr_colour("l_blue","-- COMPUTER --")+"\n")
print_tab("You unlock the computer to find a parcel management system loaded on the screen. On the ")
print_tab("display different numbers show how many parcels will be shipped to each of the surrounding ")
print_tab("towns.")
s_pause()
print_tab("You select the search function and enter the tracking number of Uncle Jocks parcel.")
s_pause()
print_tab("An incorrect value error appears on the screen and then blinks out.")
s_pause()
print_tab("You try entering the parcel ID number and immediately an item record opens up.")
s_pause()
clear_screen()
print("")
print_tab(pr_colour("l_blue","-- PARCEL RECORD --") + "\n")
print_tab("┌──────────────────────────────────────────────────────────────┐")
print_tab("│ Parcel Number: B42 8472 3189 6439 10 │")
print_tab("│ │")
print_tab("│ Tracking Number: A2K6U9-2893-G2GU96 │")
print_tab("│ │")
print_tab("│ Delivery Address: Jock Thistlewaite Angus MacTavish III │")
print_tab("│ 3 Pennyworth Rd │")
print_tab("│ Aderfeldy │")
print_tab("│ Perthshire │")
print_tab("│ BXA2XW │")
print_tab("│ │")
print_tab("│ Delivery Date: Tomorrow - 24/12/2021 │")
print_tab("│ │")
print_tab("│ Current Location: In Vehicle for delivery │")
print_tab("└──────────────────────────────────────────────────────────────┘")
pause()
clear_screen()
print("")
print_tab(pr_colour("l_blue","-- COMPUTER --")+"\n")
print_tab("After skimming over the details you realise that the parcel in no longer in the warehouse ")
print_tab("but instead in a vehicle waiting to be delivered.")
s_pause()
print_tab("You select the Current Location field and a vehicle record opens.")
s_pause()
clear_screen()
print("")
print_tab(pr_colour("l_blue","-- VEHICLE RECORD --") + "\n")
print_tab("┌───────────────────────────────┐")
print_tab("│ Vehicle ID: 00001372 │")
# print_tab("│ │")
print_tab("│ Driver Name: Sidney │")
print_tab("│ Miles: 100,263 │")
print_tab("│ │")
print_tab("│ Serviced Last: 30/09/2021 │")
print_tab("│ MOT due: 22/01/2022 │")
print_tab("│ │")
print_tab("│ REG: " + game.unformated_plate + " │")
print_tab("└───────────────────────────────┘")
pause()
clear_screen()
print("")
print_tab(pr_colour("l_blue","-- COMPUTER --")+"\n")
print_tab("You now have the vehicle information. "+ game.player_name +" it is up to you! ")
s_pause()
game.set_new_ob("Find Uncle Jock's Parcel in a Vehicle with REG: " + game.number_plate )
s_pause()
loop = True
while loop:
clear_screen()
print("")
print_tab(pr_colour("l_blue","-- COMPUTER --")+"\n")
print_tab("Did you find Uncle Jock's parcel in the delivery vehicle? Type YES to continue.")
var = san_input()
if var == "yes":
loop = False
elif var == "hint":
print("")
hint = "Call the game maker if you can't find the"
print("\tHint -", end="")
print_tab(hint)
pause()
else:
print("")
print_tab("Incorrect entry try again")
pause()
game.game_over = True
def boxes( opt , num, game = N_game() ):
if opt == 1:
if num == 1 :
clear_screen()
print("")
print_tab(pr_colour("l_blue","-- PARCEL "+ str(num) +" --") + "\n")
print_tab("The address label on the parcel reads:\n")
print_tab("\t┌────────────────────┐")
print_tab("\t│ <NAME> │")
print_tab("\t│ 25 Terrace Rd │")
print_tab("\t│ Aberystwyth │")
print_tab("\t│ Dyfed │")
print_tab("\t│ SY23 1NP │")
print_tab("\t│ United Kingdom │")
print_tab("\t└────────────────────┘\n")
print_tab("Not Uncle Jock's Parcel, Lets keep looking")
pause()
game.set_boxes(3)
elif num == 2:
clear_screen()
print("")
print_tab(pr_colour("l_blue","-- PARCEL "+ str(num) +" --") + "\n")
print_tab("The address label on the parcel reads:\n")
print_tab("\t┌────────────────────┐")
print_tab("\t│ <NAME> │")
print_tab("\t│ 8 Lynwood Close │")
print_tab("\t│ Ashton-under-Lyne │")
print_tab("\t│ Tameside │")
print_tab("\t│ OL7 9SS │")
print_tab("\t│ United Kingdom │")
print_tab("\t└────────────────────┘\n")
print_tab("Not Uncle Jock's Parcel, Lets keep looking")
pause()
game.set_boxes(4)
elif num == 3:
clear_screen()
print("")
print_tab(pr_colour("l_blue","-- PARCEL "+ str(num) +" --") + "\n")
print_tab("The address label on the parcel reads:\n")
print_tab("\t┌────────────────────┐")
print_tab("\t│ <NAME> │")
print_tab("\t│ College Green │")
print_tab("\t│ Bristol │")
print_tab("\t│ City of Bristol │")
print_tab("\t│ BS1 5TA │")
print_tab("\t│ United Kingdom │")
print_tab("\t└────────────────────┘\n")
print_tab("Not Uncle Jock's Parcel, Lets keep looking")
pause()
game.set_boxes(5)
elif num == 4:
clear_screen()
print("")
print_tab(pr_colour("l_blue","-- PARCEL "+ str(num) +" --") + "\n")
print_tab("The address label on the parcel reads:\n")
print_tab("\t┌────────────────────┐")
print_tab("\t│ Bethany Hunt │")
print_tab("\t│ 56 Hambro Hill │")
print_tab("\t│ Rayleigh │")
print_tab("\t│ Essex │")
print_tab("\t│ SS6 8BW │")
print_tab("\t│ United Kingdom │")
print_tab("\t└────────────────────┘\n")
print_tab("Not Uncle Jock's Parcel, Lets keep looking")
pause()
game.set_boxes(6)
office_empty(game)
else:
if num == 1 :
clear_screen()
print("")
print_tab(pr_colour("l_blue","-- PARCEL "+ str(num) +" --") + "\n")
print_tab("The address label on the parcel reads:\n")
print_tab("\t┌────────────────────┐")
print_tab("\t│ <NAME> │")
print_tab("\t│ 27 Manor Way │")
print_tab("\t│ Borehamwood │")
print_tab("\t│ Hertfordshire │")
print_tab("\t│ WD6 1QJ │")
print_tab("\t│ United Kingdom │")
print_tab("\t└────────────────────┘\n")
print_tab("Not Uncle Jock's Parcel, Lets keep looking")
pause()
game.set_boxes(7)
elif num == 2:
clear_screen()
print("")
print_tab(pr_colour("l_blue","-- PARCEL "+ str(num) +" --") + "\n")
print_tab("The address label on the parcel reads:\n")
print_tab("\t┌────────────────────┐")
print_tab("\t│ Yvonne Price │")
print_tab("\t│ 15-16 High St │")
print_tab("\t│ Swansea │")
print_tab("\t│ Glamorgan │")
print_tab("\t│ SA1 1LF │")
print_tab("\t│ United Kingdom │")
print_tab("\t└────────────────────┘\n")
print_tab("Not Uncle Jock's Parcel, Lets keep looking")
pause()
game.set_boxes(8)
elif num == 3:
clear_screen()
print("")
print_tab(pr_colour("l_blue","-- PARCEL "+ str(num) +" --") + "\n")
print_tab("The address label on the parcel reads:\n")
print_tab("\t┌────────────────────┐")
print_tab("\t│ <NAME> │")
print_tab("\t│ 14 St Thomas Rd │")
print_tab("\t│ Brentwood │")
print_tab("\t│ Essex │")
print_tab("\t│ CM14 4DB │")
print_tab("\t│ United Kingdom │")
print_tab("\t└────────────────────┘\n")
print_tab("Not Uncle Jock's Parcel, Lets keep looking")
pause()
game.set_boxes(9)
office_empty(game)
def office_empty(game = N_game()):
empty = game.check_boxes()
if empty == True:
clear_screen()
print("")
print_tab(pr_colour("l_blue","-- SEARCHING --") + "\n")
print_tab("As you set down the parcel you are looking at you glance across the warehouse to the office.")
print_tab("You notice the worker that was in the office has left it and is heading out the door to the ")
print_tab("main building. Now is your chance to have a look inside.")
game.set_new_ob("Search the Office")
game.worker = False
pause()
if __name__ == "__main__":
game = N_game()
game.set_num_plate(" KLZ 9890 ")
computer(game)
# warehouse(game) | from libraries import *
from game import N_game
def warehouse(game = N_game()):
loop = True
while loop:
if(game.game_over == False):
game.game_map.pre = game.game_map.player_enter((2,1),game.game_map.pre)
clear_screen()
print("")
print_tab(pr_colour("l_blue","-- WAREHOUSE --")+"\n")
print_tab("The warehouse is a cavernous open space with concrete floors painted a pale blue colour.")
print_tab("Red lines clearly mark out walk ways from fork lift drive paths. The warehouse appears to")
print_tab("have been broken down into sections. To the front of the warehouse there are two plastic ")
print_tab("sheeting, covered holes in the wall. The space behind them is clear, however after that on")
print_tab("the wall can be found the word " + pr_colour("l_blue", "Sorting Area") + ". Looking to the opposite side of the room")
print_tab("you can see six smaller gaps in the wall covered by the same plastic sheeting as the others.")
print_tab("The wall beside this area reads " + pr_colour("l_blue", "Loading Bay") + ". Next to you there is a desk that has been")
print_tab("labelled " + pr_colour("l_blue", "Parcel Repair") + ". This seems to be were damaged parcels go when they need fixing. ")
print_tab("The last feature of the warehouse is a window surrounded " + pr_colour("l_blue", "Office") + " in the near right hand corner. ")
var = san_input()
# Navigation IF
if var == "sortingarea":
shelves(game)
elif var == "parcelrepair":
damaged_parcel_area(game)
elif var == "loadingbay":
loading_bay(game)
elif var == "office":
office(game)
else:
hint = "Look around for Uncle Jock's Parcel"
loop = game.basic_game_func(var, hint)
else:
loop = False
def shelves(game = N_game()):
loop = True
while loop:
game.game_map.pre = game.game_map.player_enter((2,0),game.game_map.pre)
clear_screen()
print("")
print_tab(pr_colour("l_blue","-- SORTING AREA --") + "\n")
print_tab("The sorting area is broken down into postcodes and forwarding piles. Some to be shipped to ")
print_tab("other distribution centres and others to be delivered to the local area. In the forwarding ")
print_tab("section there are a number of parcels to be sent however only four of them match the size of ")
print_tab("the parcel you are looking for. Have a look around at the parcels. You may need a " + pr_colour("l_blue","Hint") + " to ")
print_tab("start your search.")
var, num = item_input()
if str(type(num)) != "<class 'int'>":
var = san_text(var + str(num))
if var == "parcel" and num < 5 and num > 0:
boxes(1 ,num, game)
else:
hint = "Type: Parcel 1 "
loop = game.basic_game_func(var, hint)
def damaged_parcel_area(game = N_game()):
loop = True
while loop:
game.game_map.pre = game.game_map.player_enter((2,2),game.game_map.pre)
clear_screen()
print("")
print_tab(pr_colour("l_blue","-- PARCEL REPAIR STATION --") + "\n")
print_tab("On the desk sits two parcels that seem a little worst for wear. The " + pr_colour("l_blue", "Parcel 1") + " seems to have")
print_tab("been dropped as one of the corners has the characteristic signs of landing face first. ")
print_tab(pr_colour("l_blue", "Parcel 2") + " seems to have been crashed by another parcel significantly heavier then if could ")
print_tab("withstand. All around its side are the wrinkles in the cardboard formed when it buckled")
print_tab("under the weight which also seems to have caused the corners to spilt.")
var = san_input()
if var == "parcel1":
clear_screen()
print("")
print_tab(pr_colour("l_blue","-- PARCEL 1 --") + "\n")
print_tab("The address label on the parcel reads:\n")
print_tab("\t┌────────────────────┐")
print_tab("\t│ <NAME> │")
print_tab("\t│ New Chester Road │")
print_tab("\t│ Ellesmere Port │")
print_tab("\t│ Cheshire │")
print_tab("\t│ CH66 1QW │")
print_tab("\t│ United Kingdom │")
print_tab("\t└────────────────────┘\n")
print_tab("Not Uncle Jock's Parcel, Lets keep looking")
pause()
game.set_boxes(1)
office_empty(game)
elif var == "parcel2":
clear_screen()
print("")
print_tab(pr_colour("l_blue","-- PARCEL 2 --") + "\n")
print_tab("The address on this label appears to be ripped:\n")
print_tab("\t ─────────┐")
print_tab("\t _\Roberts │")
print_tab("\t /raney Road │")
print_tab("\t __\derry │")
print_tab("\t /rn │")
print_tab("\t /JG │")
print_tab("\t /ern Ireland │")
print_tab("\t ─────────────────┘\n")
print_tab("Not Uncle Jock's Parcel, Lets keep looking")
pause()
game.set_boxes(2)
office_empty(game)
else:
hint = "Don't lick icy lamp posts"
loop = game.basic_game_func(var, hint)
def loading_bay(game = N_game()):
loop = True
while loop:
game.game_map.pre = game.game_map.player_enter((1,3),game.game_map.pre)
clear_screen()
print("")
print_tab(pr_colour("l_blue","-- LOADING BAY --") + "\n")
print_tab("The loading bay has a fairly simple layout. A wheeled cage trolley can be easily wheeled from")
print_tab("the sorting area to the smaller entrances which then allows for easy loading of the delivery")
print_tab("vans when they are getting ready for their delivery runs. There is a single " + pr_colour("l_blue", "Roller Cage"))
print_tab("sitting off to the side of one of the loading areas.")
var = san_input()
if var == "rollercage":
rollercage(game)
else:
hint = "Don't lick icy lamp posts"
loop = game.basic_game_func(var, hint)
def rollercage(game = N_game()):
loop = True
while loop:
clear_screen()
print("")
print_tab(pr_colour("l_blue","-- ROLLER CAGE --") + "\n")
print_tab("Three parcel lie in an almost tower like structure in the bottom of the Roller Cage. Most of ")
print_tab("the labels are obscured. You can take a closer look at each parcel to see its shipping label.")
print_tab("You may need a " + pr_colour("l_blue","Hint") + " to start your search.")
var, num = item_input()
if str(type(num)) != "<class 'int'>":
var = san_text(var + str(num))
if var == "parcel" and num <4 and num > 0:
boxes( 2 ,num, game)
else:
hint = "Type: Parcel 1 "
loop = game.basic_game_func(var, hint)
def office(game = N_game()):
loop = True
while loop:
if(game.game_over == False):
game.game_map.pre = game.game_map.player_enter((0,3),game.game_map.pre)
clear_screen()
print("")
print_tab(pr_colour("l_blue","-- WAREHOUSE OFFICE --") + "\n")
if game.worker == True:
print_tab("As you get closer to the office you see there is someone inside it. They would recognise ")
print_tab("instantly that you weren't supposed to be here. You best search elsewhere until they leave. ")
pause()
loop = False
else:
print_tab("You enter the office and find cluttered space. On a table in the back of the room semi-ordered ")
print_tab("stacks of paper climb the wall. Three of the four sides of the boxy room have glass windows ")
print_tab("that span the length of the side. The bottom edges of the window frames are coated with a thin")
print_tab("layer of dust which appears to have been disturbed in places where people have lent against it.")
print_tab("On a table that faces into the warehouse sits a " + pr_colour("l_blue","Computer") + " with it password and username handily")
print_tab("stored on a post-it note stuck to the top left-hand corner of the screen. ")
var = san_input()
if var == "computer":
computer(game)
else:
hint = "Don't lick icy lamp posts"
loop = game.basic_game_func(var, hint)
else:
loop = False
def computer(game = N_game()):
clear_screen()
print("")
print_tab(pr_colour("l_blue","-- COMPUTER --")+"\n")
print_tab("You unlock the computer to find a parcel management system loaded on the screen. On the ")
print_tab("display different numbers show how many parcels will be shipped to each of the surrounding ")
print_tab("towns.")
s_pause()
print_tab("You select the search function and enter the tracking number of Uncle Jocks parcel.")
s_pause()
print_tab("An incorrect value error appears on the screen and then blinks out.")
s_pause()
print_tab("You try entering the parcel ID number and immediately an item record opens up.")
s_pause()
clear_screen()
print("")
print_tab(pr_colour("l_blue","-- PARCEL RECORD --") + "\n")
print_tab("┌──────────────────────────────────────────────────────────────┐")
print_tab("│ Parcel Number: B42 8472 3189 6439 10 │")
print_tab("│ │")
print_tab("│ Tracking Number: A2K6U9-2893-G2GU96 │")
print_tab("│ │")
print_tab("│ Delivery Address: Jock Thistlewaite Angus MacTavish III │")
print_tab("│ 3 Pennyworth Rd │")
print_tab("│ Aderfeldy │")
print_tab("│ Perthshire │")
print_tab("│ BXA2XW │")
print_tab("│ │")
print_tab("│ Delivery Date: Tomorrow - 24/12/2021 │")
print_tab("│ │")
print_tab("│ Current Location: In Vehicle for delivery │")
print_tab("└──────────────────────────────────────────────────────────────┘")
pause()
clear_screen()
print("")
print_tab(pr_colour("l_blue","-- COMPUTER --")+"\n")
print_tab("After skimming over the details you realise that the parcel in no longer in the warehouse ")
print_tab("but instead in a vehicle waiting to be delivered.")
s_pause()
print_tab("You select the Current Location field and a vehicle record opens.")
s_pause()
clear_screen()
print("")
print_tab(pr_colour("l_blue","-- VEHICLE RECORD --") + "\n")
print_tab("┌───────────────────────────────┐")
print_tab("│ Vehicle ID: 00001372 │")
# print_tab("│ │")
print_tab("│ Driver Name: Sidney │")
print_tab("│ Miles: 100,263 │")
print_tab("│ │")
print_tab("│ Serviced Last: 30/09/2021 │")
print_tab("│ MOT due: 22/01/2022 │")
print_tab("│ │")
print_tab("│ REG: " + game.unformated_plate + " │")
print_tab("└───────────────────────────────┘")
pause()
clear_screen()
print("")
print_tab(pr_colour("l_blue","-- COMPUTER --")+"\n")
print_tab("You now have the vehicle information. "+ game.player_name +" it is up to you! ")
s_pause()
game.set_new_ob("Find Uncle Jock's Parcel in a Vehicle with REG: " + game.number_plate )
s_pause()
loop = True
while loop:
clear_screen()
print("")
print_tab(pr_colour("l_blue","-- COMPUTER --")+"\n")
print_tab("Did you find Uncle Jock's parcel in the delivery vehicle? Type YES to continue.")
var = san_input()
if var == "yes":
loop = False
elif var == "hint":
print("")
hint = "Call the game maker if you can't find the"
print("\tHint -", end="")
print_tab(hint)
pause()
else:
print("")
print_tab("Incorrect entry try again")
pause()
game.game_over = True
def boxes( opt , num, game = N_game() ):
if opt == 1:
if num == 1 :
clear_screen()
print("")
print_tab(pr_colour("l_blue","-- PARCEL "+ str(num) +" --") + "\n")
print_tab("The address label on the parcel reads:\n")
print_tab("\t┌────────────────────┐")
print_tab("\t│ <NAME> │")
print_tab("\t│ 25 Terrace Rd │")
print_tab("\t│ Aberystwyth │")
print_tab("\t│ Dyfed │")
print_tab("\t│ SY23 1NP │")
print_tab("\t│ United Kingdom │")
print_tab("\t└────────────────────┘\n")
print_tab("Not Uncle Jock's Parcel, Lets keep looking")
pause()
game.set_boxes(3)
elif num == 2:
clear_screen()
print("")
print_tab(pr_colour("l_blue","-- PARCEL "+ str(num) +" --") + "\n")
print_tab("The address label on the parcel reads:\n")
print_tab("\t┌────────────────────┐")
print_tab("\t│ <NAME> │")
print_tab("\t│ 8 Lynwood Close │")
print_tab("\t│ Ashton-under-Lyne │")
print_tab("\t│ Tameside │")
print_tab("\t│ OL7 9SS │")
print_tab("\t│ United Kingdom │")
print_tab("\t└────────────────────┘\n")
print_tab("Not Uncle Jock's Parcel, Lets keep looking")
pause()
game.set_boxes(4)
elif num == 3:
clear_screen()
print("")
print_tab(pr_colour("l_blue","-- PARCEL "+ str(num) +" --") + "\n")
print_tab("The address label on the parcel reads:\n")
print_tab("\t┌────────────────────┐")
print_tab("\t│ <NAME> │")
print_tab("\t│ College Green │")
print_tab("\t│ Bristol │")
print_tab("\t│ City of Bristol │")
print_tab("\t│ BS1 5TA │")
print_tab("\t│ United Kingdom │")
print_tab("\t└────────────────────┘\n")
print_tab("Not Uncle Jock's Parcel, Lets keep looking")
pause()
game.set_boxes(5)
elif num == 4:
clear_screen()
print("")
print_tab(pr_colour("l_blue","-- PARCEL "+ str(num) +" --") + "\n")
print_tab("The address label on the parcel reads:\n")
print_tab("\t┌────────────────────┐")
print_tab("\t│ Bethany Hunt │")
print_tab("\t│ 56 Hambro Hill │")
print_tab("\t│ Rayleigh │")
print_tab("\t│ Essex │")
print_tab("\t│ SS6 8BW │")
print_tab("\t│ United Kingdom │")
print_tab("\t└────────────────────┘\n")
print_tab("Not Uncle Jock's Parcel, Lets keep looking")
pause()
game.set_boxes(6)
office_empty(game)
else:
if num == 1 :
clear_screen()
print("")
print_tab(pr_colour("l_blue","-- PARCEL "+ str(num) +" --") + "\n")
print_tab("The address label on the parcel reads:\n")
print_tab("\t┌────────────────────┐")
print_tab("\t│ <NAME> │")
print_tab("\t│ 27 Manor Way │")
print_tab("\t│ Borehamwood │")
print_tab("\t│ Hertfordshire │")
print_tab("\t│ WD6 1QJ │")
print_tab("\t│ United Kingdom │")
print_tab("\t└────────────────────┘\n")
print_tab("Not Uncle Jock's Parcel, Lets keep looking")
pause()
game.set_boxes(7)
elif num == 2:
clear_screen()
print("")
print_tab(pr_colour("l_blue","-- PARCEL "+ str(num) +" --") + "\n")
print_tab("The address label on the parcel reads:\n")
print_tab("\t┌────────────────────┐")
print_tab("\t│ Yvonne Price │")
print_tab("\t│ 15-16 High St │")
print_tab("\t│ Swansea │")
print_tab("\t│ Glamorgan │")
print_tab("\t│ SA1 1LF │")
print_tab("\t│ United Kingdom │")
print_tab("\t└────────────────────┘\n")
print_tab("Not Uncle Jock's Parcel, Lets keep looking")
pause()
game.set_boxes(8)
elif num == 3:
clear_screen()
print("")
print_tab(pr_colour("l_blue","-- PARCEL "+ str(num) +" --") + "\n")
print_tab("The address label on the parcel reads:\n")
print_tab("\t┌────────────────────┐")
print_tab("\t│ <NAME> │")
print_tab("\t│ 14 St Thomas Rd │")
print_tab("\t│ Brentwood │")
print_tab("\t│ Essex │")
print_tab("\t│ CM14 4DB │")
print_tab("\t│ United Kingdom │")
print_tab("\t└────────────────────┘\n")
print_tab("Not Uncle Jock's Parcel, Lets keep looking")
pause()
game.set_boxes(9)
office_empty(game)
def office_empty(game = N_game()):
empty = game.check_boxes()
if empty == True:
clear_screen()
print("")
print_tab(pr_colour("l_blue","-- SEARCHING --") + "\n")
print_tab("As you set down the parcel you are looking at you glance across the warehouse to the office.")
print_tab("You notice the worker that was in the office has left it and is heading out the door to the ")
print_tab("main building. Now is your chance to have a look inside.")
game.set_new_ob("Search the Office")
game.worker = False
pause()
if __name__ == "__main__":
game = N_game()
game.set_num_plate(" KLZ 9890 ")
computer(game)
# warehouse(game) | en | 0.254091 | # Navigation IF # print_tab("│ │") # warehouse(game) | 3.914368 | 4 |
qnas/serializers.py | easymean/ModusDRF | 0 | 6619746 | from rest_framework import serializers
from .models import PlaceQuestion, PlaceReply
from places.serializers import RelationPlaceSerializer
from users.serializers import RelationUserSerializer
class QuestionSerializer(serializers.ModelSerializer):
place = RelationPlaceSerializer(read_only=True)
user = RelationUserSerializer(read_only=True)
class Meta:
model = PlaceQuestion
exclude = ["is_active"]
read_only_fields = ["place", "user", "pk", "created", "updated"]
def create(self, validated_data):
request = self.context.get("request")
place = self.context.get("place")
question = PlaceQuestion.objects.create(
**validated_data, user=request.user, place=place
)
return question
class ReplySerializer(serializers.ModelSerializer):
class Meta:
model = PlaceReply
exclude = ["is_active"]
read_only_fields = ["pk", "question", "host", "created", "updated"]
def create(self, validated_data):
request = self.context.get("request")
question = self.context.get("question")
reply = PlaceReply.objects.create(
**validated_data, host=request.user, question=question
)
return reply
| from rest_framework import serializers
from .models import PlaceQuestion, PlaceReply
from places.serializers import RelationPlaceSerializer
from users.serializers import RelationUserSerializer
class QuestionSerializer(serializers.ModelSerializer):
place = RelationPlaceSerializer(read_only=True)
user = RelationUserSerializer(read_only=True)
class Meta:
model = PlaceQuestion
exclude = ["is_active"]
read_only_fields = ["place", "user", "pk", "created", "updated"]
def create(self, validated_data):
request = self.context.get("request")
place = self.context.get("place")
question = PlaceQuestion.objects.create(
**validated_data, user=request.user, place=place
)
return question
class ReplySerializer(serializers.ModelSerializer):
class Meta:
model = PlaceReply
exclude = ["is_active"]
read_only_fields = ["pk", "question", "host", "created", "updated"]
def create(self, validated_data):
request = self.context.get("request")
question = self.context.get("question")
reply = PlaceReply.objects.create(
**validated_data, host=request.user, question=question
)
return reply
| none | 1 | 2.280166 | 2 | |
tb_rest_client/api/api_ce/entity_view_controller_api.py | samson0v/python_tb_rest_client | 30 | 6619747 | # coding: utf-8
"""
ThingsBoard REST API
ThingsBoard open-source IoT platform REST API documentation. # noqa: E501
OpenAPI spec version: 3.3.3-SNAPSHOT
Contact: <EMAIL>
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from tb_rest_client.api_client import ApiClient
class EntityViewControllerApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def assign_entity_view_to_customer_using_post(self, customer_id, entity_view_id, **kwargs): # noqa: E501
"""Assign Entity View to customer (assignEntityViewToCustomer) # noqa: E501
Creates assignment of the Entity View to customer. Customer will be able to query Entity View afterwards. Available for users with 'TENANT_ADMIN' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.assign_entity_view_to_customer_using_post(customer_id, entity_view_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str customer_id: A string value representing the customer id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:param str entity_view_id: A string value representing the entity view id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:return: EntityView
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.assign_entity_view_to_customer_using_post_with_http_info(customer_id, entity_view_id, **kwargs) # noqa: E501
else:
(data) = self.assign_entity_view_to_customer_using_post_with_http_info(customer_id, entity_view_id, **kwargs) # noqa: E501
return data
def assign_entity_view_to_customer_using_post_with_http_info(self, customer_id, entity_view_id, **kwargs): # noqa: E501
"""Assign Entity View to customer (assignEntityViewToCustomer) # noqa: E501
Creates assignment of the Entity View to customer. Customer will be able to query Entity View afterwards. Available for users with 'TENANT_ADMIN' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.assign_entity_view_to_customer_using_post_with_http_info(customer_id, entity_view_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str customer_id: A string value representing the customer id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:param str entity_view_id: A string value representing the entity view id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:return: EntityView
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['customer_id', 'entity_view_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method assign_entity_view_to_customer_using_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'customer_id' is set
if ('customer_id' not in params or
params['customer_id'] is None):
raise ValueError("Missing the required parameter `customer_id` when calling `assign_entity_view_to_customer_using_post`") # noqa: E501
# verify the required parameter 'entity_view_id' is set
if ('entity_view_id' not in params or
params['entity_view_id'] is None):
raise ValueError("Missing the required parameter `entity_view_id` when calling `assign_entity_view_to_customer_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
if 'customer_id' in params:
path_params['customerId'] = params['customer_id'] # noqa: E501
if 'entity_view_id' in params:
path_params['entityViewId'] = params['entity_view_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/customer/{customerId}/entityView/{entityViewId}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EntityView', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def assign_entity_view_to_edge_using_post(self, edge_id, entity_view_id, **kwargs): # noqa: E501
"""Assign entity view to edge (assignEntityViewToEdge) # noqa: E501
Creates assignment of an existing entity view to an instance of The Edge. Assignment works in async way - first, notification event pushed to edge service queue on platform. Second, remote edge service will receive a copy of assignment entity view (Edge will receive this instantly, if it's currently connected, or once it's going to be connected to platform). Third, once entity view will be delivered to edge service, it's going to be available for usage on remote edge instance. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.assign_entity_view_to_edge_using_post(edge_id, entity_view_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str edge_id: edgeId (required)
:param str entity_view_id: entityViewId (required)
:return: EntityView
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.assign_entity_view_to_edge_using_post_with_http_info(edge_id, entity_view_id, **kwargs) # noqa: E501
else:
(data) = self.assign_entity_view_to_edge_using_post_with_http_info(edge_id, entity_view_id, **kwargs) # noqa: E501
return data
def assign_entity_view_to_edge_using_post_with_http_info(self, edge_id, entity_view_id, **kwargs): # noqa: E501
"""Assign entity view to edge (assignEntityViewToEdge) # noqa: E501
Creates assignment of an existing entity view to an instance of The Edge. Assignment works in async way - first, notification event pushed to edge service queue on platform. Second, remote edge service will receive a copy of assignment entity view (Edge will receive this instantly, if it's currently connected, or once it's going to be connected to platform). Third, once entity view will be delivered to edge service, it's going to be available for usage on remote edge instance. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.assign_entity_view_to_edge_using_post_with_http_info(edge_id, entity_view_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str edge_id: edgeId (required)
:param str entity_view_id: entityViewId (required)
:return: EntityView
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['edge_id', 'entity_view_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method assign_entity_view_to_edge_using_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'edge_id' is set
if ('edge_id' not in params or
params['edge_id'] is None):
raise ValueError("Missing the required parameter `edge_id` when calling `assign_entity_view_to_edge_using_post`") # noqa: E501
# verify the required parameter 'entity_view_id' is set
if ('entity_view_id' not in params or
params['entity_view_id'] is None):
raise ValueError("Missing the required parameter `entity_view_id` when calling `assign_entity_view_to_edge_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
if 'edge_id' in params:
path_params['edgeId'] = params['edge_id'] # noqa: E501
if 'entity_view_id' in params:
path_params['entityViewId'] = params['entity_view_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/edge/{edgeId}/entityView/{entityViewId}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EntityView', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def assign_entity_view_to_public_customer_using_post(self, entity_view_id, **kwargs): # noqa: E501
"""Make entity view publicly available (assignEntityViewToPublicCustomer) # noqa: E501
Entity View will be available for non-authorized (not logged-in) users. This is useful to create dashboards that you plan to share/embed on a publicly available website. However, users that are logged-in and belong to different tenant will not be able to access the entity view. Available for users with 'TENANT_ADMIN' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.assign_entity_view_to_public_customer_using_post(entity_view_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str entity_view_id: A string value representing the entity view id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:return: EntityView
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.assign_entity_view_to_public_customer_using_post_with_http_info(entity_view_id, **kwargs) # noqa: E501
else:
(data) = self.assign_entity_view_to_public_customer_using_post_with_http_info(entity_view_id, **kwargs) # noqa: E501
return data
def assign_entity_view_to_public_customer_using_post_with_http_info(self, entity_view_id, **kwargs): # noqa: E501
"""Make entity view publicly available (assignEntityViewToPublicCustomer) # noqa: E501
Entity View will be available for non-authorized (not logged-in) users. This is useful to create dashboards that you plan to share/embed on a publicly available website. However, users that are logged-in and belong to different tenant will not be able to access the entity view. Available for users with 'TENANT_ADMIN' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.assign_entity_view_to_public_customer_using_post_with_http_info(entity_view_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str entity_view_id: A string value representing the entity view id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:return: EntityView
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['entity_view_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method assign_entity_view_to_public_customer_using_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'entity_view_id' is set
if ('entity_view_id' not in params or
params['entity_view_id'] is None):
raise ValueError("Missing the required parameter `entity_view_id` when calling `assign_entity_view_to_public_customer_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
if 'entity_view_id' in params:
path_params['entityViewId'] = params['entity_view_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/customer/public/entityView/{entityViewId}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EntityView', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_entity_view_using_delete(self, entity_view_id, **kwargs): # noqa: E501
"""Delete entity view (deleteEntityView) # noqa: E501
Delete the EntityView object based on the provided entity view id. Available for users with 'TENANT_ADMIN' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_entity_view_using_delete(entity_view_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str entity_view_id: A string value representing the entity view id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_entity_view_using_delete_with_http_info(entity_view_id, **kwargs) # noqa: E501
else:
(data) = self.delete_entity_view_using_delete_with_http_info(entity_view_id, **kwargs) # noqa: E501
return data
def delete_entity_view_using_delete_with_http_info(self, entity_view_id, **kwargs): # noqa: E501
"""Delete entity view (deleteEntityView) # noqa: E501
Delete the EntityView object based on the provided entity view id. Available for users with 'TENANT_ADMIN' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_entity_view_using_delete_with_http_info(entity_view_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str entity_view_id: A string value representing the entity view id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['entity_view_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_entity_view_using_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'entity_view_id' is set
if ('entity_view_id' not in params or
params['entity_view_id'] is None):
raise ValueError("Missing the required parameter `entity_view_id` when calling `delete_entity_view_using_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'entity_view_id' in params:
path_params['entityViewId'] = params['entity_view_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/entityView/{entityViewId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def find_by_query_using_post4(self, **kwargs): # noqa: E501
"""Find related entity views (findByQuery) # noqa: E501
Returns all entity views that are related to the specific entity. The entity id, relation type, entity view types, depth of the search, and other query parameters defined using complex 'EntityViewSearchQuery' object. See 'Model' tab of the Parameters for more info. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_by_query_using_post4(async_req=True)
>>> result = thread.get()
:param async_req bool
:param EntityViewSearchQuery body:
:return: list[EntityView]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.find_by_query_using_post4_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.find_by_query_using_post4_with_http_info(**kwargs) # noqa: E501
return data
def find_by_query_using_post4_with_http_info(self, **kwargs): # noqa: E501
"""Find related entity views (findByQuery) # noqa: E501
Returns all entity views that are related to the specific entity. The entity id, relation type, entity view types, depth of the search, and other query parameters defined using complex 'EntityViewSearchQuery' object. See 'Model' tab of the Parameters for more info. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_by_query_using_post4_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param EntityViewSearchQuery body:
:return: list[EntityView]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method find_by_query_using_post4" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/entityViews', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[EntityView]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_customer_entity_view_infos_using_get(self, customer_id, page_size, page, **kwargs): # noqa: E501
"""Get Customer Entity View info (getCustomerEntityViewInfos) # noqa: E501
Returns a page of Entity View info objects assigned to customer. Entity Views limit the degree of exposure of the Device or Asset telemetry and attributes to the Customers. Every Entity View references exactly one entity (device or asset) and defines telemetry and attribute keys that will be visible to the assigned Customer. As a Tenant Administrator you are able to create multiple EVs per Device or Asset and assign them to different Customers. You can specify parameters to filter the results. The result is wrapped with PageData object that allows you to iterate over result set using pagination. See the 'Model' tab of the Response Class for more details. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_customer_entity_view_infos_using_get(customer_id, page_size, page, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str customer_id: A string value representing the customer id. For example, '784f394c-42b6-435a-<PASSWORD>ff<PASSWORD>' (required)
:param int page_size: Maximum amount of entities in a one page (required)
:param int page: Sequence number of page starting from 0 (required)
:param str type: ## Entity View Filter Allows to filter entity views based on their type and the **'starts with'** expression over their name. For example, this entity filter selects all 'Concrete Mixer' entity views which name starts with 'CAT': ```json { \"type\": \"entityViewType\", \"entityViewType\": \"Concrete Mixer\", \"entityViewNameFilter\": \"CAT\" } ```
:param str text_search: The case insensitive 'startsWith' filter based on the entity view name.
:param str sort_property: Property of entity to sort by
:param str sort_order: Sort order. ASC (ASCENDING) or DESC (DESCENDING)
:return: PageDataEntityViewInfo
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_customer_entity_view_infos_using_get_with_http_info(customer_id, page_size, page, **kwargs) # noqa: E501
else:
(data) = self.get_customer_entity_view_infos_using_get_with_http_info(customer_id, page_size, page, **kwargs) # noqa: E501
return data
def get_customer_entity_view_infos_using_get_with_http_info(self, customer_id, page_size, page, **kwargs): # noqa: E501
"""Get Customer Entity View info (getCustomerEntityViewInfos) # noqa: E501
Returns a page of Entity View info objects assigned to customer. Entity Views limit the degree of exposure of the Device or Asset telemetry and attributes to the Customers. Every Entity View references exactly one entity (device or asset) and defines telemetry and attribute keys that will be visible to the assigned Customer. As a Tenant Administrator you are able to create multiple EVs per Device or Asset and assign them to different Customers. You can specify parameters to filter the results. The result is wrapped with PageData object that allows you to iterate over result set using pagination. See the 'Model' tab of the Response Class for more details. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_customer_entity_view_infos_using_get_with_http_info(customer_id, page_size, page, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str customer_id: A string value representing the customer id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:param int page_size: Maximum amount of entities in a one page (required)
:param int page: Sequence number of page starting from 0 (required)
:param str type: ## Entity View Filter Allows to filter entity views based on their type and the **'starts with'** expression over their name. For example, this entity filter selects all 'Concrete Mixer' entity views which name starts with 'CAT': ```json { \"type\": \"entityViewType\", \"entityViewType\": \"Concrete Mixer\", \"entityViewNameFilter\": \"CAT\" } ```
:param str text_search: The case insensitive 'startsWith' filter based on the entity view name.
:param str sort_property: Property of entity to sort by
:param str sort_order: Sort order. ASC (ASCENDING) or DESC (DESCENDING)
:return: PageDataEntityViewInfo
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['customer_id', 'page_size', 'page', 'type', 'text_search', 'sort_property', 'sort_order'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_customer_entity_view_infos_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'customer_id' is set
if ('customer_id' not in params or
params['customer_id'] is None):
raise ValueError("Missing the required parameter `customer_id` when calling `get_customer_entity_view_infos_using_get`") # noqa: E501
# verify the required parameter 'page_size' is set
if ('page_size' not in params or
params['page_size'] is None):
raise ValueError("Missing the required parameter `page_size` when calling `get_customer_entity_view_infos_using_get`") # noqa: E501
# verify the required parameter 'page' is set
if ('page' not in params or
params['page'] is None):
raise ValueError("Missing the required parameter `page` when calling `get_customer_entity_view_infos_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'customer_id' in params:
path_params['customerId'] = params['customer_id'] # noqa: E501
query_params = []
if 'page_size' in params:
query_params.append(('pageSize', params['page_size'])) # noqa: E501
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'type' in params:
query_params.append(('type', params['type'])) # noqa: E501
if 'text_search' in params:
query_params.append(('textSearch', params['text_search'])) # noqa: E501
if 'sort_property' in params:
query_params.append(('sortProperty', params['sort_property'])) # noqa: E501
if 'sort_order' in params:
query_params.append(('sortOrder', params['sort_order'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/customer/{customerId}/entityViewInfos{?page,pageSize,sortOrder,sortProperty,textSearch,type}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PageDataEntityViewInfo', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_customer_entity_views_using_get(self, customer_id, page_size, page, **kwargs): # noqa: E501
"""Get Customer Entity Views (getCustomerEntityViews) # noqa: E501
Returns a page of Entity View objects assigned to customer. You can specify parameters to filter the results. The result is wrapped with PageData object that allows you to iterate over result set using pagination. See the 'Model' tab of the Response Class for more details. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_customer_entity_views_using_get(customer_id, page_size, page, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str customer_id: A string value representing the customer id. For example, '784f394c-42b6-435a-<PASSWORD>-<PASSWORD>ff<PASSWORD>' (required)
:param int page_size: Maximum amount of entities in a one page (required)
:param int page: Sequence number of page starting from 0 (required)
:param str type: ## Entity View Filter Allows to filter entity views based on their type and the **'starts with'** expression over their name. For example, this entity filter selects all 'Concrete Mixer' entity views which name starts with 'CAT': ```json { \"type\": \"entityViewType\", \"entityViewType\": \"Concrete Mixer\", \"entityViewNameFilter\": \"CAT\" } ```
:param str text_search: The case insensitive 'startsWith' filter based on the entity view name.
:param str sort_property: Property of entity to sort by
:param str sort_order: Sort order. ASC (ASCENDING) or DESC (DESCENDING)
:return: PageDataEntityView
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_customer_entity_views_using_get_with_http_info(customer_id, page_size, page, **kwargs) # noqa: E501
else:
(data) = self.get_customer_entity_views_using_get_with_http_info(customer_id, page_size, page, **kwargs) # noqa: E501
return data
def get_customer_entity_views_using_get_with_http_info(self, customer_id, page_size, page, **kwargs): # noqa: E501
"""Get Customer Entity Views (getCustomerEntityViews) # noqa: E501
Returns a page of Entity View objects assigned to customer. You can specify parameters to filter the results. The result is wrapped with PageData object that allows you to iterate over result set using pagination. See the 'Model' tab of the Response Class for more details. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_customer_entity_views_using_get_with_http_info(customer_id, page_size, page, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str customer_id: A string value representing the customer id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:param int page_size: Maximum amount of entities in a one page (required)
:param int page: Sequence number of page starting from 0 (required)
:param str type: ## Entity View Filter Allows to filter entity views based on their type and the **'starts with'** expression over their name. For example, this entity filter selects all 'Concrete Mixer' entity views which name starts with 'CAT': ```json { \"type\": \"entityViewType\", \"entityViewType\": \"Concrete Mixer\", \"entityViewNameFilter\": \"CAT\" } ```
:param str text_search: The case insensitive 'startsWith' filter based on the entity view name.
:param str sort_property: Property of entity to sort by
:param str sort_order: Sort order. ASC (ASCENDING) or DESC (DESCENDING)
:return: PageDataEntityView
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['customer_id', 'page_size', 'page', 'type', 'text_search', 'sort_property', 'sort_order'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_customer_entity_views_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'customer_id' is set
if ('customer_id' not in params or
params['customer_id'] is None):
raise ValueError("Missing the required parameter `customer_id` when calling `get_customer_entity_views_using_get`") # noqa: E501
# verify the required parameter 'page_size' is set
if ('page_size' not in params or
params['page_size'] is None):
raise ValueError("Missing the required parameter `page_size` when calling `get_customer_entity_views_using_get`") # noqa: E501
# verify the required parameter 'page' is set
if ('page' not in params or
params['page'] is None):
raise ValueError("Missing the required parameter `page` when calling `get_customer_entity_views_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'customer_id' in params:
path_params['customerId'] = params['customer_id'] # noqa: E501
query_params = []
if 'page_size' in params:
query_params.append(('pageSize', params['page_size'])) # noqa: E501
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'type' in params:
query_params.append(('type', params['type'])) # noqa: E501
if 'text_search' in params:
query_params.append(('textSearch', params['text_search'])) # noqa: E501
if 'sort_property' in params:
query_params.append(('sortProperty', params['sort_property'])) # noqa: E501
if 'sort_order' in params:
query_params.append(('sortOrder', params['sort_order'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/customer/{customerId}/entityViews{?page,pageSize,sortOrder,sortProperty,textSearch,type}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PageDataEntityView', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_edge_entity_views_using_get(self, edge_id, page, page_size, **kwargs): # noqa: E501
"""getEdgeEntityViews # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_edge_entity_views_using_get(edge_id, page, page_size, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str edge_id: edgeId (required)
:param str page: page (required)
:param str page_size: pageSize (required)
:param str type: type
:param str text_search: textSearch
:param str sort_property: sortProperty
:param str sort_order: sortOrder
:param int start_time: startTime
:param int end_time: endTime
:return: PageDataEntityView
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_edge_entity_views_using_get_with_http_info(edge_id, page, page_size, **kwargs) # noqa: E501
else:
(data) = self.get_edge_entity_views_using_get_with_http_info(edge_id, page, page_size, **kwargs) # noqa: E501
return data
def get_edge_entity_views_using_get_with_http_info(self, edge_id, page, page_size, **kwargs): # noqa: E501
"""getEdgeEntityViews # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_edge_entity_views_using_get_with_http_info(edge_id, page, page_size, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str edge_id: edgeId (required)
:param str page: page (required)
:param str page_size: pageSize (required)
:param str type: type
:param str text_search: textSearch
:param str sort_property: sortProperty
:param str sort_order: sortOrder
:param int start_time: startTime
:param int end_time: endTime
:return: PageDataEntityView
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['edge_id', 'page', 'page_size', 'type', 'text_search', 'sort_property', 'sort_order', 'start_time', 'end_time'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_edge_entity_views_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'edge_id' is set
if ('edge_id' not in params or
params['edge_id'] is None):
raise ValueError("Missing the required parameter `edge_id` when calling `get_edge_entity_views_using_get`") # noqa: E501
# verify the required parameter 'page' is set
if ('page' not in params or
params['page'] is None):
raise ValueError("Missing the required parameter `page` when calling `get_edge_entity_views_using_get`") # noqa: E501
# verify the required parameter 'page_size' is set
if ('page_size' not in params or
params['page_size'] is None):
raise ValueError("Missing the required parameter `page_size` when calling `get_edge_entity_views_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'edge_id' in params:
path_params['edgeId'] = params['edge_id'] # noqa: E501
query_params = []
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'page_size' in params:
query_params.append(('pageSize', params['page_size'])) # noqa: E501
if 'type' in params:
query_params.append(('type', params['type'])) # noqa: E501
if 'text_search' in params:
query_params.append(('textSearch', params['text_search'])) # noqa: E501
if 'sort_property' in params:
query_params.append(('sortProperty', params['sort_property'])) # noqa: E501
if 'sort_order' in params:
query_params.append(('sortOrder', params['sort_order'])) # noqa: E501
if 'start_time' in params:
query_params.append(('startTime', params['start_time'])) # noqa: E501
if 'end_time' in params:
query_params.append(('endTime', params['end_time'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/edge/{edgeId}/entityViews{?endTime,page,pageSize,sortOrder,sortProperty,startTime,textSearch,type}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PageDataEntityView', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_entity_view_by_id_using_get(self, entity_view_id, **kwargs): # noqa: E501
"""Get entity view (getEntityViewById) # noqa: E501
Fetch the EntityView object based on the provided entity view id. Entity Views limit the degree of exposure of the Device or Asset telemetry and attributes to the Customers. Every Entity View references exactly one entity (device or asset) and defines telemetry and attribute keys that will be visible to the assigned Customer. As a Tenant Administrator you are able to create multiple EVs per Device or Asset and assign them to different Customers. See the 'Model' tab for more details. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_entity_view_by_id_using_get(entity_view_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str entity_view_id: A string value representing the entity view id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:return: EntityView
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_entity_view_by_id_using_get_with_http_info(entity_view_id, **kwargs) # noqa: E501
else:
(data) = self.get_entity_view_by_id_using_get_with_http_info(entity_view_id, **kwargs) # noqa: E501
return data
def get_entity_view_by_id_using_get_with_http_info(self, entity_view_id, **kwargs): # noqa: E501
"""Get entity view (getEntityViewById) # noqa: E501
Fetch the EntityView object based on the provided entity view id. Entity Views limit the degree of exposure of the Device or Asset telemetry and attributes to the Customers. Every Entity View references exactly one entity (device or asset) and defines telemetry and attribute keys that will be visible to the assigned Customer. As a Tenant Administrator you are able to create multiple EVs per Device or Asset and assign them to different Customers. See the 'Model' tab for more details. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_entity_view_by_id_using_get_with_http_info(entity_view_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str entity_view_id: A string value representing the entity view id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:return: EntityView
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['entity_view_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_entity_view_by_id_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'entity_view_id' is set
if ('entity_view_id' not in params or
params['entity_view_id'] is None):
raise ValueError("Missing the required parameter `entity_view_id` when calling `get_entity_view_by_id_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'entity_view_id' in params:
path_params['entityViewId'] = params['entity_view_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/entityView/{entityViewId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EntityView', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_entity_view_info_by_id_using_get(self, entity_view_id, **kwargs): # noqa: E501
"""Get Entity View info (getEntityViewInfoById) # noqa: E501
Fetch the Entity View info object based on the provided Entity View Id. Entity Views Info extends the Entity View with customer title and 'is public' flag. Entity Views limit the degree of exposure of the Device or Asset telemetry and attributes to the Customers. Every Entity View references exactly one entity (device or asset) and defines telemetry and attribute keys that will be visible to the assigned Customer. As a Tenant Administrator you are able to create multiple EVs per Device or Asset and assign them to different Customers. See the 'Model' tab for more details. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_entity_view_info_by_id_using_get(entity_view_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str entity_view_id: A string value representing the entity view id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:return: EntityViewInfo
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_entity_view_info_by_id_using_get_with_http_info(entity_view_id, **kwargs) # noqa: E501
else:
(data) = self.get_entity_view_info_by_id_using_get_with_http_info(entity_view_id, **kwargs) # noqa: E501
return data
def get_entity_view_info_by_id_using_get_with_http_info(self, entity_view_id, **kwargs): # noqa: E501
"""Get Entity View info (getEntityViewInfoById) # noqa: E501
Fetch the Entity View info object based on the provided Entity View Id. Entity Views Info extends the Entity View with customer title and 'is public' flag. Entity Views limit the degree of exposure of the Device or Asset telemetry and attributes to the Customers. Every Entity View references exactly one entity (device or asset) and defines telemetry and attribute keys that will be visible to the assigned Customer. As a Tenant Administrator you are able to create multiple EVs per Device or Asset and assign them to different Customers. See the 'Model' tab for more details. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_entity_view_info_by_id_using_get_with_http_info(entity_view_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str entity_view_id: A string value representing the entity view id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:return: EntityViewInfo
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['entity_view_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_entity_view_info_by_id_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'entity_view_id' is set
if ('entity_view_id' not in params or
params['entity_view_id'] is None):
raise ValueError("Missing the required parameter `entity_view_id` when calling `get_entity_view_info_by_id_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'entity_view_id' in params:
path_params['entityViewId'] = params['entity_view_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/entityView/info/{entityViewId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EntityViewInfo', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_entity_view_types_using_get(self, **kwargs): # noqa: E501
"""Get Entity View Types (getEntityViewTypes) # noqa: E501
Returns a set of unique entity view types based on entity views that are either owned by the tenant or assigned to the customer which user is performing the request. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_entity_view_types_using_get(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[EntitySubtype]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_entity_view_types_using_get_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_entity_view_types_using_get_with_http_info(**kwargs) # noqa: E501
return data
def get_entity_view_types_using_get_with_http_info(self, **kwargs): # noqa: E501
"""Get Entity View Types (getEntityViewTypes) # noqa: E501
Returns a set of unique entity view types based on entity views that are either owned by the tenant or assigned to the customer which user is performing the request. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_entity_view_types_using_get_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[EntitySubtype]
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_entity_view_types_using_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/entityView/types', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[EntitySubtype]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_tenant_entity_view_infos_using_get(self, page_size, page, **kwargs): # noqa: E501
"""Get Tenant Entity Views (getTenantEntityViews) # noqa: E501
Returns a page of entity views info owned by tenant. Entity Views limit the degree of exposure of the Device or Asset telemetry and attributes to the Customers. Every Entity View references exactly one entity (device or asset) and defines telemetry and attribute keys that will be visible to the assigned Customer. As a Tenant Administrator you are able to create multiple EVs per Device or Asset and assign them to different Customers. You can specify parameters to filter the results. The result is wrapped with PageData object that allows you to iterate over result set using pagination. See the 'Model' tab of the Response Class for more details. Available for users with 'TENANT_ADMIN' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tenant_entity_view_infos_using_get(page_size, page, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page_size: Maximum amount of entities in a one page (required)
:param int page: Sequence number of page starting from 0 (required)
:param str type: ## Entity View Filter Allows to filter entity views based on their type and the **'starts with'** expression over their name. For example, this entity filter selects all 'Concrete Mixer' entity views which name starts with 'CAT': ```json { \"type\": \"entityViewType\", \"entityViewType\": \"Concrete Mixer\", \"entityViewNameFilter\": \"CAT\" } ```
:param str text_search: The case insensitive 'startsWith' filter based on the entity view name.
:param str sort_property: Property of entity to sort by
:param str sort_order: Sort order. ASC (ASCENDING) or DESC (DESCENDING)
:return: PageDataEntityViewInfo
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_tenant_entity_view_infos_using_get_with_http_info(page_size, page, **kwargs) # noqa: E501
else:
(data) = self.get_tenant_entity_view_infos_using_get_with_http_info(page_size, page, **kwargs) # noqa: E501
return data
def get_tenant_entity_view_infos_using_get_with_http_info(self, page_size, page, **kwargs): # noqa: E501
"""Get Tenant Entity Views (getTenantEntityViews) # noqa: E501
Returns a page of entity views info owned by tenant. Entity Views limit the degree of exposure of the Device or Asset telemetry and attributes to the Customers. Every Entity View references exactly one entity (device or asset) and defines telemetry and attribute keys that will be visible to the assigned Customer. As a Tenant Administrator you are able to create multiple EVs per Device or Asset and assign them to different Customers. You can specify parameters to filter the results. The result is wrapped with PageData object that allows you to iterate over result set using pagination. See the 'Model' tab of the Response Class for more details. Available for users with 'TENANT_ADMIN' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tenant_entity_view_infos_using_get_with_http_info(page_size, page, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page_size: Maximum amount of entities in a one page (required)
:param int page: Sequence number of page starting from 0 (required)
:param str type: ## Entity View Filter Allows to filter entity views based on their type and the **'starts with'** expression over their name. For example, this entity filter selects all 'Concrete Mixer' entity views which name starts with 'CAT': ```json { \"type\": \"entityViewType\", \"entityViewType\": \"Concrete Mixer\", \"entityViewNameFilter\": \"CAT\" } ```
:param str text_search: The case insensitive 'startsWith' filter based on the entity view name.
:param str sort_property: Property of entity to sort by
:param str sort_order: Sort order. ASC (ASCENDING) or DESC (DESCENDING)
:return: PageDataEntityViewInfo
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['page_size', 'page', 'type', 'text_search', 'sort_property', 'sort_order'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_tenant_entity_view_infos_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'page_size' is set
if ('page_size' not in params or
params['page_size'] is None):
raise ValueError("Missing the required parameter `page_size` when calling `get_tenant_entity_view_infos_using_get`") # noqa: E501
# verify the required parameter 'page' is set
if ('page' not in params or
params['page'] is None):
raise ValueError("Missing the required parameter `page` when calling `get_tenant_entity_view_infos_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'page_size' in params:
query_params.append(('pageSize', params['page_size'])) # noqa: E501
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'type' in params:
query_params.append(('type', params['type'])) # noqa: E501
if 'text_search' in params:
query_params.append(('textSearch', params['text_search'])) # noqa: E501
if 'sort_property' in params:
query_params.append(('sortProperty', params['sort_property'])) # noqa: E501
if 'sort_order' in params:
query_params.append(('sortOrder', params['sort_order'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/tenant/entityViewInfos{?page,pageSize,sortOrder,sortProperty,textSearch,type}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PageDataEntityViewInfo', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_tenant_entity_view_using_get(self, entity_view_name, **kwargs): # noqa: E501
"""Get Entity View by name (getTenantEntityView) # noqa: E501
Fetch the Entity View object based on the tenant id and entity view name. Available for users with 'TENANT_ADMIN' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tenant_entity_view_using_get(entity_view_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str entity_view_name: Entity View name (required)
:return: EntityView
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_tenant_entity_view_using_get_with_http_info(entity_view_name, **kwargs) # noqa: E501
else:
(data) = self.get_tenant_entity_view_using_get_with_http_info(entity_view_name, **kwargs) # noqa: E501
return data
def get_tenant_entity_view_using_get_with_http_info(self, entity_view_name, **kwargs): # noqa: E501
"""Get Entity View by name (getTenantEntityView) # noqa: E501
Fetch the Entity View object based on the tenant id and entity view name. Available for users with 'TENANT_ADMIN' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tenant_entity_view_using_get_with_http_info(entity_view_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str entity_view_name: Entity View name (required)
:return: EntityView
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['entity_view_name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_tenant_entity_view_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'entity_view_name' is set
if ('entity_view_name' not in params or
params['entity_view_name'] is None):
raise ValueError("Missing the required parameter `entity_view_name` when calling `get_tenant_entity_view_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'entity_view_name' in params:
query_params.append(('entityViewName', params['entity_view_name'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/tenant/entityViews{?entityViewName}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EntityView', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_tenant_entity_views_using_get(self, page_size, page, **kwargs): # noqa: E501
"""Get Tenant Entity Views (getTenantEntityViews) # noqa: E501
Returns a page of entity views owned by tenant. Entity Views limit the degree of exposure of the Device or Asset telemetry and attributes to the Customers. Every Entity View references exactly one entity (device or asset) and defines telemetry and attribute keys that will be visible to the assigned Customer. As a Tenant Administrator you are able to create multiple EVs per Device or Asset and assign them to different Customers. You can specify parameters to filter the results. The result is wrapped with PageData object that allows you to iterate over result set using pagination. See the 'Model' tab of the Response Class for more details. Available for users with 'TENANT_ADMIN' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tenant_entity_views_using_get(page_size, page, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page_size: Maximum amount of entities in a one page (required)
:param int page: Sequence number of page starting from 0 (required)
:param str type: ## Entity View Filter Allows to filter entity views based on their type and the **'starts with'** expression over their name. For example, this entity filter selects all 'Concrete Mixer' entity views which name starts with 'CAT': ```json { \"type\": \"entityViewType\", \"entityViewType\": \"Concrete Mixer\", \"entityViewNameFilter\": \"CAT\" } ```
:param str text_search: The case insensitive 'startsWith' filter based on the entity view name.
:param str sort_property: Property of entity to sort by
:param str sort_order: Sort order. ASC (ASCENDING) or DESC (DESCENDING)
:return: PageDataEntityView
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_tenant_entity_views_using_get_with_http_info(page_size, page, **kwargs) # noqa: E501
else:
(data) = self.get_tenant_entity_views_using_get_with_http_info(page_size, page, **kwargs) # noqa: E501
return data
def get_tenant_entity_views_using_get_with_http_info(self, page_size, page, **kwargs): # noqa: E501
"""Get Tenant Entity Views (getTenantEntityViews) # noqa: E501
Returns a page of entity views owned by tenant. Entity Views limit the degree of exposure of the Device or Asset telemetry and attributes to the Customers. Every Entity View references exactly one entity (device or asset) and defines telemetry and attribute keys that will be visible to the assigned Customer. As a Tenant Administrator you are able to create multiple EVs per Device or Asset and assign them to different Customers. You can specify parameters to filter the results. The result is wrapped with PageData object that allows you to iterate over result set using pagination. See the 'Model' tab of the Response Class for more details. Available for users with 'TENANT_ADMIN' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tenant_entity_views_using_get_with_http_info(page_size, page, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page_size: Maximum amount of entities in a one page (required)
:param int page: Sequence number of page starting from 0 (required)
:param str type: ## Entity View Filter Allows to filter entity views based on their type and the **'starts with'** expression over their name. For example, this entity filter selects all 'Concrete Mixer' entity views which name starts with 'CAT': ```json { \"type\": \"entityViewType\", \"entityViewType\": \"Concrete Mixer\", \"entityViewNameFilter\": \"CAT\" } ```
:param str text_search: The case insensitive 'startsWith' filter based on the entity view name.
:param str sort_property: Property of entity to sort by
:param str sort_order: Sort order. ASC (ASCENDING) or DESC (DESCENDING)
:return: PageDataEntityView
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['page_size', 'page', 'type', 'text_search', 'sort_property', 'sort_order'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_tenant_entity_views_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'page_size' is set
if ('page_size' not in params or
params['page_size'] is None):
raise ValueError("Missing the required parameter `page_size` when calling `get_tenant_entity_views_using_get`") # noqa: E501
# verify the required parameter 'page' is set
if ('page' not in params or
params['page'] is None):
raise ValueError("Missing the required parameter `page` when calling `get_tenant_entity_views_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'page_size' in params:
query_params.append(('pageSize', params['page_size'])) # noqa: E501
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'type' in params:
query_params.append(('type', params['type'])) # noqa: E501
if 'text_search' in params:
query_params.append(('textSearch', params['text_search'])) # noqa: E501
if 'sort_property' in params:
query_params.append(('sortProperty', params['sort_property'])) # noqa: E501
if 'sort_order' in params:
query_params.append(('sortOrder', params['sort_order'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/tenant/entityViews{?page,pageSize,sortOrder,sortProperty,textSearch,type}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PageDataEntityView', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def save_entity_view_using_post(self, **kwargs): # noqa: E501
"""Save or update entity view (saveEntityView) # noqa: E501
Entity Views limit the degree of exposure of the Device or Asset telemetry and attributes to the Customers. Every Entity View references exactly one entity (device or asset) and defines telemetry and attribute keys that will be visible to the assigned Customer. As a Tenant Administrator you are able to create multiple EVs per Device or Asset and assign them to different Customers. See the 'Model' tab for more details. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.save_entity_view_using_post(async_req=True)
>>> result = thread.get()
:param async_req bool
:param EntityView body:
:return: EntityView
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.save_entity_view_using_post_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.save_entity_view_using_post_with_http_info(**kwargs) # noqa: E501
return data
def save_entity_view_using_post_with_http_info(self, **kwargs): # noqa: E501
"""Save or update entity view (saveEntityView) # noqa: E501
Entity Views limit the degree of exposure of the Device or Asset telemetry and attributes to the Customers. Every Entity View references exactly one entity (device or asset) and defines telemetry and attribute keys that will be visible to the assigned Customer. As a Tenant Administrator you are able to create multiple EVs per Device or Asset and assign them to different Customers. See the 'Model' tab for more details. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.save_entity_view_using_post_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param EntityView body:
:return: EntityView
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method save_entity_view_using_post" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/entityView', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EntityView', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def unassign_entity_view_from_customer_using_delete(self, entity_view_id, **kwargs): # noqa: E501
"""Unassign Entity View from customer (unassignEntityViewFromCustomer) # noqa: E501
Clears assignment of the Entity View to customer. Customer will not be able to query Entity View afterwards. Available for users with 'TENANT_ADMIN' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.unassign_entity_view_from_customer_using_delete(entity_view_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str entity_view_id: A string value representing the entity view id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:return: EntityView
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.unassign_entity_view_from_customer_using_delete_with_http_info(entity_view_id, **kwargs) # noqa: E501
else:
(data) = self.unassign_entity_view_from_customer_using_delete_with_http_info(entity_view_id, **kwargs) # noqa: E501
return data
def unassign_entity_view_from_customer_using_delete_with_http_info(self, entity_view_id, **kwargs): # noqa: E501
"""Unassign Entity View from customer (unassignEntityViewFromCustomer) # noqa: E501
Clears assignment of the Entity View to customer. Customer will not be able to query Entity View afterwards. Available for users with 'TENANT_ADMIN' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.unassign_entity_view_from_customer_using_delete_with_http_info(entity_view_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str entity_view_id: A string value representing the entity view id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:return: EntityView
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['entity_view_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method unassign_entity_view_from_customer_using_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'entity_view_id' is set
if ('entity_view_id' not in params or
params['entity_view_id'] is None):
raise ValueError("Missing the required parameter `entity_view_id` when calling `unassign_entity_view_from_customer_using_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'entity_view_id' in params:
path_params['entityViewId'] = params['entity_view_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/customer/entityView/{entityViewId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EntityView', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def unassign_entity_view_from_edge_using_delete(self, edge_id, entity_view_id, **kwargs): # noqa: E501
"""Unassign entity view from edge (unassignEntityViewFromEdge) # noqa: E501
Clears assignment of the entity view to the edge. Unassignment works in async way - first, 'unassign' notification event pushed to edge queue on platform. Second, remote edge service will receive an 'unassign' command to remove entity view (Edge will receive this instantly, if it's currently connected, or once it's going to be connected to platform). Third, once 'unassign' command will be delivered to edge service, it's going to remove entity view locally. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.unassign_entity_view_from_edge_using_delete(edge_id, entity_view_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str edge_id: edgeId (required)
:param str entity_view_id: entityViewId (required)
:return: EntityView
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.unassign_entity_view_from_edge_using_delete_with_http_info(edge_id, entity_view_id, **kwargs) # noqa: E501
else:
(data) = self.unassign_entity_view_from_edge_using_delete_with_http_info(edge_id, entity_view_id, **kwargs) # noqa: E501
return data
def unassign_entity_view_from_edge_using_delete_with_http_info(self, edge_id, entity_view_id, **kwargs): # noqa: E501
"""Unassign entity view from edge (unassignEntityViewFromEdge) # noqa: E501
Clears assignment of the entity view to the edge. Unassignment works in async way - first, 'unassign' notification event pushed to edge queue on platform. Second, remote edge service will receive an 'unassign' command to remove entity view (Edge will receive this instantly, if it's currently connected, or once it's going to be connected to platform). Third, once 'unassign' command will be delivered to edge service, it's going to remove entity view locally. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.unassign_entity_view_from_edge_using_delete_with_http_info(edge_id, entity_view_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str edge_id: edgeId (required)
:param str entity_view_id: entityViewId (required)
:return: EntityView
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['edge_id', 'entity_view_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method unassign_entity_view_from_edge_using_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'edge_id' is set
if ('edge_id' not in params or
params['edge_id'] is None):
raise ValueError("Missing the required parameter `edge_id` when calling `unassign_entity_view_from_edge_using_delete`") # noqa: E501
# verify the required parameter 'entity_view_id' is set
if ('entity_view_id' not in params or
params['entity_view_id'] is None):
raise ValueError("Missing the required parameter `entity_view_id` when calling `unassign_entity_view_from_edge_using_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'edge_id' in params:
path_params['edgeId'] = params['edge_id'] # noqa: E501
if 'entity_view_id' in params:
path_params['entityViewId'] = params['entity_view_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/edge/{edgeId}/entityView/{entityViewId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EntityView', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| # coding: utf-8
"""
ThingsBoard REST API
ThingsBoard open-source IoT platform REST API documentation. # noqa: E501
OpenAPI spec version: 3.3.3-SNAPSHOT
Contact: <EMAIL>
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from tb_rest_client.api_client import ApiClient
class EntityViewControllerApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def assign_entity_view_to_customer_using_post(self, customer_id, entity_view_id, **kwargs): # noqa: E501
"""Assign Entity View to customer (assignEntityViewToCustomer) # noqa: E501
Creates assignment of the Entity View to customer. Customer will be able to query Entity View afterwards. Available for users with 'TENANT_ADMIN' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.assign_entity_view_to_customer_using_post(customer_id, entity_view_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str customer_id: A string value representing the customer id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:param str entity_view_id: A string value representing the entity view id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:return: EntityView
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.assign_entity_view_to_customer_using_post_with_http_info(customer_id, entity_view_id, **kwargs) # noqa: E501
else:
(data) = self.assign_entity_view_to_customer_using_post_with_http_info(customer_id, entity_view_id, **kwargs) # noqa: E501
return data
def assign_entity_view_to_customer_using_post_with_http_info(self, customer_id, entity_view_id, **kwargs): # noqa: E501
"""Assign Entity View to customer (assignEntityViewToCustomer) # noqa: E501
Creates assignment of the Entity View to customer. Customer will be able to query Entity View afterwards. Available for users with 'TENANT_ADMIN' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.assign_entity_view_to_customer_using_post_with_http_info(customer_id, entity_view_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str customer_id: A string value representing the customer id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:param str entity_view_id: A string value representing the entity view id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:return: EntityView
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['customer_id', 'entity_view_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method assign_entity_view_to_customer_using_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'customer_id' is set
if ('customer_id' not in params or
params['customer_id'] is None):
raise ValueError("Missing the required parameter `customer_id` when calling `assign_entity_view_to_customer_using_post`") # noqa: E501
# verify the required parameter 'entity_view_id' is set
if ('entity_view_id' not in params or
params['entity_view_id'] is None):
raise ValueError("Missing the required parameter `entity_view_id` when calling `assign_entity_view_to_customer_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
if 'customer_id' in params:
path_params['customerId'] = params['customer_id'] # noqa: E501
if 'entity_view_id' in params:
path_params['entityViewId'] = params['entity_view_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/customer/{customerId}/entityView/{entityViewId}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EntityView', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def assign_entity_view_to_edge_using_post(self, edge_id, entity_view_id, **kwargs): # noqa: E501
"""Assign entity view to edge (assignEntityViewToEdge) # noqa: E501
Creates assignment of an existing entity view to an instance of The Edge. Assignment works in async way - first, notification event pushed to edge service queue on platform. Second, remote edge service will receive a copy of assignment entity view (Edge will receive this instantly, if it's currently connected, or once it's going to be connected to platform). Third, once entity view will be delivered to edge service, it's going to be available for usage on remote edge instance. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.assign_entity_view_to_edge_using_post(edge_id, entity_view_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str edge_id: edgeId (required)
:param str entity_view_id: entityViewId (required)
:return: EntityView
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.assign_entity_view_to_edge_using_post_with_http_info(edge_id, entity_view_id, **kwargs) # noqa: E501
else:
(data) = self.assign_entity_view_to_edge_using_post_with_http_info(edge_id, entity_view_id, **kwargs) # noqa: E501
return data
def assign_entity_view_to_edge_using_post_with_http_info(self, edge_id, entity_view_id, **kwargs): # noqa: E501
"""Assign entity view to edge (assignEntityViewToEdge) # noqa: E501
Creates assignment of an existing entity view to an instance of The Edge. Assignment works in async way - first, notification event pushed to edge service queue on platform. Second, remote edge service will receive a copy of assignment entity view (Edge will receive this instantly, if it's currently connected, or once it's going to be connected to platform). Third, once entity view will be delivered to edge service, it's going to be available for usage on remote edge instance. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.assign_entity_view_to_edge_using_post_with_http_info(edge_id, entity_view_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str edge_id: edgeId (required)
:param str entity_view_id: entityViewId (required)
:return: EntityView
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['edge_id', 'entity_view_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method assign_entity_view_to_edge_using_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'edge_id' is set
if ('edge_id' not in params or
params['edge_id'] is None):
raise ValueError("Missing the required parameter `edge_id` when calling `assign_entity_view_to_edge_using_post`") # noqa: E501
# verify the required parameter 'entity_view_id' is set
if ('entity_view_id' not in params or
params['entity_view_id'] is None):
raise ValueError("Missing the required parameter `entity_view_id` when calling `assign_entity_view_to_edge_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
if 'edge_id' in params:
path_params['edgeId'] = params['edge_id'] # noqa: E501
if 'entity_view_id' in params:
path_params['entityViewId'] = params['entity_view_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/edge/{edgeId}/entityView/{entityViewId}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EntityView', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def assign_entity_view_to_public_customer_using_post(self, entity_view_id, **kwargs): # noqa: E501
"""Make entity view publicly available (assignEntityViewToPublicCustomer) # noqa: E501
Entity View will be available for non-authorized (not logged-in) users. This is useful to create dashboards that you plan to share/embed on a publicly available website. However, users that are logged-in and belong to different tenant will not be able to access the entity view. Available for users with 'TENANT_ADMIN' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.assign_entity_view_to_public_customer_using_post(entity_view_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str entity_view_id: A string value representing the entity view id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:return: EntityView
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.assign_entity_view_to_public_customer_using_post_with_http_info(entity_view_id, **kwargs) # noqa: E501
else:
(data) = self.assign_entity_view_to_public_customer_using_post_with_http_info(entity_view_id, **kwargs) # noqa: E501
return data
def assign_entity_view_to_public_customer_using_post_with_http_info(self, entity_view_id, **kwargs): # noqa: E501
"""Make entity view publicly available (assignEntityViewToPublicCustomer) # noqa: E501
Entity View will be available for non-authorized (not logged-in) users. This is useful to create dashboards that you plan to share/embed on a publicly available website. However, users that are logged-in and belong to different tenant will not be able to access the entity view. Available for users with 'TENANT_ADMIN' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.assign_entity_view_to_public_customer_using_post_with_http_info(entity_view_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str entity_view_id: A string value representing the entity view id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:return: EntityView
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['entity_view_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method assign_entity_view_to_public_customer_using_post" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'entity_view_id' is set
if ('entity_view_id' not in params or
params['entity_view_id'] is None):
raise ValueError("Missing the required parameter `entity_view_id` when calling `assign_entity_view_to_public_customer_using_post`") # noqa: E501
collection_formats = {}
path_params = {}
if 'entity_view_id' in params:
path_params['entityViewId'] = params['entity_view_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/customer/public/entityView/{entityViewId}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EntityView', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_entity_view_using_delete(self, entity_view_id, **kwargs): # noqa: E501
"""Delete entity view (deleteEntityView) # noqa: E501
Delete the EntityView object based on the provided entity view id. Available for users with 'TENANT_ADMIN' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_entity_view_using_delete(entity_view_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str entity_view_id: A string value representing the entity view id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_entity_view_using_delete_with_http_info(entity_view_id, **kwargs) # noqa: E501
else:
(data) = self.delete_entity_view_using_delete_with_http_info(entity_view_id, **kwargs) # noqa: E501
return data
def delete_entity_view_using_delete_with_http_info(self, entity_view_id, **kwargs): # noqa: E501
"""Delete entity view (deleteEntityView) # noqa: E501
Delete the EntityView object based on the provided entity view id. Available for users with 'TENANT_ADMIN' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_entity_view_using_delete_with_http_info(entity_view_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str entity_view_id: A string value representing the entity view id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['entity_view_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_entity_view_using_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'entity_view_id' is set
if ('entity_view_id' not in params or
params['entity_view_id'] is None):
raise ValueError("Missing the required parameter `entity_view_id` when calling `delete_entity_view_using_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'entity_view_id' in params:
path_params['entityViewId'] = params['entity_view_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/entityView/{entityViewId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def find_by_query_using_post4(self, **kwargs): # noqa: E501
"""Find related entity views (findByQuery) # noqa: E501
Returns all entity views that are related to the specific entity. The entity id, relation type, entity view types, depth of the search, and other query parameters defined using complex 'EntityViewSearchQuery' object. See 'Model' tab of the Parameters for more info. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_by_query_using_post4(async_req=True)
>>> result = thread.get()
:param async_req bool
:param EntityViewSearchQuery body:
:return: list[EntityView]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.find_by_query_using_post4_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.find_by_query_using_post4_with_http_info(**kwargs) # noqa: E501
return data
def find_by_query_using_post4_with_http_info(self, **kwargs): # noqa: E501
"""Find related entity views (findByQuery) # noqa: E501
Returns all entity views that are related to the specific entity. The entity id, relation type, entity view types, depth of the search, and other query parameters defined using complex 'EntityViewSearchQuery' object. See 'Model' tab of the Parameters for more info. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.find_by_query_using_post4_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param EntityViewSearchQuery body:
:return: list[EntityView]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method find_by_query_using_post4" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/entityViews', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[EntityView]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_customer_entity_view_infos_using_get(self, customer_id, page_size, page, **kwargs): # noqa: E501
"""Get Customer Entity View info (getCustomerEntityViewInfos) # noqa: E501
Returns a page of Entity View info objects assigned to customer. Entity Views limit the degree of exposure of the Device or Asset telemetry and attributes to the Customers. Every Entity View references exactly one entity (device or asset) and defines telemetry and attribute keys that will be visible to the assigned Customer. As a Tenant Administrator you are able to create multiple EVs per Device or Asset and assign them to different Customers. You can specify parameters to filter the results. The result is wrapped with PageData object that allows you to iterate over result set using pagination. See the 'Model' tab of the Response Class for more details. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_customer_entity_view_infos_using_get(customer_id, page_size, page, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str customer_id: A string value representing the customer id. For example, '784f394c-42b6-435a-<PASSWORD>ff<PASSWORD>' (required)
:param int page_size: Maximum amount of entities in a one page (required)
:param int page: Sequence number of page starting from 0 (required)
:param str type: ## Entity View Filter Allows to filter entity views based on their type and the **'starts with'** expression over their name. For example, this entity filter selects all 'Concrete Mixer' entity views which name starts with 'CAT': ```json { \"type\": \"entityViewType\", \"entityViewType\": \"Concrete Mixer\", \"entityViewNameFilter\": \"CAT\" } ```
:param str text_search: The case insensitive 'startsWith' filter based on the entity view name.
:param str sort_property: Property of entity to sort by
:param str sort_order: Sort order. ASC (ASCENDING) or DESC (DESCENDING)
:return: PageDataEntityViewInfo
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_customer_entity_view_infos_using_get_with_http_info(customer_id, page_size, page, **kwargs) # noqa: E501
else:
(data) = self.get_customer_entity_view_infos_using_get_with_http_info(customer_id, page_size, page, **kwargs) # noqa: E501
return data
def get_customer_entity_view_infos_using_get_with_http_info(self, customer_id, page_size, page, **kwargs): # noqa: E501
"""Get Customer Entity View info (getCustomerEntityViewInfos) # noqa: E501
Returns a page of Entity View info objects assigned to customer. Entity Views limit the degree of exposure of the Device or Asset telemetry and attributes to the Customers. Every Entity View references exactly one entity (device or asset) and defines telemetry and attribute keys that will be visible to the assigned Customer. As a Tenant Administrator you are able to create multiple EVs per Device or Asset and assign them to different Customers. You can specify parameters to filter the results. The result is wrapped with PageData object that allows you to iterate over result set using pagination. See the 'Model' tab of the Response Class for more details. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_customer_entity_view_infos_using_get_with_http_info(customer_id, page_size, page, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str customer_id: A string value representing the customer id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:param int page_size: Maximum amount of entities in a one page (required)
:param int page: Sequence number of page starting from 0 (required)
:param str type: ## Entity View Filter Allows to filter entity views based on their type and the **'starts with'** expression over their name. For example, this entity filter selects all 'Concrete Mixer' entity views which name starts with 'CAT': ```json { \"type\": \"entityViewType\", \"entityViewType\": \"Concrete Mixer\", \"entityViewNameFilter\": \"CAT\" } ```
:param str text_search: The case insensitive 'startsWith' filter based on the entity view name.
:param str sort_property: Property of entity to sort by
:param str sort_order: Sort order. ASC (ASCENDING) or DESC (DESCENDING)
:return: PageDataEntityViewInfo
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['customer_id', 'page_size', 'page', 'type', 'text_search', 'sort_property', 'sort_order'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_customer_entity_view_infos_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'customer_id' is set
if ('customer_id' not in params or
params['customer_id'] is None):
raise ValueError("Missing the required parameter `customer_id` when calling `get_customer_entity_view_infos_using_get`") # noqa: E501
# verify the required parameter 'page_size' is set
if ('page_size' not in params or
params['page_size'] is None):
raise ValueError("Missing the required parameter `page_size` when calling `get_customer_entity_view_infos_using_get`") # noqa: E501
# verify the required parameter 'page' is set
if ('page' not in params or
params['page'] is None):
raise ValueError("Missing the required parameter `page` when calling `get_customer_entity_view_infos_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'customer_id' in params:
path_params['customerId'] = params['customer_id'] # noqa: E501
query_params = []
if 'page_size' in params:
query_params.append(('pageSize', params['page_size'])) # noqa: E501
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'type' in params:
query_params.append(('type', params['type'])) # noqa: E501
if 'text_search' in params:
query_params.append(('textSearch', params['text_search'])) # noqa: E501
if 'sort_property' in params:
query_params.append(('sortProperty', params['sort_property'])) # noqa: E501
if 'sort_order' in params:
query_params.append(('sortOrder', params['sort_order'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/customer/{customerId}/entityViewInfos{?page,pageSize,sortOrder,sortProperty,textSearch,type}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PageDataEntityViewInfo', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_customer_entity_views_using_get(self, customer_id, page_size, page, **kwargs): # noqa: E501
"""Get Customer Entity Views (getCustomerEntityViews) # noqa: E501
Returns a page of Entity View objects assigned to customer. You can specify parameters to filter the results. The result is wrapped with PageData object that allows you to iterate over result set using pagination. See the 'Model' tab of the Response Class for more details. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_customer_entity_views_using_get(customer_id, page_size, page, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str customer_id: A string value representing the customer id. For example, '784f394c-42b6-435a-<PASSWORD>-<PASSWORD>ff<PASSWORD>' (required)
:param int page_size: Maximum amount of entities in a one page (required)
:param int page: Sequence number of page starting from 0 (required)
:param str type: ## Entity View Filter Allows to filter entity views based on their type and the **'starts with'** expression over their name. For example, this entity filter selects all 'Concrete Mixer' entity views which name starts with 'CAT': ```json { \"type\": \"entityViewType\", \"entityViewType\": \"Concrete Mixer\", \"entityViewNameFilter\": \"CAT\" } ```
:param str text_search: The case insensitive 'startsWith' filter based on the entity view name.
:param str sort_property: Property of entity to sort by
:param str sort_order: Sort order. ASC (ASCENDING) or DESC (DESCENDING)
:return: PageDataEntityView
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_customer_entity_views_using_get_with_http_info(customer_id, page_size, page, **kwargs) # noqa: E501
else:
(data) = self.get_customer_entity_views_using_get_with_http_info(customer_id, page_size, page, **kwargs) # noqa: E501
return data
def get_customer_entity_views_using_get_with_http_info(self, customer_id, page_size, page, **kwargs): # noqa: E501
"""Get Customer Entity Views (getCustomerEntityViews) # noqa: E501
Returns a page of Entity View objects assigned to customer. You can specify parameters to filter the results. The result is wrapped with PageData object that allows you to iterate over result set using pagination. See the 'Model' tab of the Response Class for more details. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_customer_entity_views_using_get_with_http_info(customer_id, page_size, page, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str customer_id: A string value representing the customer id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:param int page_size: Maximum amount of entities in a one page (required)
:param int page: Sequence number of page starting from 0 (required)
:param str type: ## Entity View Filter Allows to filter entity views based on their type and the **'starts with'** expression over their name. For example, this entity filter selects all 'Concrete Mixer' entity views which name starts with 'CAT': ```json { \"type\": \"entityViewType\", \"entityViewType\": \"Concrete Mixer\", \"entityViewNameFilter\": \"CAT\" } ```
:param str text_search: The case insensitive 'startsWith' filter based on the entity view name.
:param str sort_property: Property of entity to sort by
:param str sort_order: Sort order. ASC (ASCENDING) or DESC (DESCENDING)
:return: PageDataEntityView
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['customer_id', 'page_size', 'page', 'type', 'text_search', 'sort_property', 'sort_order'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_customer_entity_views_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'customer_id' is set
if ('customer_id' not in params or
params['customer_id'] is None):
raise ValueError("Missing the required parameter `customer_id` when calling `get_customer_entity_views_using_get`") # noqa: E501
# verify the required parameter 'page_size' is set
if ('page_size' not in params or
params['page_size'] is None):
raise ValueError("Missing the required parameter `page_size` when calling `get_customer_entity_views_using_get`") # noqa: E501
# verify the required parameter 'page' is set
if ('page' not in params or
params['page'] is None):
raise ValueError("Missing the required parameter `page` when calling `get_customer_entity_views_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'customer_id' in params:
path_params['customerId'] = params['customer_id'] # noqa: E501
query_params = []
if 'page_size' in params:
query_params.append(('pageSize', params['page_size'])) # noqa: E501
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'type' in params:
query_params.append(('type', params['type'])) # noqa: E501
if 'text_search' in params:
query_params.append(('textSearch', params['text_search'])) # noqa: E501
if 'sort_property' in params:
query_params.append(('sortProperty', params['sort_property'])) # noqa: E501
if 'sort_order' in params:
query_params.append(('sortOrder', params['sort_order'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/customer/{customerId}/entityViews{?page,pageSize,sortOrder,sortProperty,textSearch,type}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PageDataEntityView', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_edge_entity_views_using_get(self, edge_id, page, page_size, **kwargs): # noqa: E501
"""getEdgeEntityViews # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_edge_entity_views_using_get(edge_id, page, page_size, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str edge_id: edgeId (required)
:param str page: page (required)
:param str page_size: pageSize (required)
:param str type: type
:param str text_search: textSearch
:param str sort_property: sortProperty
:param str sort_order: sortOrder
:param int start_time: startTime
:param int end_time: endTime
:return: PageDataEntityView
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_edge_entity_views_using_get_with_http_info(edge_id, page, page_size, **kwargs) # noqa: E501
else:
(data) = self.get_edge_entity_views_using_get_with_http_info(edge_id, page, page_size, **kwargs) # noqa: E501
return data
def get_edge_entity_views_using_get_with_http_info(self, edge_id, page, page_size, **kwargs): # noqa: E501
"""getEdgeEntityViews # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_edge_entity_views_using_get_with_http_info(edge_id, page, page_size, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str edge_id: edgeId (required)
:param str page: page (required)
:param str page_size: pageSize (required)
:param str type: type
:param str text_search: textSearch
:param str sort_property: sortProperty
:param str sort_order: sortOrder
:param int start_time: startTime
:param int end_time: endTime
:return: PageDataEntityView
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['edge_id', 'page', 'page_size', 'type', 'text_search', 'sort_property', 'sort_order', 'start_time', 'end_time'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_edge_entity_views_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'edge_id' is set
if ('edge_id' not in params or
params['edge_id'] is None):
raise ValueError("Missing the required parameter `edge_id` when calling `get_edge_entity_views_using_get`") # noqa: E501
# verify the required parameter 'page' is set
if ('page' not in params or
params['page'] is None):
raise ValueError("Missing the required parameter `page` when calling `get_edge_entity_views_using_get`") # noqa: E501
# verify the required parameter 'page_size' is set
if ('page_size' not in params or
params['page_size'] is None):
raise ValueError("Missing the required parameter `page_size` when calling `get_edge_entity_views_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'edge_id' in params:
path_params['edgeId'] = params['edge_id'] # noqa: E501
query_params = []
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'page_size' in params:
query_params.append(('pageSize', params['page_size'])) # noqa: E501
if 'type' in params:
query_params.append(('type', params['type'])) # noqa: E501
if 'text_search' in params:
query_params.append(('textSearch', params['text_search'])) # noqa: E501
if 'sort_property' in params:
query_params.append(('sortProperty', params['sort_property'])) # noqa: E501
if 'sort_order' in params:
query_params.append(('sortOrder', params['sort_order'])) # noqa: E501
if 'start_time' in params:
query_params.append(('startTime', params['start_time'])) # noqa: E501
if 'end_time' in params:
query_params.append(('endTime', params['end_time'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/edge/{edgeId}/entityViews{?endTime,page,pageSize,sortOrder,sortProperty,startTime,textSearch,type}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PageDataEntityView', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_entity_view_by_id_using_get(self, entity_view_id, **kwargs): # noqa: E501
"""Get entity view (getEntityViewById) # noqa: E501
Fetch the EntityView object based on the provided entity view id. Entity Views limit the degree of exposure of the Device or Asset telemetry and attributes to the Customers. Every Entity View references exactly one entity (device or asset) and defines telemetry and attribute keys that will be visible to the assigned Customer. As a Tenant Administrator you are able to create multiple EVs per Device or Asset and assign them to different Customers. See the 'Model' tab for more details. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_entity_view_by_id_using_get(entity_view_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str entity_view_id: A string value representing the entity view id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:return: EntityView
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_entity_view_by_id_using_get_with_http_info(entity_view_id, **kwargs) # noqa: E501
else:
(data) = self.get_entity_view_by_id_using_get_with_http_info(entity_view_id, **kwargs) # noqa: E501
return data
def get_entity_view_by_id_using_get_with_http_info(self, entity_view_id, **kwargs): # noqa: E501
"""Get entity view (getEntityViewById) # noqa: E501
Fetch the EntityView object based on the provided entity view id. Entity Views limit the degree of exposure of the Device or Asset telemetry and attributes to the Customers. Every Entity View references exactly one entity (device or asset) and defines telemetry and attribute keys that will be visible to the assigned Customer. As a Tenant Administrator you are able to create multiple EVs per Device or Asset and assign them to different Customers. See the 'Model' tab for more details. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_entity_view_by_id_using_get_with_http_info(entity_view_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str entity_view_id: A string value representing the entity view id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:return: EntityView
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['entity_view_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_entity_view_by_id_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'entity_view_id' is set
if ('entity_view_id' not in params or
params['entity_view_id'] is None):
raise ValueError("Missing the required parameter `entity_view_id` when calling `get_entity_view_by_id_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'entity_view_id' in params:
path_params['entityViewId'] = params['entity_view_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/entityView/{entityViewId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EntityView', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_entity_view_info_by_id_using_get(self, entity_view_id, **kwargs): # noqa: E501
"""Get Entity View info (getEntityViewInfoById) # noqa: E501
Fetch the Entity View info object based on the provided Entity View Id. Entity Views Info extends the Entity View with customer title and 'is public' flag. Entity Views limit the degree of exposure of the Device or Asset telemetry and attributes to the Customers. Every Entity View references exactly one entity (device or asset) and defines telemetry and attribute keys that will be visible to the assigned Customer. As a Tenant Administrator you are able to create multiple EVs per Device or Asset and assign them to different Customers. See the 'Model' tab for more details. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_entity_view_info_by_id_using_get(entity_view_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str entity_view_id: A string value representing the entity view id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:return: EntityViewInfo
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_entity_view_info_by_id_using_get_with_http_info(entity_view_id, **kwargs) # noqa: E501
else:
(data) = self.get_entity_view_info_by_id_using_get_with_http_info(entity_view_id, **kwargs) # noqa: E501
return data
def get_entity_view_info_by_id_using_get_with_http_info(self, entity_view_id, **kwargs): # noqa: E501
"""Get Entity View info (getEntityViewInfoById) # noqa: E501
Fetch the Entity View info object based on the provided Entity View Id. Entity Views Info extends the Entity View with customer title and 'is public' flag. Entity Views limit the degree of exposure of the Device or Asset telemetry and attributes to the Customers. Every Entity View references exactly one entity (device or asset) and defines telemetry and attribute keys that will be visible to the assigned Customer. As a Tenant Administrator you are able to create multiple EVs per Device or Asset and assign them to different Customers. See the 'Model' tab for more details. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_entity_view_info_by_id_using_get_with_http_info(entity_view_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str entity_view_id: A string value representing the entity view id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:return: EntityViewInfo
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['entity_view_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_entity_view_info_by_id_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'entity_view_id' is set
if ('entity_view_id' not in params or
params['entity_view_id'] is None):
raise ValueError("Missing the required parameter `entity_view_id` when calling `get_entity_view_info_by_id_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
if 'entity_view_id' in params:
path_params['entityViewId'] = params['entity_view_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/entityView/info/{entityViewId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EntityViewInfo', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_entity_view_types_using_get(self, **kwargs): # noqa: E501
"""Get Entity View Types (getEntityViewTypes) # noqa: E501
Returns a set of unique entity view types based on entity views that are either owned by the tenant or assigned to the customer which user is performing the request. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_entity_view_types_using_get(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[EntitySubtype]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_entity_view_types_using_get_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_entity_view_types_using_get_with_http_info(**kwargs) # noqa: E501
return data
def get_entity_view_types_using_get_with_http_info(self, **kwargs): # noqa: E501
"""Get Entity View Types (getEntityViewTypes) # noqa: E501
Returns a set of unique entity view types based on entity views that are either owned by the tenant or assigned to the customer which user is performing the request. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_entity_view_types_using_get_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[EntitySubtype]
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_entity_view_types_using_get" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/entityView/types', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[EntitySubtype]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_tenant_entity_view_infos_using_get(self, page_size, page, **kwargs): # noqa: E501
"""Get Tenant Entity Views (getTenantEntityViews) # noqa: E501
Returns a page of entity views info owned by tenant. Entity Views limit the degree of exposure of the Device or Asset telemetry and attributes to the Customers. Every Entity View references exactly one entity (device or asset) and defines telemetry and attribute keys that will be visible to the assigned Customer. As a Tenant Administrator you are able to create multiple EVs per Device or Asset and assign them to different Customers. You can specify parameters to filter the results. The result is wrapped with PageData object that allows you to iterate over result set using pagination. See the 'Model' tab of the Response Class for more details. Available for users with 'TENANT_ADMIN' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tenant_entity_view_infos_using_get(page_size, page, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page_size: Maximum amount of entities in a one page (required)
:param int page: Sequence number of page starting from 0 (required)
:param str type: ## Entity View Filter Allows to filter entity views based on their type and the **'starts with'** expression over their name. For example, this entity filter selects all 'Concrete Mixer' entity views which name starts with 'CAT': ```json { \"type\": \"entityViewType\", \"entityViewType\": \"Concrete Mixer\", \"entityViewNameFilter\": \"CAT\" } ```
:param str text_search: The case insensitive 'startsWith' filter based on the entity view name.
:param str sort_property: Property of entity to sort by
:param str sort_order: Sort order. ASC (ASCENDING) or DESC (DESCENDING)
:return: PageDataEntityViewInfo
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_tenant_entity_view_infos_using_get_with_http_info(page_size, page, **kwargs) # noqa: E501
else:
(data) = self.get_tenant_entity_view_infos_using_get_with_http_info(page_size, page, **kwargs) # noqa: E501
return data
def get_tenant_entity_view_infos_using_get_with_http_info(self, page_size, page, **kwargs): # noqa: E501
"""Get Tenant Entity Views (getTenantEntityViews) # noqa: E501
Returns a page of entity views info owned by tenant. Entity Views limit the degree of exposure of the Device or Asset telemetry and attributes to the Customers. Every Entity View references exactly one entity (device or asset) and defines telemetry and attribute keys that will be visible to the assigned Customer. As a Tenant Administrator you are able to create multiple EVs per Device or Asset and assign them to different Customers. You can specify parameters to filter the results. The result is wrapped with PageData object that allows you to iterate over result set using pagination. See the 'Model' tab of the Response Class for more details. Available for users with 'TENANT_ADMIN' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tenant_entity_view_infos_using_get_with_http_info(page_size, page, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page_size: Maximum amount of entities in a one page (required)
:param int page: Sequence number of page starting from 0 (required)
:param str type: ## Entity View Filter Allows to filter entity views based on their type and the **'starts with'** expression over their name. For example, this entity filter selects all 'Concrete Mixer' entity views which name starts with 'CAT': ```json { \"type\": \"entityViewType\", \"entityViewType\": \"Concrete Mixer\", \"entityViewNameFilter\": \"CAT\" } ```
:param str text_search: The case insensitive 'startsWith' filter based on the entity view name.
:param str sort_property: Property of entity to sort by
:param str sort_order: Sort order. ASC (ASCENDING) or DESC (DESCENDING)
:return: PageDataEntityViewInfo
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['page_size', 'page', 'type', 'text_search', 'sort_property', 'sort_order'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_tenant_entity_view_infos_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'page_size' is set
if ('page_size' not in params or
params['page_size'] is None):
raise ValueError("Missing the required parameter `page_size` when calling `get_tenant_entity_view_infos_using_get`") # noqa: E501
# verify the required parameter 'page' is set
if ('page' not in params or
params['page'] is None):
raise ValueError("Missing the required parameter `page` when calling `get_tenant_entity_view_infos_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'page_size' in params:
query_params.append(('pageSize', params['page_size'])) # noqa: E501
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'type' in params:
query_params.append(('type', params['type'])) # noqa: E501
if 'text_search' in params:
query_params.append(('textSearch', params['text_search'])) # noqa: E501
if 'sort_property' in params:
query_params.append(('sortProperty', params['sort_property'])) # noqa: E501
if 'sort_order' in params:
query_params.append(('sortOrder', params['sort_order'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/tenant/entityViewInfos{?page,pageSize,sortOrder,sortProperty,textSearch,type}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PageDataEntityViewInfo', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_tenant_entity_view_using_get(self, entity_view_name, **kwargs): # noqa: E501
"""Get Entity View by name (getTenantEntityView) # noqa: E501
Fetch the Entity View object based on the tenant id and entity view name. Available for users with 'TENANT_ADMIN' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tenant_entity_view_using_get(entity_view_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str entity_view_name: Entity View name (required)
:return: EntityView
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_tenant_entity_view_using_get_with_http_info(entity_view_name, **kwargs) # noqa: E501
else:
(data) = self.get_tenant_entity_view_using_get_with_http_info(entity_view_name, **kwargs) # noqa: E501
return data
def get_tenant_entity_view_using_get_with_http_info(self, entity_view_name, **kwargs): # noqa: E501
"""Get Entity View by name (getTenantEntityView) # noqa: E501
Fetch the Entity View object based on the tenant id and entity view name. Available for users with 'TENANT_ADMIN' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tenant_entity_view_using_get_with_http_info(entity_view_name, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str entity_view_name: Entity View name (required)
:return: EntityView
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['entity_view_name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_tenant_entity_view_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'entity_view_name' is set
if ('entity_view_name' not in params or
params['entity_view_name'] is None):
raise ValueError("Missing the required parameter `entity_view_name` when calling `get_tenant_entity_view_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'entity_view_name' in params:
query_params.append(('entityViewName', params['entity_view_name'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/tenant/entityViews{?entityViewName}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EntityView', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_tenant_entity_views_using_get(self, page_size, page, **kwargs): # noqa: E501
"""Get Tenant Entity Views (getTenantEntityViews) # noqa: E501
Returns a page of entity views owned by tenant. Entity Views limit the degree of exposure of the Device or Asset telemetry and attributes to the Customers. Every Entity View references exactly one entity (device or asset) and defines telemetry and attribute keys that will be visible to the assigned Customer. As a Tenant Administrator you are able to create multiple EVs per Device or Asset and assign them to different Customers. You can specify parameters to filter the results. The result is wrapped with PageData object that allows you to iterate over result set using pagination. See the 'Model' tab of the Response Class for more details. Available for users with 'TENANT_ADMIN' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tenant_entity_views_using_get(page_size, page, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page_size: Maximum amount of entities in a one page (required)
:param int page: Sequence number of page starting from 0 (required)
:param str type: ## Entity View Filter Allows to filter entity views based on their type and the **'starts with'** expression over their name. For example, this entity filter selects all 'Concrete Mixer' entity views which name starts with 'CAT': ```json { \"type\": \"entityViewType\", \"entityViewType\": \"Concrete Mixer\", \"entityViewNameFilter\": \"CAT\" } ```
:param str text_search: The case insensitive 'startsWith' filter based on the entity view name.
:param str sort_property: Property of entity to sort by
:param str sort_order: Sort order. ASC (ASCENDING) or DESC (DESCENDING)
:return: PageDataEntityView
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_tenant_entity_views_using_get_with_http_info(page_size, page, **kwargs) # noqa: E501
else:
(data) = self.get_tenant_entity_views_using_get_with_http_info(page_size, page, **kwargs) # noqa: E501
return data
def get_tenant_entity_views_using_get_with_http_info(self, page_size, page, **kwargs): # noqa: E501
"""Get Tenant Entity Views (getTenantEntityViews) # noqa: E501
Returns a page of entity views owned by tenant. Entity Views limit the degree of exposure of the Device or Asset telemetry and attributes to the Customers. Every Entity View references exactly one entity (device or asset) and defines telemetry and attribute keys that will be visible to the assigned Customer. As a Tenant Administrator you are able to create multiple EVs per Device or Asset and assign them to different Customers. You can specify parameters to filter the results. The result is wrapped with PageData object that allows you to iterate over result set using pagination. See the 'Model' tab of the Response Class for more details. Available for users with 'TENANT_ADMIN' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_tenant_entity_views_using_get_with_http_info(page_size, page, async_req=True)
>>> result = thread.get()
:param async_req bool
:param int page_size: Maximum amount of entities in a one page (required)
:param int page: Sequence number of page starting from 0 (required)
:param str type: ## Entity View Filter Allows to filter entity views based on their type and the **'starts with'** expression over their name. For example, this entity filter selects all 'Concrete Mixer' entity views which name starts with 'CAT': ```json { \"type\": \"entityViewType\", \"entityViewType\": \"Concrete Mixer\", \"entityViewNameFilter\": \"CAT\" } ```
:param str text_search: The case insensitive 'startsWith' filter based on the entity view name.
:param str sort_property: Property of entity to sort by
:param str sort_order: Sort order. ASC (ASCENDING) or DESC (DESCENDING)
:return: PageDataEntityView
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['page_size', 'page', 'type', 'text_search', 'sort_property', 'sort_order'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_tenant_entity_views_using_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'page_size' is set
if ('page_size' not in params or
params['page_size'] is None):
raise ValueError("Missing the required parameter `page_size` when calling `get_tenant_entity_views_using_get`") # noqa: E501
# verify the required parameter 'page' is set
if ('page' not in params or
params['page'] is None):
raise ValueError("Missing the required parameter `page` when calling `get_tenant_entity_views_using_get`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'page_size' in params:
query_params.append(('pageSize', params['page_size'])) # noqa: E501
if 'page' in params:
query_params.append(('page', params['page'])) # noqa: E501
if 'type' in params:
query_params.append(('type', params['type'])) # noqa: E501
if 'text_search' in params:
query_params.append(('textSearch', params['text_search'])) # noqa: E501
if 'sort_property' in params:
query_params.append(('sortProperty', params['sort_property'])) # noqa: E501
if 'sort_order' in params:
query_params.append(('sortOrder', params['sort_order'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/tenant/entityViews{?page,pageSize,sortOrder,sortProperty,textSearch,type}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PageDataEntityView', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def save_entity_view_using_post(self, **kwargs): # noqa: E501
"""Save or update entity view (saveEntityView) # noqa: E501
Entity Views limit the degree of exposure of the Device or Asset telemetry and attributes to the Customers. Every Entity View references exactly one entity (device or asset) and defines telemetry and attribute keys that will be visible to the assigned Customer. As a Tenant Administrator you are able to create multiple EVs per Device or Asset and assign them to different Customers. See the 'Model' tab for more details. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.save_entity_view_using_post(async_req=True)
>>> result = thread.get()
:param async_req bool
:param EntityView body:
:return: EntityView
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.save_entity_view_using_post_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.save_entity_view_using_post_with_http_info(**kwargs) # noqa: E501
return data
def save_entity_view_using_post_with_http_info(self, **kwargs): # noqa: E501
"""Save or update entity view (saveEntityView) # noqa: E501
Entity Views limit the degree of exposure of the Device or Asset telemetry and attributes to the Customers. Every Entity View references exactly one entity (device or asset) and defines telemetry and attribute keys that will be visible to the assigned Customer. As a Tenant Administrator you are able to create multiple EVs per Device or Asset and assign them to different Customers. See the 'Model' tab for more details. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.save_entity_view_using_post_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param EntityView body:
:return: EntityView
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method save_entity_view_using_post" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/entityView', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EntityView', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def unassign_entity_view_from_customer_using_delete(self, entity_view_id, **kwargs): # noqa: E501
"""Unassign Entity View from customer (unassignEntityViewFromCustomer) # noqa: E501
Clears assignment of the Entity View to customer. Customer will not be able to query Entity View afterwards. Available for users with 'TENANT_ADMIN' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.unassign_entity_view_from_customer_using_delete(entity_view_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str entity_view_id: A string value representing the entity view id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:return: EntityView
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.unassign_entity_view_from_customer_using_delete_with_http_info(entity_view_id, **kwargs) # noqa: E501
else:
(data) = self.unassign_entity_view_from_customer_using_delete_with_http_info(entity_view_id, **kwargs) # noqa: E501
return data
def unassign_entity_view_from_customer_using_delete_with_http_info(self, entity_view_id, **kwargs): # noqa: E501
"""Unassign Entity View from customer (unassignEntityViewFromCustomer) # noqa: E501
Clears assignment of the Entity View to customer. Customer will not be able to query Entity View afterwards. Available for users with 'TENANT_ADMIN' authority. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.unassign_entity_view_from_customer_using_delete_with_http_info(entity_view_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str entity_view_id: A string value representing the entity view id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required)
:return: EntityView
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['entity_view_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method unassign_entity_view_from_customer_using_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'entity_view_id' is set
if ('entity_view_id' not in params or
params['entity_view_id'] is None):
raise ValueError("Missing the required parameter `entity_view_id` when calling `unassign_entity_view_from_customer_using_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'entity_view_id' in params:
path_params['entityViewId'] = params['entity_view_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/customer/entityView/{entityViewId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EntityView', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def unassign_entity_view_from_edge_using_delete(self, edge_id, entity_view_id, **kwargs): # noqa: E501
"""Unassign entity view from edge (unassignEntityViewFromEdge) # noqa: E501
Clears assignment of the entity view to the edge. Unassignment works in async way - first, 'unassign' notification event pushed to edge queue on platform. Second, remote edge service will receive an 'unassign' command to remove entity view (Edge will receive this instantly, if it's currently connected, or once it's going to be connected to platform). Third, once 'unassign' command will be delivered to edge service, it's going to remove entity view locally. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.unassign_entity_view_from_edge_using_delete(edge_id, entity_view_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str edge_id: edgeId (required)
:param str entity_view_id: entityViewId (required)
:return: EntityView
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.unassign_entity_view_from_edge_using_delete_with_http_info(edge_id, entity_view_id, **kwargs) # noqa: E501
else:
(data) = self.unassign_entity_view_from_edge_using_delete_with_http_info(edge_id, entity_view_id, **kwargs) # noqa: E501
return data
def unassign_entity_view_from_edge_using_delete_with_http_info(self, edge_id, entity_view_id, **kwargs): # noqa: E501
"""Unassign entity view from edge (unassignEntityViewFromEdge) # noqa: E501
Clears assignment of the entity view to the edge. Unassignment works in async way - first, 'unassign' notification event pushed to edge queue on platform. Second, remote edge service will receive an 'unassign' command to remove entity view (Edge will receive this instantly, if it's currently connected, or once it's going to be connected to platform). Third, once 'unassign' command will be delivered to edge service, it's going to remove entity view locally. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.unassign_entity_view_from_edge_using_delete_with_http_info(edge_id, entity_view_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str edge_id: edgeId (required)
:param str entity_view_id: entityViewId (required)
:return: EntityView
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['edge_id', 'entity_view_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method unassign_entity_view_from_edge_using_delete" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'edge_id' is set
if ('edge_id' not in params or
params['edge_id'] is None):
raise ValueError("Missing the required parameter `edge_id` when calling `unassign_entity_view_from_edge_using_delete`") # noqa: E501
# verify the required parameter 'entity_view_id' is set
if ('entity_view_id' not in params or
params['entity_view_id'] is None):
raise ValueError("Missing the required parameter `entity_view_id` when calling `unassign_entity_view_from_edge_using_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'edge_id' in params:
path_params['edgeId'] = params['edge_id'] # noqa: E501
if 'entity_view_id' in params:
path_params['entityViewId'] = params['entity_view_id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['X-Authorization'] # noqa: E501
return self.api_client.call_api(
'/api/edge/{edgeId}/entityView/{entityViewId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='EntityView', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| en | 0.679774 | # coding: utf-8 ThingsBoard REST API ThingsBoard open-source IoT platform REST API documentation. # noqa: E501 OpenAPI spec version: 3.3.3-SNAPSHOT Contact: <EMAIL> Generated by: https://github.com/swagger-api/swagger-codegen.git # noqa: F401 # python 2 and python 3 compatibility library NOTE: This class is auto generated by the swagger code generator program. Do not edit the class manually. Ref: https://github.com/swagger-api/swagger-codegen # noqa: E501 Assign Entity View to customer (assignEntityViewToCustomer) # noqa: E501 Creates assignment of the Entity View to customer. Customer will be able to query Entity View afterwards. Available for users with 'TENANT_ADMIN' authority. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.assign_entity_view_to_customer_using_post(customer_id, entity_view_id, async_req=True) >>> result = thread.get() :param async_req bool :param str customer_id: A string value representing the customer id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required) :param str entity_view_id: A string value representing the entity view id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required) :return: EntityView If the method is called asynchronously, returns the request thread. # noqa: E501 # noqa: E501 # noqa: E501 Assign Entity View to customer (assignEntityViewToCustomer) # noqa: E501 Creates assignment of the Entity View to customer. Customer will be able to query Entity View afterwards. Available for users with 'TENANT_ADMIN' authority. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.assign_entity_view_to_customer_using_post_with_http_info(customer_id, entity_view_id, async_req=True) >>> result = thread.get() :param async_req bool :param str customer_id: A string value representing the customer id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required) :param str entity_view_id: A string value representing the entity view id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required) :return: EntityView If the method is called asynchronously, returns the request thread. # noqa: E501 # verify the required parameter 'customer_id' is set # noqa: E501 # verify the required parameter 'entity_view_id' is set # noqa: E501 # noqa: E501 # noqa: E501 # HTTP header `Accept` # noqa: E501 # Authentication setting # noqa: E501 # noqa: E501 # noqa: E501 Assign entity view to edge (assignEntityViewToEdge) # noqa: E501 Creates assignment of an existing entity view to an instance of The Edge. Assignment works in async way - first, notification event pushed to edge service queue on platform. Second, remote edge service will receive a copy of assignment entity view (Edge will receive this instantly, if it's currently connected, or once it's going to be connected to platform). Third, once entity view will be delivered to edge service, it's going to be available for usage on remote edge instance. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.assign_entity_view_to_edge_using_post(edge_id, entity_view_id, async_req=True) >>> result = thread.get() :param async_req bool :param str edge_id: edgeId (required) :param str entity_view_id: entityViewId (required) :return: EntityView If the method is called asynchronously, returns the request thread. # noqa: E501 # noqa: E501 # noqa: E501 Assign entity view to edge (assignEntityViewToEdge) # noqa: E501 Creates assignment of an existing entity view to an instance of The Edge. Assignment works in async way - first, notification event pushed to edge service queue on platform. Second, remote edge service will receive a copy of assignment entity view (Edge will receive this instantly, if it's currently connected, or once it's going to be connected to platform). Third, once entity view will be delivered to edge service, it's going to be available for usage on remote edge instance. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.assign_entity_view_to_edge_using_post_with_http_info(edge_id, entity_view_id, async_req=True) >>> result = thread.get() :param async_req bool :param str edge_id: edgeId (required) :param str entity_view_id: entityViewId (required) :return: EntityView If the method is called asynchronously, returns the request thread. # noqa: E501 # verify the required parameter 'edge_id' is set # noqa: E501 # verify the required parameter 'entity_view_id' is set # noqa: E501 # noqa: E501 # noqa: E501 # HTTP header `Accept` # noqa: E501 # Authentication setting # noqa: E501 # noqa: E501 # noqa: E501 Make entity view publicly available (assignEntityViewToPublicCustomer) # noqa: E501 Entity View will be available for non-authorized (not logged-in) users. This is useful to create dashboards that you plan to share/embed on a publicly available website. However, users that are logged-in and belong to different tenant will not be able to access the entity view. Available for users with 'TENANT_ADMIN' authority. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.assign_entity_view_to_public_customer_using_post(entity_view_id, async_req=True) >>> result = thread.get() :param async_req bool :param str entity_view_id: A string value representing the entity view id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required) :return: EntityView If the method is called asynchronously, returns the request thread. # noqa: E501 # noqa: E501 # noqa: E501 Make entity view publicly available (assignEntityViewToPublicCustomer) # noqa: E501 Entity View will be available for non-authorized (not logged-in) users. This is useful to create dashboards that you plan to share/embed on a publicly available website. However, users that are logged-in and belong to different tenant will not be able to access the entity view. Available for users with 'TENANT_ADMIN' authority. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.assign_entity_view_to_public_customer_using_post_with_http_info(entity_view_id, async_req=True) >>> result = thread.get() :param async_req bool :param str entity_view_id: A string value representing the entity view id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required) :return: EntityView If the method is called asynchronously, returns the request thread. # noqa: E501 # verify the required parameter 'entity_view_id' is set # noqa: E501 # noqa: E501 # HTTP header `Accept` # noqa: E501 # Authentication setting # noqa: E501 # noqa: E501 # noqa: E501 Delete entity view (deleteEntityView) # noqa: E501 Delete the EntityView object based on the provided entity view id. Available for users with 'TENANT_ADMIN' authority. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_entity_view_using_delete(entity_view_id, async_req=True) >>> result = thread.get() :param async_req bool :param str entity_view_id: A string value representing the entity view id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required) :return: None If the method is called asynchronously, returns the request thread. # noqa: E501 # noqa: E501 # noqa: E501 Delete entity view (deleteEntityView) # noqa: E501 Delete the EntityView object based on the provided entity view id. Available for users with 'TENANT_ADMIN' authority. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.delete_entity_view_using_delete_with_http_info(entity_view_id, async_req=True) >>> result = thread.get() :param async_req bool :param str entity_view_id: A string value representing the entity view id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required) :return: None If the method is called asynchronously, returns the request thread. # noqa: E501 # verify the required parameter 'entity_view_id' is set # noqa: E501 # noqa: E501 # HTTP header `Accept` # noqa: E501 # Authentication setting # noqa: E501 # noqa: E501 # noqa: E501 Find related entity views (findByQuery) # noqa: E501 Returns all entity views that are related to the specific entity. The entity id, relation type, entity view types, depth of the search, and other query parameters defined using complex 'EntityViewSearchQuery' object. See 'Model' tab of the Parameters for more info. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.find_by_query_using_post4(async_req=True) >>> result = thread.get() :param async_req bool :param EntityViewSearchQuery body: :return: list[EntityView] If the method is called asynchronously, returns the request thread. # noqa: E501 # noqa: E501 # noqa: E501 Find related entity views (findByQuery) # noqa: E501 Returns all entity views that are related to the specific entity. The entity id, relation type, entity view types, depth of the search, and other query parameters defined using complex 'EntityViewSearchQuery' object. See 'Model' tab of the Parameters for more info. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.find_by_query_using_post4_with_http_info(async_req=True) >>> result = thread.get() :param async_req bool :param EntityViewSearchQuery body: :return: list[EntityView] If the method is called asynchronously, returns the request thread. # noqa: E501 # HTTP header `Accept` # noqa: E501 # HTTP header `Content-Type` # noqa: E501 # noqa: E501 # Authentication setting # noqa: E501 # noqa: E501 # noqa: E501 Get Customer Entity View info (getCustomerEntityViewInfos) # noqa: E501 Returns a page of Entity View info objects assigned to customer. Entity Views limit the degree of exposure of the Device or Asset telemetry and attributes to the Customers. Every Entity View references exactly one entity (device or asset) and defines telemetry and attribute keys that will be visible to the assigned Customer. As a Tenant Administrator you are able to create multiple EVs per Device or Asset and assign them to different Customers. You can specify parameters to filter the results. The result is wrapped with PageData object that allows you to iterate over result set using pagination. See the 'Model' tab of the Response Class for more details. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_customer_entity_view_infos_using_get(customer_id, page_size, page, async_req=True) >>> result = thread.get() :param async_req bool :param str customer_id: A string value representing the customer id. For example, '784f394c-42b6-435a-<PASSWORD>ff<PASSWORD>' (required) :param int page_size: Maximum amount of entities in a one page (required) :param int page: Sequence number of page starting from 0 (required) :param str type: ## Entity View Filter Allows to filter entity views based on their type and the **'starts with'** expression over their name. For example, this entity filter selects all 'Concrete Mixer' entity views which name starts with 'CAT': ```json { \"type\": \"entityViewType\", \"entityViewType\": \"Concrete Mixer\", \"entityViewNameFilter\": \"CAT\" } ``` :param str text_search: The case insensitive 'startsWith' filter based on the entity view name. :param str sort_property: Property of entity to sort by :param str sort_order: Sort order. ASC (ASCENDING) or DESC (DESCENDING) :return: PageDataEntityViewInfo If the method is called asynchronously, returns the request thread. # noqa: E501 # noqa: E501 # noqa: E501 Get Customer Entity View info (getCustomerEntityViewInfos) # noqa: E501 Returns a page of Entity View info objects assigned to customer. Entity Views limit the degree of exposure of the Device or Asset telemetry and attributes to the Customers. Every Entity View references exactly one entity (device or asset) and defines telemetry and attribute keys that will be visible to the assigned Customer. As a Tenant Administrator you are able to create multiple EVs per Device or Asset and assign them to different Customers. You can specify parameters to filter the results. The result is wrapped with PageData object that allows you to iterate over result set using pagination. See the 'Model' tab of the Response Class for more details. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_customer_entity_view_infos_using_get_with_http_info(customer_id, page_size, page, async_req=True) >>> result = thread.get() :param async_req bool :param str customer_id: A string value representing the customer id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required) :param int page_size: Maximum amount of entities in a one page (required) :param int page: Sequence number of page starting from 0 (required) :param str type: ## Entity View Filter Allows to filter entity views based on their type and the **'starts with'** expression over their name. For example, this entity filter selects all 'Concrete Mixer' entity views which name starts with 'CAT': ```json { \"type\": \"entityViewType\", \"entityViewType\": \"Concrete Mixer\", \"entityViewNameFilter\": \"CAT\" } ``` :param str text_search: The case insensitive 'startsWith' filter based on the entity view name. :param str sort_property: Property of entity to sort by :param str sort_order: Sort order. ASC (ASCENDING) or DESC (DESCENDING) :return: PageDataEntityViewInfo If the method is called asynchronously, returns the request thread. # noqa: E501 # verify the required parameter 'customer_id' is set # noqa: E501 # verify the required parameter 'page_size' is set # noqa: E501 # verify the required parameter 'page' is set # noqa: E501 # noqa: E501 # noqa: E501 # noqa: E501 # noqa: E501 # noqa: E501 # noqa: E501 # noqa: E501 # HTTP header `Accept` # noqa: E501 # Authentication setting # noqa: E501 # noqa: E501 # noqa: E501 Get Customer Entity Views (getCustomerEntityViews) # noqa: E501 Returns a page of Entity View objects assigned to customer. You can specify parameters to filter the results. The result is wrapped with PageData object that allows you to iterate over result set using pagination. See the 'Model' tab of the Response Class for more details. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_customer_entity_views_using_get(customer_id, page_size, page, async_req=True) >>> result = thread.get() :param async_req bool :param str customer_id: A string value representing the customer id. For example, '784f394c-42b6-435a-<PASSWORD>-<PASSWORD>ff<PASSWORD>' (required) :param int page_size: Maximum amount of entities in a one page (required) :param int page: Sequence number of page starting from 0 (required) :param str type: ## Entity View Filter Allows to filter entity views based on their type and the **'starts with'** expression over their name. For example, this entity filter selects all 'Concrete Mixer' entity views which name starts with 'CAT': ```json { \"type\": \"entityViewType\", \"entityViewType\": \"Concrete Mixer\", \"entityViewNameFilter\": \"CAT\" } ``` :param str text_search: The case insensitive 'startsWith' filter based on the entity view name. :param str sort_property: Property of entity to sort by :param str sort_order: Sort order. ASC (ASCENDING) or DESC (DESCENDING) :return: PageDataEntityView If the method is called asynchronously, returns the request thread. # noqa: E501 # noqa: E501 # noqa: E501 Get Customer Entity Views (getCustomerEntityViews) # noqa: E501 Returns a page of Entity View objects assigned to customer. You can specify parameters to filter the results. The result is wrapped with PageData object that allows you to iterate over result set using pagination. See the 'Model' tab of the Response Class for more details. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_customer_entity_views_using_get_with_http_info(customer_id, page_size, page, async_req=True) >>> result = thread.get() :param async_req bool :param str customer_id: A string value representing the customer id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required) :param int page_size: Maximum amount of entities in a one page (required) :param int page: Sequence number of page starting from 0 (required) :param str type: ## Entity View Filter Allows to filter entity views based on their type and the **'starts with'** expression over their name. For example, this entity filter selects all 'Concrete Mixer' entity views which name starts with 'CAT': ```json { \"type\": \"entityViewType\", \"entityViewType\": \"Concrete Mixer\", \"entityViewNameFilter\": \"CAT\" } ``` :param str text_search: The case insensitive 'startsWith' filter based on the entity view name. :param str sort_property: Property of entity to sort by :param str sort_order: Sort order. ASC (ASCENDING) or DESC (DESCENDING) :return: PageDataEntityView If the method is called asynchronously, returns the request thread. # noqa: E501 # verify the required parameter 'customer_id' is set # noqa: E501 # verify the required parameter 'page_size' is set # noqa: E501 # verify the required parameter 'page' is set # noqa: E501 # noqa: E501 # noqa: E501 # noqa: E501 # noqa: E501 # noqa: E501 # noqa: E501 # noqa: E501 # HTTP header `Accept` # noqa: E501 # Authentication setting # noqa: E501 # noqa: E501 # noqa: E501 getEdgeEntityViews # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_edge_entity_views_using_get(edge_id, page, page_size, async_req=True) >>> result = thread.get() :param async_req bool :param str edge_id: edgeId (required) :param str page: page (required) :param str page_size: pageSize (required) :param str type: type :param str text_search: textSearch :param str sort_property: sortProperty :param str sort_order: sortOrder :param int start_time: startTime :param int end_time: endTime :return: PageDataEntityView If the method is called asynchronously, returns the request thread. # noqa: E501 # noqa: E501 # noqa: E501 getEdgeEntityViews # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_edge_entity_views_using_get_with_http_info(edge_id, page, page_size, async_req=True) >>> result = thread.get() :param async_req bool :param str edge_id: edgeId (required) :param str page: page (required) :param str page_size: pageSize (required) :param str type: type :param str text_search: textSearch :param str sort_property: sortProperty :param str sort_order: sortOrder :param int start_time: startTime :param int end_time: endTime :return: PageDataEntityView If the method is called asynchronously, returns the request thread. # noqa: E501 # verify the required parameter 'edge_id' is set # noqa: E501 # verify the required parameter 'page' is set # noqa: E501 # verify the required parameter 'page_size' is set # noqa: E501 # noqa: E501 # noqa: E501 # noqa: E501 # noqa: E501 # noqa: E501 # noqa: E501 # noqa: E501 # noqa: E501 # noqa: E501 # HTTP header `Accept` # noqa: E501 # Authentication setting # noqa: E501 # noqa: E501 # noqa: E501 Get entity view (getEntityViewById) # noqa: E501 Fetch the EntityView object based on the provided entity view id. Entity Views limit the degree of exposure of the Device or Asset telemetry and attributes to the Customers. Every Entity View references exactly one entity (device or asset) and defines telemetry and attribute keys that will be visible to the assigned Customer. As a Tenant Administrator you are able to create multiple EVs per Device or Asset and assign them to different Customers. See the 'Model' tab for more details. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_entity_view_by_id_using_get(entity_view_id, async_req=True) >>> result = thread.get() :param async_req bool :param str entity_view_id: A string value representing the entity view id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required) :return: EntityView If the method is called asynchronously, returns the request thread. # noqa: E501 # noqa: E501 # noqa: E501 Get entity view (getEntityViewById) # noqa: E501 Fetch the EntityView object based on the provided entity view id. Entity Views limit the degree of exposure of the Device or Asset telemetry and attributes to the Customers. Every Entity View references exactly one entity (device or asset) and defines telemetry and attribute keys that will be visible to the assigned Customer. As a Tenant Administrator you are able to create multiple EVs per Device or Asset and assign them to different Customers. See the 'Model' tab for more details. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_entity_view_by_id_using_get_with_http_info(entity_view_id, async_req=True) >>> result = thread.get() :param async_req bool :param str entity_view_id: A string value representing the entity view id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required) :return: EntityView If the method is called asynchronously, returns the request thread. # noqa: E501 # verify the required parameter 'entity_view_id' is set # noqa: E501 # noqa: E501 # HTTP header `Accept` # noqa: E501 # Authentication setting # noqa: E501 # noqa: E501 # noqa: E501 Get Entity View info (getEntityViewInfoById) # noqa: E501 Fetch the Entity View info object based on the provided Entity View Id. Entity Views Info extends the Entity View with customer title and 'is public' flag. Entity Views limit the degree of exposure of the Device or Asset telemetry and attributes to the Customers. Every Entity View references exactly one entity (device or asset) and defines telemetry and attribute keys that will be visible to the assigned Customer. As a Tenant Administrator you are able to create multiple EVs per Device or Asset and assign them to different Customers. See the 'Model' tab for more details. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_entity_view_info_by_id_using_get(entity_view_id, async_req=True) >>> result = thread.get() :param async_req bool :param str entity_view_id: A string value representing the entity view id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required) :return: EntityViewInfo If the method is called asynchronously, returns the request thread. # noqa: E501 # noqa: E501 # noqa: E501 Get Entity View info (getEntityViewInfoById) # noqa: E501 Fetch the Entity View info object based on the provided Entity View Id. Entity Views Info extends the Entity View with customer title and 'is public' flag. Entity Views limit the degree of exposure of the Device or Asset telemetry and attributes to the Customers. Every Entity View references exactly one entity (device or asset) and defines telemetry and attribute keys that will be visible to the assigned Customer. As a Tenant Administrator you are able to create multiple EVs per Device or Asset and assign them to different Customers. See the 'Model' tab for more details. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_entity_view_info_by_id_using_get_with_http_info(entity_view_id, async_req=True) >>> result = thread.get() :param async_req bool :param str entity_view_id: A string value representing the entity view id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required) :return: EntityViewInfo If the method is called asynchronously, returns the request thread. # noqa: E501 # verify the required parameter 'entity_view_id' is set # noqa: E501 # noqa: E501 # HTTP header `Accept` # noqa: E501 # Authentication setting # noqa: E501 # noqa: E501 # noqa: E501 Get Entity View Types (getEntityViewTypes) # noqa: E501 Returns a set of unique entity view types based on entity views that are either owned by the tenant or assigned to the customer which user is performing the request. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_entity_view_types_using_get(async_req=True) >>> result = thread.get() :param async_req bool :return: list[EntitySubtype] If the method is called asynchronously, returns the request thread. # noqa: E501 # noqa: E501 # noqa: E501 Get Entity View Types (getEntityViewTypes) # noqa: E501 Returns a set of unique entity view types based on entity views that are either owned by the tenant or assigned to the customer which user is performing the request. Available for users with 'TENANT_ADMIN' or 'CUSTOMER_USER' authority. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_entity_view_types_using_get_with_http_info(async_req=True) >>> result = thread.get() :param async_req bool :return: list[EntitySubtype] If the method is called asynchronously, returns the request thread. # noqa: E501 # HTTP header `Accept` # noqa: E501 # Authentication setting # noqa: E501 # noqa: E501 # noqa: E501 Get Tenant Entity Views (getTenantEntityViews) # noqa: E501 Returns a page of entity views info owned by tenant. Entity Views limit the degree of exposure of the Device or Asset telemetry and attributes to the Customers. Every Entity View references exactly one entity (device or asset) and defines telemetry and attribute keys that will be visible to the assigned Customer. As a Tenant Administrator you are able to create multiple EVs per Device or Asset and assign them to different Customers. You can specify parameters to filter the results. The result is wrapped with PageData object that allows you to iterate over result set using pagination. See the 'Model' tab of the Response Class for more details. Available for users with 'TENANT_ADMIN' authority. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_tenant_entity_view_infos_using_get(page_size, page, async_req=True) >>> result = thread.get() :param async_req bool :param int page_size: Maximum amount of entities in a one page (required) :param int page: Sequence number of page starting from 0 (required) :param str type: ## Entity View Filter Allows to filter entity views based on their type and the **'starts with'** expression over their name. For example, this entity filter selects all 'Concrete Mixer' entity views which name starts with 'CAT': ```json { \"type\": \"entityViewType\", \"entityViewType\": \"Concrete Mixer\", \"entityViewNameFilter\": \"CAT\" } ``` :param str text_search: The case insensitive 'startsWith' filter based on the entity view name. :param str sort_property: Property of entity to sort by :param str sort_order: Sort order. ASC (ASCENDING) or DESC (DESCENDING) :return: PageDataEntityViewInfo If the method is called asynchronously, returns the request thread. # noqa: E501 # noqa: E501 # noqa: E501 Get Tenant Entity Views (getTenantEntityViews) # noqa: E501 Returns a page of entity views info owned by tenant. Entity Views limit the degree of exposure of the Device or Asset telemetry and attributes to the Customers. Every Entity View references exactly one entity (device or asset) and defines telemetry and attribute keys that will be visible to the assigned Customer. As a Tenant Administrator you are able to create multiple EVs per Device or Asset and assign them to different Customers. You can specify parameters to filter the results. The result is wrapped with PageData object that allows you to iterate over result set using pagination. See the 'Model' tab of the Response Class for more details. Available for users with 'TENANT_ADMIN' authority. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_tenant_entity_view_infos_using_get_with_http_info(page_size, page, async_req=True) >>> result = thread.get() :param async_req bool :param int page_size: Maximum amount of entities in a one page (required) :param int page: Sequence number of page starting from 0 (required) :param str type: ## Entity View Filter Allows to filter entity views based on their type and the **'starts with'** expression over their name. For example, this entity filter selects all 'Concrete Mixer' entity views which name starts with 'CAT': ```json { \"type\": \"entityViewType\", \"entityViewType\": \"Concrete Mixer\", \"entityViewNameFilter\": \"CAT\" } ``` :param str text_search: The case insensitive 'startsWith' filter based on the entity view name. :param str sort_property: Property of entity to sort by :param str sort_order: Sort order. ASC (ASCENDING) or DESC (DESCENDING) :return: PageDataEntityViewInfo If the method is called asynchronously, returns the request thread. # noqa: E501 # verify the required parameter 'page_size' is set # noqa: E501 # verify the required parameter 'page' is set # noqa: E501 # noqa: E501 # noqa: E501 # noqa: E501 # noqa: E501 # noqa: E501 # noqa: E501 # HTTP header `Accept` # noqa: E501 # Authentication setting # noqa: E501 # noqa: E501 # noqa: E501 Get Entity View by name (getTenantEntityView) # noqa: E501 Fetch the Entity View object based on the tenant id and entity view name. Available for users with 'TENANT_ADMIN' authority. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_tenant_entity_view_using_get(entity_view_name, async_req=True) >>> result = thread.get() :param async_req bool :param str entity_view_name: Entity View name (required) :return: EntityView If the method is called asynchronously, returns the request thread. # noqa: E501 # noqa: E501 # noqa: E501 Get Entity View by name (getTenantEntityView) # noqa: E501 Fetch the Entity View object based on the tenant id and entity view name. Available for users with 'TENANT_ADMIN' authority. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_tenant_entity_view_using_get_with_http_info(entity_view_name, async_req=True) >>> result = thread.get() :param async_req bool :param str entity_view_name: Entity View name (required) :return: EntityView If the method is called asynchronously, returns the request thread. # noqa: E501 # verify the required parameter 'entity_view_name' is set # noqa: E501 # noqa: E501 # HTTP header `Accept` # noqa: E501 # Authentication setting # noqa: E501 # noqa: E501 # noqa: E501 Get Tenant Entity Views (getTenantEntityViews) # noqa: E501 Returns a page of entity views owned by tenant. Entity Views limit the degree of exposure of the Device or Asset telemetry and attributes to the Customers. Every Entity View references exactly one entity (device or asset) and defines telemetry and attribute keys that will be visible to the assigned Customer. As a Tenant Administrator you are able to create multiple EVs per Device or Asset and assign them to different Customers. You can specify parameters to filter the results. The result is wrapped with PageData object that allows you to iterate over result set using pagination. See the 'Model' tab of the Response Class for more details. Available for users with 'TENANT_ADMIN' authority. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_tenant_entity_views_using_get(page_size, page, async_req=True) >>> result = thread.get() :param async_req bool :param int page_size: Maximum amount of entities in a one page (required) :param int page: Sequence number of page starting from 0 (required) :param str type: ## Entity View Filter Allows to filter entity views based on their type and the **'starts with'** expression over their name. For example, this entity filter selects all 'Concrete Mixer' entity views which name starts with 'CAT': ```json { \"type\": \"entityViewType\", \"entityViewType\": \"Concrete Mixer\", \"entityViewNameFilter\": \"CAT\" } ``` :param str text_search: The case insensitive 'startsWith' filter based on the entity view name. :param str sort_property: Property of entity to sort by :param str sort_order: Sort order. ASC (ASCENDING) or DESC (DESCENDING) :return: PageDataEntityView If the method is called asynchronously, returns the request thread. # noqa: E501 # noqa: E501 # noqa: E501 Get Tenant Entity Views (getTenantEntityViews) # noqa: E501 Returns a page of entity views owned by tenant. Entity Views limit the degree of exposure of the Device or Asset telemetry and attributes to the Customers. Every Entity View references exactly one entity (device or asset) and defines telemetry and attribute keys that will be visible to the assigned Customer. As a Tenant Administrator you are able to create multiple EVs per Device or Asset and assign them to different Customers. You can specify parameters to filter the results. The result is wrapped with PageData object that allows you to iterate over result set using pagination. See the 'Model' tab of the Response Class for more details. Available for users with 'TENANT_ADMIN' authority. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_tenant_entity_views_using_get_with_http_info(page_size, page, async_req=True) >>> result = thread.get() :param async_req bool :param int page_size: Maximum amount of entities in a one page (required) :param int page: Sequence number of page starting from 0 (required) :param str type: ## Entity View Filter Allows to filter entity views based on their type and the **'starts with'** expression over their name. For example, this entity filter selects all 'Concrete Mixer' entity views which name starts with 'CAT': ```json { \"type\": \"entityViewType\", \"entityViewType\": \"Concrete Mixer\", \"entityViewNameFilter\": \"CAT\" } ``` :param str text_search: The case insensitive 'startsWith' filter based on the entity view name. :param str sort_property: Property of entity to sort by :param str sort_order: Sort order. ASC (ASCENDING) or DESC (DESCENDING) :return: PageDataEntityView If the method is called asynchronously, returns the request thread. # noqa: E501 # verify the required parameter 'page_size' is set # noqa: E501 # verify the required parameter 'page' is set # noqa: E501 # noqa: E501 # noqa: E501 # noqa: E501 # noqa: E501 # noqa: E501 # noqa: E501 # HTTP header `Accept` # noqa: E501 # Authentication setting # noqa: E501 # noqa: E501 # noqa: E501 Save or update entity view (saveEntityView) # noqa: E501 Entity Views limit the degree of exposure of the Device or Asset telemetry and attributes to the Customers. Every Entity View references exactly one entity (device or asset) and defines telemetry and attribute keys that will be visible to the assigned Customer. As a Tenant Administrator you are able to create multiple EVs per Device or Asset and assign them to different Customers. See the 'Model' tab for more details. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.save_entity_view_using_post(async_req=True) >>> result = thread.get() :param async_req bool :param EntityView body: :return: EntityView If the method is called asynchronously, returns the request thread. # noqa: E501 # noqa: E501 # noqa: E501 Save or update entity view (saveEntityView) # noqa: E501 Entity Views limit the degree of exposure of the Device or Asset telemetry and attributes to the Customers. Every Entity View references exactly one entity (device or asset) and defines telemetry and attribute keys that will be visible to the assigned Customer. As a Tenant Administrator you are able to create multiple EVs per Device or Asset and assign them to different Customers. See the 'Model' tab for more details. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.save_entity_view_using_post_with_http_info(async_req=True) >>> result = thread.get() :param async_req bool :param EntityView body: :return: EntityView If the method is called asynchronously, returns the request thread. # noqa: E501 # HTTP header `Accept` # noqa: E501 # HTTP header `Content-Type` # noqa: E501 # noqa: E501 # Authentication setting # noqa: E501 # noqa: E501 # noqa: E501 Unassign Entity View from customer (unassignEntityViewFromCustomer) # noqa: E501 Clears assignment of the Entity View to customer. Customer will not be able to query Entity View afterwards. Available for users with 'TENANT_ADMIN' authority. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.unassign_entity_view_from_customer_using_delete(entity_view_id, async_req=True) >>> result = thread.get() :param async_req bool :param str entity_view_id: A string value representing the entity view id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required) :return: EntityView If the method is called asynchronously, returns the request thread. # noqa: E501 # noqa: E501 # noqa: E501 Unassign Entity View from customer (unassignEntityViewFromCustomer) # noqa: E501 Clears assignment of the Entity View to customer. Customer will not be able to query Entity View afterwards. Available for users with 'TENANT_ADMIN' authority. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.unassign_entity_view_from_customer_using_delete_with_http_info(entity_view_id, async_req=True) >>> result = thread.get() :param async_req bool :param str entity_view_id: A string value representing the entity view id. For example, '784f394c-42b6-435a-983c-b7beff2784f9' (required) :return: EntityView If the method is called asynchronously, returns the request thread. # noqa: E501 # verify the required parameter 'entity_view_id' is set # noqa: E501 # noqa: E501 # HTTP header `Accept` # noqa: E501 # Authentication setting # noqa: E501 # noqa: E501 # noqa: E501 Unassign entity view from edge (unassignEntityViewFromEdge) # noqa: E501 Clears assignment of the entity view to the edge. Unassignment works in async way - first, 'unassign' notification event pushed to edge queue on platform. Second, remote edge service will receive an 'unassign' command to remove entity view (Edge will receive this instantly, if it's currently connected, or once it's going to be connected to platform). Third, once 'unassign' command will be delivered to edge service, it's going to remove entity view locally. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.unassign_entity_view_from_edge_using_delete(edge_id, entity_view_id, async_req=True) >>> result = thread.get() :param async_req bool :param str edge_id: edgeId (required) :param str entity_view_id: entityViewId (required) :return: EntityView If the method is called asynchronously, returns the request thread. # noqa: E501 # noqa: E501 # noqa: E501 Unassign entity view from edge (unassignEntityViewFromEdge) # noqa: E501 Clears assignment of the entity view to the edge. Unassignment works in async way - first, 'unassign' notification event pushed to edge queue on platform. Second, remote edge service will receive an 'unassign' command to remove entity view (Edge will receive this instantly, if it's currently connected, or once it's going to be connected to platform). Third, once 'unassign' command will be delivered to edge service, it's going to remove entity view locally. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.unassign_entity_view_from_edge_using_delete_with_http_info(edge_id, entity_view_id, async_req=True) >>> result = thread.get() :param async_req bool :param str edge_id: edgeId (required) :param str entity_view_id: entityViewId (required) :return: EntityView If the method is called asynchronously, returns the request thread. # noqa: E501 # verify the required parameter 'edge_id' is set # noqa: E501 # verify the required parameter 'entity_view_id' is set # noqa: E501 # noqa: E501 # noqa: E501 # HTTP header `Accept` # noqa: E501 # Authentication setting # noqa: E501 # noqa: E501 | 1.870198 | 2 |
CAMOnion/widgets/positionwidget.py | JonRob812/CAMOnion | 0 | 6619748 | <gh_stars>0
from CAMOnion.ui.camo_position_widget_ui import Ui_Form
from CAMOnion.ui.camo_position_frame_ui import Ui_Frame
from CAMOnion.core.widget_tools import get_combo_data
from PyQt5.QtWidgets import QFrame, QWidget
from PyQt5 import QtCore as qc
class PositionWidget(QFrame, Ui_Frame, QWidget):
change_current_origin = qc.pyqtSignal(object)
change_active_setup = qc.pyqtSignal(object)
def __init__(self):
super().__init__()
self.setupUi(self)
self.origin_combo.currentIndexChanged.connect(self.emit_change_origin)
self.active_setup_combo.currentIndexChanged.connect(self.emit_change_active_setup)
def emit_change_origin(self):
self.change_current_origin.emit(get_combo_data(self.origin_combo))
def emit_change_active_setup(self):
self.change_active_setup.emit(get_combo_data(self.active_setup_combo))
| from CAMOnion.ui.camo_position_widget_ui import Ui_Form
from CAMOnion.ui.camo_position_frame_ui import Ui_Frame
from CAMOnion.core.widget_tools import get_combo_data
from PyQt5.QtWidgets import QFrame, QWidget
from PyQt5 import QtCore as qc
class PositionWidget(QFrame, Ui_Frame, QWidget):
change_current_origin = qc.pyqtSignal(object)
change_active_setup = qc.pyqtSignal(object)
def __init__(self):
super().__init__()
self.setupUi(self)
self.origin_combo.currentIndexChanged.connect(self.emit_change_origin)
self.active_setup_combo.currentIndexChanged.connect(self.emit_change_active_setup)
def emit_change_origin(self):
self.change_current_origin.emit(get_combo_data(self.origin_combo))
def emit_change_active_setup(self):
self.change_active_setup.emit(get_combo_data(self.active_setup_combo)) | none | 1 | 1.972682 | 2 | |
model.py | rasorensen90/Udacity-Deep-RL-nano-degree-Projects | 0 | 6619749 | <filename>model.py
import torch
import torch.nn as nn
import torch.nn.functional as F
class QNetwork(nn.Module):
"""Actor (Policy) Model."""
def __init__(self, state_size, action_size, seed, modeltype='dqn'):
"""Initialize parameters and build model.
Params
======
state_size (int): Dimension of each state
action_size (int): Dimension of each action
seed (int): Random seed
modeltype (str): Type of DQN ['dqn', 'double_dqn', 'dueling_dqn']
"""
super(QNetwork, self).__init__()
self.seed = torch.manual_seed(seed)
self.modeltype = modeltype
fc1_units=128
fc2_units=32
if self.modeltype in ['dqn', 'double_dqn']:
self.fc1 = nn.Linear(state_size, fc1_units)
self.fc2 = nn.Linear(fc1_units, fc2_units)
self.fc3 = nn.Linear(fc2_units, action_size)
elif self.modeltype == 'dueling_dqn':
self.fc1 = nn.Linear(state_size, fc1_units)
self.fc2 = nn.Linear(fc1_units, fc2_units)
self.fc_act1 = nn.Linear(fc2_units, fc2_units)
self.fc_act2 = nn.Linear(fc2_units, action_size)
self.fc_val1 = nn.Linear(fc2_units, fc2_units)
self.fc_val2 = nn.Linear(fc2_units, 1)
else:
print('Unknown model type')
def forward(self, state):
"""Build a network that maps state -> action values."""
if self.modeltype in ['dqn', 'double_dqn']:
x = F.relu(self.fc1(state))
x = F.relu(self.fc2(x))
return self.fc3(x)
elif self.modeltype == 'dueling_dqn':
x = F.relu(self.fc1(state))
x = F.relu(self.fc2(x))
x_advantage = F.relu(self.fc_act1(x))
x_advantage = self.fc_act2(x_advantage)
x_value = F.relu(self.fc_val1(x))
x_value = self.fc_val2(x_value)
mean_advantage = x_advantage.mean(1).unsqueeze(1).expand_as(x_advantage)
x_value = x_value.expand_as(x_advantage)
return x_value + x_advantage - mean_advantage
else:
print('Unknown model type')
| <filename>model.py
import torch
import torch.nn as nn
import torch.nn.functional as F
class QNetwork(nn.Module):
"""Actor (Policy) Model."""
def __init__(self, state_size, action_size, seed, modeltype='dqn'):
"""Initialize parameters and build model.
Params
======
state_size (int): Dimension of each state
action_size (int): Dimension of each action
seed (int): Random seed
modeltype (str): Type of DQN ['dqn', 'double_dqn', 'dueling_dqn']
"""
super(QNetwork, self).__init__()
self.seed = torch.manual_seed(seed)
self.modeltype = modeltype
fc1_units=128
fc2_units=32
if self.modeltype in ['dqn', 'double_dqn']:
self.fc1 = nn.Linear(state_size, fc1_units)
self.fc2 = nn.Linear(fc1_units, fc2_units)
self.fc3 = nn.Linear(fc2_units, action_size)
elif self.modeltype == 'dueling_dqn':
self.fc1 = nn.Linear(state_size, fc1_units)
self.fc2 = nn.Linear(fc1_units, fc2_units)
self.fc_act1 = nn.Linear(fc2_units, fc2_units)
self.fc_act2 = nn.Linear(fc2_units, action_size)
self.fc_val1 = nn.Linear(fc2_units, fc2_units)
self.fc_val2 = nn.Linear(fc2_units, 1)
else:
print('Unknown model type')
def forward(self, state):
"""Build a network that maps state -> action values."""
if self.modeltype in ['dqn', 'double_dqn']:
x = F.relu(self.fc1(state))
x = F.relu(self.fc2(x))
return self.fc3(x)
elif self.modeltype == 'dueling_dqn':
x = F.relu(self.fc1(state))
x = F.relu(self.fc2(x))
x_advantage = F.relu(self.fc_act1(x))
x_advantage = self.fc_act2(x_advantage)
x_value = F.relu(self.fc_val1(x))
x_value = self.fc_val2(x_value)
mean_advantage = x_advantage.mean(1).unsqueeze(1).expand_as(x_advantage)
x_value = x_value.expand_as(x_advantage)
return x_value + x_advantage - mean_advantage
else:
print('Unknown model type')
| en | 0.539038 | Actor (Policy) Model. Initialize parameters and build model. Params ====== state_size (int): Dimension of each state action_size (int): Dimension of each action seed (int): Random seed modeltype (str): Type of DQN ['dqn', 'double_dqn', 'dueling_dqn'] Build a network that maps state -> action values. | 2.944413 | 3 |
DailyCodingProblem/Problem_2.py | byarmis/DailyCodingProblem | 0 | 6619750 | #!/usr/bin/env python3
'''
Given an array of integers, return a new array such that each element at index i of the new array is the product of all the numbers in the original array except the one at i.
For example, if our input was [1, 2, 3, 4, 5], the expected output would be [120, 60, 40, 30, 24]. If our input was [3, 2, 1], the expected output would be [2, 3, 6].
Follow-up: what if you can't use division?
'''
def with_division(list_in):
product = list_in[0]
for i in list_in[1:]:
product *= i
return [product / i for i in list_in]
def without_division(list_in):
output = []
for i in range(len(list_in)):
product = int(list_in[(i+1)%len(list_in)] != 0)
for j in range(len(list_in)):
if i == j:
continue
product *= list_in[j]
output.append(product)
return output
assert with_division([1, 2, 3, 4, 5]) == [120, 60, 40, 30, 24]
assert with_division([3, 2, 1]) == [2, 3, 6]
assert without_division([1, 2, 3, 4, 5]) == [120, 60, 40, 30, 24]
assert without_division([3, 2, 1]) == [2, 3, 6]
| #!/usr/bin/env python3
'''
Given an array of integers, return a new array such that each element at index i of the new array is the product of all the numbers in the original array except the one at i.
For example, if our input was [1, 2, 3, 4, 5], the expected output would be [120, 60, 40, 30, 24]. If our input was [3, 2, 1], the expected output would be [2, 3, 6].
Follow-up: what if you can't use division?
'''
def with_division(list_in):
product = list_in[0]
for i in list_in[1:]:
product *= i
return [product / i for i in list_in]
def without_division(list_in):
output = []
for i in range(len(list_in)):
product = int(list_in[(i+1)%len(list_in)] != 0)
for j in range(len(list_in)):
if i == j:
continue
product *= list_in[j]
output.append(product)
return output
assert with_division([1, 2, 3, 4, 5]) == [120, 60, 40, 30, 24]
assert with_division([3, 2, 1]) == [2, 3, 6]
assert without_division([1, 2, 3, 4, 5]) == [120, 60, 40, 30, 24]
assert without_division([3, 2, 1]) == [2, 3, 6]
| en | 0.875701 | #!/usr/bin/env python3 Given an array of integers, return a new array such that each element at index i of the new array is the product of all the numbers in the original array except the one at i. For example, if our input was [1, 2, 3, 4, 5], the expected output would be [120, 60, 40, 30, 24]. If our input was [3, 2, 1], the expected output would be [2, 3, 6]. Follow-up: what if you can't use division? | 4.106238 | 4 |