hexsha stringlengths 40 40 | size int64 2 1.02M | ext stringclasses 10
values | lang stringclasses 1
value | max_stars_repo_path stringlengths 4 245 | max_stars_repo_name stringlengths 6 130 | max_stars_repo_head_hexsha stringlengths 40 40 | max_stars_repo_licenses listlengths 1 10 | max_stars_count int64 1 191k ⌀ | max_stars_repo_stars_event_min_datetime stringlengths 24 24 ⌀ | max_stars_repo_stars_event_max_datetime stringlengths 24 24 ⌀ | max_issues_repo_path stringlengths 4 245 | max_issues_repo_name stringlengths 6 130 | max_issues_repo_head_hexsha stringlengths 40 40 | max_issues_repo_licenses listlengths 1 10 | max_issues_count int64 1 67k ⌀ | max_issues_repo_issues_event_min_datetime stringlengths 24 24 ⌀ | max_issues_repo_issues_event_max_datetime stringlengths 24 24 ⌀ | max_forks_repo_path stringlengths 4 245 | max_forks_repo_name stringlengths 6 130 | max_forks_repo_head_hexsha stringlengths 40 40 | max_forks_repo_licenses listlengths 1 10 | max_forks_count int64 1 105k ⌀ | max_forks_repo_forks_event_min_datetime stringlengths 24 24 ⌀ | max_forks_repo_forks_event_max_datetime stringlengths 24 24 ⌀ | content stringlengths 2 1.02M | avg_line_length float64 1 417k | max_line_length int64 1 987k | alphanum_fraction float64 0 1 | content_no_comment stringlengths 0 1.01M | is_comment_constant_removed bool 1
class | is_sharp_comment_removed bool 1
class |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f72fd038fc632f6e1fa32dff24a488528fb8fed5 | 230 | py | Python | xga/relations/clusters/Mλ.py | DavidT3/XGA | cde51c3f29f98b5f1e981fb6d327c04072b0ba38 | [
"BSD-3-Clause"
] | 12 | 2020-05-16T09:45:45.000Z | 2022-02-14T14:41:46.000Z | xga/relations/clusters/Mλ.py | DavidT3/XGA | cde51c3f29f98b5f1e981fb6d327c04072b0ba38 | [
"BSD-3-Clause"
] | 684 | 2020-05-28T08:52:09.000Z | 2022-03-31T10:56:24.000Z | xga/relations/clusters/Mλ.py | DavidT3/XGA | cde51c3f29f98b5f1e981fb6d327c04072b0ba38 | [
"BSD-3-Clause"
] | 2 | 2022-02-04T10:55:55.000Z | 2022-02-04T11:30:56.000Z | # This code is a part of XMM: Generate and Analyse (XGA), a module designed for the XMM Cluster Survey (XCS).
# Last modified by David J Turner (david.turner@sussex.ac.uk) 11/12/2020, 16:41. Copyright (c) David J Turner
| 20.909091 | 110 | 0.704348 | true | true | |
f72fd0a975c56ec2d4a2ead5794352e000898434 | 699 | py | Python | test_app.py | john-lock/chatter | 46c0c61f7e5798478a3630aadbfc47d281189edd | [
"MIT"
] | null | null | null | test_app.py | john-lock/chatter | 46c0c61f7e5798478a3630aadbfc47d281189edd | [
"MIT"
] | 2 | 2019-09-17T18:47:31.000Z | 2019-09-17T18:47:34.000Z | test_app.py | john-lock/chatter | 46c0c61f7e5798478a3630aadbfc47d281189edd | [
"MIT"
] | null | null | null | import pytest
import app
@pytest.fixture
def client():
app.app.config['TESTING'] = True
client = app.app.test_client()
yield client
def test_client_page(client):
rv = client.get('/')
# Main page (instructions)
assert b'<p class="lead">A Pusher-powered chat application built using Flask</p>' in rv.data
# Chat window
assert b'<input type="email" class="form-control" id="email" placeholder="Email Address*" required>' in rv.data
def test_adminpage(client):
rv = client.get('/admin')
# Admin page (0 connected clients)
assert b'Select a chat window to show and sent messages to' in rv.data
# Selenium script with clients interacting with the admin
| 25.888889 | 115 | 0.69671 | import pytest
import app
@pytest.fixture
def client():
app.app.config['TESTING'] = True
client = app.app.test_client()
yield client
def test_client_page(client):
rv = client.get('/')
assert b'<p class="lead">A Pusher-powered chat application built using Flask</p>' in rv.data
assert b'<input type="email" class="form-control" id="email" placeholder="Email Address*" required>' in rv.data
def test_adminpage(client):
rv = client.get('/admin')
assert b'Select a chat window to show and sent messages to' in rv.data
| true | true |
f72fd0f50afdb4c7cb225054bd39d9412b196c9c | 1,314 | py | Python | aries_cloudagent/protocols/discovery/v1_0/handlers/tests/test_query_handler.py | msembinelli/aries-cloudagent-python | a5a29dab30238f52dcfb6645aab115d01720a5c7 | [
"Apache-2.0"
] | 1 | 2020-11-30T05:47:54.000Z | 2020-11-30T05:47:54.000Z | aries_cloudagent/protocols/discovery/v1_0/handlers/tests/test_query_handler.py | msembinelli/aries-cloudagent-python | a5a29dab30238f52dcfb6645aab115d01720a5c7 | [
"Apache-2.0"
] | 1 | 2020-06-16T20:20:55.000Z | 2020-06-16T20:20:55.000Z | aries_cloudagent/protocols/discovery/v1_0/handlers/tests/test_query_handler.py | msembinelli/aries-cloudagent-python | a5a29dab30238f52dcfb6645aab115d01720a5c7 | [
"Apache-2.0"
] | 2 | 2020-02-18T20:34:01.000Z | 2021-03-12T16:18:30.000Z | import pytest
from aries_cloudagent.core.protocol_registry import ProtocolRegistry
from aries_cloudagent.messaging.base_handler import HandlerException
from aries_cloudagent.messaging.request_context import RequestContext
from aries_cloudagent.messaging.responder import MockResponder
from ...handlers.query_handler import QueryHandler
from ...messages.disclose import Disclose
from ...messages.query import Query
TEST_MESSAGE_FAMILY = "TEST_FAMILY"
TEST_MESSAGE_TYPE = TEST_MESSAGE_FAMILY + "/MESSAGE"
@pytest.fixture()
def request_context() -> RequestContext:
ctx = RequestContext()
registry = ProtocolRegistry()
registry.register_message_types({TEST_MESSAGE_TYPE: object()})
ctx.injector.bind_instance(ProtocolRegistry, registry)
yield ctx
class TestQueryHandler:
@pytest.mark.asyncio
async def test_query_all(self, request_context):
request_context.message = Query(query="*")
handler = QueryHandler()
responder = MockResponder()
await handler.handle(request_context, responder)
messages = responder.messages
assert len(messages) == 1
result, target = messages[0]
assert isinstance(result, Disclose) and result.protocols
assert result.protocols[0]["pid"] == TEST_MESSAGE_FAMILY
assert not target
| 34.578947 | 69 | 0.758752 | import pytest
from aries_cloudagent.core.protocol_registry import ProtocolRegistry
from aries_cloudagent.messaging.base_handler import HandlerException
from aries_cloudagent.messaging.request_context import RequestContext
from aries_cloudagent.messaging.responder import MockResponder
from ...handlers.query_handler import QueryHandler
from ...messages.disclose import Disclose
from ...messages.query import Query
TEST_MESSAGE_FAMILY = "TEST_FAMILY"
TEST_MESSAGE_TYPE = TEST_MESSAGE_FAMILY + "/MESSAGE"
@pytest.fixture()
def request_context() -> RequestContext:
ctx = RequestContext()
registry = ProtocolRegistry()
registry.register_message_types({TEST_MESSAGE_TYPE: object()})
ctx.injector.bind_instance(ProtocolRegistry, registry)
yield ctx
class TestQueryHandler:
@pytest.mark.asyncio
async def test_query_all(self, request_context):
request_context.message = Query(query="*")
handler = QueryHandler()
responder = MockResponder()
await handler.handle(request_context, responder)
messages = responder.messages
assert len(messages) == 1
result, target = messages[0]
assert isinstance(result, Disclose) and result.protocols
assert result.protocols[0]["pid"] == TEST_MESSAGE_FAMILY
assert not target
| true | true |
f72fd1f63d52cbb7ac69ac0d3b60be8df77af67c | 4,536 | py | Python | benchmark/startQiskit_Class3343.py | UCLA-SEAL/QDiff | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | [
"BSD-3-Clause"
] | null | null | null | benchmark/startQiskit_Class3343.py | UCLA-SEAL/QDiff | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | [
"BSD-3-Clause"
] | null | null | null | benchmark/startQiskit_Class3343.py | UCLA-SEAL/QDiff | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | [
"BSD-3-Clause"
] | null | null | null | # qubit number=4
# total number=49
import cirq
import qiskit
from qiskit import QuantumCircuit, QuantumRegister, ClassicalRegister
from qiskit import BasicAer, execute, transpile
from pprint import pprint
from qiskit.test.mock import FakeVigo
from math import log2
import numpy as np
import networkx as nx
def bitwise_xor(s: str, t: str) -> str:
length = len(s)
res = []
for i in range(length):
res.append(str(int(s[i]) ^ int(t[i])))
return ''.join(res[::-1])
def bitwise_dot(s: str, t: str) -> str:
length = len(s)
res = 0
for i in range(length):
res += int(s[i]) * int(t[i])
return str(res % 2)
def build_oracle(n: int, f) -> QuantumCircuit:
# implement the oracle O_f
# NOTE: use multi_control_toffoli_gate ('noancilla' mode)
# https://qiskit.org/documentation/_modules/qiskit/aqua/circuits/gates/multi_control_toffoli_gate.html
# https://quantumcomputing.stackexchange.com/questions/3943/how-do-you-implement-the-toffoli-gate-using-only-single-qubit-and-cnot-gates
# https://quantumcomputing.stackexchange.com/questions/2177/how-can-i-implement-an-n-bit-toffoli-gate
controls = QuantumRegister(n, "ofc")
target = QuantumRegister(1, "oft")
oracle = QuantumCircuit(controls, target, name="Of")
for i in range(2 ** n):
rep = np.binary_repr(i, n)
if f(rep) == "1":
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
oracle.mct(controls, target[0], None, mode='noancilla')
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
# oracle.barrier()
return oracle
def make_circuit(n:int,f) -> QuantumCircuit:
# circuit begin
input_qubit = QuantumRegister(n,"qc")
classical = ClassicalRegister(n, "qm")
prog = QuantumCircuit(input_qubit, classical)
prog.cx(input_qubit[0],input_qubit[3]) # number=13
prog.cx(input_qubit[0],input_qubit[3]) # number=17
prog.x(input_qubit[3]) # number=18
prog.cx(input_qubit[0],input_qubit[3]) # number=19
prog.cx(input_qubit[0],input_qubit[3]) # number=15
prog.h(input_qubit[1]) # number=2
prog.h(input_qubit[2]) # number=3
prog.h(input_qubit[3]) # number=4
prog.y(input_qubit[3]) # number=12
prog.h(input_qubit[0]) # number=5
oracle = build_oracle(n-1, f)
prog.append(oracle.to_gate(),[input_qubit[i] for i in range(n-1)]+[input_qubit[n-1]])
prog.h(input_qubit[1]) # number=6
prog.h(input_qubit[2]) # number=7
prog.h(input_qubit[3]) # number=37
prog.cz(input_qubit[0],input_qubit[3]) # number=38
prog.h(input_qubit[3]) # number=39
prog.cx(input_qubit[0],input_qubit[3]) # number=40
prog.x(input_qubit[3]) # number=41
prog.h(input_qubit[3]) # number=43
prog.cz(input_qubit[0],input_qubit[3]) # number=44
prog.h(input_qubit[3]) # number=45
prog.h(input_qubit[3]) # number=30
prog.cz(input_qubit[0],input_qubit[3]) # number=31
prog.h(input_qubit[3]) # number=32
prog.h(input_qubit[0]) # number=33
prog.cz(input_qubit[3],input_qubit[0]) # number=34
prog.rx(0.33300882128051834,input_qubit[2]) # number=36
prog.h(input_qubit[0]) # number=35
prog.cx(input_qubit[3],input_qubit[0]) # number=23
prog.cx(input_qubit[3],input_qubit[0]) # number=46
prog.z(input_qubit[3]) # number=47
prog.cx(input_qubit[3],input_qubit[0]) # number=48
prog.cx(input_qubit[3],input_qubit[0]) # number=25
prog.cx(input_qubit[3],input_qubit[0]) # number=22
prog.h(input_qubit[3]) # number=8
prog.h(input_qubit[0]) # number=9
prog.y(input_qubit[2]) # number=10
prog.y(input_qubit[2]) # number=11
# circuit end
return prog
if __name__ == '__main__':
a = "111"
b = "0"
f = lambda rep: bitwise_xor(bitwise_dot(a, rep), b)
prog = make_circuit(4,f)
backend = BasicAer.get_backend('statevector_simulator')
sample_shot =8000
info = execute(prog, backend=backend).result().get_statevector()
qubits = round(log2(len(info)))
info = {
np.binary_repr(i, qubits): round((info[i]*(info[i].conjugate())).real,3)
for i in range(2 ** qubits)
}
backend = FakeVigo()
circuit1 = transpile(prog,backend,optimization_level=2)
writefile = open("../data/startQiskit_Class3343.csv","w")
print(info,file=writefile)
print("results end", file=writefile)
print(circuit1.__len__(),file=writefile)
print(circuit1,file=writefile)
writefile.close()
| 35.4375 | 140 | 0.650573 |
import cirq
import qiskit
from qiskit import QuantumCircuit, QuantumRegister, ClassicalRegister
from qiskit import BasicAer, execute, transpile
from pprint import pprint
from qiskit.test.mock import FakeVigo
from math import log2
import numpy as np
import networkx as nx
def bitwise_xor(s: str, t: str) -> str:
length = len(s)
res = []
for i in range(length):
res.append(str(int(s[i]) ^ int(t[i])))
return ''.join(res[::-1])
def bitwise_dot(s: str, t: str) -> str:
length = len(s)
res = 0
for i in range(length):
res += int(s[i]) * int(t[i])
return str(res % 2)
def build_oracle(n: int, f) -> QuantumCircuit:
controls = QuantumRegister(n, "ofc")
target = QuantumRegister(1, "oft")
oracle = QuantumCircuit(controls, target, name="Of")
for i in range(2 ** n):
rep = np.binary_repr(i, n)
if f(rep) == "1":
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
oracle.mct(controls, target[0], None, mode='noancilla')
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
return oracle
def make_circuit(n:int,f) -> QuantumCircuit:
input_qubit = QuantumRegister(n,"qc")
classical = ClassicalRegister(n, "qm")
prog = QuantumCircuit(input_qubit, classical)
prog.cx(input_qubit[0],input_qubit[3])
prog.cx(input_qubit[0],input_qubit[3])
prog.x(input_qubit[3])
prog.cx(input_qubit[0],input_qubit[3])
prog.cx(input_qubit[0],input_qubit[3])
prog.h(input_qubit[1])
prog.h(input_qubit[2])
prog.h(input_qubit[3])
prog.y(input_qubit[3])
prog.h(input_qubit[0])
oracle = build_oracle(n-1, f)
prog.append(oracle.to_gate(),[input_qubit[i] for i in range(n-1)]+[input_qubit[n-1]])
prog.h(input_qubit[1])
prog.h(input_qubit[2])
prog.h(input_qubit[3])
prog.cz(input_qubit[0],input_qubit[3])
prog.h(input_qubit[3])
prog.cx(input_qubit[0],input_qubit[3])
prog.x(input_qubit[3])
prog.h(input_qubit[3])
prog.cz(input_qubit[0],input_qubit[3])
prog.h(input_qubit[3])
prog.h(input_qubit[3])
prog.cz(input_qubit[0],input_qubit[3])
prog.h(input_qubit[3])
prog.h(input_qubit[0])
prog.cz(input_qubit[3],input_qubit[0])
prog.rx(0.33300882128051834,input_qubit[2])
prog.h(input_qubit[0])
prog.cx(input_qubit[3],input_qubit[0])
prog.cx(input_qubit[3],input_qubit[0])
prog.z(input_qubit[3])
prog.cx(input_qubit[3],input_qubit[0])
prog.cx(input_qubit[3],input_qubit[0])
prog.cx(input_qubit[3],input_qubit[0])
prog.h(input_qubit[3])
prog.h(input_qubit[0])
prog.y(input_qubit[2])
prog.y(input_qubit[2])
return prog
if __name__ == '__main__':
a = "111"
b = "0"
f = lambda rep: bitwise_xor(bitwise_dot(a, rep), b)
prog = make_circuit(4,f)
backend = BasicAer.get_backend('statevector_simulator')
sample_shot =8000
info = execute(prog, backend=backend).result().get_statevector()
qubits = round(log2(len(info)))
info = {
np.binary_repr(i, qubits): round((info[i]*(info[i].conjugate())).real,3)
for i in range(2 ** qubits)
}
backend = FakeVigo()
circuit1 = transpile(prog,backend,optimization_level=2)
writefile = open("../data/startQiskit_Class3343.csv","w")
print(info,file=writefile)
print("results end", file=writefile)
print(circuit1.__len__(),file=writefile)
print(circuit1,file=writefile)
writefile.close()
| true | true |
f72fd32beb09f4059eb8836278eae50e6d7228a6 | 2,650 | py | Python | purly/py/setup.py | rmorshea/purly | 0d07d6d7636fd81d9c1c14e2df6a32fc28b325f7 | [
"MIT"
] | 2 | 2018-08-18T05:39:24.000Z | 2018-08-21T19:02:16.000Z | purly/py/setup.py | rmorshea/purly | 0d07d6d7636fd81d9c1c14e2df6a32fc28b325f7 | [
"MIT"
] | 2 | 2018-07-27T07:14:19.000Z | 2018-07-27T07:17:06.000Z | purly/py/setup.py | rmorshea/purly | 0d07d6d7636fd81d9c1c14e2df6a32fc28b325f7 | [
"MIT"
] | null | null | null | from __future__ import print_function
import os
import sys
import shutil
from glob import glob
from setuptools import find_packages
from distutils.core import setup
# the name of the project
name = "purly"
# basic paths used to gather files
here = os.path.abspath(os.path.dirname(__file__))
root = os.path.join(here, name)
#-----------------------------------------------------------------------------
# Python Version Check
#-----------------------------------------------------------------------------
if sys.version_info < (3,6) or sys.version_info >= (3, 7):
error = "ERROR: %s requires Python version 3.6." % name
print(error, file=sys.stderr)
sys.exit(1)
#-----------------------------------------------------------------------------
# requirements
#-----------------------------------------------------------------------------
requirements = [
'sanic',
'sanic_cors',
'asyncio',
'websocket-client',
'websockets==5.0',
'spectate>=0.2.1',
]
#-----------------------------------------------------------------------------
# Library Version
#-----------------------------------------------------------------------------
with open(os.path.join(root, '__init__.py')) as f:
for line in f.read().split("\n"):
if line.startswith("__version__ = "):
version = eval(line.split("=", 1)[1])
break
else:
print("No version found in purly/__init__.py")
sys.exit(1)
#-----------------------------------------------------------------------------
# Library Description
#-----------------------------------------------------------------------------
with open(os.path.join(here, 'README.md')) as f:
long_description = f.read()
#-----------------------------------------------------------------------------
# Install It
#-----------------------------------------------------------------------------
if __name__ == '__main__':
setup(
name=name,
version=version,
packages=find_packages(),
include_package_data=True,
description="Control the web with Python",
long_description=long_description,
long_description_content_type='text/markdown',
author="Ryan Morshead",
author_email="ryan.morshead@gmail.com",
url="https://github.com/rmorshea/purly",
license='MIT',
platforms="Linux, Mac OS X, Windows",
keywords=["interactive", "widgets", "DOM", "synchronization", "React"],
install_requires=requirements,
classifiers=[
'Intended Audience :: Developers',
'Programming Language :: Python :: 3.6',
],
)
| 31.547619 | 79 | 0.442642 | from __future__ import print_function
import os
import sys
import shutil
from glob import glob
from setuptools import find_packages
from distutils.core import setup
name = "purly"
here = os.path.abspath(os.path.dirname(__file__))
root = os.path.join(here, name)
if sys.version_info < (3,6) or sys.version_info >= (3, 7):
error = "ERROR: %s requires Python version 3.6." % name
print(error, file=sys.stderr)
sys.exit(1)
requirements = [
'sanic',
'sanic_cors',
'asyncio',
'websocket-client',
'websockets==5.0',
'spectate>=0.2.1',
]
with open(os.path.join(root, '__init__.py')) as f:
for line in f.read().split("\n"):
if line.startswith("__version__ = "):
version = eval(line.split("=", 1)[1])
break
else:
print("No version found in purly/__init__.py")
sys.exit(1)
with open(os.path.join(here, 'README.md')) as f:
long_description = f.read()
if __name__ == '__main__':
setup(
name=name,
version=version,
packages=find_packages(),
include_package_data=True,
description="Control the web with Python",
long_description=long_description,
long_description_content_type='text/markdown',
author="Ryan Morshead",
author_email="ryan.morshead@gmail.com",
url="https://github.com/rmorshea/purly",
license='MIT',
platforms="Linux, Mac OS X, Windows",
keywords=["interactive", "widgets", "DOM", "synchronization", "React"],
install_requires=requirements,
classifiers=[
'Intended Audience :: Developers',
'Programming Language :: Python :: 3.6',
],
)
| true | true |
f72fd342ba9c1c26e0b221251203aed9effad1f6 | 18,549 | py | Python | plugin.video.SportsDevil/lib/utils/drench.py | akuala/REPO.KUALA | ea9a157025530d2ce8fa0d88431c46c5352e89d4 | [
"Apache-2.0"
] | 2 | 2018-11-02T19:55:30.000Z | 2020-08-14T02:22:20.000Z | plugin.video.SportsDevil/lib/utils/drench.py | akuala/REPO.KUALA | ea9a157025530d2ce8fa0d88431c46c5352e89d4 | [
"Apache-2.0"
] | null | null | null | plugin.video.SportsDevil/lib/utils/drench.py | akuala/REPO.KUALA | ea9a157025530d2ce8fa0d88431c46c5352e89d4 | [
"Apache-2.0"
] | 3 | 2019-12-17T20:47:00.000Z | 2021-02-11T19:03:59.000Z | """
JavaScript encryption module ver. 2.0 by Daniel Rench
Based on existing code:
Copyright (c) 2003 by Andre Mueller.
Init of blowfish constants with a function (init/backup errors)
Copyright (c) 2003 by Rainer Wollmann
This Object is open source. You can redistribute it and/or modify
it under the terms of the Universal General Public License (UGPL).
http://www.ugpl.de/
"""
import math as Math
class blowfish:
def __init__(self,k):
if len(k) is 0:
raise '0 length key'
self.bf_P = self.Fbf_P()
self.bf_S0 = self.Fbf_S0()
self.bf_S1 = self.Fbf_S1()
self.bf_S2 = self.Fbf_S2()
self.bf_S3 = self.Fbf_S3()
self.key = k
j = 0
i = 0
while i < 18:
d = ((ord(self.key[j % len(self.key)]) * 256 + ord(self.key[(j + 1) % len(self.key)])) * 256 + ord(self.key[(j + 2) % len(self.key)])) * 256 + ord(self.key[(j + 3) % len(self.key)])
self.bf_P[i] = self.xor(self.bf_P[i], d)
j = (j + 4) % len(self.key)
i+=1
self.key = self.escape(self.key)
self.xl_par = 0x00000000
self.xr_par = 0x00000000
i = 0
while i < 18:
self.encipher()
self.bf_P[i] = self.xl_par
self.bf_P[i + 1] = self.xr_par
i += 2
j = 0
while j < 256:
self.encipher()
self.bf_S0[j] = self.xl_par
self.bf_S0[j + 1] = self.xr_par
j += 2
j = 0
while j < 256:
self.encipher()
self.bf_S1[j] = self.xl_par
self.bf_S1[j + 1] = self.xr_par
j += 2
j = 0
while j < 256:
self.encipher()
self.bf_S2[j] = self.xl_par
self.bf_S2[j + 1] = self.xr_par
j += 2
j = 0
while j < 256:
self.encipher()
self.bf_S3[j] = self.xl_par
self.bf_S3[j + 1] = self.xr_par
j += 2
def unescape(self,t):
r = ''
i = 0
l = len(t)
while i < l:
t1 = ord(t[i])
i+=1
t2 = ord(t[i])
if t1 < 58:
t1 -= 48
else:
if t1 > 96:
t1 -= 87
else:
t1 -= 55
if t2 < 58:
t2 -= 48
else:
if t2 > 96:
t2 -= 87
else:
t2 -= 55
r += chr(t1 * 16 + t2)
i+=1
return r
def escape(self,t):
r = ''
i = 0
l = len(t)
while i < l:
c = ord(t[i])
t1 = int(Math.floor(c / 16))
t2 = c % 16
if t1 < 10:
t1 += 48
else:
t1 += 55
if t2 < 10:
t2 += 48
else:
t2 += 55
r += chr(t1) + chr(t2)
i+=1
return r
def wordbyte0(self,w):
return int(Math.floor(Math.floor(Math.floor(w / 256) / 256) / 256) % 256)
def wordbyte1(self,w):
return int(Math.floor(Math.floor(w / 256) / 256) % 256)
def wordbyte2(self,w):
return int(Math.floor(w / 256) % 256)
def wordbyte3(self,w):
return w % 256
def xor(self,w1, w2):
r = w1 ^ w2
if r < 0:
r = 0xffffffff + 1 + r
return r
def Fbf_P(self):
return [0x243f6a88, 0x85a308d3, 0x13198a2e, 0x03707344, 0xa4093822, 0x299f31d0, 0x082efa98, 0xec4e6c89, 0x452821e6, 0x38d01377, 0xbe5466cf, 0x34e90c6c, 0xc0ac29b7, 0xc97c50dd, 0x3f84d5b5, 0xb5470917, 0x9216d5d9, 0x8979fb1b]
def Fbf_S0(self):
return [0xd1310ba6, 0x98dfb5ac, 0x2ffd72db, 0xd01adfb7, 0xb8e1afed, 0x6a267e96, 0xba7c9045, 0xf12c7f99, 0x24a19947, 0xb3916cf7, 0x0801f2e2, 0x858efc16, 0x636920d8, 0x71574e69, 0xa458fea3, 0xf4933d7e, 0x0d95748f, 0x728eb658, 0x718bcd58, 0x82154aee, 0x7b54a41d, 0xc25a59b5, 0x9c30d539, 0x2af26013, 0xc5d1b023, 0x286085f0, 0xca417918, 0xb8db38ef, 0x8e79dcb0, 0x603a180e, 0x6c9e0e8b, 0xb01e8a3e, 0xd71577c1, 0xbd314b27, 0x78af2fda, 0x55605c60, 0xe65525f3, 0xaa55ab94, 0x57489862, 0x63e81440, 0x55ca396a, 0x2aab10b6, 0xb4cc5c34, 0x1141e8ce, 0xa15486af, 0x7c72e993, 0xb3ee1411, 0x636fbc2a, 0x2ba9c55d, 0x741831f6, 0xce5c3e16, 0x9b87931e, 0xafd6ba33, 0x6c24cf5c, 0x7a325381, 0x28958677, 0x3b8f4898, 0x6b4bb9af, 0xc4bfe81b, 0x66282193, 0x61d809cc, 0xfb21a991, 0x487cac60, 0x5dec8032, 0xef845d5d, 0xe98575b1, 0xdc262302, 0xeb651b88, 0x23893e81, 0xd396acc5, 0x0f6d6ff3, 0x83f44239, 0x2e0b4482, 0xa4842004, 0x69c8f04a, 0x9e1f9b5e, 0x21c66842, 0xf6e96c9a, 0x670c9c61, 0xabd388f0, 0x6a51a0d2, 0xd8542f68, 0x960fa728, 0xab5133a3, 0x6eef0b6c, 0x137a3be4, 0xba3bf050, 0x7efb2a98, 0xa1f1651d, 0x39af0176, 0x66ca593e, 0x82430e88, 0x8cee8619, 0x456f9fb4, 0x7d84a5c3, 0x3b8b5ebe, 0xe06f75d8, 0x85c12073, 0x401a449f, 0x56c16aa6, 0x4ed3aa62, 0x363f7706, 0x1bfedf72, 0x429b023d, 0x37d0d724, 0xd00a1248, 0xdb0fead3, 0x49f1c09b, 0x075372c9, 0x80991b7b, 0x25d479d8, 0xf6e8def7, 0xe3fe501a, 0xb6794c3b, 0x976ce0bd, 0x04c006ba, 0xc1a94fb6, 0x409f60c4, 0x5e5c9ec2, 0x196a2463, 0x68fb6faf, 0x3e6c53b5, 0x1339b2eb, 0x3b52ec6f, 0x6dfc511f, 0x9b30952c, 0xcc814544, 0xaf5ebd09, 0xbee3d004, 0xde334afd, 0x660f2807, 0x192e4bb3, 0xc0cba857, 0x45c8740f, 0xd20b5f39, 0xb9d3fbdb, 0x5579c0bd, 0x1a60320a, 0xd6a100c6, 0x402c7279, 0x679f25fe, 0xfb1fa3cc, 0x8ea5e9f8, 0xdb3222f8, 0x3c7516df, 0xfd616b15, 0x2f501ec8, 0xad0552ab, 0x323db5fa, 0xfd238760, 0x53317b48, 0x3e00df82, 0x9e5c57bb, 0xca6f8ca0, 0x1a87562e, 0xdf1769db, 0xd542a8f6, 0x287effc3, 0xac6732c6, 0x8c4f5573, 0x695b27b0, 0xbbca58c8, 0xe1ffa35d, 0xb8f011a0, 0x10fa3d98, 0xfd2183b8, 0x4afcb56c, 0x2dd1d35b, 0x9a53e479, 0xb6f84565, 0xd28e49bc, 0x4bfb9790, 0xe1ddf2da, 0xa4cb7e33, 0x62fb1341, 0xcee4c6e8, 0xef20cada, 0x36774c01, 0xd07e9efe, 0x2bf11fb4, 0x95dbda4d, 0xae909198, 0xeaad8e71, 0x6b93d5a0, 0xd08ed1d0, 0xafc725e0, 0x8e3c5b2f, 0x8e7594b7, 0x8ff6e2fb, 0xf2122b64, 0x8888b812, 0x900df01c, 0x4fad5ea0, 0x688fc31c, 0xd1cff191, 0xb3a8c1ad, 0x2f2f2218, 0xbe0e1777, 0xea752dfe, 0x8b021fa1, 0xe5a0cc0f, 0xb56f74e8, 0x18acf3d6, 0xce89e299, 0xb4a84fe0, 0xfd13e0b7, 0x7cc43b81, 0xd2ada8d9, 0x165fa266, 0x80957705, 0x93cc7314, 0x211a1477, 0xe6ad2065, 0x77b5fa86, 0xc75442f5, 0xfb9d35cf, 0xebcdaf0c, 0x7b3e89a0, 0xd6411bd3, 0xae1e7e49, 0x00250e2d, 0x2071b35e, 0x226800bb, 0x57b8e0af, 0x2464369b, 0xf009b91e, 0x5563911d, 0x59dfa6aa, 0x78c14389, 0xd95a537f, 0x207d5ba2, 0x02e5b9c5, 0x83260376, 0x6295cfa9, 0x11c81968, 0x4e734a41, 0xb3472dca, 0x7b14a94a, 0x1b510052, 0x9a532915, 0xd60f573f, 0xbc9bc6e4, 0x2b60a476, 0x81e67400, 0x08ba6fb5, 0x571be91f, 0xf296ec6b, 0x2a0dd915, 0xb6636521, 0xe7b9f9b6, 0xff34052e, 0xc5855664, 0x53b02d5d, 0xa99f8fa1, 0x08ba4799, 0x6e85076a]
def Fbf_S1(self):
return [0x4b7a70e9, 0xb5b32944, 0xdb75092e, 0xc4192623, 0xad6ea6b0, 0x49a7df7d, 0x9cee60b8, 0x8fedb266, 0xecaa8c71, 0x699a17ff, 0x5664526c, 0xc2b19ee1, 0x193602a5, 0x75094c29, 0xa0591340, 0xe4183a3e, 0x3f54989a, 0x5b429d65, 0x6b8fe4d6, 0x99f73fd6, 0xa1d29c07, 0xefe830f5, 0x4d2d38e6, 0xf0255dc1, 0x4cdd2086, 0x8470eb26, 0x6382e9c6, 0x021ecc5e, 0x09686b3f, 0x3ebaefc9, 0x3c971814, 0x6b6a70a1, 0x687f3584, 0x52a0e286, 0xb79c5305, 0xaa500737, 0x3e07841c, 0x7fdeae5c, 0x8e7d44ec, 0x5716f2b8, 0xb03ada37, 0xf0500c0d, 0xf01c1f04, 0x0200b3ff, 0xae0cf51a, 0x3cb574b2, 0x25837a58, 0xdc0921bd, 0xd19113f9, 0x7ca92ff6, 0x94324773, 0x22f54701, 0x3ae5e581, 0x37c2dadc, 0xc8b57634, 0x9af3dda7, 0xa9446146, 0x0fd0030e, 0xecc8c73e, 0xa4751e41, 0xe238cd99, 0x3bea0e2f, 0x3280bba1, 0x183eb331, 0x4e548b38, 0x4f6db908, 0x6f420d03, 0xf60a04bf, 0x2cb81290, 0x24977c79, 0x5679b072, 0xbcaf89af, 0xde9a771f, 0xd9930810, 0xb38bae12, 0xdccf3f2e, 0x5512721f, 0x2e6b7124, 0x501adde6, 0x9f84cd87, 0x7a584718, 0x7408da17, 0xbc9f9abc, 0xe94b7d8c, 0xec7aec3a, 0xdb851dfa, 0x63094366, 0xc464c3d2, 0xef1c1847, 0x3215d908, 0xdd433b37, 0x24c2ba16, 0x12a14d43, 0x2a65c451, 0x50940002, 0x133ae4dd, 0x71dff89e, 0x10314e55, 0x81ac77d6, 0x5f11199b, 0x043556f1, 0xd7a3c76b, 0x3c11183b, 0x5924a509, 0xf28fe6ed, 0x97f1fbfa, 0x9ebabf2c, 0x1e153c6e, 0x86e34570, 0xeae96fb1, 0x860e5e0a, 0x5a3e2ab3, 0x771fe71c, 0x4e3d06fa, 0x2965dcb9, 0x99e71d0f, 0x803e89d6, 0x5266c825, 0x2e4cc978, 0x9c10b36a, 0xc6150eba, 0x94e2ea78, 0xa5fc3c53, 0x1e0a2df4, 0xf2f74ea7, 0x361d2b3d, 0x1939260f, 0x19c27960, 0x5223a708, 0xf71312b6, 0xebadfe6e, 0xeac31f66, 0xe3bc4595, 0xa67bc883, 0xb17f37d1, 0x018cff28, 0xc332ddef, 0xbe6c5aa5, 0x65582185, 0x68ab9802, 0xeecea50f, 0xdb2f953b, 0x2aef7dad, 0x5b6e2f84, 0x1521b628, 0x29076170, 0xecdd4775, 0x619f1510, 0x13cca830, 0xeb61bd96, 0x0334fe1e, 0xaa0363cf, 0xb5735c90, 0x4c70a239, 0xd59e9e0b, 0xcbaade14, 0xeecc86bc, 0x60622ca7, 0x9cab5cab, 0xb2f3846e, 0x648b1eaf, 0x19bdf0ca, 0xa02369b9, 0x655abb50, 0x40685a32, 0x3c2ab4b3, 0x319ee9d5, 0xc021b8f7, 0x9b540b19, 0x875fa099, 0x95f7997e, 0x623d7da8, 0xf837889a, 0x97e32d77, 0x11ed935f, 0x16681281, 0x0e358829, 0xc7e61fd6, 0x96dedfa1, 0x7858ba99, 0x57f584a5, 0x1b227263, 0x9b83c3ff, 0x1ac24696, 0xcdb30aeb, 0x532e3054, 0x8fd948e4, 0x6dbc3128, 0x58ebf2ef, 0x34c6ffea, 0xfe28ed61, 0xee7c3c73, 0x5d4a14d9, 0xe864b7e3, 0x42105d14, 0x203e13e0, 0x45eee2b6, 0xa3aaabea, 0xdb6c4f15, 0xfacb4fd0, 0xc742f442, 0xef6abbb5, 0x654f3b1d, 0x41cd2105, 0xd81e799e, 0x86854dc7, 0xe44b476a, 0x3d816250, 0xcf62a1f2, 0x5b8d2646, 0xfc8883a0, 0xc1c7b6a3, 0x7f1524c3, 0x69cb7492, 0x47848a0b, 0x5692b285, 0x095bbf00, 0xad19489d, 0x1462b174, 0x23820e00, 0x58428d2a, 0x0c55f5ea, 0x1dadf43e, 0x233f7061, 0x3372f092, 0x8d937e41, 0xd65fecf1, 0x6c223bdb, 0x7cde3759, 0xcbee7460, 0x4085f2a7, 0xce77326e, 0xa6078084, 0x19f8509e, 0xe8efd855, 0x61d99735, 0xa969a7aa, 0xc50c06c2, 0x5a04abfc, 0x800bcadc, 0x9e447a2e, 0xc3453484, 0xfdd56705, 0x0e1e9ec9, 0xdb73dbd3, 0x105588cd, 0x675fda79, 0xe3674340, 0xc5c43465, 0x713e38d8, 0x3d28f89e, 0xf16dff20, 0x153e21e7, 0x8fb03d4a, 0xe6e39f2b, 0xdb83adf7]
def Fbf_S2(self):
return [0xe93d5a68, 0x948140f7, 0xf64c261c, 0x94692934, 0x411520f7, 0x7602d4f7, 0xbcf46b2e, 0xd4a20068, 0xd4082471, 0x3320f46a, 0x43b7d4b7, 0x500061af, 0x1e39f62e, 0x97244546, 0x14214f74, 0xbf8b8840, 0x4d95fc1d, 0x96b591af, 0x70f4ddd3, 0x66a02f45, 0xbfbc09ec, 0x03bd9785, 0x7fac6dd0, 0x31cb8504, 0x96eb27b3, 0x55fd3941, 0xda2547e6, 0xabca0a9a, 0x28507825, 0x530429f4, 0x0a2c86da, 0xe9b66dfb, 0x68dc1462, 0xd7486900, 0x680ec0a4, 0x27a18dee, 0x4f3ffea2, 0xe887ad8c, 0xb58ce006, 0x7af4d6b6, 0xaace1e7c, 0xd3375fec, 0xce78a399, 0x406b2a42, 0x20fe9e35, 0xd9f385b9, 0xee39d7ab, 0x3b124e8b, 0x1dc9faf7, 0x4b6d1856, 0x26a36631, 0xeae397b2, 0x3a6efa74, 0xdd5b4332, 0x6841e7f7, 0xca7820fb, 0xfb0af54e, 0xd8feb397, 0x454056ac, 0xba489527, 0x55533a3a, 0x20838d87, 0xfe6ba9b7, 0xd096954b, 0x55a867bc, 0xa1159a58, 0xcca92963, 0x99e1db33, 0xa62a4a56, 0x3f3125f9, 0x5ef47e1c, 0x9029317c, 0xfdf8e802, 0x04272f70, 0x80bb155c, 0x05282ce3, 0x95c11548, 0xe4c66d22, 0x48c1133f, 0xc70f86dc, 0x07f9c9ee, 0x41041f0f, 0x404779a4, 0x5d886e17, 0x325f51eb, 0xd59bc0d1, 0xf2bcc18f, 0x41113564, 0x257b7834, 0x602a9c60, 0xdff8e8a3, 0x1f636c1b, 0x0e12b4c2, 0x02e1329e, 0xaf664fd1, 0xcad18115, 0x6b2395e0, 0x333e92e1, 0x3b240b62, 0xeebeb922, 0x85b2a20e, 0xe6ba0d99, 0xde720c8c, 0x2da2f728, 0xd0127845, 0x95b794fd, 0x647d0862, 0xe7ccf5f0, 0x5449a36f, 0x877d48fa, 0xc39dfd27, 0xf33e8d1e, 0x0a476341, 0x992eff74, 0x3a6f6eab, 0xf4f8fd37, 0xa812dc60, 0xa1ebddf8, 0x991be14c, 0xdb6e6b0d, 0xc67b5510, 0x6d672c37, 0x2765d43b, 0xdcd0e804, 0xf1290dc7, 0xcc00ffa3, 0xb5390f92, 0x690fed0b, 0x667b9ffb, 0xcedb7d9c, 0xa091cf0b, 0xd9155ea3, 0xbb132f88, 0x515bad24, 0x7b9479bf, 0x763bd6eb, 0x37392eb3, 0xcc115979, 0x8026e297, 0xf42e312d, 0x6842ada7, 0xc66a2b3b, 0x12754ccc, 0x782ef11c, 0x6a124237, 0xb79251e7, 0x06a1bbe6, 0x4bfb6350, 0x1a6b1018, 0x11caedfa, 0x3d25bdd8, 0xe2e1c3c9, 0x44421659, 0x0a121386, 0xd90cec6e, 0xd5abea2a, 0x64af674e, 0xda86a85f, 0xbebfe988, 0x64e4c3fe, 0x9dbc8057, 0xf0f7c086, 0x60787bf8, 0x6003604d, 0xd1fd8346, 0xf6381fb0, 0x7745ae04, 0xd736fccc, 0x83426b33, 0xf01eab71, 0xb0804187, 0x3c005e5f, 0x77a057be, 0xbde8ae24, 0x55464299, 0xbf582e61, 0x4e58f48f, 0xf2ddfda2, 0xf474ef38, 0x8789bdc2, 0x5366f9c3, 0xc8b38e74, 0xb475f255, 0x46fcd9b9, 0x7aeb2661, 0x8b1ddf84, 0x846a0e79, 0x915f95e2, 0x466e598e, 0x20b45770, 0x8cd55591, 0xc902de4c, 0xb90bace1, 0xbb8205d0, 0x11a86248, 0x7574a99e, 0xb77f19b6, 0xe0a9dc09, 0x662d09a1, 0xc4324633, 0xe85a1f02, 0x09f0be8c, 0x4a99a025, 0x1d6efe10, 0x1ab93d1d, 0x0ba5a4df, 0xa186f20f, 0x2868f169, 0xdcb7da83, 0x573906fe, 0xa1e2ce9b, 0x4fcd7f52, 0x50115e01, 0xa70683fa, 0xa002b5c4, 0x0de6d027, 0x9af88c27, 0x773f8641, 0xc3604c06, 0x61a806b5, 0xf0177a28, 0xc0f586e0, 0x006058aa, 0x30dc7d62, 0x11e69ed7, 0x2338ea63, 0x53c2dd94, 0xc2c21634, 0xbbcbee56, 0x90bcb6de, 0xebfc7da1, 0xce591d76, 0x6f05e409, 0x4b7c0188, 0x39720a3d, 0x7c927c24, 0x86e3725f, 0x724d9db9, 0x1ac15bb4, 0xd39eb8fc, 0xed545578, 0x08fca5b5, 0xd83d7cd3, 0x4dad0fc4, 0x1e50ef5e, 0xb161e6f8, 0xa28514d9, 0x6c51133c, 0x6fd5c7e7, 0x56e14ec4, 0x362abfce, 0xddc6c837, 0xd79a3234, 0x92638212, 0x670efa8e, 0x406000e0]
def Fbf_S3(self):
return [0x3a39ce37, 0xd3faf5cf, 0xabc27737, 0x5ac52d1b, 0x5cb0679e, 0x4fa33742, 0xd3822740, 0x99bc9bbe, 0xd5118e9d, 0xbf0f7315, 0xd62d1c7e, 0xc700c47b, 0xb78c1b6b, 0x21a19045, 0xb26eb1be, 0x6a366eb4, 0x5748ab2f, 0xbc946e79, 0xc6a376d2, 0x6549c2c8, 0x530ff8ee, 0x468dde7d, 0xd5730a1d, 0x4cd04dc6, 0x2939bbdb, 0xa9ba4650, 0xac9526e8, 0xbe5ee304, 0xa1fad5f0, 0x6a2d519a, 0x63ef8ce2, 0x9a86ee22, 0xc089c2b8, 0x43242ef6, 0xa51e03aa, 0x9cf2d0a4, 0x83c061ba, 0x9be96a4d, 0x8fe51550, 0xba645bd6, 0x2826a2f9, 0xa73a3ae1, 0x4ba99586, 0xef5562e9, 0xc72fefd3, 0xf752f7da, 0x3f046f69, 0x77fa0a59, 0x80e4a915, 0x87b08601, 0x9b09e6ad, 0x3b3ee593, 0xe990fd5a, 0x9e34d797, 0x2cf0b7d9, 0x022b8b51, 0x96d5ac3a, 0x017da67d, 0xd1cf3ed6, 0x7c7d2d28, 0x1f9f25cf, 0xadf2b89b, 0x5ad6b472, 0x5a88f54c, 0xe029ac71, 0xe019a5e6, 0x47b0acfd, 0xed93fa9b, 0xe8d3c48d, 0x283b57cc, 0xf8d56629, 0x79132e28, 0x785f0191, 0xed756055, 0xf7960e44, 0xe3d35e8c, 0x15056dd4, 0x88f46dba, 0x03a16125, 0x0564f0bd, 0xc3eb9e15, 0x3c9057a2, 0x97271aec, 0xa93a072a, 0x1b3f6d9b, 0x1e6321f5, 0xf59c66fb, 0x26dcf319, 0x7533d928, 0xb155fdf5, 0x03563482, 0x8aba3cbb, 0x28517711, 0xc20ad9f8, 0xabcc5167, 0xccad925f, 0x4de81751, 0x3830dc8e, 0x379d5862, 0x9320f991, 0xea7a90c2, 0xfb3e7bce, 0x5121ce64, 0x774fbe32, 0xa8b6e37e, 0xc3293d46, 0x48de5369, 0x6413e680, 0xa2ae0810, 0xdd6db224, 0x69852dfd, 0x09072166, 0xb39a460a, 0x6445c0dd, 0x586cdecf, 0x1c20c8ae, 0x5bbef7dd, 0x1b588d40, 0xccd2017f, 0x6bb4e3bb, 0xdda26a7e, 0x3a59ff45, 0x3e350a44, 0xbcb4cdd5, 0x72eacea8, 0xfa6484bb, 0x8d6612ae, 0xbf3c6f47, 0xd29be463, 0x542f5d9e, 0xaec2771b, 0xf64e6370, 0x740e0d8d, 0xe75b1357, 0xf8721671, 0xaf537d5d, 0x4040cb08, 0x4eb4e2cc, 0x34d2466a, 0x0115af84, 0xe1b00428, 0x95983a1d, 0x06b89fb4, 0xce6ea048, 0x6f3f3b82, 0x3520ab82, 0x011a1d4b, 0x277227f8, 0x611560b1, 0xe7933fdc, 0xbb3a792b, 0x344525bd, 0xa08839e1, 0x51ce794b, 0x2f32c9b7, 0xa01fbac9, 0xe01cc87e, 0xbcc7d1f6, 0xcf0111c3, 0xa1e8aac7, 0x1a908749, 0xd44fbd9a, 0xd0dadecb, 0xd50ada38, 0x0339c32a, 0xc6913667, 0x8df9317c, 0xe0b12b4f, 0xf79e59b7, 0x43f5bb3a, 0xf2d519ff, 0x27d9459c, 0xbf97222c, 0x15e6fc2a, 0x0f91fc71, 0x9b941525, 0xfae59361, 0xceb69ceb, 0xc2a86459, 0x12baa8d1, 0xb6c1075e, 0xe3056a0c, 0x10d25065, 0xcb03a442, 0xe0ec6e0e, 0x1698db3b, 0x4c98a0be, 0x3278e964, 0x9f1f9532, 0xe0d392df, 0xd3a0342b, 0x8971f21e, 0x1b0a7441, 0x4ba3348c, 0xc5be7120, 0xc37632d8, 0xdf359f8d, 0x9b992f2e, 0xe60b6f47, 0x0fe3f11d, 0xe54cda54, 0x1edad891, 0xce6279cf, 0xcd3e7e6f, 0x1618b166, 0xfd2c1d05, 0x848fd2c5, 0xf6fb2299, 0xf523f357, 0xa6327623, 0x93a83531, 0x56cccd02, 0xacf08162, 0x5a75ebb5, 0x6e163697, 0x88d273cc, 0xde966292, 0x81b949d0, 0x4c50901b, 0x71c65614, 0xe6c6c7bd, 0x327a140a, 0x45e1d006, 0xc3f27b9a, 0xc9aa53fd, 0x62a80f00, 0xbb25bfe2, 0x35bdd2f6, 0x71126905, 0xb2040222, 0xb6cbcf7c, 0xcd769c2b, 0x53113ec0, 0x1640e3d3, 0x38abbd60, 0x2547adf0, 0xba38209c, 0xf746ce76, 0x77afa1c5, 0x20756060, 0x85cbfe4e, 0x8ae88dd8, 0x7aaaf9b0, 0x4cf9aa7e, 0x1948c25c, 0x02fb8a8c, 0x01c36ae4, 0xd6ebe1f9, 0x90d4f869, 0xa65cdea0, 0x3f09252d, 0xc208e69f, 0xb74e6132, 0xce77e25b, 0x578fdfe3, 0x3ac372e6]
def encrypt(self,t):
t = self.escape(t)
i = 0
l = len(t) % 16
while i < l:
t += '0'
i+=1
r = ''
i = 0
l = len(t)
while i < l:
self.xr_par = self.wordunescape(t[i:i+8])
self.xl_par = self.wordunescape(t[i+8:i+16])
self.encipher()
r += self.wordescape(self.xr_par) + self.wordescape(self.xl_par)
i += 16
return r
def decrypt(self,t):
i = 0
l = len(t) % 16
while i < l:
t += '0'
i+=1
r = ''
i = 0
l = len(t)
while i < l:
self.xr_par = self.wordunescape(t[i:i+8])
self.xl_par = self.wordunescape(t[i+8:i+16])
self.decipher()
r += self.wordescape(self.xr_par) + self.wordescape(self.xl_par)
i += 16
return self.unescape(r).replace('\x00', '')
def wordescape(self,w):
r = ''
m = [self.wordbyte0(w), self.wordbyte1(w), self.wordbyte2(w), self.wordbyte3(w)]
i = 3
while i is not -1:
t1 = int(Math.floor(m[i] / 16))
t2 = m[i] % 16
if t1 < 10:
t1 += 48
else:
t1 += 55
if t2 < 10:
t2 += 48
else:
t2 += 55
r += chr(t1) + chr(t2)
i-=1
return r
def wordunescape(self,t):
r = 0
i = 6
while i is not -2:
t1 = ord(t[i])
t2 = ord(t[i+1])
if t1 < 58:
t1 -= 48
else:
t1 -= 55
if t2 < 58:
t2 -= 48
else:
t2 -= 55
r = r * 256 + t1 * 16 + t2
i -= 2
return r
def round(self, a, b, n):
t = self
return t.xor(a, t.xor(t.xor(t.bf_S0[t.wordbyte0(b)] + t.bf_S1[t.wordbyte1(b)], t.bf_S2[t.wordbyte2(b)]) + t.bf_S3[t.wordbyte3(b)], t.bf_P[n]))
def encipher(self):
t = self
Xl = t.xl_par
Xr = t.xr_par
Xl = t.xor(Xl, t.bf_P[0])
Xr = t.round(Xr, Xl, 1)
Xl = t.round(Xl, Xr, 2)
Xr = t.round(Xr, Xl, 3)
Xl = t.round(Xl, Xr, 4)
Xr = t.round(Xr, Xl, 5)
Xl = t.round(Xl, Xr, 6)
Xr = t.round(Xr, Xl, 7)
Xl = t.round(Xl, Xr, 8)
Xr = t.round(Xr, Xl, 9)
Xl = t.round(Xl, Xr, 10)
Xr = t.round(Xr, Xl, 11)
Xl = t.round(Xl, Xr, 12)
Xr = t.round(Xr, Xl, 13)
Xl = t.round(Xl, Xr, 14)
Xr = t.round(Xr, Xl, 15)
Xl = t.round(Xl, Xr, 16)
Xr = t.xor(Xr, t.bf_P[17])
t.xl_par = Xr
t.xr_par = Xl
def decipher(self):
t = self
Xl = t.xl_par
Xr = t.xr_par
Xl = t.xor(Xl, t.bf_P[17])
Xr = t.round(Xr, Xl, 16)
Xl = t.round(Xl, Xr, 15)
Xr = t.round(Xr, Xl, 14)
Xl = t.round(Xl, Xr, 13)
Xr = t.round(Xr, Xl, 12)
Xl = t.round(Xl, Xr, 11)
Xr = t.round(Xr, Xl, 10)
Xl = t.round(Xl, Xr, 9)
Xr = t.round(Xr, Xl, 8)
Xl = t.round(Xl, Xr, 7)
Xr = t.round(Xr, Xl, 6)
Xl = t.round(Xl, Xr, 5)
Xr = t.round(Xr, Xl, 4)
Xl = t.round(Xl, Xr, 3)
Xr = t.round(Xr, Xl, 2)
Xl = t.round(Xl, Xr, 1)
Xr = t.xor(Xr, t.bf_P[0])
t.xl_par = Xr
t.xr_par = Xl
| 60.224026 | 3,083 | 0.721441 |
import math as Math
class blowfish:
def __init__(self,k):
if len(k) is 0:
raise '0 length key'
self.bf_P = self.Fbf_P()
self.bf_S0 = self.Fbf_S0()
self.bf_S1 = self.Fbf_S1()
self.bf_S2 = self.Fbf_S2()
self.bf_S3 = self.Fbf_S3()
self.key = k
j = 0
i = 0
while i < 18:
d = ((ord(self.key[j % len(self.key)]) * 256 + ord(self.key[(j + 1) % len(self.key)])) * 256 + ord(self.key[(j + 2) % len(self.key)])) * 256 + ord(self.key[(j + 3) % len(self.key)])
self.bf_P[i] = self.xor(self.bf_P[i], d)
j = (j + 4) % len(self.key)
i+=1
self.key = self.escape(self.key)
self.xl_par = 0x00000000
self.xr_par = 0x00000000
i = 0
while i < 18:
self.encipher()
self.bf_P[i] = self.xl_par
self.bf_P[i + 1] = self.xr_par
i += 2
j = 0
while j < 256:
self.encipher()
self.bf_S0[j] = self.xl_par
self.bf_S0[j + 1] = self.xr_par
j += 2
j = 0
while j < 256:
self.encipher()
self.bf_S1[j] = self.xl_par
self.bf_S1[j + 1] = self.xr_par
j += 2
j = 0
while j < 256:
self.encipher()
self.bf_S2[j] = self.xl_par
self.bf_S2[j + 1] = self.xr_par
j += 2
j = 0
while j < 256:
self.encipher()
self.bf_S3[j] = self.xl_par
self.bf_S3[j + 1] = self.xr_par
j += 2
def unescape(self,t):
r = ''
i = 0
l = len(t)
while i < l:
t1 = ord(t[i])
i+=1
t2 = ord(t[i])
if t1 < 58:
t1 -= 48
else:
if t1 > 96:
t1 -= 87
else:
t1 -= 55
if t2 < 58:
t2 -= 48
else:
if t2 > 96:
t2 -= 87
else:
t2 -= 55
r += chr(t1 * 16 + t2)
i+=1
return r
def escape(self,t):
r = ''
i = 0
l = len(t)
while i < l:
c = ord(t[i])
t1 = int(Math.floor(c / 16))
t2 = c % 16
if t1 < 10:
t1 += 48
else:
t1 += 55
if t2 < 10:
t2 += 48
else:
t2 += 55
r += chr(t1) + chr(t2)
i+=1
return r
def wordbyte0(self,w):
return int(Math.floor(Math.floor(Math.floor(w / 256) / 256) / 256) % 256)
def wordbyte1(self,w):
return int(Math.floor(Math.floor(w / 256) / 256) % 256)
def wordbyte2(self,w):
return int(Math.floor(w / 256) % 256)
def wordbyte3(self,w):
return w % 256
def xor(self,w1, w2):
r = w1 ^ w2
if r < 0:
r = 0xffffffff + 1 + r
return r
def Fbf_P(self):
return [0x243f6a88, 0x85a308d3, 0x13198a2e, 0x03707344, 0xa4093822, 0x299f31d0, 0x082efa98, 0xec4e6c89, 0x452821e6, 0x38d01377, 0xbe5466cf, 0x34e90c6c, 0xc0ac29b7, 0xc97c50dd, 0x3f84d5b5, 0xb5470917, 0x9216d5d9, 0x8979fb1b]
def Fbf_S0(self):
return [0xd1310ba6, 0x98dfb5ac, 0x2ffd72db, 0xd01adfb7, 0xb8e1afed, 0x6a267e96, 0xba7c9045, 0xf12c7f99, 0x24a19947, 0xb3916cf7, 0x0801f2e2, 0x858efc16, 0x636920d8, 0x71574e69, 0xa458fea3, 0xf4933d7e, 0x0d95748f, 0x728eb658, 0x718bcd58, 0x82154aee, 0x7b54a41d, 0xc25a59b5, 0x9c30d539, 0x2af26013, 0xc5d1b023, 0x286085f0, 0xca417918, 0xb8db38ef, 0x8e79dcb0, 0x603a180e, 0x6c9e0e8b, 0xb01e8a3e, 0xd71577c1, 0xbd314b27, 0x78af2fda, 0x55605c60, 0xe65525f3, 0xaa55ab94, 0x57489862, 0x63e81440, 0x55ca396a, 0x2aab10b6, 0xb4cc5c34, 0x1141e8ce, 0xa15486af, 0x7c72e993, 0xb3ee1411, 0x636fbc2a, 0x2ba9c55d, 0x741831f6, 0xce5c3e16, 0x9b87931e, 0xafd6ba33, 0x6c24cf5c, 0x7a325381, 0x28958677, 0x3b8f4898, 0x6b4bb9af, 0xc4bfe81b, 0x66282193, 0x61d809cc, 0xfb21a991, 0x487cac60, 0x5dec8032, 0xef845d5d, 0xe98575b1, 0xdc262302, 0xeb651b88, 0x23893e81, 0xd396acc5, 0x0f6d6ff3, 0x83f44239, 0x2e0b4482, 0xa4842004, 0x69c8f04a, 0x9e1f9b5e, 0x21c66842, 0xf6e96c9a, 0x670c9c61, 0xabd388f0, 0x6a51a0d2, 0xd8542f68, 0x960fa728, 0xab5133a3, 0x6eef0b6c, 0x137a3be4, 0xba3bf050, 0x7efb2a98, 0xa1f1651d, 0x39af0176, 0x66ca593e, 0x82430e88, 0x8cee8619, 0x456f9fb4, 0x7d84a5c3, 0x3b8b5ebe, 0xe06f75d8, 0x85c12073, 0x401a449f, 0x56c16aa6, 0x4ed3aa62, 0x363f7706, 0x1bfedf72, 0x429b023d, 0x37d0d724, 0xd00a1248, 0xdb0fead3, 0x49f1c09b, 0x075372c9, 0x80991b7b, 0x25d479d8, 0xf6e8def7, 0xe3fe501a, 0xb6794c3b, 0x976ce0bd, 0x04c006ba, 0xc1a94fb6, 0x409f60c4, 0x5e5c9ec2, 0x196a2463, 0x68fb6faf, 0x3e6c53b5, 0x1339b2eb, 0x3b52ec6f, 0x6dfc511f, 0x9b30952c, 0xcc814544, 0xaf5ebd09, 0xbee3d004, 0xde334afd, 0x660f2807, 0x192e4bb3, 0xc0cba857, 0x45c8740f, 0xd20b5f39, 0xb9d3fbdb, 0x5579c0bd, 0x1a60320a, 0xd6a100c6, 0x402c7279, 0x679f25fe, 0xfb1fa3cc, 0x8ea5e9f8, 0xdb3222f8, 0x3c7516df, 0xfd616b15, 0x2f501ec8, 0xad0552ab, 0x323db5fa, 0xfd238760, 0x53317b48, 0x3e00df82, 0x9e5c57bb, 0xca6f8ca0, 0x1a87562e, 0xdf1769db, 0xd542a8f6, 0x287effc3, 0xac6732c6, 0x8c4f5573, 0x695b27b0, 0xbbca58c8, 0xe1ffa35d, 0xb8f011a0, 0x10fa3d98, 0xfd2183b8, 0x4afcb56c, 0x2dd1d35b, 0x9a53e479, 0xb6f84565, 0xd28e49bc, 0x4bfb9790, 0xe1ddf2da, 0xa4cb7e33, 0x62fb1341, 0xcee4c6e8, 0xef20cada, 0x36774c01, 0xd07e9efe, 0x2bf11fb4, 0x95dbda4d, 0xae909198, 0xeaad8e71, 0x6b93d5a0, 0xd08ed1d0, 0xafc725e0, 0x8e3c5b2f, 0x8e7594b7, 0x8ff6e2fb, 0xf2122b64, 0x8888b812, 0x900df01c, 0x4fad5ea0, 0x688fc31c, 0xd1cff191, 0xb3a8c1ad, 0x2f2f2218, 0xbe0e1777, 0xea752dfe, 0x8b021fa1, 0xe5a0cc0f, 0xb56f74e8, 0x18acf3d6, 0xce89e299, 0xb4a84fe0, 0xfd13e0b7, 0x7cc43b81, 0xd2ada8d9, 0x165fa266, 0x80957705, 0x93cc7314, 0x211a1477, 0xe6ad2065, 0x77b5fa86, 0xc75442f5, 0xfb9d35cf, 0xebcdaf0c, 0x7b3e89a0, 0xd6411bd3, 0xae1e7e49, 0x00250e2d, 0x2071b35e, 0x226800bb, 0x57b8e0af, 0x2464369b, 0xf009b91e, 0x5563911d, 0x59dfa6aa, 0x78c14389, 0xd95a537f, 0x207d5ba2, 0x02e5b9c5, 0x83260376, 0x6295cfa9, 0x11c81968, 0x4e734a41, 0xb3472dca, 0x7b14a94a, 0x1b510052, 0x9a532915, 0xd60f573f, 0xbc9bc6e4, 0x2b60a476, 0x81e67400, 0x08ba6fb5, 0x571be91f, 0xf296ec6b, 0x2a0dd915, 0xb6636521, 0xe7b9f9b6, 0xff34052e, 0xc5855664, 0x53b02d5d, 0xa99f8fa1, 0x08ba4799, 0x6e85076a]
def Fbf_S1(self):
return [0x4b7a70e9, 0xb5b32944, 0xdb75092e, 0xc4192623, 0xad6ea6b0, 0x49a7df7d, 0x9cee60b8, 0x8fedb266, 0xecaa8c71, 0x699a17ff, 0x5664526c, 0xc2b19ee1, 0x193602a5, 0x75094c29, 0xa0591340, 0xe4183a3e, 0x3f54989a, 0x5b429d65, 0x6b8fe4d6, 0x99f73fd6, 0xa1d29c07, 0xefe830f5, 0x4d2d38e6, 0xf0255dc1, 0x4cdd2086, 0x8470eb26, 0x6382e9c6, 0x021ecc5e, 0x09686b3f, 0x3ebaefc9, 0x3c971814, 0x6b6a70a1, 0x687f3584, 0x52a0e286, 0xb79c5305, 0xaa500737, 0x3e07841c, 0x7fdeae5c, 0x8e7d44ec, 0x5716f2b8, 0xb03ada37, 0xf0500c0d, 0xf01c1f04, 0x0200b3ff, 0xae0cf51a, 0x3cb574b2, 0x25837a58, 0xdc0921bd, 0xd19113f9, 0x7ca92ff6, 0x94324773, 0x22f54701, 0x3ae5e581, 0x37c2dadc, 0xc8b57634, 0x9af3dda7, 0xa9446146, 0x0fd0030e, 0xecc8c73e, 0xa4751e41, 0xe238cd99, 0x3bea0e2f, 0x3280bba1, 0x183eb331, 0x4e548b38, 0x4f6db908, 0x6f420d03, 0xf60a04bf, 0x2cb81290, 0x24977c79, 0x5679b072, 0xbcaf89af, 0xde9a771f, 0xd9930810, 0xb38bae12, 0xdccf3f2e, 0x5512721f, 0x2e6b7124, 0x501adde6, 0x9f84cd87, 0x7a584718, 0x7408da17, 0xbc9f9abc, 0xe94b7d8c, 0xec7aec3a, 0xdb851dfa, 0x63094366, 0xc464c3d2, 0xef1c1847, 0x3215d908, 0xdd433b37, 0x24c2ba16, 0x12a14d43, 0x2a65c451, 0x50940002, 0x133ae4dd, 0x71dff89e, 0x10314e55, 0x81ac77d6, 0x5f11199b, 0x043556f1, 0xd7a3c76b, 0x3c11183b, 0x5924a509, 0xf28fe6ed, 0x97f1fbfa, 0x9ebabf2c, 0x1e153c6e, 0x86e34570, 0xeae96fb1, 0x860e5e0a, 0x5a3e2ab3, 0x771fe71c, 0x4e3d06fa, 0x2965dcb9, 0x99e71d0f, 0x803e89d6, 0x5266c825, 0x2e4cc978, 0x9c10b36a, 0xc6150eba, 0x94e2ea78, 0xa5fc3c53, 0x1e0a2df4, 0xf2f74ea7, 0x361d2b3d, 0x1939260f, 0x19c27960, 0x5223a708, 0xf71312b6, 0xebadfe6e, 0xeac31f66, 0xe3bc4595, 0xa67bc883, 0xb17f37d1, 0x018cff28, 0xc332ddef, 0xbe6c5aa5, 0x65582185, 0x68ab9802, 0xeecea50f, 0xdb2f953b, 0x2aef7dad, 0x5b6e2f84, 0x1521b628, 0x29076170, 0xecdd4775, 0x619f1510, 0x13cca830, 0xeb61bd96, 0x0334fe1e, 0xaa0363cf, 0xb5735c90, 0x4c70a239, 0xd59e9e0b, 0xcbaade14, 0xeecc86bc, 0x60622ca7, 0x9cab5cab, 0xb2f3846e, 0x648b1eaf, 0x19bdf0ca, 0xa02369b9, 0x655abb50, 0x40685a32, 0x3c2ab4b3, 0x319ee9d5, 0xc021b8f7, 0x9b540b19, 0x875fa099, 0x95f7997e, 0x623d7da8, 0xf837889a, 0x97e32d77, 0x11ed935f, 0x16681281, 0x0e358829, 0xc7e61fd6, 0x96dedfa1, 0x7858ba99, 0x57f584a5, 0x1b227263, 0x9b83c3ff, 0x1ac24696, 0xcdb30aeb, 0x532e3054, 0x8fd948e4, 0x6dbc3128, 0x58ebf2ef, 0x34c6ffea, 0xfe28ed61, 0xee7c3c73, 0x5d4a14d9, 0xe864b7e3, 0x42105d14, 0x203e13e0, 0x45eee2b6, 0xa3aaabea, 0xdb6c4f15, 0xfacb4fd0, 0xc742f442, 0xef6abbb5, 0x654f3b1d, 0x41cd2105, 0xd81e799e, 0x86854dc7, 0xe44b476a, 0x3d816250, 0xcf62a1f2, 0x5b8d2646, 0xfc8883a0, 0xc1c7b6a3, 0x7f1524c3, 0x69cb7492, 0x47848a0b, 0x5692b285, 0x095bbf00, 0xad19489d, 0x1462b174, 0x23820e00, 0x58428d2a, 0x0c55f5ea, 0x1dadf43e, 0x233f7061, 0x3372f092, 0x8d937e41, 0xd65fecf1, 0x6c223bdb, 0x7cde3759, 0xcbee7460, 0x4085f2a7, 0xce77326e, 0xa6078084, 0x19f8509e, 0xe8efd855, 0x61d99735, 0xa969a7aa, 0xc50c06c2, 0x5a04abfc, 0x800bcadc, 0x9e447a2e, 0xc3453484, 0xfdd56705, 0x0e1e9ec9, 0xdb73dbd3, 0x105588cd, 0x675fda79, 0xe3674340, 0xc5c43465, 0x713e38d8, 0x3d28f89e, 0xf16dff20, 0x153e21e7, 0x8fb03d4a, 0xe6e39f2b, 0xdb83adf7]
def Fbf_S2(self):
return [0xe93d5a68, 0x948140f7, 0xf64c261c, 0x94692934, 0x411520f7, 0x7602d4f7, 0xbcf46b2e, 0xd4a20068, 0xd4082471, 0x3320f46a, 0x43b7d4b7, 0x500061af, 0x1e39f62e, 0x97244546, 0x14214f74, 0xbf8b8840, 0x4d95fc1d, 0x96b591af, 0x70f4ddd3, 0x66a02f45, 0xbfbc09ec, 0x03bd9785, 0x7fac6dd0, 0x31cb8504, 0x96eb27b3, 0x55fd3941, 0xda2547e6, 0xabca0a9a, 0x28507825, 0x530429f4, 0x0a2c86da, 0xe9b66dfb, 0x68dc1462, 0xd7486900, 0x680ec0a4, 0x27a18dee, 0x4f3ffea2, 0xe887ad8c, 0xb58ce006, 0x7af4d6b6, 0xaace1e7c, 0xd3375fec, 0xce78a399, 0x406b2a42, 0x20fe9e35, 0xd9f385b9, 0xee39d7ab, 0x3b124e8b, 0x1dc9faf7, 0x4b6d1856, 0x26a36631, 0xeae397b2, 0x3a6efa74, 0xdd5b4332, 0x6841e7f7, 0xca7820fb, 0xfb0af54e, 0xd8feb397, 0x454056ac, 0xba489527, 0x55533a3a, 0x20838d87, 0xfe6ba9b7, 0xd096954b, 0x55a867bc, 0xa1159a58, 0xcca92963, 0x99e1db33, 0xa62a4a56, 0x3f3125f9, 0x5ef47e1c, 0x9029317c, 0xfdf8e802, 0x04272f70, 0x80bb155c, 0x05282ce3, 0x95c11548, 0xe4c66d22, 0x48c1133f, 0xc70f86dc, 0x07f9c9ee, 0x41041f0f, 0x404779a4, 0x5d886e17, 0x325f51eb, 0xd59bc0d1, 0xf2bcc18f, 0x41113564, 0x257b7834, 0x602a9c60, 0xdff8e8a3, 0x1f636c1b, 0x0e12b4c2, 0x02e1329e, 0xaf664fd1, 0xcad18115, 0x6b2395e0, 0x333e92e1, 0x3b240b62, 0xeebeb922, 0x85b2a20e, 0xe6ba0d99, 0xde720c8c, 0x2da2f728, 0xd0127845, 0x95b794fd, 0x647d0862, 0xe7ccf5f0, 0x5449a36f, 0x877d48fa, 0xc39dfd27, 0xf33e8d1e, 0x0a476341, 0x992eff74, 0x3a6f6eab, 0xf4f8fd37, 0xa812dc60, 0xa1ebddf8, 0x991be14c, 0xdb6e6b0d, 0xc67b5510, 0x6d672c37, 0x2765d43b, 0xdcd0e804, 0xf1290dc7, 0xcc00ffa3, 0xb5390f92, 0x690fed0b, 0x667b9ffb, 0xcedb7d9c, 0xa091cf0b, 0xd9155ea3, 0xbb132f88, 0x515bad24, 0x7b9479bf, 0x763bd6eb, 0x37392eb3, 0xcc115979, 0x8026e297, 0xf42e312d, 0x6842ada7, 0xc66a2b3b, 0x12754ccc, 0x782ef11c, 0x6a124237, 0xb79251e7, 0x06a1bbe6, 0x4bfb6350, 0x1a6b1018, 0x11caedfa, 0x3d25bdd8, 0xe2e1c3c9, 0x44421659, 0x0a121386, 0xd90cec6e, 0xd5abea2a, 0x64af674e, 0xda86a85f, 0xbebfe988, 0x64e4c3fe, 0x9dbc8057, 0xf0f7c086, 0x60787bf8, 0x6003604d, 0xd1fd8346, 0xf6381fb0, 0x7745ae04, 0xd736fccc, 0x83426b33, 0xf01eab71, 0xb0804187, 0x3c005e5f, 0x77a057be, 0xbde8ae24, 0x55464299, 0xbf582e61, 0x4e58f48f, 0xf2ddfda2, 0xf474ef38, 0x8789bdc2, 0x5366f9c3, 0xc8b38e74, 0xb475f255, 0x46fcd9b9, 0x7aeb2661, 0x8b1ddf84, 0x846a0e79, 0x915f95e2, 0x466e598e, 0x20b45770, 0x8cd55591, 0xc902de4c, 0xb90bace1, 0xbb8205d0, 0x11a86248, 0x7574a99e, 0xb77f19b6, 0xe0a9dc09, 0x662d09a1, 0xc4324633, 0xe85a1f02, 0x09f0be8c, 0x4a99a025, 0x1d6efe10, 0x1ab93d1d, 0x0ba5a4df, 0xa186f20f, 0x2868f169, 0xdcb7da83, 0x573906fe, 0xa1e2ce9b, 0x4fcd7f52, 0x50115e01, 0xa70683fa, 0xa002b5c4, 0x0de6d027, 0x9af88c27, 0x773f8641, 0xc3604c06, 0x61a806b5, 0xf0177a28, 0xc0f586e0, 0x006058aa, 0x30dc7d62, 0x11e69ed7, 0x2338ea63, 0x53c2dd94, 0xc2c21634, 0xbbcbee56, 0x90bcb6de, 0xebfc7da1, 0xce591d76, 0x6f05e409, 0x4b7c0188, 0x39720a3d, 0x7c927c24, 0x86e3725f, 0x724d9db9, 0x1ac15bb4, 0xd39eb8fc, 0xed545578, 0x08fca5b5, 0xd83d7cd3, 0x4dad0fc4, 0x1e50ef5e, 0xb161e6f8, 0xa28514d9, 0x6c51133c, 0x6fd5c7e7, 0x56e14ec4, 0x362abfce, 0xddc6c837, 0xd79a3234, 0x92638212, 0x670efa8e, 0x406000e0]
def Fbf_S3(self):
return [0x3a39ce37, 0xd3faf5cf, 0xabc27737, 0x5ac52d1b, 0x5cb0679e, 0x4fa33742, 0xd3822740, 0x99bc9bbe, 0xd5118e9d, 0xbf0f7315, 0xd62d1c7e, 0xc700c47b, 0xb78c1b6b, 0x21a19045, 0xb26eb1be, 0x6a366eb4, 0x5748ab2f, 0xbc946e79, 0xc6a376d2, 0x6549c2c8, 0x530ff8ee, 0x468dde7d, 0xd5730a1d, 0x4cd04dc6, 0x2939bbdb, 0xa9ba4650, 0xac9526e8, 0xbe5ee304, 0xa1fad5f0, 0x6a2d519a, 0x63ef8ce2, 0x9a86ee22, 0xc089c2b8, 0x43242ef6, 0xa51e03aa, 0x9cf2d0a4, 0x83c061ba, 0x9be96a4d, 0x8fe51550, 0xba645bd6, 0x2826a2f9, 0xa73a3ae1, 0x4ba99586, 0xef5562e9, 0xc72fefd3, 0xf752f7da, 0x3f046f69, 0x77fa0a59, 0x80e4a915, 0x87b08601, 0x9b09e6ad, 0x3b3ee593, 0xe990fd5a, 0x9e34d797, 0x2cf0b7d9, 0x022b8b51, 0x96d5ac3a, 0x017da67d, 0xd1cf3ed6, 0x7c7d2d28, 0x1f9f25cf, 0xadf2b89b, 0x5ad6b472, 0x5a88f54c, 0xe029ac71, 0xe019a5e6, 0x47b0acfd, 0xed93fa9b, 0xe8d3c48d, 0x283b57cc, 0xf8d56629, 0x79132e28, 0x785f0191, 0xed756055, 0xf7960e44, 0xe3d35e8c, 0x15056dd4, 0x88f46dba, 0x03a16125, 0x0564f0bd, 0xc3eb9e15, 0x3c9057a2, 0x97271aec, 0xa93a072a, 0x1b3f6d9b, 0x1e6321f5, 0xf59c66fb, 0x26dcf319, 0x7533d928, 0xb155fdf5, 0x03563482, 0x8aba3cbb, 0x28517711, 0xc20ad9f8, 0xabcc5167, 0xccad925f, 0x4de81751, 0x3830dc8e, 0x379d5862, 0x9320f991, 0xea7a90c2, 0xfb3e7bce, 0x5121ce64, 0x774fbe32, 0xa8b6e37e, 0xc3293d46, 0x48de5369, 0x6413e680, 0xa2ae0810, 0xdd6db224, 0x69852dfd, 0x09072166, 0xb39a460a, 0x6445c0dd, 0x586cdecf, 0x1c20c8ae, 0x5bbef7dd, 0x1b588d40, 0xccd2017f, 0x6bb4e3bb, 0xdda26a7e, 0x3a59ff45, 0x3e350a44, 0xbcb4cdd5, 0x72eacea8, 0xfa6484bb, 0x8d6612ae, 0xbf3c6f47, 0xd29be463, 0x542f5d9e, 0xaec2771b, 0xf64e6370, 0x740e0d8d, 0xe75b1357, 0xf8721671, 0xaf537d5d, 0x4040cb08, 0x4eb4e2cc, 0x34d2466a, 0x0115af84, 0xe1b00428, 0x95983a1d, 0x06b89fb4, 0xce6ea048, 0x6f3f3b82, 0x3520ab82, 0x011a1d4b, 0x277227f8, 0x611560b1, 0xe7933fdc, 0xbb3a792b, 0x344525bd, 0xa08839e1, 0x51ce794b, 0x2f32c9b7, 0xa01fbac9, 0xe01cc87e, 0xbcc7d1f6, 0xcf0111c3, 0xa1e8aac7, 0x1a908749, 0xd44fbd9a, 0xd0dadecb, 0xd50ada38, 0x0339c32a, 0xc6913667, 0x8df9317c, 0xe0b12b4f, 0xf79e59b7, 0x43f5bb3a, 0xf2d519ff, 0x27d9459c, 0xbf97222c, 0x15e6fc2a, 0x0f91fc71, 0x9b941525, 0xfae59361, 0xceb69ceb, 0xc2a86459, 0x12baa8d1, 0xb6c1075e, 0xe3056a0c, 0x10d25065, 0xcb03a442, 0xe0ec6e0e, 0x1698db3b, 0x4c98a0be, 0x3278e964, 0x9f1f9532, 0xe0d392df, 0xd3a0342b, 0x8971f21e, 0x1b0a7441, 0x4ba3348c, 0xc5be7120, 0xc37632d8, 0xdf359f8d, 0x9b992f2e, 0xe60b6f47, 0x0fe3f11d, 0xe54cda54, 0x1edad891, 0xce6279cf, 0xcd3e7e6f, 0x1618b166, 0xfd2c1d05, 0x848fd2c5, 0xf6fb2299, 0xf523f357, 0xa6327623, 0x93a83531, 0x56cccd02, 0xacf08162, 0x5a75ebb5, 0x6e163697, 0x88d273cc, 0xde966292, 0x81b949d0, 0x4c50901b, 0x71c65614, 0xe6c6c7bd, 0x327a140a, 0x45e1d006, 0xc3f27b9a, 0xc9aa53fd, 0x62a80f00, 0xbb25bfe2, 0x35bdd2f6, 0x71126905, 0xb2040222, 0xb6cbcf7c, 0xcd769c2b, 0x53113ec0, 0x1640e3d3, 0x38abbd60, 0x2547adf0, 0xba38209c, 0xf746ce76, 0x77afa1c5, 0x20756060, 0x85cbfe4e, 0x8ae88dd8, 0x7aaaf9b0, 0x4cf9aa7e, 0x1948c25c, 0x02fb8a8c, 0x01c36ae4, 0xd6ebe1f9, 0x90d4f869, 0xa65cdea0, 0x3f09252d, 0xc208e69f, 0xb74e6132, 0xce77e25b, 0x578fdfe3, 0x3ac372e6]
def encrypt(self,t):
t = self.escape(t)
i = 0
l = len(t) % 16
while i < l:
t += '0'
i+=1
r = ''
i = 0
l = len(t)
while i < l:
self.xr_par = self.wordunescape(t[i:i+8])
self.xl_par = self.wordunescape(t[i+8:i+16])
self.encipher()
r += self.wordescape(self.xr_par) + self.wordescape(self.xl_par)
i += 16
return r
def decrypt(self,t):
i = 0
l = len(t) % 16
while i < l:
t += '0'
i+=1
r = ''
i = 0
l = len(t)
while i < l:
self.xr_par = self.wordunescape(t[i:i+8])
self.xl_par = self.wordunescape(t[i+8:i+16])
self.decipher()
r += self.wordescape(self.xr_par) + self.wordescape(self.xl_par)
i += 16
return self.unescape(r).replace('\x00', '')
def wordescape(self,w):
r = ''
m = [self.wordbyte0(w), self.wordbyte1(w), self.wordbyte2(w), self.wordbyte3(w)]
i = 3
while i is not -1:
t1 = int(Math.floor(m[i] / 16))
t2 = m[i] % 16
if t1 < 10:
t1 += 48
else:
t1 += 55
if t2 < 10:
t2 += 48
else:
t2 += 55
r += chr(t1) + chr(t2)
i-=1
return r
def wordunescape(self,t):
r = 0
i = 6
while i is not -2:
t1 = ord(t[i])
t2 = ord(t[i+1])
if t1 < 58:
t1 -= 48
else:
t1 -= 55
if t2 < 58:
t2 -= 48
else:
t2 -= 55
r = r * 256 + t1 * 16 + t2
i -= 2
return r
def round(self, a, b, n):
t = self
return t.xor(a, t.xor(t.xor(t.bf_S0[t.wordbyte0(b)] + t.bf_S1[t.wordbyte1(b)], t.bf_S2[t.wordbyte2(b)]) + t.bf_S3[t.wordbyte3(b)], t.bf_P[n]))
def encipher(self):
t = self
Xl = t.xl_par
Xr = t.xr_par
Xl = t.xor(Xl, t.bf_P[0])
Xr = t.round(Xr, Xl, 1)
Xl = t.round(Xl, Xr, 2)
Xr = t.round(Xr, Xl, 3)
Xl = t.round(Xl, Xr, 4)
Xr = t.round(Xr, Xl, 5)
Xl = t.round(Xl, Xr, 6)
Xr = t.round(Xr, Xl, 7)
Xl = t.round(Xl, Xr, 8)
Xr = t.round(Xr, Xl, 9)
Xl = t.round(Xl, Xr, 10)
Xr = t.round(Xr, Xl, 11)
Xl = t.round(Xl, Xr, 12)
Xr = t.round(Xr, Xl, 13)
Xl = t.round(Xl, Xr, 14)
Xr = t.round(Xr, Xl, 15)
Xl = t.round(Xl, Xr, 16)
Xr = t.xor(Xr, t.bf_P[17])
t.xl_par = Xr
t.xr_par = Xl
def decipher(self):
t = self
Xl = t.xl_par
Xr = t.xr_par
Xl = t.xor(Xl, t.bf_P[17])
Xr = t.round(Xr, Xl, 16)
Xl = t.round(Xl, Xr, 15)
Xr = t.round(Xr, Xl, 14)
Xl = t.round(Xl, Xr, 13)
Xr = t.round(Xr, Xl, 12)
Xl = t.round(Xl, Xr, 11)
Xr = t.round(Xr, Xl, 10)
Xl = t.round(Xl, Xr, 9)
Xr = t.round(Xr, Xl, 8)
Xl = t.round(Xl, Xr, 7)
Xr = t.round(Xr, Xl, 6)
Xl = t.round(Xl, Xr, 5)
Xr = t.round(Xr, Xl, 4)
Xl = t.round(Xl, Xr, 3)
Xr = t.round(Xr, Xl, 2)
Xl = t.round(Xl, Xr, 1)
Xr = t.xor(Xr, t.bf_P[0])
t.xl_par = Xr
t.xr_par = Xl
| true | true |
f72fd3f3fdf870d59380133842ea37b254c8a18a | 24,808 | py | Python | Scripts/simulation/restaurants/restaurant_commands.py | velocist/TS4CheatsInfo | b59ea7e5f4bd01d3b3bd7603843d525a9c179867 | [
"Apache-2.0"
] | null | null | null | Scripts/simulation/restaurants/restaurant_commands.py | velocist/TS4CheatsInfo | b59ea7e5f4bd01d3b3bd7603843d525a9c179867 | [
"Apache-2.0"
] | null | null | null | Scripts/simulation/restaurants/restaurant_commands.py | velocist/TS4CheatsInfo | b59ea7e5f4bd01d3b3bd7603843d525a9c179867 | [
"Apache-2.0"
] | null | null | null | # uncompyle6 version 3.7.4
# Python bytecode 3.7 (3394)
# Decompiled from: Python 3.7.9 (tags/v3.7.9:13c94747c7, Aug 17 2020, 18:58:18) [MSC v.1900 64 bit (AMD64)]
# Embedded file name: T:\InGame\Gameplay\Scripts\Server\restaurants\restaurant_commands.py
# Compiled at: 2018-08-28 03:56:41
# Size of source mod 2**32: 29007 bytes
from protocolbuffers import Restaurant_pb2
from event_testing import test_events
from google.protobuf import text_format
from restaurants import restaurant_utils
from restaurants.chefs_choice import ChefsChoice
from restaurants.restaurant_diner_situation import DinerSubSituationState, RestaurantDinerSubSituation, RestaurantDinerBackGroundSituation
from restaurants.restaurant_order import OrderStatus, OrderRecommendationState, GroupOrder
from restaurants.restaurant_tuning import RestaurantTuning, RestaurantIngredientQualityType, get_restaurant_zone_director
from server_commands.argument_helpers import TunableInstanceParam, OptionalTargetParam, get_optional_target
from sims import sim
from sims4.protocol_buffer_utils import has_field
import services, sims4.commands
@sims4.commands.Command('restaurant.order_food', command_type=(sims4.commands.CommandType.Live))
def order_food(recipe_type: TunableInstanceParam(sims4.resources.Types.RECIPE), opt_sim: OptionalTargetParam=None, _connection=None):
if recipe_type is None:
sims4.commands.output('Recipe is None', _connection)
sims4.commands.automation_output('RestaurantOrderFood; Status:Failed', _connection)
return False
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("Sim {} doesn't exist".format(opt_sim), _connection)
sims4.commands.automation_output('RestaurantOrderFood; Status:Failed', _connection)
return False
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('Current venue is not restaurant', _connection)
sims4.commands.automation_output('RestaurantOrderFood; Status:Failed', _connection)
return False
zone_director.make_one_order(sim, recipe_type)
groups = zone_director.get_dining_groups_by_sim(sim)
if groups is None:
sims4.commands.output('Sim {} is not in dining group'.format(opt_sim), _connection)
sims4.commands.automation_output('RestaurantOrderFood; Status:Failed', _connection)
group = groups.pop()
group.hold_ordered_cost(recipe_type.restaurant_base_price)
sims4.commands.automation_output('RestaurantOrderFood; Status:Success', _connection)
return True
@sims4.commands.Command('restaurant.show_menu', command_type=(sims4.commands.CommandType.Live))
def show_menu(opt_sim: OptionalTargetParam=None, _connection=None):
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("Sim {} doesn't exist".format(opt_sim), _connection)
return False
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('Current venue is not restaurant', _connection)
return False
zone_director.show_menu(sim)
@sims4.commands.Command('restaurant.show_menu_for_chef', command_type=(sims4.commands.CommandType.Live))
def show_menu_for_chef(opt_sim: OptionalTargetParam=None, chef_sim: OptionalTargetParam=None, _connection=None):
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("Sim {} doesn't exist".format(opt_sim), _connection)
return False
chef_sim = get_optional_target(chef_sim, _connection)
if chef_sim is None:
sims4.commands.output("Chef {} doesn't exist.".format(chef_sim), _connection)
return False
chef_situation = restaurant_utils.get_chef_situation(chef_sim=chef_sim)
if chef_situation is None:
sims4.commands.output("Couldn't find a Chef Situation in this zone.")
return False
chef_situation.show_menu(sim)
@sims4.commands.Command('restaurant.show_recommendation_menu_for_sim', command_type=(sims4.commands.CommandType.Live))
def show_recommendation_menu_for_sim(opt_sim: OptionalTargetParam=None, owner_sim: OptionalTargetParam=None, _connection=None):
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("Sim {} doesn't exist".format(opt_sim), _connection)
return False
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('Current venue is not restaurant', _connection)
return False
zone_director.show_menu(sim, is_recommendation=True)
@sims4.commands.Command('restaurant.claim_table', command_type=(sims4.commands.CommandType.Live))
def claim_table(opt_sim: OptionalTargetParam=None, opt_table: OptionalTargetParam=None, _connection=None):
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("Sim {} doesn't exist".format(opt_sim), _connection)
return False
table_to_claim = get_optional_target(opt_table, _connection)
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('Current venue is not restaurant', _connection)
return False
zone_director.claim_table(sim, table_to_claim)
@sims4.commands.Command('restaurant.order_for_table', command_type=(sims4.commands.CommandType.Live))
def order_for_table(sim_orders: str, _connection=None):
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('Current venue is not restaurant', _connection)
return False
proto = Restaurant_pb2.SimOrders()
text_format.Merge(sim_orders, proto)
orders = [(order.sim_id, order.recipe_id) for order in proto.sim_orders]
sim = services.object_manager().get(orders[0][0])
if sim is None:
sims4.commands.output("Trying to order for a Sim that isn't on the lot", _connection)
return False
zone_director.order_for_table(orders)
groups = zone_director.get_dining_groups_by_sim(sim)
group = groups.pop()
group.hold_ordered_cost(proto.meal_cost if has_field(proto, 'meal_cost') else 0)
return True
@sims4.commands.Command('restaurant.comp_drinks_for_group', command_type=(sims4.commands.CommandType.Live))
def comp_drinks_for_group(opt_sim: OptionalTargetParam=None, _connection=None):
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('Current venue is not restaurant', _connection)
return False
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("Sim {} doesn't exist".format(opt_sim), _connection)
return False
groups = zone_director.get_dining_groups_by_sim(sim)
group = groups.pop()
group.order_course_for_group((ChefsChoice.DRINK_COURSE), complimentary=True)
return True
@sims4.commands.Command('restaurant.comp_desserts_for_group', command_type=(sims4.commands.CommandType.Live))
def comp_desserts_for_group(opt_sim: OptionalTargetParam=None, _connection=None):
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('Current venue is not restaurant', _connection)
return False
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("Sim {} doesn't exist".format(opt_sim), _connection)
return False
groups = zone_director.get_dining_groups_by_sim(sim)
group = groups.pop()
group.order_course_for_group((ChefsChoice.DESSERT_COURSE), complimentary=True)
return True
@sims4.commands.Command('restaurant.recommend_order_for_table', command_type=(sims4.commands.CommandType.Live))
def recommend_order_for_table(sim_orders: str, _connection=None):
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('Current venue is not restaurant', _connection)
return False
proto = Restaurant_pb2.SimOrders()
text_format.Merge(sim_orders, proto)
orders = [(order.sim_id, order.recipe_id) for order in proto.sim_orders]
sims_in_order = set([services.object_manager().get(order_sim_id) for order_sim_id in [order[0] for order in orders]])
for sim in sims_in_order:
if sim is None:
sims4.commands.output("Trying to target order for a Sim that isn't on the lot", _connection)
return False
active_group_order = _get_active_group_order_for_dining_group(sim)
if active_group_order:
recipe_manager = services.get_instance_manager(sims4.resources.Types.RECIPE)
for order in orders:
recipe = recipe_manager.get(order[1])
recipes = GroupOrder.get_food_drink_recipe_id_tuple(recipe)
active_group_order.add_sim_order((order[0]), food_recipe_id=(recipes[0]), drink_recipe_id=(recipes[1]),
recommendation_state=(OrderRecommendationState.RECOMMENDATION_PROPOSAL),
order_status=(OrderStatus.ORDER_INIT))
else:
zone_director.order_for_table(orders, send_order=False,
recommendation_state=(OrderRecommendationState.RECOMMENDATION_PROPOSAL),
order_status=(OrderStatus.ORDER_INIT))
groups = zone_director.get_dining_groups_by_sim(sim)
group = groups.pop()
group.hold_ordered_cost(proto.meal_cost if has_field(proto, 'meal_cost') else 0)
for sim in sims_in_order:
zone_director.trigger_recommendation_interaction(services.get_active_sim(), sim)
return True
@sims4.commands.Command('restaurant.npc_accept_or_reject_recommendation', command_type=(sims4.commands.CommandType.Live))
def npc_accept_or_reject_recommendation(opt_sim: OptionalTargetParam=None, accept_recommendation: bool=True, _connection=None):
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("Sim {} doesn't exist.".format(opt_sim), _connection)
return False
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('Current venue is not restaurant', _connection)
return False
group_order = zone_director.get_active_group_order_for_sim(sim.sim_id)
if group_order is None:
sims4.commands.output('Sim {} was not offered a recommendation.'.format(opt_sim), _connection)
return False
if accept_recommendation:
sim_order = group_order.get_sim_order(sim.sim_id)
if sim_order is not None:
sim_order.recommendation_state = OrderRecommendationState.RECOMMENDATION_ACCEPTED
else:
group_order.remove_sim_order(sim.sim_id)
food_recipe, drink_recipe = ChefsChoice.get_order_for_npc_sim(sim)
group_order.add_sim_order((sim.sim_id), food_recipe_id=(food_recipe.guid64),
drink_recipe_id=(drink_recipe.guid64),
recommendation_state=(OrderRecommendationState.RECOMMENDATION_REJECTED),
order_status=(OrderStatus.ORDER_INIT))
return True
@sims4.commands.Command('restaurant.order_food_at_chef_station', command_type=(sims4.commands.CommandType.Live))
def order_food_at_chef_station(recipe_type: TunableInstanceParam(sims4.resources.Types.RECIPE), opt_sim: OptionalTargetParam=None, _connection=None):
if recipe_type is None:
sims4.commands.output('Recipe is None', _connection)
return False
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("Sim {} doesn't exist.".format(opt_sim), _connection)
return False
chef_situation = restaurant_utils.get_chef_situation()
if chef_situation is None:
sims4.commands.output("Couldn't find a Chef Situation in this zone.")
return False
chef_situation.add_direct_order(recipe_type, sim)
services.get_event_manager().process_event((test_events.TestEvent.RestaurantFoodOrdered), sim_info=(sim.sim_info))
return True
@sims4.commands.Command('restaurant.npc_order_food_at_chef_station', command_type=(sims4.commands.CommandType.Live))
def npc_order_food_at_chef_station(opt_sim: OptionalTargetParam=None, chef_sim: OptionalTargetParam=None, _connection=None):
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("Sim {} doesn't exist.".format(opt_sim), _connection)
return False
else:
chef_sim = get_optional_target(chef_sim, _connection)
if chef_sim is None:
sims4.commands.output("Chef {} doesn't exist.".format(chef_sim), _connection)
return False
chef_situation = restaurant_utils.get_chef_situation(chef_sim=chef_sim)
if chef_situation is None:
sims4.commands.output("Couldn't find a Chef Situation in this zone.")
return False
if chef_situation.menu_preset is not None:
food_order = ChefsChoice.get_order_for_npc_sim_with_menu(sim, chef_situation.menu_preset)
else:
food_order, _ = ChefsChoice.get_order_for_npc_sim(sim)
chef_situation.add_direct_order(food_order, sim)
services.get_event_manager().process_event((test_events.TestEvent.RestaurantFoodOrdered), sim_info=(sim.sim_info))
return True
@sims4.commands.Command('restaurant.give_chef_feedback', command_type=(sims4.commands.CommandType.Live))
def give_chef_feedback(to_chef_sim_id: OptionalTargetParam=None, from_sim_id: OptionalTargetParam=None, is_compliment: bool=True, waitstaff_sim_id: OptionalTargetParam=None, _connection=None):
from_sim = get_optional_target(from_sim_id, _connection)
if from_sim is None:
sims4.commands.output("From Sim {} doesn't exist.".format(from_sim_id), _connection)
return False
to_chef_sim = get_optional_target(to_chef_sim_id, _connection)
if to_chef_sim is None:
sims4.commands.output("To Chef Sim {} doesn't exist.".format(to_chef_sim_id), _connection)
return False
waitstaff_sim = get_optional_target(waitstaff_sim_id, _connection)
if waitstaff_sim is None:
sims4.commands.output("Waitstaff Sim {} doesn't exist.".format(waitstaff_sim_id), _connection)
return False
waitstaff_situation = restaurant_utils.get_waitstaff_situation(waitstaff_sim)
waitstaff_situation.give_chef_feedback(to_chef_sim, from_sim, is_compliment)
@sims4.commands.Command('restaurant.npc_order_food_from_waitstaff', command_type=(sims4.commands.CommandType.Live))
def npc_order_food_from_waitstaff(opt_sim: OptionalTargetParam=None, _connection=None):
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("Sim {} doesn't exist.".format(opt_sim), _connection)
return False
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('Not currently on a restaurant lot so cannot place orders with the waitstaff for NPC groups.', _connection)
return False
active_group_order = _get_active_group_order_for_dining_group(sim)
dining_groups = zone_director.get_dining_groups_by_sim(sim)
for dining_group in dining_groups:
if not dining_group.order_for_table(active_group_order=active_group_order):
sims4.commands.output('Failed to place order for dining group.', _connection)
return False
return True
@sims4.commands.Command('restaurant.comp_order_for_sim', command_type=(sims4.commands.CommandType.Live))
def comp_order_for_sim(opt_sim: OptionalTargetParam=None, _connection=None):
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.Command("Sim {} doesn't exist.".format(opt_sim), _connection)
return False
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.Command('Not currently on a restaurant lot.', _connection)
return False
business_manager = zone_director.business_manager
if business_manager is None:
sims4.commands.Command("The current zone doesn't have a business manager.", _connection)
return False
for group_order in zone_director.get_delivered_orders_for_sim(sim.id):
business_manager.comp_order_for_sim(group_order.get_sim_order(sim.id))
@sims4.commands.Command('restaurant.create_food_for_group_order_sim', command_type=(sims4.commands.CommandType.Live))
def create_food_for_group_order_sim(opt_sim: OptionalTargetParam=None, _connection=None):
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("Sim {} doesn't exist.".format(opt_sim), _connection)
return False
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('Not currently on a restaurant lot so can not create an order for a table.', _connection)
return False
group_order = zone_director.get_active_group_order_for_sim(sim.id)
if group_order is None:
sims4.commands.output('There is no group order in for the passed in sim {}.'.format(sim), _connection)
return False
zone_director.create_food_for_group_order(group_order)
return True
@sims4.commands.Command('restaurant.create_food_for_group_order_table', command_type=(sims4.commands.CommandType.Live))
def create_food_for_group_order_table(table_id: OptionalTargetParam=None, _connection=None):
table = get_optional_target(table_id, _connection)
if table is None:
sims4.commands.output("Table {} doesn't exist.".format(table_id), _connection)
return False
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('Not currently on a restaurant lot so can not create an order for a table.', _connection)
return False
group_order = zone_director.get_active_group_order_for_table(table.id)
if group_order is None:
sims4.commands.output('There is no group order in for the passed in sim {}.'.format(sim), _connection)
return False
zone_director.create_food_for_group_order(group_order)
return True
@sims4.commands.Command('restaurant.set_ingredient_quality', command_type=(sims4.commands.CommandType.Live))
def set_ingredient_quality(ingredient_quality: RestaurantIngredientQualityType, _connection=None):
business_manager = services.business_service().get_business_manager_for_zone()
if business_manager is None:
sims4.commands.output('Trying to set the ingredient quality for a restaurant but there was no valid business manager found for the current zone.')
return False
business_manager.set_ingredient_quality(ingredient_quality)
@sims4.commands.Command('restaurant.expedite_sims_order', command_type=(sims4.commands.CommandType.Live))
def expedite_sim_order(opt_sim: OptionalTargetParam=None, _connection=None):
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("Sim {} doesn't exist.".format(opt_sim), _connection)
return False
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('Not on a restaurant lot.', _connection)
return
if not zone_director.has_group_order(sim.id):
sims4.commands.output('Sim {} does not have an order.'.format(sim), _connection)
return
group_order = zone_director.get_group_order(sim.id)
if group_order is not None:
group_order.expedited = True
@sims4.commands.Command('restaurant.refresh_configuration', command_type=(sims4.commands.CommandType.Live))
def refresh_configuration(_connection=None):
zone_director = get_restaurant_zone_director()
if zone_director is not None:
zone_director.refresh_configuration()
def _get_active_group_order_for_dining_group(sim):
zone_director = get_restaurant_zone_director()
if zone_director is None:
return
dining_groups = zone_director.get_dining_groups_by_sim(sim)
for dining_group in dining_groups:
for group_sim in dining_group.all_sims_in_situation_gen():
active_group_order = zone_director.get_active_group_order_for_sim(group_sim.sim_id)
if active_group_order:
return active_group_order
@sims4.commands.Command('restaurant.sim_is_employee', command_type=(sims4.commands.CommandType.Automation))
def sim_is_employee(opt_sim: OptionalTargetParam=None, _connection=None):
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("False, Sim {} doesn't exist.".format(opt_sim), _connection)
sims4.commands.automation_output('RestaurantIsEmployee; Status:InvalidSim', _connection)
return False
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('False, Not on a restaurant lot.', _connection)
sims4.commands.automation_output('RestaurantIsEmployee; Status:NotOnLot', _connection)
return False
situation_manager = services.get_zone_situation_manager()
if situation_manager is None:
sims4.commands.output('False, There is no situation manager on this lot.', _connection)
sims4.commands.automation_output('RestaurantIsEmployee; Status:NoSituationMgr', _connection)
return False
business_manager = zone_director.business_manager
if business_manager is None:
sim_situations = situation_manager.get_situations_sim_is_in(sim)
for situation in sim_situations:
if type(situation) in (RestaurantTuning.CHEF_SITUATION,
RestaurantTuning.HOST_SITUATION,
RestaurantTuning.WAITSTAFF_SITUATION):
sims4.commands.output('True, Sim is an employee of the current restaurant.', _connection)
sims4.commands.automation_output('RestaurantIsEmployee; Status:Success', _connection)
return True
elif business_manager.is_employee(sim.sim_info):
sims4.commands.output('True, Sim is currently an employee', _connection)
sims4.commands.automation_output('RestaurantIsEmployee; Status:Success', _connection)
return True
sims4.commands.output('False, Sim is not an employee of the current restaurant.', _connection)
sims4.commands.automation_output('RestaurantIsEmployee; Status:Failed', _connection)
return False
@sims4.commands.Command('restaurant.is_open', command_type=(sims4.commands.CommandType.Automation))
def is_open(_connection=None):
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('False, Not on a restaurant lot.', _connection)
sims4.commands.automation_output('RestaurantIsOpen; Status:NotOnLot', _connection)
return False
if zone_director.business_manager is None:
sims4.commands.output('True, unowned restaurants are always open.', _connection)
sims4.commands.automation_output('RestaurantIsOpen; Status:Success', _connection)
return True
if zone_director.business_manager.is_open:
sims4.commands.output('True, this owned restaurant is currently open', _connection)
sims4.commands.automation_output('RestaurantIsOpen; Status:Success', _connection)
return True
sims4.commands.output('False, this owned restaurant is currently closed', _connection)
sims4.commands.automation_output('RestaurantIsOpen; Status:Failed', _connection)
return False
@sims4.commands.Command('restaurant.get_sim_diner_state', command_type=(sims4.commands.CommandType.Automation))
def get_sim_dining_state(opt_sim: OptionalTargetParam=None, _connection=None):
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("Sim {} doesn't exist".format(opt_sim), _connection)
return False
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('Not on a restaurant lot.', _connection)
return False
groups = zone_director.get_dining_groups_by_sim(sim)
if not groups:
sims4.commands.output('Sim {} is not in dining group'.format(sim), _connection)
sims4.commands.automation_output('RestaurantDinerState; Status:NotReady', _connection)
return True
dining_group = groups.pop()
for sub_situation in dining_group.sub_situations:
state = sub_situation.current_state_index().name
sims4.commands.automation_output('RestaurantDinerState; Status:{}'.format(state), _connection)
return True | 51.791232 | 192 | 0.747743 |
from protocolbuffers import Restaurant_pb2
from event_testing import test_events
from google.protobuf import text_format
from restaurants import restaurant_utils
from restaurants.chefs_choice import ChefsChoice
from restaurants.restaurant_diner_situation import DinerSubSituationState, RestaurantDinerSubSituation, RestaurantDinerBackGroundSituation
from restaurants.restaurant_order import OrderStatus, OrderRecommendationState, GroupOrder
from restaurants.restaurant_tuning import RestaurantTuning, RestaurantIngredientQualityType, get_restaurant_zone_director
from server_commands.argument_helpers import TunableInstanceParam, OptionalTargetParam, get_optional_target
from sims import sim
from sims4.protocol_buffer_utils import has_field
import services, sims4.commands
@sims4.commands.Command('restaurant.order_food', command_type=(sims4.commands.CommandType.Live))
def order_food(recipe_type: TunableInstanceParam(sims4.resources.Types.RECIPE), opt_sim: OptionalTargetParam=None, _connection=None):
if recipe_type is None:
sims4.commands.output('Recipe is None', _connection)
sims4.commands.automation_output('RestaurantOrderFood; Status:Failed', _connection)
return False
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("Sim {} doesn't exist".format(opt_sim), _connection)
sims4.commands.automation_output('RestaurantOrderFood; Status:Failed', _connection)
return False
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('Current venue is not restaurant', _connection)
sims4.commands.automation_output('RestaurantOrderFood; Status:Failed', _connection)
return False
zone_director.make_one_order(sim, recipe_type)
groups = zone_director.get_dining_groups_by_sim(sim)
if groups is None:
sims4.commands.output('Sim {} is not in dining group'.format(opt_sim), _connection)
sims4.commands.automation_output('RestaurantOrderFood; Status:Failed', _connection)
group = groups.pop()
group.hold_ordered_cost(recipe_type.restaurant_base_price)
sims4.commands.automation_output('RestaurantOrderFood; Status:Success', _connection)
return True
@sims4.commands.Command('restaurant.show_menu', command_type=(sims4.commands.CommandType.Live))
def show_menu(opt_sim: OptionalTargetParam=None, _connection=None):
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("Sim {} doesn't exist".format(opt_sim), _connection)
return False
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('Current venue is not restaurant', _connection)
return False
zone_director.show_menu(sim)
@sims4.commands.Command('restaurant.show_menu_for_chef', command_type=(sims4.commands.CommandType.Live))
def show_menu_for_chef(opt_sim: OptionalTargetParam=None, chef_sim: OptionalTargetParam=None, _connection=None):
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("Sim {} doesn't exist".format(opt_sim), _connection)
return False
chef_sim = get_optional_target(chef_sim, _connection)
if chef_sim is None:
sims4.commands.output("Chef {} doesn't exist.".format(chef_sim), _connection)
return False
chef_situation = restaurant_utils.get_chef_situation(chef_sim=chef_sim)
if chef_situation is None:
sims4.commands.output("Couldn't find a Chef Situation in this zone.")
return False
chef_situation.show_menu(sim)
@sims4.commands.Command('restaurant.show_recommendation_menu_for_sim', command_type=(sims4.commands.CommandType.Live))
def show_recommendation_menu_for_sim(opt_sim: OptionalTargetParam=None, owner_sim: OptionalTargetParam=None, _connection=None):
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("Sim {} doesn't exist".format(opt_sim), _connection)
return False
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('Current venue is not restaurant', _connection)
return False
zone_director.show_menu(sim, is_recommendation=True)
@sims4.commands.Command('restaurant.claim_table', command_type=(sims4.commands.CommandType.Live))
def claim_table(opt_sim: OptionalTargetParam=None, opt_table: OptionalTargetParam=None, _connection=None):
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("Sim {} doesn't exist".format(opt_sim), _connection)
return False
table_to_claim = get_optional_target(opt_table, _connection)
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('Current venue is not restaurant', _connection)
return False
zone_director.claim_table(sim, table_to_claim)
@sims4.commands.Command('restaurant.order_for_table', command_type=(sims4.commands.CommandType.Live))
def order_for_table(sim_orders: str, _connection=None):
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('Current venue is not restaurant', _connection)
return False
proto = Restaurant_pb2.SimOrders()
text_format.Merge(sim_orders, proto)
orders = [(order.sim_id, order.recipe_id) for order in proto.sim_orders]
sim = services.object_manager().get(orders[0][0])
if sim is None:
sims4.commands.output("Trying to order for a Sim that isn't on the lot", _connection)
return False
zone_director.order_for_table(orders)
groups = zone_director.get_dining_groups_by_sim(sim)
group = groups.pop()
group.hold_ordered_cost(proto.meal_cost if has_field(proto, 'meal_cost') else 0)
return True
@sims4.commands.Command('restaurant.comp_drinks_for_group', command_type=(sims4.commands.CommandType.Live))
def comp_drinks_for_group(opt_sim: OptionalTargetParam=None, _connection=None):
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('Current venue is not restaurant', _connection)
return False
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("Sim {} doesn't exist".format(opt_sim), _connection)
return False
groups = zone_director.get_dining_groups_by_sim(sim)
group = groups.pop()
group.order_course_for_group((ChefsChoice.DRINK_COURSE), complimentary=True)
return True
@sims4.commands.Command('restaurant.comp_desserts_for_group', command_type=(sims4.commands.CommandType.Live))
def comp_desserts_for_group(opt_sim: OptionalTargetParam=None, _connection=None):
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('Current venue is not restaurant', _connection)
return False
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("Sim {} doesn't exist".format(opt_sim), _connection)
return False
groups = zone_director.get_dining_groups_by_sim(sim)
group = groups.pop()
group.order_course_for_group((ChefsChoice.DESSERT_COURSE), complimentary=True)
return True
@sims4.commands.Command('restaurant.recommend_order_for_table', command_type=(sims4.commands.CommandType.Live))
def recommend_order_for_table(sim_orders: str, _connection=None):
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('Current venue is not restaurant', _connection)
return False
proto = Restaurant_pb2.SimOrders()
text_format.Merge(sim_orders, proto)
orders = [(order.sim_id, order.recipe_id) for order in proto.sim_orders]
sims_in_order = set([services.object_manager().get(order_sim_id) for order_sim_id in [order[0] for order in orders]])
for sim in sims_in_order:
if sim is None:
sims4.commands.output("Trying to target order for a Sim that isn't on the lot", _connection)
return False
active_group_order = _get_active_group_order_for_dining_group(sim)
if active_group_order:
recipe_manager = services.get_instance_manager(sims4.resources.Types.RECIPE)
for order in orders:
recipe = recipe_manager.get(order[1])
recipes = GroupOrder.get_food_drink_recipe_id_tuple(recipe)
active_group_order.add_sim_order((order[0]), food_recipe_id=(recipes[0]), drink_recipe_id=(recipes[1]),
recommendation_state=(OrderRecommendationState.RECOMMENDATION_PROPOSAL),
order_status=(OrderStatus.ORDER_INIT))
else:
zone_director.order_for_table(orders, send_order=False,
recommendation_state=(OrderRecommendationState.RECOMMENDATION_PROPOSAL),
order_status=(OrderStatus.ORDER_INIT))
groups = zone_director.get_dining_groups_by_sim(sim)
group = groups.pop()
group.hold_ordered_cost(proto.meal_cost if has_field(proto, 'meal_cost') else 0)
for sim in sims_in_order:
zone_director.trigger_recommendation_interaction(services.get_active_sim(), sim)
return True
@sims4.commands.Command('restaurant.npc_accept_or_reject_recommendation', command_type=(sims4.commands.CommandType.Live))
def npc_accept_or_reject_recommendation(opt_sim: OptionalTargetParam=None, accept_recommendation: bool=True, _connection=None):
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("Sim {} doesn't exist.".format(opt_sim), _connection)
return False
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('Current venue is not restaurant', _connection)
return False
group_order = zone_director.get_active_group_order_for_sim(sim.sim_id)
if group_order is None:
sims4.commands.output('Sim {} was not offered a recommendation.'.format(opt_sim), _connection)
return False
if accept_recommendation:
sim_order = group_order.get_sim_order(sim.sim_id)
if sim_order is not None:
sim_order.recommendation_state = OrderRecommendationState.RECOMMENDATION_ACCEPTED
else:
group_order.remove_sim_order(sim.sim_id)
food_recipe, drink_recipe = ChefsChoice.get_order_for_npc_sim(sim)
group_order.add_sim_order((sim.sim_id), food_recipe_id=(food_recipe.guid64),
drink_recipe_id=(drink_recipe.guid64),
recommendation_state=(OrderRecommendationState.RECOMMENDATION_REJECTED),
order_status=(OrderStatus.ORDER_INIT))
return True
@sims4.commands.Command('restaurant.order_food_at_chef_station', command_type=(sims4.commands.CommandType.Live))
def order_food_at_chef_station(recipe_type: TunableInstanceParam(sims4.resources.Types.RECIPE), opt_sim: OptionalTargetParam=None, _connection=None):
if recipe_type is None:
sims4.commands.output('Recipe is None', _connection)
return False
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("Sim {} doesn't exist.".format(opt_sim), _connection)
return False
chef_situation = restaurant_utils.get_chef_situation()
if chef_situation is None:
sims4.commands.output("Couldn't find a Chef Situation in this zone.")
return False
chef_situation.add_direct_order(recipe_type, sim)
services.get_event_manager().process_event((test_events.TestEvent.RestaurantFoodOrdered), sim_info=(sim.sim_info))
return True
@sims4.commands.Command('restaurant.npc_order_food_at_chef_station', command_type=(sims4.commands.CommandType.Live))
def npc_order_food_at_chef_station(opt_sim: OptionalTargetParam=None, chef_sim: OptionalTargetParam=None, _connection=None):
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("Sim {} doesn't exist.".format(opt_sim), _connection)
return False
else:
chef_sim = get_optional_target(chef_sim, _connection)
if chef_sim is None:
sims4.commands.output("Chef {} doesn't exist.".format(chef_sim), _connection)
return False
chef_situation = restaurant_utils.get_chef_situation(chef_sim=chef_sim)
if chef_situation is None:
sims4.commands.output("Couldn't find a Chef Situation in this zone.")
return False
if chef_situation.menu_preset is not None:
food_order = ChefsChoice.get_order_for_npc_sim_with_menu(sim, chef_situation.menu_preset)
else:
food_order, _ = ChefsChoice.get_order_for_npc_sim(sim)
chef_situation.add_direct_order(food_order, sim)
services.get_event_manager().process_event((test_events.TestEvent.RestaurantFoodOrdered), sim_info=(sim.sim_info))
return True
@sims4.commands.Command('restaurant.give_chef_feedback', command_type=(sims4.commands.CommandType.Live))
def give_chef_feedback(to_chef_sim_id: OptionalTargetParam=None, from_sim_id: OptionalTargetParam=None, is_compliment: bool=True, waitstaff_sim_id: OptionalTargetParam=None, _connection=None):
from_sim = get_optional_target(from_sim_id, _connection)
if from_sim is None:
sims4.commands.output("From Sim {} doesn't exist.".format(from_sim_id), _connection)
return False
to_chef_sim = get_optional_target(to_chef_sim_id, _connection)
if to_chef_sim is None:
sims4.commands.output("To Chef Sim {} doesn't exist.".format(to_chef_sim_id), _connection)
return False
waitstaff_sim = get_optional_target(waitstaff_sim_id, _connection)
if waitstaff_sim is None:
sims4.commands.output("Waitstaff Sim {} doesn't exist.".format(waitstaff_sim_id), _connection)
return False
waitstaff_situation = restaurant_utils.get_waitstaff_situation(waitstaff_sim)
waitstaff_situation.give_chef_feedback(to_chef_sim, from_sim, is_compliment)
@sims4.commands.Command('restaurant.npc_order_food_from_waitstaff', command_type=(sims4.commands.CommandType.Live))
def npc_order_food_from_waitstaff(opt_sim: OptionalTargetParam=None, _connection=None):
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("Sim {} doesn't exist.".format(opt_sim), _connection)
return False
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('Not currently on a restaurant lot so cannot place orders with the waitstaff for NPC groups.', _connection)
return False
active_group_order = _get_active_group_order_for_dining_group(sim)
dining_groups = zone_director.get_dining_groups_by_sim(sim)
for dining_group in dining_groups:
if not dining_group.order_for_table(active_group_order=active_group_order):
sims4.commands.output('Failed to place order for dining group.', _connection)
return False
return True
@sims4.commands.Command('restaurant.comp_order_for_sim', command_type=(sims4.commands.CommandType.Live))
def comp_order_for_sim(opt_sim: OptionalTargetParam=None, _connection=None):
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.Command("Sim {} doesn't exist.".format(opt_sim), _connection)
return False
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.Command('Not currently on a restaurant lot.', _connection)
return False
business_manager = zone_director.business_manager
if business_manager is None:
sims4.commands.Command("The current zone doesn't have a business manager.", _connection)
return False
for group_order in zone_director.get_delivered_orders_for_sim(sim.id):
business_manager.comp_order_for_sim(group_order.get_sim_order(sim.id))
@sims4.commands.Command('restaurant.create_food_for_group_order_sim', command_type=(sims4.commands.CommandType.Live))
def create_food_for_group_order_sim(opt_sim: OptionalTargetParam=None, _connection=None):
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("Sim {} doesn't exist.".format(opt_sim), _connection)
return False
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('Not currently on a restaurant lot so can not create an order for a table.', _connection)
return False
group_order = zone_director.get_active_group_order_for_sim(sim.id)
if group_order is None:
sims4.commands.output('There is no group order in for the passed in sim {}.'.format(sim), _connection)
return False
zone_director.create_food_for_group_order(group_order)
return True
@sims4.commands.Command('restaurant.create_food_for_group_order_table', command_type=(sims4.commands.CommandType.Live))
def create_food_for_group_order_table(table_id: OptionalTargetParam=None, _connection=None):
table = get_optional_target(table_id, _connection)
if table is None:
sims4.commands.output("Table {} doesn't exist.".format(table_id), _connection)
return False
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('Not currently on a restaurant lot so can not create an order for a table.', _connection)
return False
group_order = zone_director.get_active_group_order_for_table(table.id)
if group_order is None:
sims4.commands.output('There is no group order in for the passed in sim {}.'.format(sim), _connection)
return False
zone_director.create_food_for_group_order(group_order)
return True
@sims4.commands.Command('restaurant.set_ingredient_quality', command_type=(sims4.commands.CommandType.Live))
def set_ingredient_quality(ingredient_quality: RestaurantIngredientQualityType, _connection=None):
business_manager = services.business_service().get_business_manager_for_zone()
if business_manager is None:
sims4.commands.output('Trying to set the ingredient quality for a restaurant but there was no valid business manager found for the current zone.')
return False
business_manager.set_ingredient_quality(ingredient_quality)
@sims4.commands.Command('restaurant.expedite_sims_order', command_type=(sims4.commands.CommandType.Live))
def expedite_sim_order(opt_sim: OptionalTargetParam=None, _connection=None):
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("Sim {} doesn't exist.".format(opt_sim), _connection)
return False
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('Not on a restaurant lot.', _connection)
return
if not zone_director.has_group_order(sim.id):
sims4.commands.output('Sim {} does not have an order.'.format(sim), _connection)
return
group_order = zone_director.get_group_order(sim.id)
if group_order is not None:
group_order.expedited = True
@sims4.commands.Command('restaurant.refresh_configuration', command_type=(sims4.commands.CommandType.Live))
def refresh_configuration(_connection=None):
zone_director = get_restaurant_zone_director()
if zone_director is not None:
zone_director.refresh_configuration()
def _get_active_group_order_for_dining_group(sim):
zone_director = get_restaurant_zone_director()
if zone_director is None:
return
dining_groups = zone_director.get_dining_groups_by_sim(sim)
for dining_group in dining_groups:
for group_sim in dining_group.all_sims_in_situation_gen():
active_group_order = zone_director.get_active_group_order_for_sim(group_sim.sim_id)
if active_group_order:
return active_group_order
@sims4.commands.Command('restaurant.sim_is_employee', command_type=(sims4.commands.CommandType.Automation))
def sim_is_employee(opt_sim: OptionalTargetParam=None, _connection=None):
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("False, Sim {} doesn't exist.".format(opt_sim), _connection)
sims4.commands.automation_output('RestaurantIsEmployee; Status:InvalidSim', _connection)
return False
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('False, Not on a restaurant lot.', _connection)
sims4.commands.automation_output('RestaurantIsEmployee; Status:NotOnLot', _connection)
return False
situation_manager = services.get_zone_situation_manager()
if situation_manager is None:
sims4.commands.output('False, There is no situation manager on this lot.', _connection)
sims4.commands.automation_output('RestaurantIsEmployee; Status:NoSituationMgr', _connection)
return False
business_manager = zone_director.business_manager
if business_manager is None:
sim_situations = situation_manager.get_situations_sim_is_in(sim)
for situation in sim_situations:
if type(situation) in (RestaurantTuning.CHEF_SITUATION,
RestaurantTuning.HOST_SITUATION,
RestaurantTuning.WAITSTAFF_SITUATION):
sims4.commands.output('True, Sim is an employee of the current restaurant.', _connection)
sims4.commands.automation_output('RestaurantIsEmployee; Status:Success', _connection)
return True
elif business_manager.is_employee(sim.sim_info):
sims4.commands.output('True, Sim is currently an employee', _connection)
sims4.commands.automation_output('RestaurantIsEmployee; Status:Success', _connection)
return True
sims4.commands.output('False, Sim is not an employee of the current restaurant.', _connection)
sims4.commands.automation_output('RestaurantIsEmployee; Status:Failed', _connection)
return False
@sims4.commands.Command('restaurant.is_open', command_type=(sims4.commands.CommandType.Automation))
def is_open(_connection=None):
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('False, Not on a restaurant lot.', _connection)
sims4.commands.automation_output('RestaurantIsOpen; Status:NotOnLot', _connection)
return False
if zone_director.business_manager is None:
sims4.commands.output('True, unowned restaurants are always open.', _connection)
sims4.commands.automation_output('RestaurantIsOpen; Status:Success', _connection)
return True
if zone_director.business_manager.is_open:
sims4.commands.output('True, this owned restaurant is currently open', _connection)
sims4.commands.automation_output('RestaurantIsOpen; Status:Success', _connection)
return True
sims4.commands.output('False, this owned restaurant is currently closed', _connection)
sims4.commands.automation_output('RestaurantIsOpen; Status:Failed', _connection)
return False
@sims4.commands.Command('restaurant.get_sim_diner_state', command_type=(sims4.commands.CommandType.Automation))
def get_sim_dining_state(opt_sim: OptionalTargetParam=None, _connection=None):
sim = get_optional_target(opt_sim, _connection)
if sim is None:
sims4.commands.output("Sim {} doesn't exist".format(opt_sim), _connection)
return False
zone_director = get_restaurant_zone_director()
if zone_director is None:
sims4.commands.output('Not on a restaurant lot.', _connection)
return False
groups = zone_director.get_dining_groups_by_sim(sim)
if not groups:
sims4.commands.output('Sim {} is not in dining group'.format(sim), _connection)
sims4.commands.automation_output('RestaurantDinerState; Status:NotReady', _connection)
return True
dining_group = groups.pop()
for sub_situation in dining_group.sub_situations:
state = sub_situation.current_state_index().name
sims4.commands.automation_output('RestaurantDinerState; Status:{}'.format(state), _connection)
return True | true | true |
f72fd45c61980e5c188d7f2e1db08ef2a024468b | 685 | py | Python | examples/custom_plugin/plugins/MyFirstPlugin/pwba_plugin.py | pxlc/PyWebBrowserApp | 0165b29cbe5f88068f62d8298b1f5e3ee611a985 | [
"MIT"
] | 1 | 2021-11-09T07:53:25.000Z | 2021-11-09T07:53:25.000Z | examples/custom_plugin/plugins/MyFirstPlugin/pwba_plugin.py | pxlc/PyWebBrowserApp | 0165b29cbe5f88068f62d8298b1f5e3ee611a985 | [
"MIT"
] | null | null | null | examples/custom_plugin/plugins/MyFirstPlugin/pwba_plugin.py | pxlc/PyWebBrowserApp | 0165b29cbe5f88068f62d8298b1f5e3ee611a985 | [
"MIT"
] | null | null | null |
from PyWebBrowserApp import PluginBase
from PyWebBrowserApp import register_plugin_op
class Plugin(PluginBase):
def __init__(self):
super(Plugin, self).__init__()
self.name = '${P}'
@register_plugin_op
def test_plugin_callback(self, op_data):
# self.info(op_data.get('message', ''))
print('Hello from ${P} callback')
@register_plugin_op
def roundtrip_from_js(self, op_data):
alert_msg = op_data.get('alert_msg', '???')
self.info('[Plugin "%s"] in roundtrip_from_js() method, got alert_msg "%s"' % (self.name, alert_msg))
self.plugin_to_webbrowser('roundtrip_from_python', {'alert_msg': alert_msg})
| 25.37037 | 109 | 0.665693 |
from PyWebBrowserApp import PluginBase
from PyWebBrowserApp import register_plugin_op
class Plugin(PluginBase):
def __init__(self):
super(Plugin, self).__init__()
self.name = '${P}'
@register_plugin_op
def test_plugin_callback(self, op_data):
print('Hello from ${P} callback')
@register_plugin_op
def roundtrip_from_js(self, op_data):
alert_msg = op_data.get('alert_msg', '???')
self.info('[Plugin "%s"] in roundtrip_from_js() method, got alert_msg "%s"' % (self.name, alert_msg))
self.plugin_to_webbrowser('roundtrip_from_python', {'alert_msg': alert_msg})
| true | true |
f72fd4a32451e1afa48eb80e2147811dcd4f5f9f | 54,405 | py | Python | .kodi/addons/script.ftvguide/gui.py | C6SUMMER/allinclusive-kodi-pi | 8baf247c79526849c640c6e56ca57a708a65bd11 | [
"Apache-2.0"
] | null | null | null | .kodi/addons/script.ftvguide/gui.py | C6SUMMER/allinclusive-kodi-pi | 8baf247c79526849c640c6e56ca57a708a65bd11 | [
"Apache-2.0"
] | null | null | null | .kodi/addons/script.ftvguide/gui.py | C6SUMMER/allinclusive-kodi-pi | 8baf247c79526849c640c6e56ca57a708a65bd11 | [
"Apache-2.0"
] | 2 | 2018-04-17T17:34:39.000Z | 2020-07-26T03:43:33.000Z | #
# Copyright (C) 2014 Tommy Winther
# http://tommy.winther.nu
#
# Modified for FTV Guide (09/2014 onwards)
# by Thomas Geppert [bluezed] - bluezed.apps@gmail.com
#
# This Program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This Program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this Program; see the file LICENSE.txt. If not, write to
# the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA.
# http://www.gnu.org/copyleft/gpl.html
#
import datetime
import threading
import time
import xbmc
import xbmcgui
import source as src
from notification import Notification
from strings import *
import streaming
DEBUG = False
MODE_EPG = 'EPG'
MODE_TV = 'TV'
MODE_OSD = 'OSD'
ACTION_LEFT = 1
ACTION_RIGHT = 2
ACTION_UP = 3
ACTION_DOWN = 4
ACTION_PAGE_UP = 5
ACTION_PAGE_DOWN = 6
ACTION_SELECT_ITEM = 7
ACTION_PARENT_DIR = 9
ACTION_PREVIOUS_MENU = 10
ACTION_SHOW_INFO = 11
ACTION_NEXT_ITEM = 14
ACTION_PREV_ITEM = 15
ACTION_MOUSE_WHEEL_UP = 104
ACTION_MOUSE_WHEEL_DOWN = 105
ACTION_MOUSE_MOVE = 107
KEY_NAV_BACK = 92
KEY_CONTEXT_MENU = 117
KEY_HOME = 159
KEY_ESC = 61467
CHANNELS_PER_PAGE = 8
HALF_HOUR = datetime.timedelta(minutes=30)
SKIN = ADDON.getSetting('skin')
def debug(s):
if DEBUG: xbmc.log(str(s), xbmc.LOGDEBUG)
class Point(object):
def __init__(self):
self.x = self.y = 0
def __repr__(self):
return 'Point(x=%d, y=%d)' % (self.x, self.y)
class EPGView(object):
def __init__(self):
self.top = self.left = self.right = self.bottom = self.width = self.cellHeight = 0
class ControlAndProgram(object):
def __init__(self, control, program):
self.control = control
self.program = program
class TVGuide(xbmcgui.WindowXML):
C_MAIN_DATE_LONG = 3999
C_MAIN_DATE = 4000
C_MAIN_TITLE = 4020
C_MAIN_TIME = 4021
C_MAIN_DESCRIPTION = 4022
C_MAIN_IMAGE = 4023
C_MAIN_LOGO = 4024
C_MAIN_TIMEBAR = 4100
C_MAIN_LOADING = 4200
C_MAIN_LOADING_PROGRESS = 4201
C_MAIN_LOADING_TIME_LEFT = 4202
C_MAIN_LOADING_CANCEL = 4203
C_MAIN_MOUSE_CONTROLS = 4300
C_MAIN_MOUSE_HOME = 4301
C_MAIN_MOUSE_LEFT = 4302
C_MAIN_MOUSE_UP = 4303
C_MAIN_MOUSE_DOWN = 4304
C_MAIN_MOUSE_RIGHT = 4305
C_MAIN_MOUSE_EXIT = 4306
C_MAIN_BACKGROUND = 4600
C_MAIN_EPG = 5000
C_MAIN_EPG_VIEW_MARKER = 5001
C_MAIN_OSD = 6000
C_MAIN_OSD_TITLE = 6001
C_MAIN_OSD_TIME = 6002
C_MAIN_OSD_DESCRIPTION = 6003
C_MAIN_OSD_CHANNEL_LOGO = 6004
C_MAIN_OSD_CHANNEL_TITLE = 6005
def __new__(cls):
return super(TVGuide, cls).__new__(cls, 'script-tvguide-main.xml', ADDON.getAddonInfo('path'), SKIN)
def __init__(self):
super(TVGuide, self).__init__()
self.notification = None
self.redrawingEPG = False
self.isClosing = False
self.controlAndProgramList = list()
self.ignoreMissingControlIds = list()
self.channelIdx = 0
self.focusPoint = Point()
self.epgView = EPGView()
self.streamingService = streaming.StreamsService(ADDON)
self.player = xbmc.Player()
self.database = None
self.mode = MODE_EPG
self.currentChannel = None
self.osdEnabled = ADDON.getSetting('enable.osd') == 'true' and ADDON.getSetting(
'alternative.playback') != 'true'
self.alternativePlayback = ADDON.getSetting('alternative.playback') == 'true'
self.osdChannel = None
self.osdProgram = None
# find nearest half hour
self.viewStartDate = datetime.datetime.today()
self.viewStartDate -= datetime.timedelta(minutes=self.viewStartDate.minute % 30,
seconds=self.viewStartDate.second)
def getControl(self, controlId):
try:
return super(TVGuide, self).getControl(controlId)
except:
if controlId in self.ignoreMissingControlIds:
return None
if not self.isClosing:
self.close()
return None
def close(self):
if not self.isClosing:
self.isClosing = True
if self.player.isPlaying():
self.player.stop()
if self.database:
self.database.close(super(TVGuide, self).close)
else:
super(TVGuide, self).close()
def onInit(self):
self._hideControl(self.C_MAIN_MOUSE_CONTROLS, self.C_MAIN_OSD)
self._showControl(self.C_MAIN_EPG, self.C_MAIN_LOADING)
self.setControlLabel(self.C_MAIN_LOADING_TIME_LEFT, strings(BACKGROUND_UPDATE_IN_PROGRESS))
self.setFocusId(self.C_MAIN_LOADING_CANCEL)
control = self.getControl(self.C_MAIN_EPG_VIEW_MARKER)
if control:
left, top = control.getPosition()
self.focusPoint.x = left
self.focusPoint.y = top
self.epgView.left = left
self.epgView.top = top
self.epgView.right = left + control.getWidth()
self.epgView.bottom = top + control.getHeight()
self.epgView.width = control.getWidth()
self.epgView.cellHeight = control.getHeight() / CHANNELS_PER_PAGE
if self.database:
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
else:
try:
self.database = src.Database()
except src.SourceNotConfiguredException:
self.onSourceNotConfigured()
self.close()
return
self.database.initialize(self.onSourceInitialized, self.isSourceInitializationCancelled)
self.updateTimebar()
def onAction(self, action):
debug('Mode is: %s' % self.mode)
if self.mode == MODE_TV:
self.onActionTVMode(action)
elif self.mode == MODE_OSD:
self.onActionOSDMode(action)
elif self.mode == MODE_EPG:
self.onActionEPGMode(action)
def onActionTVMode(self, action):
if action.getId() == ACTION_PAGE_UP:
self._channelUp()
elif action.getId() == ACTION_PAGE_DOWN:
self._channelDown()
elif not self.osdEnabled:
pass # skip the rest of the actions
elif action.getId() in [ACTION_PARENT_DIR, KEY_NAV_BACK, KEY_CONTEXT_MENU, ACTION_PREVIOUS_MENU]:
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
elif action.getId() == ACTION_SHOW_INFO:
self._showOsd()
def onActionOSDMode(self, action):
if action.getId() == ACTION_SHOW_INFO:
self._hideOsd()
elif action.getId() in [ACTION_PARENT_DIR, KEY_NAV_BACK, KEY_CONTEXT_MENU, ACTION_PREVIOUS_MENU]:
self._hideOsd()
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
elif action.getId() == ACTION_SELECT_ITEM:
if self.playChannel(self.osdChannel):
self._hideOsd()
elif action.getId() == ACTION_PAGE_UP:
self._channelUp()
self._showOsd()
elif action.getId() == ACTION_PAGE_DOWN:
self._channelDown()
self._showOsd()
elif action.getId() == ACTION_UP:
self.osdChannel = self.database.getPreviousChannel(self.osdChannel)
self.osdProgram = self.database.getCurrentProgram(self.osdChannel)
self._showOsd()
elif action.getId() == ACTION_DOWN:
self.osdChannel = self.database.getNextChannel(self.osdChannel)
self.osdProgram = self.database.getCurrentProgram(self.osdChannel)
self._showOsd()
elif action.getId() == ACTION_LEFT:
previousProgram = self.database.getPreviousProgram(self.osdProgram)
if previousProgram:
self.osdProgram = previousProgram
self._showOsd()
elif action.getId() == ACTION_RIGHT:
nextProgram = self.database.getNextProgram(self.osdProgram)
if nextProgram:
self.osdProgram = nextProgram
self._showOsd()
def onActionEPGMode(self, action):
if action.getId() in [ACTION_PARENT_DIR, KEY_NAV_BACK]:
self.close()
return
# catch the ESC key
elif action.getId() == ACTION_PREVIOUS_MENU and action.getButtonCode() == KEY_ESC:
self.close()
return
elif action.getId() == ACTION_MOUSE_MOVE:
self._showControl(self.C_MAIN_MOUSE_CONTROLS)
return
elif action.getId() == KEY_CONTEXT_MENU:
if self.player.isPlaying():
self._hideEpg()
controlInFocus = None
currentFocus = self.focusPoint
try:
controlInFocus = self.getFocus()
if controlInFocus in [elem.control for elem in self.controlAndProgramList]:
(left, top) = controlInFocus.getPosition()
currentFocus = Point()
currentFocus.x = left + (controlInFocus.getWidth() / 2)
currentFocus.y = top + (controlInFocus.getHeight() / 2)
except Exception:
control = self._findControlAt(self.focusPoint)
if control is None and len(self.controlAndProgramList) > 0:
control = self.controlAndProgramList[0].control
if control is not None:
self.setFocus(control)
return
if action.getId() == ACTION_LEFT:
self._left(currentFocus)
elif action.getId() == ACTION_RIGHT:
self._right(currentFocus)
elif action.getId() == ACTION_UP:
self._up(currentFocus)
elif action.getId() == ACTION_DOWN:
self._down(currentFocus)
elif action.getId() == ACTION_NEXT_ITEM:
self._nextDay()
elif action.getId() == ACTION_PREV_ITEM:
self._previousDay()
elif action.getId() == ACTION_PAGE_UP:
self._moveUp(CHANNELS_PER_PAGE)
elif action.getId() == ACTION_PAGE_DOWN:
self._moveDown(CHANNELS_PER_PAGE)
elif action.getId() == ACTION_MOUSE_WHEEL_UP:
self._moveUp(scrollEvent=True)
elif action.getId() == ACTION_MOUSE_WHEEL_DOWN:
self._moveDown(scrollEvent=True)
elif action.getId() == KEY_HOME:
self.viewStartDate = datetime.datetime.today()
self.viewStartDate -= datetime.timedelta(minutes=self.viewStartDate.minute % 30,
seconds=self.viewStartDate.second)
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
elif action.getId() in [KEY_CONTEXT_MENU, ACTION_PREVIOUS_MENU] and controlInFocus is not None:
program = self._getProgramFromControl(controlInFocus)
if program is not None:
self._showContextMenu(program)
else:
xbmc.log('[script.ftvguide] Unhandled ActionId: ' + str(action.getId()), xbmc.LOGDEBUG)
def onClick(self, controlId):
if controlId in [self.C_MAIN_LOADING_CANCEL, self.C_MAIN_MOUSE_EXIT]:
self.close()
return
if self.isClosing:
return
if controlId == self.C_MAIN_MOUSE_HOME:
self.viewStartDate = datetime.datetime.today()
self.viewStartDate -= datetime.timedelta(minutes=self.viewStartDate.minute % 30, seconds=self.viewStartDate.second)
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
return
elif controlId == self.C_MAIN_MOUSE_LEFT:
self.viewStartDate -= datetime.timedelta(hours=2)
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
return
elif controlId == self.C_MAIN_MOUSE_UP:
self._moveUp(count=CHANNELS_PER_PAGE)
return
elif controlId == self.C_MAIN_MOUSE_DOWN:
self._moveDown(count=CHANNELS_PER_PAGE)
return
elif controlId == self.C_MAIN_MOUSE_RIGHT:
self.viewStartDate += datetime.timedelta(hours=2)
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
return
program = self._getProgramFromControl(self.getControl(controlId))
if program is None:
return
if not self.playChannel(program.channel):
result = self.streamingService.detectStream(program.channel)
if not result:
# could not detect stream, show context menu
self._showContextMenu(program)
elif type(result) == str:
# one single stream detected, save it and start streaming
self.database.setCustomStreamUrl(program.channel, result)
self.playChannel(program.channel)
else:
# multiple matches, let user decide
d = ChooseStreamAddonDialog(result)
d.doModal()
if d.stream is not None:
self.database.setCustomStreamUrl(program.channel, d.stream)
self.playChannel(program.channel)
def _showContextMenu(self, program):
self._hideControl(self.C_MAIN_MOUSE_CONTROLS)
d = PopupMenu(self.database, program, not program.notificationScheduled)
d.doModal()
buttonClicked = d.buttonClicked
del d
if buttonClicked == PopupMenu.C_POPUP_REMIND:
if program.notificationScheduled:
self.notification.removeNotification(program)
else:
self.notification.addNotification(program)
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
elif buttonClicked == PopupMenu.C_POPUP_CHOOSE_STREAM:
d = StreamSetupDialog(self.database, program.channel)
d.doModal()
del d
elif buttonClicked == PopupMenu.C_POPUP_PLAY:
self.playChannel(program.channel)
elif buttonClicked == PopupMenu.C_POPUP_CHANNELS:
d = ChannelsMenu(self.database)
d.doModal()
del d
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
elif buttonClicked == PopupMenu.C_POPUP_QUIT:
self.close()
elif buttonClicked == PopupMenu.C_POPUP_LIBMOV:
xbmc.executebuiltin('ActivateWindow(Videos,videodb://movies/titles/)')
elif buttonClicked == PopupMenu.C_POPUP_LIBTV:
xbmc.executebuiltin('ActivateWindow(Videos,videodb://tvshows/titles/)')
elif buttonClicked == PopupMenu.C_POPUP_VIDEOADDONS:
xbmc.executebuiltin('ActivateWindow(Videos,addons://sources/video/)')
def setFocusId(self, controlId):
control = self.getControl(controlId)
if control:
self.setFocus(control)
def setFocus(self, control):
debug('setFocus %d' % control.getId())
if control in [elem.control for elem in self.controlAndProgramList]:
debug('Focus before %s' % self.focusPoint)
(left, top) = control.getPosition()
if left > self.focusPoint.x or left + control.getWidth() < self.focusPoint.x:
self.focusPoint.x = left
self.focusPoint.y = top + (control.getHeight() / 2)
debug('New focus at %s' % self.focusPoint)
super(TVGuide, self).setFocus(control)
def onFocus(self, controlId):
try:
controlInFocus = self.getControl(controlId)
except Exception:
return
program = self._getProgramFromControl(controlInFocus)
if program is None:
return
self.setControlLabel(self.C_MAIN_TITLE, '[B]%s[/B]' % program.title)
if program.startDate or program.endDate:
self.setControlLabel(self.C_MAIN_TIME,
'[B]%s - %s[/B]' % (self.formatTime(program.startDate), self.formatTime(program.endDate)))
else:
self.setControlLabel(self.C_MAIN_TIME, '')
if program.description:
description = program.description
else:
description = strings(NO_DESCRIPTION)
self.setControlText(self.C_MAIN_DESCRIPTION, description)
if program.channel.logo is not None:
self.setControlImage(self.C_MAIN_LOGO, program.channel.logo)
else:
self.setControlImage(self.C_MAIN_LOGO, '')
if program.imageSmall is not None:
self.setControlImage(self.C_MAIN_IMAGE, program.imageSmall)
else:
self.setControlImage(self.C_MAIN_IMAGE, 'tvguide-logo-epg.png')
if ADDON.getSetting('program.background.enabled') == 'true' and program.imageLarge is not None:
self.setControlImage(self.C_MAIN_BACKGROUND, program.imageLarge)
if not self.osdEnabled and self.player.isPlaying():
self.player.stop()
def _left(self, currentFocus):
control = self._findControlOnLeft(currentFocus)
if control is not None:
self.setFocus(control)
elif control is None:
self.viewStartDate -= datetime.timedelta(hours=2)
self.focusPoint.x = self.epgView.right
self.onRedrawEPG(self.channelIdx, self.viewStartDate, focusFunction=self._findControlOnLeft)
def _right(self, currentFocus):
control = self._findControlOnRight(currentFocus)
if control is not None:
self.setFocus(control)
elif control is None:
self.viewStartDate += datetime.timedelta(hours=2)
self.focusPoint.x = self.epgView.left
self.onRedrawEPG(self.channelIdx, self.viewStartDate, focusFunction=self._findControlOnRight)
def _up(self, currentFocus):
currentFocus.x = self.focusPoint.x
control = self._findControlAbove(currentFocus)
if control is not None:
self.setFocus(control)
elif control is None:
self.focusPoint.y = self.epgView.bottom
self.onRedrawEPG(self.channelIdx - CHANNELS_PER_PAGE, self.viewStartDate,
focusFunction=self._findControlAbove)
def _down(self, currentFocus):
currentFocus.x = self.focusPoint.x
control = self._findControlBelow(currentFocus)
if control is not None:
self.setFocus(control)
elif control is None:
self.focusPoint.y = self.epgView.top
self.onRedrawEPG(self.channelIdx + CHANNELS_PER_PAGE, self.viewStartDate,
focusFunction=self._findControlBelow)
def _nextDay(self):
self.viewStartDate += datetime.timedelta(days=1)
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
def _previousDay(self):
self.viewStartDate -= datetime.timedelta(days=1)
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
def _moveUp(self, count=1, scrollEvent=False):
if scrollEvent:
self.onRedrawEPG(self.channelIdx - count, self.viewStartDate)
else:
self.focusPoint.y = self.epgView.bottom
self.onRedrawEPG(self.channelIdx - count, self.viewStartDate, focusFunction=self._findControlAbove)
def _moveDown(self, count=1, scrollEvent=False):
if scrollEvent:
self.onRedrawEPG(self.channelIdx + count, self.viewStartDate)
else:
self.focusPoint.y = self.epgView.top
self.onRedrawEPG(self.channelIdx + count, self.viewStartDate, focusFunction=self._findControlBelow)
def _channelUp(self):
channel = self.database.getNextChannel(self.currentChannel)
self.playChannel(channel)
def _channelDown(self):
channel = self.database.getPreviousChannel(self.currentChannel)
self.playChannel(channel)
def playChannel(self, channel):
self.currentChannel = channel
wasPlaying = self.player.isPlaying()
url = self.database.getStreamUrl(channel)
if url:
if url[0:9] == 'plugin://':
if self.alternativePlayback:
xbmc.executebuiltin('XBMC.RunPlugin(%s)' % url)
elif self.osdEnabled:
xbmc.executebuiltin('PlayMedia(%s,1)' % url)
else:
xbmc.executebuiltin('PlayMedia(%s)' % url)
else:
self.player.play(item=url, windowed=self.osdEnabled)
if not wasPlaying:
self._hideEpg()
threading.Timer(1, self.waitForPlayBackStopped).start()
self.osdProgram = self.database.getCurrentProgram(self.currentChannel)
return url is not None
def waitForPlayBackStopped(self):
for retry in range(0, 100):
time.sleep(0.1)
if self.player.isPlaying():
break
while self.player.isPlaying() and not xbmc.abortRequested and not self.isClosing:
time.sleep(0.5)
self.onPlayBackStopped()
def _showOsd(self):
if not self.osdEnabled:
return
if self.mode != MODE_OSD:
self.osdChannel = self.currentChannel
if self.osdProgram is not None:
self.setControlLabel(self.C_MAIN_OSD_TITLE, '[B]%s[/B]' % self.osdProgram.title)
if self.osdProgram.startDate or self.osdProgram.endDate:
self.setControlLabel(self.C_MAIN_OSD_TIME, '[B]%s - %s[/B]' % (
self.formatTime(self.osdProgram.startDate), self.formatTime(self.osdProgram.endDate)))
else:
self.setControlLabel(self.C_MAIN_OSD_TIME, '')
self.setControlText(self.C_MAIN_OSD_DESCRIPTION, self.osdProgram.description)
self.setControlLabel(self.C_MAIN_OSD_CHANNEL_TITLE, self.osdChannel.title)
if self.osdProgram.channel.logo is not None:
self.setControlImage(self.C_MAIN_OSD_CHANNEL_LOGO, self.osdProgram.channel.logo)
else:
self.setControlImage(self.C_MAIN_OSD_CHANNEL_LOGO, '')
self.mode = MODE_OSD
self._showControl(self.C_MAIN_OSD)
def _hideOsd(self):
self.mode = MODE_TV
self._hideControl(self.C_MAIN_OSD)
def _hideEpg(self):
self._hideControl(self.C_MAIN_EPG)
self.mode = MODE_TV
self._clearEpg()
def onRedrawEPG(self, channelStart, startTime, focusFunction=None):
if self.redrawingEPG or (self.database is not None and self.database.updateInProgress) or self.isClosing:
debug('onRedrawEPG - already redrawing')
return # ignore redraw request while redrawing
debug('onRedrawEPG')
self.redrawingEPG = True
self.mode = MODE_EPG
self._showControl(self.C_MAIN_EPG)
self.updateTimebar(scheduleTimer=False)
# show Loading screen
self.setControlLabel(self.C_MAIN_LOADING_TIME_LEFT, strings(CALCULATING_REMAINING_TIME))
self._showControl(self.C_MAIN_LOADING)
self.setFocusId(self.C_MAIN_LOADING_CANCEL)
# remove existing controls
self._clearEpg()
try:
self.channelIdx, channels, programs = self.database.getEPGView(channelStart, startTime, self.onSourceProgressUpdate, clearExistingProgramList=False)
except src.SourceException:
self.onEPGLoadError()
return
channelsWithoutPrograms = list(channels)
# date and time row
self.setControlLabel(self.C_MAIN_DATE, self.formatDate(self.viewStartDate, False))
self.setControlLabel(self.C_MAIN_DATE_LONG, self.formatDate(self.viewStartDate, True))
for col in range(1, 5):
self.setControlLabel(4000 + col, self.formatTime(startTime))
startTime += HALF_HOUR
if programs is None:
self.onEPGLoadError()
return
# set channel logo or text
showLogo = ADDON.getSetting('logos.enabled') == 'true'
for idx in range(0, CHANNELS_PER_PAGE):
if idx >= len(channels):
self.setControlImage(4110 + idx, ' ')
self.setControlLabel(4010 + idx, ' ')
else:
channel = channels[idx]
self.setControlLabel(4010 + idx, channel.title)
if (channel.logo is not None and showLogo == True):
self.setControlImage(4110 + idx, channel.logo)
else:
self.setControlImage(4110 + idx, ' ')
for program in programs:
idx = channels.index(program.channel)
if program.channel in channelsWithoutPrograms:
channelsWithoutPrograms.remove(program.channel)
startDelta = program.startDate - self.viewStartDate
stopDelta = program.endDate - self.viewStartDate
cellStart = self._secondsToXposition(startDelta.seconds)
if startDelta.days < 0:
cellStart = self.epgView.left
cellWidth = self._secondsToXposition(stopDelta.seconds) - cellStart
if cellStart + cellWidth > self.epgView.right:
cellWidth = self.epgView.right - cellStart
if cellWidth > 1:
if program.notificationScheduled:
noFocusTexture = 'tvguide-program-red.png'
focusTexture = 'tvguide-program-red-focus.png'
else:
noFocusTexture = 'tvguide-program-grey.png'
focusTexture = 'tvguide-program-grey-focus.png'
if cellWidth < 25:
title = '' # Text will overflow outside the button if it is too narrow
else:
title = program.title
control = xbmcgui.ControlButton(
cellStart,
self.epgView.top + self.epgView.cellHeight * idx,
cellWidth - 2,
self.epgView.cellHeight - 2,
title,
noFocusTexture=noFocusTexture,
focusTexture=focusTexture
)
self.controlAndProgramList.append(ControlAndProgram(control, program))
for channel in channelsWithoutPrograms:
idx = channels.index(channel)
control = xbmcgui.ControlButton(
self.epgView.left,
self.epgView.top + self.epgView.cellHeight * idx,
(self.epgView.right - self.epgView.left) - 2,
self.epgView.cellHeight - 2,
strings(NO_PROGRAM_AVAILABLE),
noFocusTexture='tvguide-program-grey.png',
focusTexture='tvguide-program-grey-focus.png'
)
program = src.Program(channel, strings(NO_PROGRAM_AVAILABLE), None, None, None)
self.controlAndProgramList.append(ControlAndProgram(control, program))
# add program controls
if focusFunction is None:
focusFunction = self._findControlAt
focusControl = focusFunction(self.focusPoint)
controls = [elem.control for elem in self.controlAndProgramList]
self.addControls(controls)
if focusControl is not None:
debug('onRedrawEPG - setFocus %d' % focusControl.getId())
self.setFocus(focusControl)
self.ignoreMissingControlIds.extend([elem.control.getId() for elem in self.controlAndProgramList])
if focusControl is None and len(self.controlAndProgramList) > 0:
self.setFocus(self.controlAndProgramList[0].control)
self._hideControl(self.C_MAIN_LOADING)
self.redrawingEPG = False
def _clearEpg(self):
controls = [elem.control for elem in self.controlAndProgramList]
try:
self.removeControls(controls)
except RuntimeError:
for elem in self.controlAndProgramList:
try:
self.removeControl(elem.control)
except RuntimeError:
pass # happens if we try to remove a control that doesn't exist
del self.controlAndProgramList[:]
def onEPGLoadError(self):
self.redrawingEPG = False
self._hideControl(self.C_MAIN_LOADING)
xbmcgui.Dialog().ok(strings(LOAD_ERROR_TITLE), strings(LOAD_ERROR_LINE1), strings(LOAD_ERROR_LINE2))
self.close()
def onSourceNotConfigured(self):
self.redrawingEPG = False
self._hideControl(self.C_MAIN_LOADING)
xbmcgui.Dialog().ok(strings(LOAD_ERROR_TITLE), strings(LOAD_ERROR_LINE1), strings(CONFIGURATION_ERROR_LINE2))
self.close()
def isSourceInitializationCancelled(self):
return xbmc.abortRequested or self.isClosing
def onSourceInitialized(self, success):
if success:
self.notification = Notification(self.database, ADDON.getAddonInfo('path'))
self.onRedrawEPG(0, self.viewStartDate)
def onSourceProgressUpdate(self, percentageComplete):
control = self.getControl(self.C_MAIN_LOADING_PROGRESS)
if percentageComplete < 1:
if control:
control.setPercent(1)
self.progressStartTime = datetime.datetime.now()
self.progressPreviousPercentage = percentageComplete
elif percentageComplete != self.progressPreviousPercentage:
if control:
control.setPercent(percentageComplete)
self.progressPreviousPercentage = percentageComplete
delta = datetime.datetime.now() - self.progressStartTime
if percentageComplete < 20:
self.setControlLabel(self.C_MAIN_LOADING_TIME_LEFT, strings(CALCULATING_REMAINING_TIME))
else:
secondsLeft = int(delta.seconds) / float(percentageComplete) * (100.0 - percentageComplete)
if secondsLeft > 30:
secondsLeft -= secondsLeft % 10
self.setControlLabel(self.C_MAIN_LOADING_TIME_LEFT, strings(TIME_LEFT) % secondsLeft)
return not xbmc.abortRequested and not self.isClosing
def onPlayBackStopped(self):
if not self.player.isPlaying() and not self.isClosing:
self._hideControl(self.C_MAIN_OSD)
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
def _secondsToXposition(self, seconds):
return self.epgView.left + (seconds * self.epgView.width / 7200)
def _findControlOnRight(self, point):
distanceToNearest = 10000
nearestControl = None
for elem in self.controlAndProgramList:
control = elem.control
(left, top) = control.getPosition()
x = left + (control.getWidth() / 2)
y = top + (control.getHeight() / 2)
if point.x < x and point.y == y:
distance = abs(point.x - x)
if distance < distanceToNearest:
distanceToNearest = distance
nearestControl = control
return nearestControl
def _findControlOnLeft(self, point):
distanceToNearest = 10000
nearestControl = None
for elem in self.controlAndProgramList:
control = elem.control
(left, top) = control.getPosition()
x = left + (control.getWidth() / 2)
y = top + (control.getHeight() / 2)
if point.x > x and point.y == y:
distance = abs(point.x - x)
if distance < distanceToNearest:
distanceToNearest = distance
nearestControl = control
return nearestControl
def _findControlBelow(self, point):
nearestControl = None
for elem in self.controlAndProgramList:
control = elem.control
(leftEdge, top) = control.getPosition()
y = top + (control.getHeight() / 2)
if point.y < y:
rightEdge = leftEdge + control.getWidth()
if leftEdge <= point.x < rightEdge and (nearestControl is None or nearestControl.getPosition()[1] > top):
nearestControl = control
return nearestControl
def _findControlAbove(self, point):
nearestControl = None
for elem in self.controlAndProgramList:
control = elem.control
(leftEdge, top) = control.getPosition()
y = top + (control.getHeight() / 2)
if point.y > y:
rightEdge = leftEdge + control.getWidth()
if leftEdge <= point.x < rightEdge and (nearestControl is None or nearestControl.getPosition()[1] < top):
nearestControl = control
return nearestControl
def _findControlAt(self, point):
for elem in self.controlAndProgramList:
control = elem.control
(left, top) = control.getPosition()
bottom = top + control.getHeight()
right = left + control.getWidth()
if left <= point.x <= right and top <= point.y <= bottom:
return control
return None
def _getProgramFromControl(self, control):
for elem in self.controlAndProgramList:
if elem.control == control:
return elem.program
return None
def _hideControl(self, *controlIds):
"""
Visibility is inverted in skin
"""
for controlId in controlIds:
control = self.getControl(controlId)
if control:
control.setVisible(True)
def _showControl(self, *controlIds):
"""
Visibility is inverted in skin
"""
for controlId in controlIds:
control = self.getControl(controlId)
if control:
control.setVisible(False)
def formatTime(self, timestamp):
if timestamp:
format = xbmc.getRegion('time').replace(':%S', '').replace('%H%H', '%H')
return timestamp.strftime(format)
else:
return ''
def formatDate(self, timestamp, longdate=False):
if timestamp:
if longdate == True:
format = xbmc.getRegion('datelong')
else:
format = xbmc.getRegion('dateshort')
return timestamp.strftime(format)
else:
return ''
def setControlImage(self, controlId, image):
control = self.getControl(controlId)
if control:
control.setImage(image.encode('utf-8'))
def setControlLabel(self, controlId, label):
control = self.getControl(controlId)
if control and label:
control.setLabel(label)
def setControlText(self, controlId, text):
control = self.getControl(controlId)
if control:
control.setText(text)
def updateTimebar(self, scheduleTimer=True):
# move timebar to current time
timeDelta = datetime.datetime.today() - self.viewStartDate
control = self.getControl(self.C_MAIN_TIMEBAR)
if control:
(x, y) = control.getPosition()
try:
# Sometimes raises:
# exceptions.RuntimeError: Unknown exception thrown from the call "setVisible"
control.setVisible(timeDelta.days == 0)
except:
pass
control.setPosition(self._secondsToXposition(timeDelta.seconds), y)
if scheduleTimer and not xbmc.abortRequested and not self.isClosing:
threading.Timer(1, self.updateTimebar).start()
class PopupMenu(xbmcgui.WindowXMLDialog):
C_POPUP_PLAY = 4000
C_POPUP_CHOOSE_STREAM = 4001
C_POPUP_REMIND = 4002
C_POPUP_CHANNELS = 4003
C_POPUP_QUIT = 4004
C_POPUP_CHANNEL_LOGO = 4100
C_POPUP_CHANNEL_TITLE = 4101
C_POPUP_PROGRAM_TITLE = 4102
C_POPUP_LIBMOV = 80000
C_POPUP_LIBTV = 80001
C_POPUP_VIDEOADDONS = 80002
def __new__(cls, database, program, showRemind):
return super(PopupMenu, cls).__new__(cls, 'script-tvguide-menu.xml', ADDON.getAddonInfo('path'), SKIN)
def __init__(self, database, program, showRemind):
"""
@type database: source.Database
@param program:
@type program: source.Program
@param showRemind:
"""
super(PopupMenu, self).__init__()
self.database = database
self.program = program
self.showRemind = showRemind
self.buttonClicked = None
def onInit(self):
playControl = self.getControl(self.C_POPUP_PLAY)
remindControl = self.getControl(self.C_POPUP_REMIND)
channelLogoControl = self.getControl(self.C_POPUP_CHANNEL_LOGO)
channelTitleControl = self.getControl(self.C_POPUP_CHANNEL_TITLE)
programTitleControl = self.getControl(self.C_POPUP_PROGRAM_TITLE)
playControl.setLabel(strings(WATCH_CHANNEL, self.program.channel.title))
if not self.program.channel.isPlayable():
playControl.setEnabled(False)
self.setFocusId(self.C_POPUP_CHOOSE_STREAM)
if self.database.getCustomStreamUrl(self.program.channel):
chooseStrmControl = self.getControl(self.C_POPUP_CHOOSE_STREAM)
chooseStrmControl.setLabel(strings(REMOVE_STRM_FILE))
if self.program.channel.logo is not None:
channelLogoControl.setImage(self.program.channel.logo)
channelTitleControl.setVisible(False)
else:
channelTitleControl.setLabel(self.program.channel.title)
channelLogoControl.setVisible(False)
programTitleControl.setLabel(self.program.title)
if self.program.startDate:
remindControl.setEnabled(True)
if self.showRemind:
remindControl.setLabel(strings(REMIND_PROGRAM))
else:
remindControl.setLabel(strings(DONT_REMIND_PROGRAM))
else:
remindControl.setEnabled(False)
def onAction(self, action):
if action.getId() in [ACTION_PARENT_DIR, ACTION_PREVIOUS_MENU, KEY_NAV_BACK, KEY_CONTEXT_MENU]:
self.close()
return
def onClick(self, controlId):
if controlId == self.C_POPUP_CHOOSE_STREAM and self.database.getCustomStreamUrl(self.program.channel):
self.database.deleteCustomStreamUrl(self.program.channel)
chooseStrmControl = self.getControl(self.C_POPUP_CHOOSE_STREAM)
chooseStrmControl.setLabel(strings(CHOOSE_STRM_FILE))
if not self.program.channel.isPlayable():
playControl = self.getControl(self.C_POPUP_PLAY)
playControl.setEnabled(False)
else:
self.buttonClicked = controlId
self.close()
def onFocus(self, controlId):
pass
class ChannelsMenu(xbmcgui.WindowXMLDialog):
C_CHANNELS_LIST = 6000
C_CHANNELS_SELECTION_VISIBLE = 6001
C_CHANNELS_SELECTION = 6002
C_CHANNELS_SAVE = 6003
C_CHANNELS_CANCEL = 6004
def __new__(cls, database):
return super(ChannelsMenu, cls).__new__(cls, 'script-tvguide-channels.xml', ADDON.getAddonInfo('path'), SKIN)
def __init__(self, database):
"""
@type database: source.Database
"""
super(ChannelsMenu, self).__init__()
self.database = database
self.channelList = database.getChannelList(onlyVisible=False)
self.swapInProgress = False
self.selectedChannel = 0
def onInit(self):
self.updateChannelList()
self.setFocusId(self.C_CHANNELS_LIST)
def onAction(self, action):
if action.getId() in [ACTION_PARENT_DIR, KEY_NAV_BACK]:
self.close()
return
if self.getFocusId() == self.C_CHANNELS_LIST and action.getId() in [ACTION_PREVIOUS_MENU, KEY_CONTEXT_MENU, ACTION_LEFT]:
listControl = self.getControl(self.C_CHANNELS_LIST)
idx = listControl.getSelectedPosition()
self.selectedChannel = idx
buttonControl = self.getControl(self.C_CHANNELS_SELECTION)
buttonControl.setLabel('[B]%s[/B]' % self.channelList[idx].title)
self.getControl(self.C_CHANNELS_SELECTION_VISIBLE).setVisible(False)
self.setFocusId(self.C_CHANNELS_SELECTION)
elif self.getFocusId() == self.C_CHANNELS_SELECTION and action.getId() in [ACTION_RIGHT, ACTION_SELECT_ITEM]:
self.getControl(self.C_CHANNELS_SELECTION_VISIBLE).setVisible(True)
xbmc.sleep(350)
self.setFocusId(self.C_CHANNELS_LIST)
elif self.getFocusId() == self.C_CHANNELS_SELECTION and action.getId() in [ACTION_PREVIOUS_MENU, KEY_CONTEXT_MENU]:
listControl = self.getControl(self.C_CHANNELS_LIST)
idx = listControl.getSelectedPosition()
self.swapChannels(self.selectedChannel, idx)
self.getControl(self.C_CHANNELS_SELECTION_VISIBLE).setVisible(True)
xbmc.sleep(350)
self.setFocusId(self.C_CHANNELS_LIST)
elif self.getFocusId() == self.C_CHANNELS_SELECTION and action.getId() == ACTION_UP:
listControl = self.getControl(self.C_CHANNELS_LIST)
idx = listControl.getSelectedPosition()
if idx > 0:
self.swapChannels(idx, idx - 1)
elif self.getFocusId() == self.C_CHANNELS_SELECTION and action.getId() == ACTION_DOWN:
listControl = self.getControl(self.C_CHANNELS_LIST)
idx = listControl.getSelectedPosition()
if idx < listControl.size() - 1:
self.swapChannels(idx, idx + 1)
def onClick(self, controlId):
if controlId == self.C_CHANNELS_LIST:
listControl = self.getControl(self.C_CHANNELS_LIST)
item = listControl.getSelectedItem()
channel = self.channelList[int(item.getProperty('idx'))]
channel.visible = not channel.visible
if channel.visible:
iconImage = 'tvguide-channel-visible.png'
else:
iconImage = 'tvguide-channel-hidden.png'
item.setIconImage(iconImage)
elif controlId == self.C_CHANNELS_SAVE:
self.database.saveChannelList(self.close, self.channelList)
elif controlId == self.C_CHANNELS_CANCEL:
self.close()
def onFocus(self, controlId):
pass
def updateChannelList(self):
listControl = self.getControl(self.C_CHANNELS_LIST)
listControl.reset()
for idx, channel in enumerate(self.channelList):
if channel.visible:
iconImage = 'tvguide-channel-visible.png'
else:
iconImage = 'tvguide-channel-hidden.png'
item = xbmcgui.ListItem('%3d. %s' % (idx + 1, channel.title), iconImage=iconImage)
item.setProperty('idx', str(idx))
listControl.addItem(item)
def updateListItem(self, idx, item):
channel = self.channelList[idx]
item.setLabel('%3d. %s' % (idx + 1, channel.title))
if channel.visible:
iconImage = 'tvguide-channel-visible.png'
else:
iconImage = 'tvguide-channel-hidden.png'
item.setIconImage(iconImage)
item.setProperty('idx', str(idx))
def swapChannels(self, fromIdx, toIdx):
if self.swapInProgress:
return
self.swapInProgress = True
c = self.channelList[fromIdx]
self.channelList[fromIdx] = self.channelList[toIdx]
self.channelList[toIdx] = c
# recalculate weight
for idx, channel in enumerate(self.channelList):
channel.weight = idx
listControl = self.getControl(self.C_CHANNELS_LIST)
self.updateListItem(fromIdx, listControl.getListItem(fromIdx))
self.updateListItem(toIdx, listControl.getListItem(toIdx))
listControl.selectItem(toIdx)
xbmc.sleep(50)
self.swapInProgress = False
class StreamSetupDialog(xbmcgui.WindowXMLDialog):
C_STREAM_STRM_TAB = 101
C_STREAM_FAVOURITES_TAB = 102
C_STREAM_ADDONS_TAB = 103
C_STREAM_STRM_BROWSE = 1001
C_STREAM_STRM_FILE_LABEL = 1005
C_STREAM_STRM_PREVIEW = 1002
C_STREAM_STRM_OK = 1003
C_STREAM_STRM_CANCEL = 1004
C_STREAM_FAVOURITES = 2001
C_STREAM_FAVOURITES_PREVIEW = 2002
C_STREAM_FAVOURITES_OK = 2003
C_STREAM_FAVOURITES_CANCEL = 2004
C_STREAM_ADDONS = 3001
C_STREAM_ADDONS_STREAMS = 3002
C_STREAM_ADDONS_NAME = 3003
C_STREAM_ADDONS_DESCRIPTION = 3004
C_STREAM_ADDONS_PREVIEW = 3005
C_STREAM_ADDONS_OK = 3006
C_STREAM_ADDONS_CANCEL = 3007
C_STREAM_VISIBILITY_MARKER = 100
VISIBLE_STRM = 'strm'
VISIBLE_FAVOURITES = 'favourites'
VISIBLE_ADDONS = 'addons'
def __new__(cls, database, channel):
return super(StreamSetupDialog, cls).__new__(cls, 'script-tvguide-streamsetup.xml', ADDON.getAddonInfo('path'), SKIN)
def __init__(self, database, channel):
"""
@type database: source.Database
@type channel:source.Channel
"""
super(StreamSetupDialog, self).__init__()
self.database = database
self.channel = channel
self.player = xbmc.Player()
self.previousAddonId = None
self.strmFile = None
self.streamingService = streaming.StreamsService(ADDON)
def close(self):
if self.player.isPlaying():
self.player.stop()
super(StreamSetupDialog, self).close()
def onInit(self):
self.getControl(self.C_STREAM_VISIBILITY_MARKER).setLabel(self.VISIBLE_STRM)
favourites = self.streamingService.loadFavourites()
items = list()
for label, value in favourites:
item = xbmcgui.ListItem(label)
item.setProperty('stream', value)
items.append(item)
listControl = self.getControl(StreamSetupDialog.C_STREAM_FAVOURITES)
listControl.addItems(items)
items = list()
for id in self.streamingService.getAddons():
try:
addon = xbmcaddon.Addon(id) # raises Exception if addon is not installed
item = xbmcgui.ListItem(addon.getAddonInfo('name'), iconImage=addon.getAddonInfo('icon'))
item.setProperty('addon_id', id)
items.append(item)
except Exception:
pass
listControl = self.getControl(StreamSetupDialog.C_STREAM_ADDONS)
listControl.addItems(items)
self.updateAddonInfo()
def onAction(self, action):
if action.getId() in [ACTION_PARENT_DIR, ACTION_PREVIOUS_MENU, KEY_NAV_BACK, KEY_CONTEXT_MENU]:
self.close()
return
elif self.getFocusId() == self.C_STREAM_ADDONS:
self.updateAddonInfo()
def onClick(self, controlId):
if controlId == self.C_STREAM_STRM_BROWSE:
stream = xbmcgui.Dialog().browse(1, ADDON.getLocalizedString(30304), 'video', '.strm')
if stream:
self.database.setCustomStreamUrl(self.channel, stream)
self.getControl(self.C_STREAM_STRM_FILE_LABEL).setText(stream)
self.strmFile = stream
elif controlId == self.C_STREAM_ADDONS_OK:
listControl = self.getControl(self.C_STREAM_ADDONS_STREAMS)
item = listControl.getSelectedItem()
if item:
stream = item.getProperty('stream')
self.database.setCustomStreamUrl(self.channel, stream)
self.close()
elif controlId == self.C_STREAM_FAVOURITES_OK:
listControl = self.getControl(self.C_STREAM_FAVOURITES)
item = listControl.getSelectedItem()
if item:
stream = item.getProperty('stream')
self.database.setCustomStreamUrl(self.channel, stream)
self.close()
elif controlId == self.C_STREAM_STRM_OK:
self.database.setCustomStreamUrl(self.channel, self.strmFile)
self.close()
elif controlId in [self.C_STREAM_ADDONS_CANCEL, self.C_STREAM_FAVOURITES_CANCEL, self.C_STREAM_STRM_CANCEL]:
self.close()
elif controlId in [self.C_STREAM_ADDONS_PREVIEW, self.C_STREAM_FAVOURITES_PREVIEW, self.C_STREAM_STRM_PREVIEW]:
if self.player.isPlaying():
self.player.stop()
self.getControl(self.C_STREAM_ADDONS_PREVIEW).setLabel(strings(PREVIEW_STREAM))
self.getControl(self.C_STREAM_FAVOURITES_PREVIEW).setLabel(strings(PREVIEW_STREAM))
self.getControl(self.C_STREAM_STRM_PREVIEW).setLabel(strings(PREVIEW_STREAM))
return
stream = None
visible = self.getControl(self.C_STREAM_VISIBILITY_MARKER).getLabel()
if visible == self.VISIBLE_ADDONS:
listControl = self.getControl(self.C_STREAM_ADDONS_STREAMS)
item = listControl.getSelectedItem()
if item:
stream = item.getProperty('stream')
elif visible == self.VISIBLE_FAVOURITES:
listControl = self.getControl(self.C_STREAM_FAVOURITES)
item = listControl.getSelectedItem()
if item:
stream = item.getProperty('stream')
elif visible == self.VISIBLE_STRM:
stream = self.strmFile
if stream is not None:
self.player.play(item=stream, windowed=True)
if self.player.isPlaying():
self.getControl(self.C_STREAM_ADDONS_PREVIEW).setLabel(strings(STOP_PREVIEW))
self.getControl(self.C_STREAM_FAVOURITES_PREVIEW).setLabel(strings(STOP_PREVIEW))
self.getControl(self.C_STREAM_STRM_PREVIEW).setLabel(strings(STOP_PREVIEW))
def onFocus(self, controlId):
if controlId == self.C_STREAM_STRM_TAB:
self.getControl(self.C_STREAM_VISIBILITY_MARKER).setLabel(self.VISIBLE_STRM)
elif controlId == self.C_STREAM_FAVOURITES_TAB:
self.getControl(self.C_STREAM_VISIBILITY_MARKER).setLabel(self.VISIBLE_FAVOURITES)
elif controlId == self.C_STREAM_ADDONS_TAB:
self.getControl(self.C_STREAM_VISIBILITY_MARKER).setLabel(self.VISIBLE_ADDONS)
def updateAddonInfo(self):
listControl = self.getControl(self.C_STREAM_ADDONS)
item = listControl.getSelectedItem()
if item is None:
return
if item.getProperty('addon_id') == self.previousAddonId:
return
self.previousAddonId = item.getProperty('addon_id')
addon = xbmcaddon.Addon(id=item.getProperty('addon_id'))
self.getControl(self.C_STREAM_ADDONS_NAME).setLabel('[B]%s[/B]' % addon.getAddonInfo('name'))
self.getControl(self.C_STREAM_ADDONS_DESCRIPTION).setText(addon.getAddonInfo('description'))
streams = self.streamingService.getAddonStreams(item.getProperty('addon_id'))
items = list()
for (label, stream) in streams:
item = xbmcgui.ListItem(label)
item.setProperty('stream', stream)
items.append(item)
listControl = self.getControl(StreamSetupDialog.C_STREAM_ADDONS_STREAMS)
listControl.reset()
listControl.addItems(items)
class ChooseStreamAddonDialog(xbmcgui.WindowXMLDialog):
C_SELECTION_LIST = 1000
def __new__(cls, addons):
return super(ChooseStreamAddonDialog, cls).__new__(cls, 'script-tvguide-streamaddon.xml', ADDON.getAddonInfo('path'), SKIN)
def __init__(self, addons):
super(ChooseStreamAddonDialog, self).__init__()
self.addons = addons
self.stream = None
def onInit(self):
items = list()
for id, label, url in self.addons:
addon = xbmcaddon.Addon(id)
item = xbmcgui.ListItem(label, addon.getAddonInfo('name'), addon.getAddonInfo('icon'))
item.setProperty('stream', url)
items.append(item)
listControl = self.getControl(ChooseStreamAddonDialog.C_SELECTION_LIST)
listControl.addItems(items)
self.setFocus(listControl)
def onAction(self, action):
if action.getId() in [ACTION_PARENT_DIR, ACTION_PREVIOUS_MENU, KEY_NAV_BACK]:
self.close()
def onClick(self, controlId):
if controlId == ChooseStreamAddonDialog.C_SELECTION_LIST:
listControl = self.getControl(ChooseStreamAddonDialog.C_SELECTION_LIST)
self.stream = listControl.getSelectedItem().getProperty('stream')
self.close()
def onFocus(self, controlId):
pass
| 38.944166 | 161 | 0.614282 |
import datetime
import threading
import time
import xbmc
import xbmcgui
import source as src
from notification import Notification
from strings import *
import streaming
DEBUG = False
MODE_EPG = 'EPG'
MODE_TV = 'TV'
MODE_OSD = 'OSD'
ACTION_LEFT = 1
ACTION_RIGHT = 2
ACTION_UP = 3
ACTION_DOWN = 4
ACTION_PAGE_UP = 5
ACTION_PAGE_DOWN = 6
ACTION_SELECT_ITEM = 7
ACTION_PARENT_DIR = 9
ACTION_PREVIOUS_MENU = 10
ACTION_SHOW_INFO = 11
ACTION_NEXT_ITEM = 14
ACTION_PREV_ITEM = 15
ACTION_MOUSE_WHEEL_UP = 104
ACTION_MOUSE_WHEEL_DOWN = 105
ACTION_MOUSE_MOVE = 107
KEY_NAV_BACK = 92
KEY_CONTEXT_MENU = 117
KEY_HOME = 159
KEY_ESC = 61467
CHANNELS_PER_PAGE = 8
HALF_HOUR = datetime.timedelta(minutes=30)
SKIN = ADDON.getSetting('skin')
def debug(s):
if DEBUG: xbmc.log(str(s), xbmc.LOGDEBUG)
class Point(object):
def __init__(self):
self.x = self.y = 0
def __repr__(self):
return 'Point(x=%d, y=%d)' % (self.x, self.y)
class EPGView(object):
def __init__(self):
self.top = self.left = self.right = self.bottom = self.width = self.cellHeight = 0
class ControlAndProgram(object):
def __init__(self, control, program):
self.control = control
self.program = program
class TVGuide(xbmcgui.WindowXML):
C_MAIN_DATE_LONG = 3999
C_MAIN_DATE = 4000
C_MAIN_TITLE = 4020
C_MAIN_TIME = 4021
C_MAIN_DESCRIPTION = 4022
C_MAIN_IMAGE = 4023
C_MAIN_LOGO = 4024
C_MAIN_TIMEBAR = 4100
C_MAIN_LOADING = 4200
C_MAIN_LOADING_PROGRESS = 4201
C_MAIN_LOADING_TIME_LEFT = 4202
C_MAIN_LOADING_CANCEL = 4203
C_MAIN_MOUSE_CONTROLS = 4300
C_MAIN_MOUSE_HOME = 4301
C_MAIN_MOUSE_LEFT = 4302
C_MAIN_MOUSE_UP = 4303
C_MAIN_MOUSE_DOWN = 4304
C_MAIN_MOUSE_RIGHT = 4305
C_MAIN_MOUSE_EXIT = 4306
C_MAIN_BACKGROUND = 4600
C_MAIN_EPG = 5000
C_MAIN_EPG_VIEW_MARKER = 5001
C_MAIN_OSD = 6000
C_MAIN_OSD_TITLE = 6001
C_MAIN_OSD_TIME = 6002
C_MAIN_OSD_DESCRIPTION = 6003
C_MAIN_OSD_CHANNEL_LOGO = 6004
C_MAIN_OSD_CHANNEL_TITLE = 6005
def __new__(cls):
return super(TVGuide, cls).__new__(cls, 'script-tvguide-main.xml', ADDON.getAddonInfo('path'), SKIN)
def __init__(self):
super(TVGuide, self).__init__()
self.notification = None
self.redrawingEPG = False
self.isClosing = False
self.controlAndProgramList = list()
self.ignoreMissingControlIds = list()
self.channelIdx = 0
self.focusPoint = Point()
self.epgView = EPGView()
self.streamingService = streaming.StreamsService(ADDON)
self.player = xbmc.Player()
self.database = None
self.mode = MODE_EPG
self.currentChannel = None
self.osdEnabled = ADDON.getSetting('enable.osd') == 'true' and ADDON.getSetting(
'alternative.playback') != 'true'
self.alternativePlayback = ADDON.getSetting('alternative.playback') == 'true'
self.osdChannel = None
self.osdProgram = None
self.viewStartDate = datetime.datetime.today()
self.viewStartDate -= datetime.timedelta(minutes=self.viewStartDate.minute % 30,
seconds=self.viewStartDate.second)
def getControl(self, controlId):
try:
return super(TVGuide, self).getControl(controlId)
except:
if controlId in self.ignoreMissingControlIds:
return None
if not self.isClosing:
self.close()
return None
def close(self):
if not self.isClosing:
self.isClosing = True
if self.player.isPlaying():
self.player.stop()
if self.database:
self.database.close(super(TVGuide, self).close)
else:
super(TVGuide, self).close()
def onInit(self):
self._hideControl(self.C_MAIN_MOUSE_CONTROLS, self.C_MAIN_OSD)
self._showControl(self.C_MAIN_EPG, self.C_MAIN_LOADING)
self.setControlLabel(self.C_MAIN_LOADING_TIME_LEFT, strings(BACKGROUND_UPDATE_IN_PROGRESS))
self.setFocusId(self.C_MAIN_LOADING_CANCEL)
control = self.getControl(self.C_MAIN_EPG_VIEW_MARKER)
if control:
left, top = control.getPosition()
self.focusPoint.x = left
self.focusPoint.y = top
self.epgView.left = left
self.epgView.top = top
self.epgView.right = left + control.getWidth()
self.epgView.bottom = top + control.getHeight()
self.epgView.width = control.getWidth()
self.epgView.cellHeight = control.getHeight() / CHANNELS_PER_PAGE
if self.database:
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
else:
try:
self.database = src.Database()
except src.SourceNotConfiguredException:
self.onSourceNotConfigured()
self.close()
return
self.database.initialize(self.onSourceInitialized, self.isSourceInitializationCancelled)
self.updateTimebar()
def onAction(self, action):
debug('Mode is: %s' % self.mode)
if self.mode == MODE_TV:
self.onActionTVMode(action)
elif self.mode == MODE_OSD:
self.onActionOSDMode(action)
elif self.mode == MODE_EPG:
self.onActionEPGMode(action)
def onActionTVMode(self, action):
if action.getId() == ACTION_PAGE_UP:
self._channelUp()
elif action.getId() == ACTION_PAGE_DOWN:
self._channelDown()
elif not self.osdEnabled:
pass
elif action.getId() in [ACTION_PARENT_DIR, KEY_NAV_BACK, KEY_CONTEXT_MENU, ACTION_PREVIOUS_MENU]:
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
elif action.getId() == ACTION_SHOW_INFO:
self._showOsd()
def onActionOSDMode(self, action):
if action.getId() == ACTION_SHOW_INFO:
self._hideOsd()
elif action.getId() in [ACTION_PARENT_DIR, KEY_NAV_BACK, KEY_CONTEXT_MENU, ACTION_PREVIOUS_MENU]:
self._hideOsd()
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
elif action.getId() == ACTION_SELECT_ITEM:
if self.playChannel(self.osdChannel):
self._hideOsd()
elif action.getId() == ACTION_PAGE_UP:
self._channelUp()
self._showOsd()
elif action.getId() == ACTION_PAGE_DOWN:
self._channelDown()
self._showOsd()
elif action.getId() == ACTION_UP:
self.osdChannel = self.database.getPreviousChannel(self.osdChannel)
self.osdProgram = self.database.getCurrentProgram(self.osdChannel)
self._showOsd()
elif action.getId() == ACTION_DOWN:
self.osdChannel = self.database.getNextChannel(self.osdChannel)
self.osdProgram = self.database.getCurrentProgram(self.osdChannel)
self._showOsd()
elif action.getId() == ACTION_LEFT:
previousProgram = self.database.getPreviousProgram(self.osdProgram)
if previousProgram:
self.osdProgram = previousProgram
self._showOsd()
elif action.getId() == ACTION_RIGHT:
nextProgram = self.database.getNextProgram(self.osdProgram)
if nextProgram:
self.osdProgram = nextProgram
self._showOsd()
def onActionEPGMode(self, action):
if action.getId() in [ACTION_PARENT_DIR, KEY_NAV_BACK]:
self.close()
return
elif action.getId() == ACTION_PREVIOUS_MENU and action.getButtonCode() == KEY_ESC:
self.close()
return
elif action.getId() == ACTION_MOUSE_MOVE:
self._showControl(self.C_MAIN_MOUSE_CONTROLS)
return
elif action.getId() == KEY_CONTEXT_MENU:
if self.player.isPlaying():
self._hideEpg()
controlInFocus = None
currentFocus = self.focusPoint
try:
controlInFocus = self.getFocus()
if controlInFocus in [elem.control for elem in self.controlAndProgramList]:
(left, top) = controlInFocus.getPosition()
currentFocus = Point()
currentFocus.x = left + (controlInFocus.getWidth() / 2)
currentFocus.y = top + (controlInFocus.getHeight() / 2)
except Exception:
control = self._findControlAt(self.focusPoint)
if control is None and len(self.controlAndProgramList) > 0:
control = self.controlAndProgramList[0].control
if control is not None:
self.setFocus(control)
return
if action.getId() == ACTION_LEFT:
self._left(currentFocus)
elif action.getId() == ACTION_RIGHT:
self._right(currentFocus)
elif action.getId() == ACTION_UP:
self._up(currentFocus)
elif action.getId() == ACTION_DOWN:
self._down(currentFocus)
elif action.getId() == ACTION_NEXT_ITEM:
self._nextDay()
elif action.getId() == ACTION_PREV_ITEM:
self._previousDay()
elif action.getId() == ACTION_PAGE_UP:
self._moveUp(CHANNELS_PER_PAGE)
elif action.getId() == ACTION_PAGE_DOWN:
self._moveDown(CHANNELS_PER_PAGE)
elif action.getId() == ACTION_MOUSE_WHEEL_UP:
self._moveUp(scrollEvent=True)
elif action.getId() == ACTION_MOUSE_WHEEL_DOWN:
self._moveDown(scrollEvent=True)
elif action.getId() == KEY_HOME:
self.viewStartDate = datetime.datetime.today()
self.viewStartDate -= datetime.timedelta(minutes=self.viewStartDate.minute % 30,
seconds=self.viewStartDate.second)
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
elif action.getId() in [KEY_CONTEXT_MENU, ACTION_PREVIOUS_MENU] and controlInFocus is not None:
program = self._getProgramFromControl(controlInFocus)
if program is not None:
self._showContextMenu(program)
else:
xbmc.log('[script.ftvguide] Unhandled ActionId: ' + str(action.getId()), xbmc.LOGDEBUG)
def onClick(self, controlId):
if controlId in [self.C_MAIN_LOADING_CANCEL, self.C_MAIN_MOUSE_EXIT]:
self.close()
return
if self.isClosing:
return
if controlId == self.C_MAIN_MOUSE_HOME:
self.viewStartDate = datetime.datetime.today()
self.viewStartDate -= datetime.timedelta(minutes=self.viewStartDate.minute % 30, seconds=self.viewStartDate.second)
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
return
elif controlId == self.C_MAIN_MOUSE_LEFT:
self.viewStartDate -= datetime.timedelta(hours=2)
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
return
elif controlId == self.C_MAIN_MOUSE_UP:
self._moveUp(count=CHANNELS_PER_PAGE)
return
elif controlId == self.C_MAIN_MOUSE_DOWN:
self._moveDown(count=CHANNELS_PER_PAGE)
return
elif controlId == self.C_MAIN_MOUSE_RIGHT:
self.viewStartDate += datetime.timedelta(hours=2)
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
return
program = self._getProgramFromControl(self.getControl(controlId))
if program is None:
return
if not self.playChannel(program.channel):
result = self.streamingService.detectStream(program.channel)
if not result:
self._showContextMenu(program)
elif type(result) == str:
self.database.setCustomStreamUrl(program.channel, result)
self.playChannel(program.channel)
else:
d = ChooseStreamAddonDialog(result)
d.doModal()
if d.stream is not None:
self.database.setCustomStreamUrl(program.channel, d.stream)
self.playChannel(program.channel)
def _showContextMenu(self, program):
self._hideControl(self.C_MAIN_MOUSE_CONTROLS)
d = PopupMenu(self.database, program, not program.notificationScheduled)
d.doModal()
buttonClicked = d.buttonClicked
del d
if buttonClicked == PopupMenu.C_POPUP_REMIND:
if program.notificationScheduled:
self.notification.removeNotification(program)
else:
self.notification.addNotification(program)
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
elif buttonClicked == PopupMenu.C_POPUP_CHOOSE_STREAM:
d = StreamSetupDialog(self.database, program.channel)
d.doModal()
del d
elif buttonClicked == PopupMenu.C_POPUP_PLAY:
self.playChannel(program.channel)
elif buttonClicked == PopupMenu.C_POPUP_CHANNELS:
d = ChannelsMenu(self.database)
d.doModal()
del d
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
elif buttonClicked == PopupMenu.C_POPUP_QUIT:
self.close()
elif buttonClicked == PopupMenu.C_POPUP_LIBMOV:
xbmc.executebuiltin('ActivateWindow(Videos,videodb://movies/titles/)')
elif buttonClicked == PopupMenu.C_POPUP_LIBTV:
xbmc.executebuiltin('ActivateWindow(Videos,videodb://tvshows/titles/)')
elif buttonClicked == PopupMenu.C_POPUP_VIDEOADDONS:
xbmc.executebuiltin('ActivateWindow(Videos,addons://sources/video/)')
def setFocusId(self, controlId):
control = self.getControl(controlId)
if control:
self.setFocus(control)
def setFocus(self, control):
debug('setFocus %d' % control.getId())
if control in [elem.control for elem in self.controlAndProgramList]:
debug('Focus before %s' % self.focusPoint)
(left, top) = control.getPosition()
if left > self.focusPoint.x or left + control.getWidth() < self.focusPoint.x:
self.focusPoint.x = left
self.focusPoint.y = top + (control.getHeight() / 2)
debug('New focus at %s' % self.focusPoint)
super(TVGuide, self).setFocus(control)
def onFocus(self, controlId):
try:
controlInFocus = self.getControl(controlId)
except Exception:
return
program = self._getProgramFromControl(controlInFocus)
if program is None:
return
self.setControlLabel(self.C_MAIN_TITLE, '[B]%s[/B]' % program.title)
if program.startDate or program.endDate:
self.setControlLabel(self.C_MAIN_TIME,
'[B]%s - %s[/B]' % (self.formatTime(program.startDate), self.formatTime(program.endDate)))
else:
self.setControlLabel(self.C_MAIN_TIME, '')
if program.description:
description = program.description
else:
description = strings(NO_DESCRIPTION)
self.setControlText(self.C_MAIN_DESCRIPTION, description)
if program.channel.logo is not None:
self.setControlImage(self.C_MAIN_LOGO, program.channel.logo)
else:
self.setControlImage(self.C_MAIN_LOGO, '')
if program.imageSmall is not None:
self.setControlImage(self.C_MAIN_IMAGE, program.imageSmall)
else:
self.setControlImage(self.C_MAIN_IMAGE, 'tvguide-logo-epg.png')
if ADDON.getSetting('program.background.enabled') == 'true' and program.imageLarge is not None:
self.setControlImage(self.C_MAIN_BACKGROUND, program.imageLarge)
if not self.osdEnabled and self.player.isPlaying():
self.player.stop()
def _left(self, currentFocus):
control = self._findControlOnLeft(currentFocus)
if control is not None:
self.setFocus(control)
elif control is None:
self.viewStartDate -= datetime.timedelta(hours=2)
self.focusPoint.x = self.epgView.right
self.onRedrawEPG(self.channelIdx, self.viewStartDate, focusFunction=self._findControlOnLeft)
def _right(self, currentFocus):
control = self._findControlOnRight(currentFocus)
if control is not None:
self.setFocus(control)
elif control is None:
self.viewStartDate += datetime.timedelta(hours=2)
self.focusPoint.x = self.epgView.left
self.onRedrawEPG(self.channelIdx, self.viewStartDate, focusFunction=self._findControlOnRight)
def _up(self, currentFocus):
currentFocus.x = self.focusPoint.x
control = self._findControlAbove(currentFocus)
if control is not None:
self.setFocus(control)
elif control is None:
self.focusPoint.y = self.epgView.bottom
self.onRedrawEPG(self.channelIdx - CHANNELS_PER_PAGE, self.viewStartDate,
focusFunction=self._findControlAbove)
def _down(self, currentFocus):
currentFocus.x = self.focusPoint.x
control = self._findControlBelow(currentFocus)
if control is not None:
self.setFocus(control)
elif control is None:
self.focusPoint.y = self.epgView.top
self.onRedrawEPG(self.channelIdx + CHANNELS_PER_PAGE, self.viewStartDate,
focusFunction=self._findControlBelow)
def _nextDay(self):
self.viewStartDate += datetime.timedelta(days=1)
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
def _previousDay(self):
self.viewStartDate -= datetime.timedelta(days=1)
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
def _moveUp(self, count=1, scrollEvent=False):
if scrollEvent:
self.onRedrawEPG(self.channelIdx - count, self.viewStartDate)
else:
self.focusPoint.y = self.epgView.bottom
self.onRedrawEPG(self.channelIdx - count, self.viewStartDate, focusFunction=self._findControlAbove)
def _moveDown(self, count=1, scrollEvent=False):
if scrollEvent:
self.onRedrawEPG(self.channelIdx + count, self.viewStartDate)
else:
self.focusPoint.y = self.epgView.top
self.onRedrawEPG(self.channelIdx + count, self.viewStartDate, focusFunction=self._findControlBelow)
def _channelUp(self):
channel = self.database.getNextChannel(self.currentChannel)
self.playChannel(channel)
def _channelDown(self):
channel = self.database.getPreviousChannel(self.currentChannel)
self.playChannel(channel)
def playChannel(self, channel):
self.currentChannel = channel
wasPlaying = self.player.isPlaying()
url = self.database.getStreamUrl(channel)
if url:
if url[0:9] == 'plugin://':
if self.alternativePlayback:
xbmc.executebuiltin('XBMC.RunPlugin(%s)' % url)
elif self.osdEnabled:
xbmc.executebuiltin('PlayMedia(%s,1)' % url)
else:
xbmc.executebuiltin('PlayMedia(%s)' % url)
else:
self.player.play(item=url, windowed=self.osdEnabled)
if not wasPlaying:
self._hideEpg()
threading.Timer(1, self.waitForPlayBackStopped).start()
self.osdProgram = self.database.getCurrentProgram(self.currentChannel)
return url is not None
def waitForPlayBackStopped(self):
for retry in range(0, 100):
time.sleep(0.1)
if self.player.isPlaying():
break
while self.player.isPlaying() and not xbmc.abortRequested and not self.isClosing:
time.sleep(0.5)
self.onPlayBackStopped()
def _showOsd(self):
if not self.osdEnabled:
return
if self.mode != MODE_OSD:
self.osdChannel = self.currentChannel
if self.osdProgram is not None:
self.setControlLabel(self.C_MAIN_OSD_TITLE, '[B]%s[/B]' % self.osdProgram.title)
if self.osdProgram.startDate or self.osdProgram.endDate:
self.setControlLabel(self.C_MAIN_OSD_TIME, '[B]%s - %s[/B]' % (
self.formatTime(self.osdProgram.startDate), self.formatTime(self.osdProgram.endDate)))
else:
self.setControlLabel(self.C_MAIN_OSD_TIME, '')
self.setControlText(self.C_MAIN_OSD_DESCRIPTION, self.osdProgram.description)
self.setControlLabel(self.C_MAIN_OSD_CHANNEL_TITLE, self.osdChannel.title)
if self.osdProgram.channel.logo is not None:
self.setControlImage(self.C_MAIN_OSD_CHANNEL_LOGO, self.osdProgram.channel.logo)
else:
self.setControlImage(self.C_MAIN_OSD_CHANNEL_LOGO, '')
self.mode = MODE_OSD
self._showControl(self.C_MAIN_OSD)
def _hideOsd(self):
self.mode = MODE_TV
self._hideControl(self.C_MAIN_OSD)
def _hideEpg(self):
self._hideControl(self.C_MAIN_EPG)
self.mode = MODE_TV
self._clearEpg()
def onRedrawEPG(self, channelStart, startTime, focusFunction=None):
if self.redrawingEPG or (self.database is not None and self.database.updateInProgress) or self.isClosing:
debug('onRedrawEPG - already redrawing')
return
debug('onRedrawEPG')
self.redrawingEPG = True
self.mode = MODE_EPG
self._showControl(self.C_MAIN_EPG)
self.updateTimebar(scheduleTimer=False)
self.setControlLabel(self.C_MAIN_LOADING_TIME_LEFT, strings(CALCULATING_REMAINING_TIME))
self._showControl(self.C_MAIN_LOADING)
self.setFocusId(self.C_MAIN_LOADING_CANCEL)
self._clearEpg()
try:
self.channelIdx, channels, programs = self.database.getEPGView(channelStart, startTime, self.onSourceProgressUpdate, clearExistingProgramList=False)
except src.SourceException:
self.onEPGLoadError()
return
channelsWithoutPrograms = list(channels)
self.setControlLabel(self.C_MAIN_DATE, self.formatDate(self.viewStartDate, False))
self.setControlLabel(self.C_MAIN_DATE_LONG, self.formatDate(self.viewStartDate, True))
for col in range(1, 5):
self.setControlLabel(4000 + col, self.formatTime(startTime))
startTime += HALF_HOUR
if programs is None:
self.onEPGLoadError()
return
showLogo = ADDON.getSetting('logos.enabled') == 'true'
for idx in range(0, CHANNELS_PER_PAGE):
if idx >= len(channels):
self.setControlImage(4110 + idx, ' ')
self.setControlLabel(4010 + idx, ' ')
else:
channel = channels[idx]
self.setControlLabel(4010 + idx, channel.title)
if (channel.logo is not None and showLogo == True):
self.setControlImage(4110 + idx, channel.logo)
else:
self.setControlImage(4110 + idx, ' ')
for program in programs:
idx = channels.index(program.channel)
if program.channel in channelsWithoutPrograms:
channelsWithoutPrograms.remove(program.channel)
startDelta = program.startDate - self.viewStartDate
stopDelta = program.endDate - self.viewStartDate
cellStart = self._secondsToXposition(startDelta.seconds)
if startDelta.days < 0:
cellStart = self.epgView.left
cellWidth = self._secondsToXposition(stopDelta.seconds) - cellStart
if cellStart + cellWidth > self.epgView.right:
cellWidth = self.epgView.right - cellStart
if cellWidth > 1:
if program.notificationScheduled:
noFocusTexture = 'tvguide-program-red.png'
focusTexture = 'tvguide-program-red-focus.png'
else:
noFocusTexture = 'tvguide-program-grey.png'
focusTexture = 'tvguide-program-grey-focus.png'
if cellWidth < 25:
title = ''
else:
title = program.title
control = xbmcgui.ControlButton(
cellStart,
self.epgView.top + self.epgView.cellHeight * idx,
cellWidth - 2,
self.epgView.cellHeight - 2,
title,
noFocusTexture=noFocusTexture,
focusTexture=focusTexture
)
self.controlAndProgramList.append(ControlAndProgram(control, program))
for channel in channelsWithoutPrograms:
idx = channels.index(channel)
control = xbmcgui.ControlButton(
self.epgView.left,
self.epgView.top + self.epgView.cellHeight * idx,
(self.epgView.right - self.epgView.left) - 2,
self.epgView.cellHeight - 2,
strings(NO_PROGRAM_AVAILABLE),
noFocusTexture='tvguide-program-grey.png',
focusTexture='tvguide-program-grey-focus.png'
)
program = src.Program(channel, strings(NO_PROGRAM_AVAILABLE), None, None, None)
self.controlAndProgramList.append(ControlAndProgram(control, program))
if focusFunction is None:
focusFunction = self._findControlAt
focusControl = focusFunction(self.focusPoint)
controls = [elem.control for elem in self.controlAndProgramList]
self.addControls(controls)
if focusControl is not None:
debug('onRedrawEPG - setFocus %d' % focusControl.getId())
self.setFocus(focusControl)
self.ignoreMissingControlIds.extend([elem.control.getId() for elem in self.controlAndProgramList])
if focusControl is None and len(self.controlAndProgramList) > 0:
self.setFocus(self.controlAndProgramList[0].control)
self._hideControl(self.C_MAIN_LOADING)
self.redrawingEPG = False
def _clearEpg(self):
controls = [elem.control for elem in self.controlAndProgramList]
try:
self.removeControls(controls)
except RuntimeError:
for elem in self.controlAndProgramList:
try:
self.removeControl(elem.control)
except RuntimeError:
pass
del self.controlAndProgramList[:]
def onEPGLoadError(self):
self.redrawingEPG = False
self._hideControl(self.C_MAIN_LOADING)
xbmcgui.Dialog().ok(strings(LOAD_ERROR_TITLE), strings(LOAD_ERROR_LINE1), strings(LOAD_ERROR_LINE2))
self.close()
def onSourceNotConfigured(self):
self.redrawingEPG = False
self._hideControl(self.C_MAIN_LOADING)
xbmcgui.Dialog().ok(strings(LOAD_ERROR_TITLE), strings(LOAD_ERROR_LINE1), strings(CONFIGURATION_ERROR_LINE2))
self.close()
def isSourceInitializationCancelled(self):
return xbmc.abortRequested or self.isClosing
def onSourceInitialized(self, success):
if success:
self.notification = Notification(self.database, ADDON.getAddonInfo('path'))
self.onRedrawEPG(0, self.viewStartDate)
def onSourceProgressUpdate(self, percentageComplete):
control = self.getControl(self.C_MAIN_LOADING_PROGRESS)
if percentageComplete < 1:
if control:
control.setPercent(1)
self.progressStartTime = datetime.datetime.now()
self.progressPreviousPercentage = percentageComplete
elif percentageComplete != self.progressPreviousPercentage:
if control:
control.setPercent(percentageComplete)
self.progressPreviousPercentage = percentageComplete
delta = datetime.datetime.now() - self.progressStartTime
if percentageComplete < 20:
self.setControlLabel(self.C_MAIN_LOADING_TIME_LEFT, strings(CALCULATING_REMAINING_TIME))
else:
secondsLeft = int(delta.seconds) / float(percentageComplete) * (100.0 - percentageComplete)
if secondsLeft > 30:
secondsLeft -= secondsLeft % 10
self.setControlLabel(self.C_MAIN_LOADING_TIME_LEFT, strings(TIME_LEFT) % secondsLeft)
return not xbmc.abortRequested and not self.isClosing
def onPlayBackStopped(self):
if not self.player.isPlaying() and not self.isClosing:
self._hideControl(self.C_MAIN_OSD)
self.onRedrawEPG(self.channelIdx, self.viewStartDate)
def _secondsToXposition(self, seconds):
return self.epgView.left + (seconds * self.epgView.width / 7200)
def _findControlOnRight(self, point):
distanceToNearest = 10000
nearestControl = None
for elem in self.controlAndProgramList:
control = elem.control
(left, top) = control.getPosition()
x = left + (control.getWidth() / 2)
y = top + (control.getHeight() / 2)
if point.x < x and point.y == y:
distance = abs(point.x - x)
if distance < distanceToNearest:
distanceToNearest = distance
nearestControl = control
return nearestControl
def _findControlOnLeft(self, point):
distanceToNearest = 10000
nearestControl = None
for elem in self.controlAndProgramList:
control = elem.control
(left, top) = control.getPosition()
x = left + (control.getWidth() / 2)
y = top + (control.getHeight() / 2)
if point.x > x and point.y == y:
distance = abs(point.x - x)
if distance < distanceToNearest:
distanceToNearest = distance
nearestControl = control
return nearestControl
def _findControlBelow(self, point):
nearestControl = None
for elem in self.controlAndProgramList:
control = elem.control
(leftEdge, top) = control.getPosition()
y = top + (control.getHeight() / 2)
if point.y < y:
rightEdge = leftEdge + control.getWidth()
if leftEdge <= point.x < rightEdge and (nearestControl is None or nearestControl.getPosition()[1] > top):
nearestControl = control
return nearestControl
def _findControlAbove(self, point):
nearestControl = None
for elem in self.controlAndProgramList:
control = elem.control
(leftEdge, top) = control.getPosition()
y = top + (control.getHeight() / 2)
if point.y > y:
rightEdge = leftEdge + control.getWidth()
if leftEdge <= point.x < rightEdge and (nearestControl is None or nearestControl.getPosition()[1] < top):
nearestControl = control
return nearestControl
def _findControlAt(self, point):
for elem in self.controlAndProgramList:
control = elem.control
(left, top) = control.getPosition()
bottom = top + control.getHeight()
right = left + control.getWidth()
if left <= point.x <= right and top <= point.y <= bottom:
return control
return None
def _getProgramFromControl(self, control):
for elem in self.controlAndProgramList:
if elem.control == control:
return elem.program
return None
def _hideControl(self, *controlIds):
for controlId in controlIds:
control = self.getControl(controlId)
if control:
control.setVisible(True)
def _showControl(self, *controlIds):
for controlId in controlIds:
control = self.getControl(controlId)
if control:
control.setVisible(False)
def formatTime(self, timestamp):
if timestamp:
format = xbmc.getRegion('time').replace(':%S', '').replace('%H%H', '%H')
return timestamp.strftime(format)
else:
return ''
def formatDate(self, timestamp, longdate=False):
if timestamp:
if longdate == True:
format = xbmc.getRegion('datelong')
else:
format = xbmc.getRegion('dateshort')
return timestamp.strftime(format)
else:
return ''
def setControlImage(self, controlId, image):
control = self.getControl(controlId)
if control:
control.setImage(image.encode('utf-8'))
def setControlLabel(self, controlId, label):
control = self.getControl(controlId)
if control and label:
control.setLabel(label)
def setControlText(self, controlId, text):
control = self.getControl(controlId)
if control:
control.setText(text)
def updateTimebar(self, scheduleTimer=True):
# move timebar to current time
timeDelta = datetime.datetime.today() - self.viewStartDate
control = self.getControl(self.C_MAIN_TIMEBAR)
if control:
(x, y) = control.getPosition()
try:
# Sometimes raises:
# exceptions.RuntimeError: Unknown exception thrown from the call "setVisible"
control.setVisible(timeDelta.days == 0)
except:
pass
control.setPosition(self._secondsToXposition(timeDelta.seconds), y)
if scheduleTimer and not xbmc.abortRequested and not self.isClosing:
threading.Timer(1, self.updateTimebar).start()
class PopupMenu(xbmcgui.WindowXMLDialog):
C_POPUP_PLAY = 4000
C_POPUP_CHOOSE_STREAM = 4001
C_POPUP_REMIND = 4002
C_POPUP_CHANNELS = 4003
C_POPUP_QUIT = 4004
C_POPUP_CHANNEL_LOGO = 4100
C_POPUP_CHANNEL_TITLE = 4101
C_POPUP_PROGRAM_TITLE = 4102
C_POPUP_LIBMOV = 80000
C_POPUP_LIBTV = 80001
C_POPUP_VIDEOADDONS = 80002
def __new__(cls, database, program, showRemind):
return super(PopupMenu, cls).__new__(cls, 'script-tvguide-menu.xml', ADDON.getAddonInfo('path'), SKIN)
def __init__(self, database, program, showRemind):
super(PopupMenu, self).__init__()
self.database = database
self.program = program
self.showRemind = showRemind
self.buttonClicked = None
def onInit(self):
playControl = self.getControl(self.C_POPUP_PLAY)
remindControl = self.getControl(self.C_POPUP_REMIND)
channelLogoControl = self.getControl(self.C_POPUP_CHANNEL_LOGO)
channelTitleControl = self.getControl(self.C_POPUP_CHANNEL_TITLE)
programTitleControl = self.getControl(self.C_POPUP_PROGRAM_TITLE)
playControl.setLabel(strings(WATCH_CHANNEL, self.program.channel.title))
if not self.program.channel.isPlayable():
playControl.setEnabled(False)
self.setFocusId(self.C_POPUP_CHOOSE_STREAM)
if self.database.getCustomStreamUrl(self.program.channel):
chooseStrmControl = self.getControl(self.C_POPUP_CHOOSE_STREAM)
chooseStrmControl.setLabel(strings(REMOVE_STRM_FILE))
if self.program.channel.logo is not None:
channelLogoControl.setImage(self.program.channel.logo)
channelTitleControl.setVisible(False)
else:
channelTitleControl.setLabel(self.program.channel.title)
channelLogoControl.setVisible(False)
programTitleControl.setLabel(self.program.title)
if self.program.startDate:
remindControl.setEnabled(True)
if self.showRemind:
remindControl.setLabel(strings(REMIND_PROGRAM))
else:
remindControl.setLabel(strings(DONT_REMIND_PROGRAM))
else:
remindControl.setEnabled(False)
def onAction(self, action):
if action.getId() in [ACTION_PARENT_DIR, ACTION_PREVIOUS_MENU, KEY_NAV_BACK, KEY_CONTEXT_MENU]:
self.close()
return
def onClick(self, controlId):
if controlId == self.C_POPUP_CHOOSE_STREAM and self.database.getCustomStreamUrl(self.program.channel):
self.database.deleteCustomStreamUrl(self.program.channel)
chooseStrmControl = self.getControl(self.C_POPUP_CHOOSE_STREAM)
chooseStrmControl.setLabel(strings(CHOOSE_STRM_FILE))
if not self.program.channel.isPlayable():
playControl = self.getControl(self.C_POPUP_PLAY)
playControl.setEnabled(False)
else:
self.buttonClicked = controlId
self.close()
def onFocus(self, controlId):
pass
class ChannelsMenu(xbmcgui.WindowXMLDialog):
C_CHANNELS_LIST = 6000
C_CHANNELS_SELECTION_VISIBLE = 6001
C_CHANNELS_SELECTION = 6002
C_CHANNELS_SAVE = 6003
C_CHANNELS_CANCEL = 6004
def __new__(cls, database):
return super(ChannelsMenu, cls).__new__(cls, 'script-tvguide-channels.xml', ADDON.getAddonInfo('path'), SKIN)
def __init__(self, database):
super(ChannelsMenu, self).__init__()
self.database = database
self.channelList = database.getChannelList(onlyVisible=False)
self.swapInProgress = False
self.selectedChannel = 0
def onInit(self):
self.updateChannelList()
self.setFocusId(self.C_CHANNELS_LIST)
def onAction(self, action):
if action.getId() in [ACTION_PARENT_DIR, KEY_NAV_BACK]:
self.close()
return
if self.getFocusId() == self.C_CHANNELS_LIST and action.getId() in [ACTION_PREVIOUS_MENU, KEY_CONTEXT_MENU, ACTION_LEFT]:
listControl = self.getControl(self.C_CHANNELS_LIST)
idx = listControl.getSelectedPosition()
self.selectedChannel = idx
buttonControl = self.getControl(self.C_CHANNELS_SELECTION)
buttonControl.setLabel('[B]%s[/B]' % self.channelList[idx].title)
self.getControl(self.C_CHANNELS_SELECTION_VISIBLE).setVisible(False)
self.setFocusId(self.C_CHANNELS_SELECTION)
elif self.getFocusId() == self.C_CHANNELS_SELECTION and action.getId() in [ACTION_RIGHT, ACTION_SELECT_ITEM]:
self.getControl(self.C_CHANNELS_SELECTION_VISIBLE).setVisible(True)
xbmc.sleep(350)
self.setFocusId(self.C_CHANNELS_LIST)
elif self.getFocusId() == self.C_CHANNELS_SELECTION and action.getId() in [ACTION_PREVIOUS_MENU, KEY_CONTEXT_MENU]:
listControl = self.getControl(self.C_CHANNELS_LIST)
idx = listControl.getSelectedPosition()
self.swapChannels(self.selectedChannel, idx)
self.getControl(self.C_CHANNELS_SELECTION_VISIBLE).setVisible(True)
xbmc.sleep(350)
self.setFocusId(self.C_CHANNELS_LIST)
elif self.getFocusId() == self.C_CHANNELS_SELECTION and action.getId() == ACTION_UP:
listControl = self.getControl(self.C_CHANNELS_LIST)
idx = listControl.getSelectedPosition()
if idx > 0:
self.swapChannels(idx, idx - 1)
elif self.getFocusId() == self.C_CHANNELS_SELECTION and action.getId() == ACTION_DOWN:
listControl = self.getControl(self.C_CHANNELS_LIST)
idx = listControl.getSelectedPosition()
if idx < listControl.size() - 1:
self.swapChannels(idx, idx + 1)
def onClick(self, controlId):
if controlId == self.C_CHANNELS_LIST:
listControl = self.getControl(self.C_CHANNELS_LIST)
item = listControl.getSelectedItem()
channel = self.channelList[int(item.getProperty('idx'))]
channel.visible = not channel.visible
if channel.visible:
iconImage = 'tvguide-channel-visible.png'
else:
iconImage = 'tvguide-channel-hidden.png'
item.setIconImage(iconImage)
elif controlId == self.C_CHANNELS_SAVE:
self.database.saveChannelList(self.close, self.channelList)
elif controlId == self.C_CHANNELS_CANCEL:
self.close()
def onFocus(self, controlId):
pass
def updateChannelList(self):
listControl = self.getControl(self.C_CHANNELS_LIST)
listControl.reset()
for idx, channel in enumerate(self.channelList):
if channel.visible:
iconImage = 'tvguide-channel-visible.png'
else:
iconImage = 'tvguide-channel-hidden.png'
item = xbmcgui.ListItem('%3d. %s' % (idx + 1, channel.title), iconImage=iconImage)
item.setProperty('idx', str(idx))
listControl.addItem(item)
def updateListItem(self, idx, item):
channel = self.channelList[idx]
item.setLabel('%3d. %s' % (idx + 1, channel.title))
if channel.visible:
iconImage = 'tvguide-channel-visible.png'
else:
iconImage = 'tvguide-channel-hidden.png'
item.setIconImage(iconImage)
item.setProperty('idx', str(idx))
def swapChannels(self, fromIdx, toIdx):
if self.swapInProgress:
return
self.swapInProgress = True
c = self.channelList[fromIdx]
self.channelList[fromIdx] = self.channelList[toIdx]
self.channelList[toIdx] = c
# recalculate weight
for idx, channel in enumerate(self.channelList):
channel.weight = idx
listControl = self.getControl(self.C_CHANNELS_LIST)
self.updateListItem(fromIdx, listControl.getListItem(fromIdx))
self.updateListItem(toIdx, listControl.getListItem(toIdx))
listControl.selectItem(toIdx)
xbmc.sleep(50)
self.swapInProgress = False
class StreamSetupDialog(xbmcgui.WindowXMLDialog):
C_STREAM_STRM_TAB = 101
C_STREAM_FAVOURITES_TAB = 102
C_STREAM_ADDONS_TAB = 103
C_STREAM_STRM_BROWSE = 1001
C_STREAM_STRM_FILE_LABEL = 1005
C_STREAM_STRM_PREVIEW = 1002
C_STREAM_STRM_OK = 1003
C_STREAM_STRM_CANCEL = 1004
C_STREAM_FAVOURITES = 2001
C_STREAM_FAVOURITES_PREVIEW = 2002
C_STREAM_FAVOURITES_OK = 2003
C_STREAM_FAVOURITES_CANCEL = 2004
C_STREAM_ADDONS = 3001
C_STREAM_ADDONS_STREAMS = 3002
C_STREAM_ADDONS_NAME = 3003
C_STREAM_ADDONS_DESCRIPTION = 3004
C_STREAM_ADDONS_PREVIEW = 3005
C_STREAM_ADDONS_OK = 3006
C_STREAM_ADDONS_CANCEL = 3007
C_STREAM_VISIBILITY_MARKER = 100
VISIBLE_STRM = 'strm'
VISIBLE_FAVOURITES = 'favourites'
VISIBLE_ADDONS = 'addons'
def __new__(cls, database, channel):
return super(StreamSetupDialog, cls).__new__(cls, 'script-tvguide-streamsetup.xml', ADDON.getAddonInfo('path'), SKIN)
def __init__(self, database, channel):
super(StreamSetupDialog, self).__init__()
self.database = database
self.channel = channel
self.player = xbmc.Player()
self.previousAddonId = None
self.strmFile = None
self.streamingService = streaming.StreamsService(ADDON)
def close(self):
if self.player.isPlaying():
self.player.stop()
super(StreamSetupDialog, self).close()
def onInit(self):
self.getControl(self.C_STREAM_VISIBILITY_MARKER).setLabel(self.VISIBLE_STRM)
favourites = self.streamingService.loadFavourites()
items = list()
for label, value in favourites:
item = xbmcgui.ListItem(label)
item.setProperty('stream', value)
items.append(item)
listControl = self.getControl(StreamSetupDialog.C_STREAM_FAVOURITES)
listControl.addItems(items)
items = list()
for id in self.streamingService.getAddons():
try:
addon = xbmcaddon.Addon(id) # raises Exception if addon is not installed
item = xbmcgui.ListItem(addon.getAddonInfo('name'), iconImage=addon.getAddonInfo('icon'))
item.setProperty('addon_id', id)
items.append(item)
except Exception:
pass
listControl = self.getControl(StreamSetupDialog.C_STREAM_ADDONS)
listControl.addItems(items)
self.updateAddonInfo()
def onAction(self, action):
if action.getId() in [ACTION_PARENT_DIR, ACTION_PREVIOUS_MENU, KEY_NAV_BACK, KEY_CONTEXT_MENU]:
self.close()
return
elif self.getFocusId() == self.C_STREAM_ADDONS:
self.updateAddonInfo()
def onClick(self, controlId):
if controlId == self.C_STREAM_STRM_BROWSE:
stream = xbmcgui.Dialog().browse(1, ADDON.getLocalizedString(30304), 'video', '.strm')
if stream:
self.database.setCustomStreamUrl(self.channel, stream)
self.getControl(self.C_STREAM_STRM_FILE_LABEL).setText(stream)
self.strmFile = stream
elif controlId == self.C_STREAM_ADDONS_OK:
listControl = self.getControl(self.C_STREAM_ADDONS_STREAMS)
item = listControl.getSelectedItem()
if item:
stream = item.getProperty('stream')
self.database.setCustomStreamUrl(self.channel, stream)
self.close()
elif controlId == self.C_STREAM_FAVOURITES_OK:
listControl = self.getControl(self.C_STREAM_FAVOURITES)
item = listControl.getSelectedItem()
if item:
stream = item.getProperty('stream')
self.database.setCustomStreamUrl(self.channel, stream)
self.close()
elif controlId == self.C_STREAM_STRM_OK:
self.database.setCustomStreamUrl(self.channel, self.strmFile)
self.close()
elif controlId in [self.C_STREAM_ADDONS_CANCEL, self.C_STREAM_FAVOURITES_CANCEL, self.C_STREAM_STRM_CANCEL]:
self.close()
elif controlId in [self.C_STREAM_ADDONS_PREVIEW, self.C_STREAM_FAVOURITES_PREVIEW, self.C_STREAM_STRM_PREVIEW]:
if self.player.isPlaying():
self.player.stop()
self.getControl(self.C_STREAM_ADDONS_PREVIEW).setLabel(strings(PREVIEW_STREAM))
self.getControl(self.C_STREAM_FAVOURITES_PREVIEW).setLabel(strings(PREVIEW_STREAM))
self.getControl(self.C_STREAM_STRM_PREVIEW).setLabel(strings(PREVIEW_STREAM))
return
stream = None
visible = self.getControl(self.C_STREAM_VISIBILITY_MARKER).getLabel()
if visible == self.VISIBLE_ADDONS:
listControl = self.getControl(self.C_STREAM_ADDONS_STREAMS)
item = listControl.getSelectedItem()
if item:
stream = item.getProperty('stream')
elif visible == self.VISIBLE_FAVOURITES:
listControl = self.getControl(self.C_STREAM_FAVOURITES)
item = listControl.getSelectedItem()
if item:
stream = item.getProperty('stream')
elif visible == self.VISIBLE_STRM:
stream = self.strmFile
if stream is not None:
self.player.play(item=stream, windowed=True)
if self.player.isPlaying():
self.getControl(self.C_STREAM_ADDONS_PREVIEW).setLabel(strings(STOP_PREVIEW))
self.getControl(self.C_STREAM_FAVOURITES_PREVIEW).setLabel(strings(STOP_PREVIEW))
self.getControl(self.C_STREAM_STRM_PREVIEW).setLabel(strings(STOP_PREVIEW))
def onFocus(self, controlId):
if controlId == self.C_STREAM_STRM_TAB:
self.getControl(self.C_STREAM_VISIBILITY_MARKER).setLabel(self.VISIBLE_STRM)
elif controlId == self.C_STREAM_FAVOURITES_TAB:
self.getControl(self.C_STREAM_VISIBILITY_MARKER).setLabel(self.VISIBLE_FAVOURITES)
elif controlId == self.C_STREAM_ADDONS_TAB:
self.getControl(self.C_STREAM_VISIBILITY_MARKER).setLabel(self.VISIBLE_ADDONS)
def updateAddonInfo(self):
listControl = self.getControl(self.C_STREAM_ADDONS)
item = listControl.getSelectedItem()
if item is None:
return
if item.getProperty('addon_id') == self.previousAddonId:
return
self.previousAddonId = item.getProperty('addon_id')
addon = xbmcaddon.Addon(id=item.getProperty('addon_id'))
self.getControl(self.C_STREAM_ADDONS_NAME).setLabel('[B]%s[/B]' % addon.getAddonInfo('name'))
self.getControl(self.C_STREAM_ADDONS_DESCRIPTION).setText(addon.getAddonInfo('description'))
streams = self.streamingService.getAddonStreams(item.getProperty('addon_id'))
items = list()
for (label, stream) in streams:
item = xbmcgui.ListItem(label)
item.setProperty('stream', stream)
items.append(item)
listControl = self.getControl(StreamSetupDialog.C_STREAM_ADDONS_STREAMS)
listControl.reset()
listControl.addItems(items)
class ChooseStreamAddonDialog(xbmcgui.WindowXMLDialog):
C_SELECTION_LIST = 1000
def __new__(cls, addons):
return super(ChooseStreamAddonDialog, cls).__new__(cls, 'script-tvguide-streamaddon.xml', ADDON.getAddonInfo('path'), SKIN)
def __init__(self, addons):
super(ChooseStreamAddonDialog, self).__init__()
self.addons = addons
self.stream = None
def onInit(self):
items = list()
for id, label, url in self.addons:
addon = xbmcaddon.Addon(id)
item = xbmcgui.ListItem(label, addon.getAddonInfo('name'), addon.getAddonInfo('icon'))
item.setProperty('stream', url)
items.append(item)
listControl = self.getControl(ChooseStreamAddonDialog.C_SELECTION_LIST)
listControl.addItems(items)
self.setFocus(listControl)
def onAction(self, action):
if action.getId() in [ACTION_PARENT_DIR, ACTION_PREVIOUS_MENU, KEY_NAV_BACK]:
self.close()
def onClick(self, controlId):
if controlId == ChooseStreamAddonDialog.C_SELECTION_LIST:
listControl = self.getControl(ChooseStreamAddonDialog.C_SELECTION_LIST)
self.stream = listControl.getSelectedItem().getProperty('stream')
self.close()
def onFocus(self, controlId):
pass
| true | true |
f72fd4f07817e53144026d6614cb8968d8cb124e | 2,873 | py | Python | setup.py | rupanshi-chawda/nm-theme | d909d7f89d6b0bca49d6d90ed50d087bab41b912 | [
"BSD-3-Clause"
] | null | null | null | setup.py | rupanshi-chawda/nm-theme | d909d7f89d6b0bca49d6d90ed50d087bab41b912 | [
"BSD-3-Clause"
] | null | null | null | setup.py | rupanshi-chawda/nm-theme | d909d7f89d6b0bca49d6d90ed50d087bab41b912 | [
"BSD-3-Clause"
] | null | null | null | """
nm-theme setup
"""
import json
import sys
from pathlib import Path
import setuptools
HERE = Path(__file__).parent.resolve()
# Get the package info from package.json
pkg_json = json.loads((HERE / "package.json").read_bytes())
# The name of the project
name = "nm-theme"
lab_path = (HERE / pkg_json["jupyterlab"]["outputDir"])
# Representative files that should exist after a successful build
ensured_targets = [
str(lab_path / "package.json"),
str(lab_path / "static/style.js")
]
labext_name = pkg_json["name"]
data_files_spec = [
("share/jupyter/labextensions/%s" % labext_name, str(lab_path.relative_to(HERE)), "**"),
("share/jupyter/labextensions/%s" % labext_name, str("."), "install.json"),
]
long_description = (HERE / "README.md").read_text()
version = (
pkg_json["version"]
.replace("-alpha.", "a")
.replace("-beta.", "b")
.replace("-rc.", "rc")
)
setup_args = dict(
name=name,
version=version,
url=pkg_json["homepage"],
author=pkg_json["author"]["name"],
author_email=pkg_json["author"]["email"],
description=pkg_json["description"],
license=pkg_json["license"],
license_file="LICENSE",
long_description=long_description,
long_description_content_type="text/markdown",
packages=setuptools.find_packages(),
install_requires=[],
zip_safe=False,
include_package_data=True,
python_requires=">=3.7",
platforms="Linux, Mac OS X, Windows",
keywords=["Jupyter", "JupyterLab", "JupyterLab3"],
classifiers=[
"License :: OSI Approved :: BSD License",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Framework :: Jupyter",
"Framework :: Jupyter :: JupyterLab",
"Framework :: Jupyter :: JupyterLab :: 3",
"Framework :: Jupyter :: JupyterLab :: Extensions",
"Framework :: Jupyter :: JupyterLab :: Extensions :: Prebuilt",
],
)
try:
from jupyter_packaging import (
wrap_installers,
npm_builder,
get_data_files
)
post_develop = npm_builder(
build_cmd="install:extension", source_dir="src", build_dir=lab_path
)
setup_args["cmdclass"] = wrap_installers(post_develop=post_develop, ensured_targets=ensured_targets)
setup_args["data_files"] = get_data_files(data_files_spec)
except ImportError as e:
import logging
logging.basicConfig(format="%(levelname)s: %(message)s")
logging.warning("Build tool `jupyter-packaging` is missing. Install it with pip or conda.")
if not ("--name" in sys.argv or "--version" in sys.argv):
raise e
if __name__ == "__main__":
setuptools.setup(**setup_args)
| 29.927083 | 104 | 0.655761 | import json
import sys
from pathlib import Path
import setuptools
HERE = Path(__file__).parent.resolve()
pkg_json = json.loads((HERE / "package.json").read_bytes())
name = "nm-theme"
lab_path = (HERE / pkg_json["jupyterlab"]["outputDir"])
ensured_targets = [
str(lab_path / "package.json"),
str(lab_path / "static/style.js")
]
labext_name = pkg_json["name"]
data_files_spec = [
("share/jupyter/labextensions/%s" % labext_name, str(lab_path.relative_to(HERE)), "**"),
("share/jupyter/labextensions/%s" % labext_name, str("."), "install.json"),
]
long_description = (HERE / "README.md").read_text()
version = (
pkg_json["version"]
.replace("-alpha.", "a")
.replace("-beta.", "b")
.replace("-rc.", "rc")
)
setup_args = dict(
name=name,
version=version,
url=pkg_json["homepage"],
author=pkg_json["author"]["name"],
author_email=pkg_json["author"]["email"],
description=pkg_json["description"],
license=pkg_json["license"],
license_file="LICENSE",
long_description=long_description,
long_description_content_type="text/markdown",
packages=setuptools.find_packages(),
install_requires=[],
zip_safe=False,
include_package_data=True,
python_requires=">=3.7",
platforms="Linux, Mac OS X, Windows",
keywords=["Jupyter", "JupyterLab", "JupyterLab3"],
classifiers=[
"License :: OSI Approved :: BSD License",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Framework :: Jupyter",
"Framework :: Jupyter :: JupyterLab",
"Framework :: Jupyter :: JupyterLab :: 3",
"Framework :: Jupyter :: JupyterLab :: Extensions",
"Framework :: Jupyter :: JupyterLab :: Extensions :: Prebuilt",
],
)
try:
from jupyter_packaging import (
wrap_installers,
npm_builder,
get_data_files
)
post_develop = npm_builder(
build_cmd="install:extension", source_dir="src", build_dir=lab_path
)
setup_args["cmdclass"] = wrap_installers(post_develop=post_develop, ensured_targets=ensured_targets)
setup_args["data_files"] = get_data_files(data_files_spec)
except ImportError as e:
import logging
logging.basicConfig(format="%(levelname)s: %(message)s")
logging.warning("Build tool `jupyter-packaging` is missing. Install it with pip or conda.")
if not ("--name" in sys.argv or "--version" in sys.argv):
raise e
if __name__ == "__main__":
setuptools.setup(**setup_args)
| true | true |
f72fd53943b50711edaa6f2f5b0e426773997a03 | 8,917 | py | Python | service_clients/aws/s3_client.py | radzhome/python-service-clients | dd17e74217a9412b1b78c90433bfced08733fd88 | [
"BSD-2-Clause"
] | 2 | 2019-04-18T05:29:32.000Z | 2019-11-01T22:58:56.000Z | service_clients/aws/s3_client.py | radzhome/python-service-clients | dd17e74217a9412b1b78c90433bfced08733fd88 | [
"BSD-2-Clause"
] | null | null | null | service_clients/aws/s3_client.py | radzhome/python-service-clients | dd17e74217a9412b1b78c90433bfced08733fd88 | [
"BSD-2-Clause"
] | null | null | null | from __future__ import unicode_literals
"""
S3 bucket CRUD operations core module
"""
import logging
import time
import boto3
import botocore
from botocore.client import Config
class S3Client: # pragma: no cover
"""
S3 class encapsulates uploading,
downloading & other s3 file ops and handling errors
This is not covered in unit test test coverage,
but in integration tests since its an external process
"""
S3_DATE_FORMAT = '%Y-%m-%dT%H:%M:%S.000Z' # Not used
RECONNECT_SLEEP_SECS = 0.5
CONN_RETRIES = 10
CONN_CONFIG = Config(connect_timeout=5, retries={'max_attempts': 0})
def __init__(self, config, reconnect_sleep_secs=RECONNECT_SLEEP_SECS, conn_retries=CONN_RETRIES):
"""
Load config from passed params or override with defaults
:param config: dict, config with access_key_id, secret_access_key, bucket name
:return: None
"""
self.config = config
self.access_key_id = self.config['access_key_id']
self.secret_access_key = self.config['secret_access_key']
self.aws_region = self.config['aws_region']
self.bucket_name = self.config.get('bucket_name') # Optional bucket name
self.RECONNECT_SLEEP_SECS = reconnect_sleep_secs
self.CONN_RETRIES = conn_retries
self.connection_attempt = 0
self.connection = None
self.bucket = None
self.connect(run_get_bucket=bool(self.bucket_name))
def connect(self, run_get_bucket=False):
"""
Creates object connection to the designated region (self.boto.cli_region).
The connection is established on the first call for this instance (lazy) and cached.
:param run_get_bucket: bool, run (or skip) getting the bucket object
:return: None
"""
try:
self.connection_attempt += 1
self.connection = boto3.resource('s3', region_name=self.aws_region,
aws_access_key_id=self.access_key_id,
aws_secret_access_key=self.secret_access_key,
config=self.CONN_CONFIG)
if run_get_bucket:
self.bucket = self._get_bucket()
except Exception as e:
logging.exception("S3Client.connect failed with params {}, error {}".format(self.config, e))
if self.connection_attempt >= self.CONN_RETRIES:
raise
def _get_bucket(self, bucket_name=None):
"""
Uses S3 Connection and return connection to queue
S3 used for getting the listing file in the SQS message
:param bucket_name: str, bucket name (optional)
:return: None
"""
try:
# It also looks like at times, the bucket object is made even if not exists until you query it
# getting a NoSuchBucket error, see list
bucket = self.connection.Bucket(name=bucket_name or self.bucket_name)
except Exception as e:
# I.e. gaierror: [Errno -2] Name or service not known
logging.exception("S3Client.get_bucket unable to get bucket {}, error {}".format(self.bucket_name, e))
raise
return bucket
def list(self, bucket_name=None):
"""
List contents of a bucket
:param bucket_name: str, bucket name (optional)
:return: list of s3.ObjectSummary
"""
if bucket_name:
bucket = self._get_bucket(bucket_name)
else:
bucket = self.bucket
if not bucket:
logging.warning("S3Client.remove bucket not found, {}".format(bucket_name or self.bucket_name))
result = None
else:
try:
result = list(bucket.objects.all())
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] == "NoSuchBucket":
logging.warning("S3Client.list no such bucket {}".format(bucket_name or self.bucket_name))
result = None
else:
raise
return result
def read(self, key, bucket_name=None):
"""
Get bucket key value, return contents
Get contents of a file from S3
:param key: str, bucket key filename
:param bucket_name: str, bucket name (optional)
:return: str, contents of key
"""
try:
obj = self.connection.Object(key=key, bucket_name=bucket_name or self.bucket_name, )
contents = obj.get()['Body'].read()
try:
contents = contents.decode('utf-8')
except UnicodeDecodeError:
logging.debug("S3Client.read key cannot be decoded using utf-8, leaving raw. {}".format(key))
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] == "NoSuchKey":
logging.warning("S3Client.read no such key {}".format(key))
contents = None
else:
raise
except Exception as e: # Retry in-case we have a connection error
logging.exception("S3Client.read failed for key {}, error {}".format(key, e))
time.sleep(self.RECONNECT_SLEEP_SECS)
self.connect()
contents = self.read(key)
return contents
def write(self, key, contents, bucket_name=None):
"""
Create bucket key from string
Write content to a file in S3
:param contents: str, contents to save to a file
:param key: str, bucket key filename
:param bucket_name: str, bucket name (optional)
:return: dict, output
"""
output = response = None
try:
response = self.connection.Object(key=key, bucket_name=bucket_name or self.bucket_name).put(Body=contents)
output = {
'file_name': key,
# 'is_new': not k.exists(),
}
except Exception as e:
logging.exception("S3Client.write failed for key {}, error {}, response {}".format(key, e, response))
return output
def upload(self, key, origin_path, bucket_name=None):
"""
Create bucket key from filename
Upload a file to S3 from origin file
:param origin_path: str, path to origin filename
:param key: str, bucket key filename
:param bucket_name: str, bucket name (optional)
:return: bool, success
"""
result = True
try:
file_body = open(origin_path, 'rb')
self.connection.Bucket(bucket_name or self.bucket_name).put_object(Key=key, Body=file_body)
except Exception as e:
logging.exception("S3Client.upload failed for key {}, error {} ".format(key, e))
result = False
return result
def download(self, key, destination, bucket_name=None):
"""
Get key
Download a file from S3 to destination
:param destination: str, path to local file name
:param key: str, bucket key filename
:param bucket_name: str, bucket name (optional)
:return: bool, success
"""
result = True
if bucket_name:
bucket = self._get_bucket(bucket_name)
else:
bucket = self.bucket
if not bucket:
logging.warning("S3Client.remove bucket not found, {}".format(bucket_name or self.bucket_name))
result = False
try:
bucket.download_file(key, destination)
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] == "404":
logging.error("S3Client.download bucket missing key file {}".format(key))
else:
raise
except Exception as e:
logging.warning("S3Client.download failed for key {} to {}, error {}, retrying".format(key, destination, e))
time.sleep(self.RECONNECT_SLEEP_SECS)
self.connect()
result = self.download(key, destination)
return result
def remove(self, keys, bucket_name=None):
"""
Deletes the given keys from the given bucket.
:param keys: list, list of key names
:param bucket_name: str, bucket name (optional)
:return: bool, success
"""
result = True
if bucket_name:
bucket = self._get_bucket(bucket_name)
else:
bucket = self.bucket
if not bucket:
logging.warning("S3Client.remove bucket not found, {}".format(bucket_name or self.bucket_name))
result = False
logging.warning("S3Client.remove deleting keys {}".format(keys))
objects = [{'Key': key} for key in keys]
bucket.delete_objects(Delete={'Objects': objects})
return result
| 37.309623 | 120 | 0.596165 | from __future__ import unicode_literals
import logging
import time
import boto3
import botocore
from botocore.client import Config
class S3Client:
S3_DATE_FORMAT = '%Y-%m-%dT%H:%M:%S.000Z'
RECONNECT_SLEEP_SECS = 0.5
CONN_RETRIES = 10
CONN_CONFIG = Config(connect_timeout=5, retries={'max_attempts': 0})
def __init__(self, config, reconnect_sleep_secs=RECONNECT_SLEEP_SECS, conn_retries=CONN_RETRIES):
self.config = config
self.access_key_id = self.config['access_key_id']
self.secret_access_key = self.config['secret_access_key']
self.aws_region = self.config['aws_region']
self.bucket_name = self.config.get('bucket_name')
self.RECONNECT_SLEEP_SECS = reconnect_sleep_secs
self.CONN_RETRIES = conn_retries
self.connection_attempt = 0
self.connection = None
self.bucket = None
self.connect(run_get_bucket=bool(self.bucket_name))
def connect(self, run_get_bucket=False):
try:
self.connection_attempt += 1
self.connection = boto3.resource('s3', region_name=self.aws_region,
aws_access_key_id=self.access_key_id,
aws_secret_access_key=self.secret_access_key,
config=self.CONN_CONFIG)
if run_get_bucket:
self.bucket = self._get_bucket()
except Exception as e:
logging.exception("S3Client.connect failed with params {}, error {}".format(self.config, e))
if self.connection_attempt >= self.CONN_RETRIES:
raise
def _get_bucket(self, bucket_name=None):
try:
bucket = self.connection.Bucket(name=bucket_name or self.bucket_name)
except Exception as e:
logging.exception("S3Client.get_bucket unable to get bucket {}, error {}".format(self.bucket_name, e))
raise
return bucket
def list(self, bucket_name=None):
if bucket_name:
bucket = self._get_bucket(bucket_name)
else:
bucket = self.bucket
if not bucket:
logging.warning("S3Client.remove bucket not found, {}".format(bucket_name or self.bucket_name))
result = None
else:
try:
result = list(bucket.objects.all())
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] == "NoSuchBucket":
logging.warning("S3Client.list no such bucket {}".format(bucket_name or self.bucket_name))
result = None
else:
raise
return result
def read(self, key, bucket_name=None):
try:
obj = self.connection.Object(key=key, bucket_name=bucket_name or self.bucket_name, )
contents = obj.get()['Body'].read()
try:
contents = contents.decode('utf-8')
except UnicodeDecodeError:
logging.debug("S3Client.read key cannot be decoded using utf-8, leaving raw. {}".format(key))
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] == "NoSuchKey":
logging.warning("S3Client.read no such key {}".format(key))
contents = None
else:
raise
except Exception as e:
logging.exception("S3Client.read failed for key {}, error {}".format(key, e))
time.sleep(self.RECONNECT_SLEEP_SECS)
self.connect()
contents = self.read(key)
return contents
def write(self, key, contents, bucket_name=None):
output = response = None
try:
response = self.connection.Object(key=key, bucket_name=bucket_name or self.bucket_name).put(Body=contents)
output = {
'file_name': key,
}
except Exception as e:
logging.exception("S3Client.write failed for key {}, error {}, response {}".format(key, e, response))
return output
def upload(self, key, origin_path, bucket_name=None):
result = True
try:
file_body = open(origin_path, 'rb')
self.connection.Bucket(bucket_name or self.bucket_name).put_object(Key=key, Body=file_body)
except Exception as e:
logging.exception("S3Client.upload failed for key {}, error {} ".format(key, e))
result = False
return result
def download(self, key, destination, bucket_name=None):
result = True
if bucket_name:
bucket = self._get_bucket(bucket_name)
else:
bucket = self.bucket
if not bucket:
logging.warning("S3Client.remove bucket not found, {}".format(bucket_name or self.bucket_name))
result = False
try:
bucket.download_file(key, destination)
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] == "404":
logging.error("S3Client.download bucket missing key file {}".format(key))
else:
raise
except Exception as e:
logging.warning("S3Client.download failed for key {} to {}, error {}, retrying".format(key, destination, e))
time.sleep(self.RECONNECT_SLEEP_SECS)
self.connect()
result = self.download(key, destination)
return result
def remove(self, keys, bucket_name=None):
result = True
if bucket_name:
bucket = self._get_bucket(bucket_name)
else:
bucket = self.bucket
if not bucket:
logging.warning("S3Client.remove bucket not found, {}".format(bucket_name or self.bucket_name))
result = False
logging.warning("S3Client.remove deleting keys {}".format(keys))
objects = [{'Key': key} for key in keys]
bucket.delete_objects(Delete={'Objects': objects})
return result
| true | true |
f72fd5b91881f72c58b41d9a2321dc53142923f3 | 1,234 | py | Python | acmicpc/9093/9093-1.py | love-adela/algorithm | 4ccd02173c96f8369962f1fd4e5166a221690fa2 | [
"MIT"
] | 3 | 2019-03-09T05:19:23.000Z | 2019-04-06T09:26:36.000Z | acmicpc/9093/9093-1.py | love-adela/algorithm | 4ccd02173c96f8369962f1fd4e5166a221690fa2 | [
"MIT"
] | 1 | 2020-02-23T10:38:04.000Z | 2020-02-23T10:38:04.000Z | acmicpc/9093/9093-1.py | love-adela/algorithm | 4ccd02173c96f8369962f1fd4e5166a221690fa2 | [
"MIT"
] | 1 | 2019-05-22T13:47:53.000Z | 2019-05-22T13:47:53.000Z | # Stack 활용해서 풀기
N = int(input())
class Node(object):
def __init__(self, value=None, next=None):
self.value = value
self.next = next
class Stack(object):
def __init__(self):
self.head = None
self.count = 0
def is_empty(self):
return not bool(self.head)
def push(self, item):
self.head = Node(item, self.head)
self.count += 1
def pop(self):
if self.count > 0:
node = self.head
self.head = node.next
self.count -= 1
return node.value
else:
print('Stack is empty')
def peek(self):
if self.count > 0:
return self.head.value
else:
print('Stack is empty')
def size(self):
return self.size
def reverse_with_stack(sentence):
s = Stack()
for i in range(len(sentence)):
if sentence[i] == ' ' or sentence[i]=='\n':
while not s.is_empty():
print(s.peek(), end='')
s.pop()
print(sentence[i], end='')
else:
s.push(sentence[i])
while N:
sentence = input()
sentence += '\n'
reverse_with_stack(sentence)
N-=1
| 21.649123 | 51 | 0.502431 |
N = int(input())
class Node(object):
def __init__(self, value=None, next=None):
self.value = value
self.next = next
class Stack(object):
def __init__(self):
self.head = None
self.count = 0
def is_empty(self):
return not bool(self.head)
def push(self, item):
self.head = Node(item, self.head)
self.count += 1
def pop(self):
if self.count > 0:
node = self.head
self.head = node.next
self.count -= 1
return node.value
else:
print('Stack is empty')
def peek(self):
if self.count > 0:
return self.head.value
else:
print('Stack is empty')
def size(self):
return self.size
def reverse_with_stack(sentence):
s = Stack()
for i in range(len(sentence)):
if sentence[i] == ' ' or sentence[i]=='\n':
while not s.is_empty():
print(s.peek(), end='')
s.pop()
print(sentence[i], end='')
else:
s.push(sentence[i])
while N:
sentence = input()
sentence += '\n'
reverse_with_stack(sentence)
N-=1
| true | true |
f72fd5e243d2a0ee9ab66cb14a1e4f2f75b8f2b5 | 15,989 | py | Python | lib/surface/container/node_pools/create.py | google-cloud-sdk-unofficial/google-cloud-sdk | 2a48a04df14be46c8745050f98768e30474a1aac | [
"Apache-2.0"
] | 2 | 2019-11-10T09:17:07.000Z | 2019-12-18T13:44:08.000Z | lib/surface/container/node_pools/create.py | google-cloud-sdk-unofficial/google-cloud-sdk | 2a48a04df14be46c8745050f98768e30474a1aac | [
"Apache-2.0"
] | null | null | null | lib/surface/container/node_pools/create.py | google-cloud-sdk-unofficial/google-cloud-sdk | 2a48a04df14be46c8745050f98768e30474a1aac | [
"Apache-2.0"
] | 1 | 2020-07-25T01:40:19.000Z | 2020-07-25T01:40:19.000Z | # -*- coding: utf-8 -*- #
# Copyright 2014 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Create node pool command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from apitools.base.py import exceptions as apitools_exceptions
from googlecloudsdk.api_lib.compute import metadata_utils
from googlecloudsdk.api_lib.compute import utils
from googlecloudsdk.api_lib.container import api_adapter
from googlecloudsdk.api_lib.container import util
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.calliope import base
from googlecloudsdk.calliope import exceptions
from googlecloudsdk.command_lib.container import constants
from googlecloudsdk.command_lib.container import container_command_util as cmd_util
from googlecloudsdk.command_lib.container import flags
from googlecloudsdk.core import log
DETAILED_HELP = {
'DESCRIPTION':
"""\
*{command}* facilitates the creation of a node pool in a Google
Kubernetes Engine cluster. A variety of options exists to customize the
node configuration and the number of nodes created.
""",
'EXAMPLES':
"""\
To create a new node pool "node-pool-1" with the default options in the
cluster "sample-cluster", run:
$ {command} node-pool-1 --cluster=sample-cluster
The new node pool will show up in the cluster after all the nodes have
been provisioned.
To create a node pool with 5 nodes, run:
$ {command} node-pool-1 --cluster=sample-cluster --num-nodes=5
""",
}
WARN_WINDOWS_SAC_SUPPORT_LIFECYCLE = (
'Windows SAC node pools must be upgraded regularly to remain operational. '
'Please refer to https://cloud.google.com/kubernetes-engine/docs/how-to/'
'creating-a-cluster-windows#choose_your_windows_server_node_image for more '
'information.')
def _Args(parser):
"""Register flags for this command.
Args:
parser: An argparse.ArgumentParser-like object. It is mocked out in order to
capture some information, but behaves like an ArgumentParser.
"""
flags.AddNodePoolNameArg(parser, 'The name of the node pool to create.')
flags.AddNodePoolClusterFlag(parser, 'The cluster to add the node pool to.')
# Timeout in seconds for operation
parser.add_argument(
'--timeout',
type=int,
default=1800,
hidden=True,
help='THIS ARGUMENT NEEDS HELP TEXT.')
parser.add_argument(
'--num-nodes',
type=int,
help='The number of nodes in the node pool in each of the '
'cluster\'s zones.',
default=3)
flags.AddMachineTypeFlag(parser)
parser.add_argument(
'--disk-size',
type=arg_parsers.BinarySize(lower_bound='10GB'),
help='Size for node VM boot disks in GB. Defaults to 100GB.')
flags.AddImageTypeFlag(parser, 'node pool')
flags.AddImageFlag(parser, hidden=True)
flags.AddImageProjectFlag(parser, hidden=True)
flags.AddImageFamilyFlag(parser, hidden=True)
flags.AddNodeLabelsFlag(parser, for_node_pool=True)
flags.AddTagsFlag(
parser, """\
Applies the given Compute Engine tags (comma separated) on all nodes in the new
node-pool. Example:
$ {command} node-pool-1 --cluster=example-cluster --tags=tag1,tag2
New nodes, including ones created by resize or recreate, will have these tags
on the Compute Engine API instance object and can be used in firewall rules.
See https://cloud.google.com/sdk/gcloud/reference/compute/firewall-rules/create
for examples.
""")
parser.display_info.AddFormat(util.NODEPOOLS_FORMAT)
flags.AddNodeVersionFlag(parser)
flags.AddDiskTypeFlag(parser)
flags.AddMetadataFlags(parser)
flags.AddShieldedInstanceFlags(parser)
flags.AddNetworkConfigFlags(parser)
flags.AddThreadsPerCore(parser)
def ParseCreateNodePoolOptionsBase(args):
"""Parses the flags provided with the node pool creation command."""
enable_autorepair = cmd_util.GetAutoRepair(args)
flags.WarnForNodeModification(args, enable_autorepair)
flags.ValidateSurgeUpgradeSettings(args)
metadata = metadata_utils.ConstructMetadataDict(args.metadata,
args.metadata_from_file)
return api_adapter.CreateNodePoolOptions(
accelerators=args.accelerator,
boot_disk_kms_key=args.boot_disk_kms_key,
machine_type=args.machine_type,
disk_size_gb=utils.BytesToGb(args.disk_size),
scopes=args.scopes,
node_version=args.node_version,
num_nodes=args.num_nodes,
local_ssd_count=args.local_ssd_count,
tags=args.tags,
threads_per_core=args.threads_per_core,
node_labels=args.node_labels,
node_taints=args.node_taints,
enable_autoscaling=args.enable_autoscaling,
max_nodes=args.max_nodes,
min_cpu_platform=args.min_cpu_platform,
min_nodes=args.min_nodes,
image_type=args.image_type,
image=args.image,
image_project=args.image_project,
image_family=args.image_family,
preemptible=args.preemptible,
enable_autorepair=enable_autorepair,
enable_autoupgrade=cmd_util.GetAutoUpgrade(args),
service_account=args.service_account,
disk_type=args.disk_type,
metadata=metadata,
max_pods_per_node=args.max_pods_per_node,
enable_autoprovisioning=args.enable_autoprovisioning,
workload_metadata=args.workload_metadata,
workload_metadata_from_node=args.workload_metadata_from_node,
shielded_secure_boot=args.shielded_secure_boot,
shielded_integrity_monitoring=args.shielded_integrity_monitoring,
reservation_affinity=args.reservation_affinity,
reservation=args.reservation,
sandbox=args.sandbox,
max_surge_upgrade=args.max_surge_upgrade,
max_unavailable_upgrade=args.max_unavailable_upgrade,
node_group=args.node_group,
system_config_from_file=args.system_config_from_file,
pod_ipv4_range=args.pod_ipv4_range,
create_pod_ipv4_range=args.create_pod_ipv4_range,
gvnic=args.enable_gvnic,
enable_image_streaming=args.enable_image_streaming,
spot=args.spot)
@base.ReleaseTracks(base.ReleaseTrack.GA)
class Create(base.CreateCommand):
"""Create a node pool in a running cluster."""
@staticmethod
def Args(parser):
_Args(parser)
flags.AddAcceleratorArgs(
parser, enable_gpu_partition=True, enable_gpu_time_sharing=False)
flags.AddBootDiskKmsKeyFlag(parser)
flags.AddClusterAutoscalingFlags(parser)
flags.AddLocalSSDFlag(parser)
flags.AddPreemptibleFlag(parser, for_node_pool=True)
flags.AddEnableAutoRepairFlag(parser, for_node_pool=True, for_create=True)
flags.AddMinCpuPlatformFlag(parser, for_node_pool=True)
flags.AddWorkloadMetadataFlag(parser)
flags.AddNodeTaintsFlag(parser, for_node_pool=True)
flags.AddNodePoolNodeIdentityFlags(parser)
flags.AddNodePoolAutoprovisioningFlag(parser, hidden=False)
flags.AddMaxPodsPerNodeFlag(parser, for_node_pool=True)
flags.AddEnableAutoUpgradeFlag(parser, for_node_pool=True, default=True)
flags.AddReservationAffinityFlags(parser, for_node_pool=True)
flags.AddSandboxFlag(parser)
flags.AddNodePoolLocationsFlag(parser, for_create=True)
flags.AddSurgeUpgradeFlag(parser, for_node_pool=True)
flags.AddMaxUnavailableUpgradeFlag(
parser, for_node_pool=True, is_create=True)
flags.AddSystemConfigFlag(parser, hidden=False)
flags.AddNodeGroupFlag(parser)
flags.AddEnableGvnicFlag(parser)
flags.AddEnableImageStreamingFlag(parser, for_node_pool=True)
flags.AddSpotFlag(parser, for_node_pool=True, hidden=True)
def ParseCreateNodePoolOptions(self, args):
ops = ParseCreateNodePoolOptionsBase(args)
ops.node_locations = args.node_locations
return ops
def Run(self, args):
"""This is what gets called when the user runs this command.
Args:
args: an argparse namespace. All the arguments that were provided to this
command invocation.
Returns:
Cluster message for the successfully created node pool.
Raises:
util.Error, if creation failed.
"""
adapter = self.context['api_adapter']
location_get = self.context['location_get']
location = location_get(args)
try:
pool_ref = adapter.ParseNodePool(args.name, location)
options = self.ParseCreateNodePoolOptions(args)
if options.accelerators is not None:
log.status.Print(constants.KUBERNETES_GPU_LIMITATION_MSG)
if not options.image_type:
log.warning('Starting with version 1.19, newly created node-pools '
'will have COS_CONTAINERD as the default node image '
'when no image type is specified.')
elif options.image_type.upper() == 'WINDOWS_SAC':
log.warning(WARN_WINDOWS_SAC_SUPPORT_LIFECYCLE)
operation_ref = adapter.CreateNodePool(pool_ref, options)
adapter.WaitForOperation(
operation_ref,
'Creating node pool {0}'.format(pool_ref.nodePoolId),
timeout_s=args.timeout)
pool = adapter.GetNodePool(pool_ref)
except apitools_exceptions.HttpError as error:
raise exceptions.HttpException(error, util.HTTP_ERROR_FORMAT)
log.CreatedResource(pool_ref)
return [pool]
@base.ReleaseTracks(base.ReleaseTrack.BETA)
class CreateBeta(Create):
"""Create a node pool in a running cluster."""
@staticmethod
def Args(parser):
_Args(parser)
flags.AddAcceleratorArgs(
parser, enable_gpu_partition=True, enable_gpu_time_sharing=True)
flags.AddClusterAutoscalingFlags(parser)
flags.AddLocalSSDsBetaFlags(parser, for_node_pool=True)
flags.AddBootDiskKmsKeyFlag(parser)
flags.AddPreemptibleFlag(parser, for_node_pool=True)
flags.AddEnableAutoRepairFlag(parser, for_node_pool=True, for_create=True)
flags.AddMinCpuPlatformFlag(parser, for_node_pool=True)
flags.AddWorkloadMetadataFlag(parser, use_mode=False)
flags.AddNodeTaintsFlag(parser, for_node_pool=True)
flags.AddNodePoolNodeIdentityFlags(parser)
flags.AddNodePoolAutoprovisioningFlag(parser, hidden=False)
flags.AddMaxPodsPerNodeFlag(parser, for_node_pool=True)
flags.AddEnableAutoUpgradeFlag(parser, for_node_pool=True, default=True)
flags.AddSandboxFlag(parser)
flags.AddNodePoolLocationsFlag(parser, for_create=True)
flags.AddSurgeUpgradeFlag(parser, for_node_pool=True, default=1)
flags.AddMaxUnavailableUpgradeFlag(
parser, for_node_pool=True, is_create=True)
flags.AddReservationAffinityFlags(parser, for_node_pool=True)
flags.AddSystemConfigFlag(parser, hidden=False)
flags.AddNodeGroupFlag(parser)
flags.AddEnableGcfsFlag(parser, for_node_pool=True)
flags.AddEnableImageStreamingFlag(parser, for_node_pool=True)
flags.AddNodePoolEnablePrivateNodes(parser, hidden=True)
flags.AddEnableGvnicFlag(parser)
flags.AddSpotFlag(parser, for_node_pool=True)
flags.AddPlacementTypeFlag(parser, for_node_pool=True, hidden=True)
flags.AddEnableRollingUpdateFlag(parser)
flags.AddEnableBlueGreenUpdateFlag(parser)
flags.AddStandardRolloutPolicyFlag(parser)
flags.AddNodePoolSoakDurationFlag(parser)
flags.AddMaintenanceIntervalFlag(parser, for_node_pool=True, hidden=True)
def ParseCreateNodePoolOptions(self, args):
ops = ParseCreateNodePoolOptionsBase(args)
flags.WarnForNodeVersionAutoUpgrade(args)
flags.ValidateSurgeUpgradeSettings(args)
ops.boot_disk_kms_key = args.boot_disk_kms_key
ops.sandbox = args.sandbox
ops.node_locations = args.node_locations
ops.system_config_from_file = args.system_config_from_file
ops.enable_gcfs = args.enable_gcfs
ops.enable_image_streaming = args.enable_image_streaming
ops.ephemeral_storage = args.ephemeral_storage
ops.enable_private_nodes = args.enable_private_nodes
ops.spot = args.spot
ops.placement_type = args.placement_type
ops.enable_blue_green_update = args.enable_blue_green_update
ops.enable_rolling_update = args.enable_rolling_update
ops.node_pool_soak_duration = args.node_pool_soak_duration
ops.standard_rollout_policy = args.standard_rollout_policy
ops.maintenance_interval = args.maintenance_interval
return ops
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class CreateAlpha(Create):
"""Create a node pool in a running cluster."""
def ParseCreateNodePoolOptions(self, args):
ops = ParseCreateNodePoolOptionsBase(args)
flags.WarnForNodeVersionAutoUpgrade(args)
flags.ValidateSurgeUpgradeSettings(args)
ops.local_ssd_volume_configs = args.local_ssd_volumes
ops.ephemeral_storage = args.ephemeral_storage
ops.boot_disk_kms_key = args.boot_disk_kms_key
ops.sandbox = args.sandbox
ops.linux_sysctls = args.linux_sysctls
ops.node_locations = args.node_locations
ops.system_config_from_file = args.system_config_from_file
ops.enable_gcfs = args.enable_gcfs
ops.enable_image_streaming = args.enable_image_streaming
ops.enable_private_nodes = args.enable_private_nodes
ops.spot = args.spot
ops.placement_type = args.placement_type
ops.enable_blue_green_update = args.enable_blue_green_update
ops.enable_rolling_update = args.enable_rolling_update
ops.node_pool_soak_duration = args.node_pool_soak_duration
ops.standard_rollout_policy = args.standard_rollout_policy
ops.maintenance_interval = args.maintenance_interval
return ops
@staticmethod
def Args(parser):
_Args(parser)
flags.AddAcceleratorArgs(
parser, enable_gpu_partition=True, enable_gpu_time_sharing=True)
flags.AddClusterAutoscalingFlags(parser)
flags.AddNodePoolAutoprovisioningFlag(parser, hidden=False)
flags.AddLocalSSDsAlphaFlags(parser, for_node_pool=True)
flags.AddBootDiskKmsKeyFlag(parser)
flags.AddPreemptibleFlag(parser, for_node_pool=True)
flags.AddEnableAutoRepairFlag(parser, for_node_pool=True, for_create=True)
flags.AddMinCpuPlatformFlag(parser, for_node_pool=True)
flags.AddWorkloadMetadataFlag(parser, use_mode=False)
flags.AddNodeTaintsFlag(parser, for_node_pool=True)
flags.AddNodePoolNodeIdentityFlags(parser)
flags.AddMaxPodsPerNodeFlag(parser, for_node_pool=True)
flags.AddSandboxFlag(parser)
flags.AddNodeGroupFlag(parser)
flags.AddEnableAutoUpgradeFlag(parser, for_node_pool=True, default=True)
flags.AddLinuxSysctlFlags(parser, for_node_pool=True)
flags.AddSurgeUpgradeFlag(parser, for_node_pool=True, default=1)
flags.AddMaxUnavailableUpgradeFlag(
parser, for_node_pool=True, is_create=True)
flags.AddNodePoolLocationsFlag(parser, for_create=True)
flags.AddSystemConfigFlag(parser, hidden=False)
flags.AddReservationAffinityFlags(parser, for_node_pool=True)
flags.AddEnableGcfsFlag(parser, for_node_pool=True)
flags.AddEnableImageStreamingFlag(parser, for_node_pool=True)
flags.AddNodePoolEnablePrivateNodes(parser, hidden=True)
flags.AddEnableGvnicFlag(parser)
flags.AddSpotFlag(parser, for_node_pool=True)
flags.AddPlacementTypeFlag(parser, for_node_pool=True, hidden=True)
flags.AddEnableRollingUpdateFlag(parser)
flags.AddEnableBlueGreenUpdateFlag(parser)
flags.AddStandardRolloutPolicyFlag(parser, for_node_pool=True)
flags.AddNodePoolSoakDurationFlag(parser, for_node_pool=True)
flags.AddMaintenanceIntervalFlag(parser, for_node_pool=True, hidden=True)
Create.detailed_help = DETAILED_HELP
| 41.52987 | 83 | 0.766965 |
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from apitools.base.py import exceptions as apitools_exceptions
from googlecloudsdk.api_lib.compute import metadata_utils
from googlecloudsdk.api_lib.compute import utils
from googlecloudsdk.api_lib.container import api_adapter
from googlecloudsdk.api_lib.container import util
from googlecloudsdk.calliope import arg_parsers
from googlecloudsdk.calliope import base
from googlecloudsdk.calliope import exceptions
from googlecloudsdk.command_lib.container import constants
from googlecloudsdk.command_lib.container import container_command_util as cmd_util
from googlecloudsdk.command_lib.container import flags
from googlecloudsdk.core import log
DETAILED_HELP = {
'DESCRIPTION':
"""\
*{command}* facilitates the creation of a node pool in a Google
Kubernetes Engine cluster. A variety of options exists to customize the
node configuration and the number of nodes created.
""",
'EXAMPLES':
"""\
To create a new node pool "node-pool-1" with the default options in the
cluster "sample-cluster", run:
$ {command} node-pool-1 --cluster=sample-cluster
The new node pool will show up in the cluster after all the nodes have
been provisioned.
To create a node pool with 5 nodes, run:
$ {command} node-pool-1 --cluster=sample-cluster --num-nodes=5
""",
}
WARN_WINDOWS_SAC_SUPPORT_LIFECYCLE = (
'Windows SAC node pools must be upgraded regularly to remain operational. '
'Please refer to https://cloud.google.com/kubernetes-engine/docs/how-to/'
'creating-a-cluster-windows#choose_your_windows_server_node_image for more '
'information.')
def _Args(parser):
flags.AddNodePoolNameArg(parser, 'The name of the node pool to create.')
flags.AddNodePoolClusterFlag(parser, 'The cluster to add the node pool to.')
parser.add_argument(
'--timeout',
type=int,
default=1800,
hidden=True,
help='THIS ARGUMENT NEEDS HELP TEXT.')
parser.add_argument(
'--num-nodes',
type=int,
help='The number of nodes in the node pool in each of the '
'cluster\'s zones.',
default=3)
flags.AddMachineTypeFlag(parser)
parser.add_argument(
'--disk-size',
type=arg_parsers.BinarySize(lower_bound='10GB'),
help='Size for node VM boot disks in GB. Defaults to 100GB.')
flags.AddImageTypeFlag(parser, 'node pool')
flags.AddImageFlag(parser, hidden=True)
flags.AddImageProjectFlag(parser, hidden=True)
flags.AddImageFamilyFlag(parser, hidden=True)
flags.AddNodeLabelsFlag(parser, for_node_pool=True)
flags.AddTagsFlag(
parser, """\
Applies the given Compute Engine tags (comma separated) on all nodes in the new
node-pool. Example:
$ {command} node-pool-1 --cluster=example-cluster --tags=tag1,tag2
New nodes, including ones created by resize or recreate, will have these tags
on the Compute Engine API instance object and can be used in firewall rules.
See https://cloud.google.com/sdk/gcloud/reference/compute/firewall-rules/create
for examples.
""")
parser.display_info.AddFormat(util.NODEPOOLS_FORMAT)
flags.AddNodeVersionFlag(parser)
flags.AddDiskTypeFlag(parser)
flags.AddMetadataFlags(parser)
flags.AddShieldedInstanceFlags(parser)
flags.AddNetworkConfigFlags(parser)
flags.AddThreadsPerCore(parser)
def ParseCreateNodePoolOptionsBase(args):
enable_autorepair = cmd_util.GetAutoRepair(args)
flags.WarnForNodeModification(args, enable_autorepair)
flags.ValidateSurgeUpgradeSettings(args)
metadata = metadata_utils.ConstructMetadataDict(args.metadata,
args.metadata_from_file)
return api_adapter.CreateNodePoolOptions(
accelerators=args.accelerator,
boot_disk_kms_key=args.boot_disk_kms_key,
machine_type=args.machine_type,
disk_size_gb=utils.BytesToGb(args.disk_size),
scopes=args.scopes,
node_version=args.node_version,
num_nodes=args.num_nodes,
local_ssd_count=args.local_ssd_count,
tags=args.tags,
threads_per_core=args.threads_per_core,
node_labels=args.node_labels,
node_taints=args.node_taints,
enable_autoscaling=args.enable_autoscaling,
max_nodes=args.max_nodes,
min_cpu_platform=args.min_cpu_platform,
min_nodes=args.min_nodes,
image_type=args.image_type,
image=args.image,
image_project=args.image_project,
image_family=args.image_family,
preemptible=args.preemptible,
enable_autorepair=enable_autorepair,
enable_autoupgrade=cmd_util.GetAutoUpgrade(args),
service_account=args.service_account,
disk_type=args.disk_type,
metadata=metadata,
max_pods_per_node=args.max_pods_per_node,
enable_autoprovisioning=args.enable_autoprovisioning,
workload_metadata=args.workload_metadata,
workload_metadata_from_node=args.workload_metadata_from_node,
shielded_secure_boot=args.shielded_secure_boot,
shielded_integrity_monitoring=args.shielded_integrity_monitoring,
reservation_affinity=args.reservation_affinity,
reservation=args.reservation,
sandbox=args.sandbox,
max_surge_upgrade=args.max_surge_upgrade,
max_unavailable_upgrade=args.max_unavailable_upgrade,
node_group=args.node_group,
system_config_from_file=args.system_config_from_file,
pod_ipv4_range=args.pod_ipv4_range,
create_pod_ipv4_range=args.create_pod_ipv4_range,
gvnic=args.enable_gvnic,
enable_image_streaming=args.enable_image_streaming,
spot=args.spot)
@base.ReleaseTracks(base.ReleaseTrack.GA)
class Create(base.CreateCommand):
@staticmethod
def Args(parser):
_Args(parser)
flags.AddAcceleratorArgs(
parser, enable_gpu_partition=True, enable_gpu_time_sharing=False)
flags.AddBootDiskKmsKeyFlag(parser)
flags.AddClusterAutoscalingFlags(parser)
flags.AddLocalSSDFlag(parser)
flags.AddPreemptibleFlag(parser, for_node_pool=True)
flags.AddEnableAutoRepairFlag(parser, for_node_pool=True, for_create=True)
flags.AddMinCpuPlatformFlag(parser, for_node_pool=True)
flags.AddWorkloadMetadataFlag(parser)
flags.AddNodeTaintsFlag(parser, for_node_pool=True)
flags.AddNodePoolNodeIdentityFlags(parser)
flags.AddNodePoolAutoprovisioningFlag(parser, hidden=False)
flags.AddMaxPodsPerNodeFlag(parser, for_node_pool=True)
flags.AddEnableAutoUpgradeFlag(parser, for_node_pool=True, default=True)
flags.AddReservationAffinityFlags(parser, for_node_pool=True)
flags.AddSandboxFlag(parser)
flags.AddNodePoolLocationsFlag(parser, for_create=True)
flags.AddSurgeUpgradeFlag(parser, for_node_pool=True)
flags.AddMaxUnavailableUpgradeFlag(
parser, for_node_pool=True, is_create=True)
flags.AddSystemConfigFlag(parser, hidden=False)
flags.AddNodeGroupFlag(parser)
flags.AddEnableGvnicFlag(parser)
flags.AddEnableImageStreamingFlag(parser, for_node_pool=True)
flags.AddSpotFlag(parser, for_node_pool=True, hidden=True)
def ParseCreateNodePoolOptions(self, args):
ops = ParseCreateNodePoolOptionsBase(args)
ops.node_locations = args.node_locations
return ops
def Run(self, args):
adapter = self.context['api_adapter']
location_get = self.context['location_get']
location = location_get(args)
try:
pool_ref = adapter.ParseNodePool(args.name, location)
options = self.ParseCreateNodePoolOptions(args)
if options.accelerators is not None:
log.status.Print(constants.KUBERNETES_GPU_LIMITATION_MSG)
if not options.image_type:
log.warning('Starting with version 1.19, newly created node-pools '
'will have COS_CONTAINERD as the default node image '
'when no image type is specified.')
elif options.image_type.upper() == 'WINDOWS_SAC':
log.warning(WARN_WINDOWS_SAC_SUPPORT_LIFECYCLE)
operation_ref = adapter.CreateNodePool(pool_ref, options)
adapter.WaitForOperation(
operation_ref,
'Creating node pool {0}'.format(pool_ref.nodePoolId),
timeout_s=args.timeout)
pool = adapter.GetNodePool(pool_ref)
except apitools_exceptions.HttpError as error:
raise exceptions.HttpException(error, util.HTTP_ERROR_FORMAT)
log.CreatedResource(pool_ref)
return [pool]
@base.ReleaseTracks(base.ReleaseTrack.BETA)
class CreateBeta(Create):
@staticmethod
def Args(parser):
_Args(parser)
flags.AddAcceleratorArgs(
parser, enable_gpu_partition=True, enable_gpu_time_sharing=True)
flags.AddClusterAutoscalingFlags(parser)
flags.AddLocalSSDsBetaFlags(parser, for_node_pool=True)
flags.AddBootDiskKmsKeyFlag(parser)
flags.AddPreemptibleFlag(parser, for_node_pool=True)
flags.AddEnableAutoRepairFlag(parser, for_node_pool=True, for_create=True)
flags.AddMinCpuPlatformFlag(parser, for_node_pool=True)
flags.AddWorkloadMetadataFlag(parser, use_mode=False)
flags.AddNodeTaintsFlag(parser, for_node_pool=True)
flags.AddNodePoolNodeIdentityFlags(parser)
flags.AddNodePoolAutoprovisioningFlag(parser, hidden=False)
flags.AddMaxPodsPerNodeFlag(parser, for_node_pool=True)
flags.AddEnableAutoUpgradeFlag(parser, for_node_pool=True, default=True)
flags.AddSandboxFlag(parser)
flags.AddNodePoolLocationsFlag(parser, for_create=True)
flags.AddSurgeUpgradeFlag(parser, for_node_pool=True, default=1)
flags.AddMaxUnavailableUpgradeFlag(
parser, for_node_pool=True, is_create=True)
flags.AddReservationAffinityFlags(parser, for_node_pool=True)
flags.AddSystemConfigFlag(parser, hidden=False)
flags.AddNodeGroupFlag(parser)
flags.AddEnableGcfsFlag(parser, for_node_pool=True)
flags.AddEnableImageStreamingFlag(parser, for_node_pool=True)
flags.AddNodePoolEnablePrivateNodes(parser, hidden=True)
flags.AddEnableGvnicFlag(parser)
flags.AddSpotFlag(parser, for_node_pool=True)
flags.AddPlacementTypeFlag(parser, for_node_pool=True, hidden=True)
flags.AddEnableRollingUpdateFlag(parser)
flags.AddEnableBlueGreenUpdateFlag(parser)
flags.AddStandardRolloutPolicyFlag(parser)
flags.AddNodePoolSoakDurationFlag(parser)
flags.AddMaintenanceIntervalFlag(parser, for_node_pool=True, hidden=True)
def ParseCreateNodePoolOptions(self, args):
ops = ParseCreateNodePoolOptionsBase(args)
flags.WarnForNodeVersionAutoUpgrade(args)
flags.ValidateSurgeUpgradeSettings(args)
ops.boot_disk_kms_key = args.boot_disk_kms_key
ops.sandbox = args.sandbox
ops.node_locations = args.node_locations
ops.system_config_from_file = args.system_config_from_file
ops.enable_gcfs = args.enable_gcfs
ops.enable_image_streaming = args.enable_image_streaming
ops.ephemeral_storage = args.ephemeral_storage
ops.enable_private_nodes = args.enable_private_nodes
ops.spot = args.spot
ops.placement_type = args.placement_type
ops.enable_blue_green_update = args.enable_blue_green_update
ops.enable_rolling_update = args.enable_rolling_update
ops.node_pool_soak_duration = args.node_pool_soak_duration
ops.standard_rollout_policy = args.standard_rollout_policy
ops.maintenance_interval = args.maintenance_interval
return ops
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class CreateAlpha(Create):
def ParseCreateNodePoolOptions(self, args):
ops = ParseCreateNodePoolOptionsBase(args)
flags.WarnForNodeVersionAutoUpgrade(args)
flags.ValidateSurgeUpgradeSettings(args)
ops.local_ssd_volume_configs = args.local_ssd_volumes
ops.ephemeral_storage = args.ephemeral_storage
ops.boot_disk_kms_key = args.boot_disk_kms_key
ops.sandbox = args.sandbox
ops.linux_sysctls = args.linux_sysctls
ops.node_locations = args.node_locations
ops.system_config_from_file = args.system_config_from_file
ops.enable_gcfs = args.enable_gcfs
ops.enable_image_streaming = args.enable_image_streaming
ops.enable_private_nodes = args.enable_private_nodes
ops.spot = args.spot
ops.placement_type = args.placement_type
ops.enable_blue_green_update = args.enable_blue_green_update
ops.enable_rolling_update = args.enable_rolling_update
ops.node_pool_soak_duration = args.node_pool_soak_duration
ops.standard_rollout_policy = args.standard_rollout_policy
ops.maintenance_interval = args.maintenance_interval
return ops
@staticmethod
def Args(parser):
_Args(parser)
flags.AddAcceleratorArgs(
parser, enable_gpu_partition=True, enable_gpu_time_sharing=True)
flags.AddClusterAutoscalingFlags(parser)
flags.AddNodePoolAutoprovisioningFlag(parser, hidden=False)
flags.AddLocalSSDsAlphaFlags(parser, for_node_pool=True)
flags.AddBootDiskKmsKeyFlag(parser)
flags.AddPreemptibleFlag(parser, for_node_pool=True)
flags.AddEnableAutoRepairFlag(parser, for_node_pool=True, for_create=True)
flags.AddMinCpuPlatformFlag(parser, for_node_pool=True)
flags.AddWorkloadMetadataFlag(parser, use_mode=False)
flags.AddNodeTaintsFlag(parser, for_node_pool=True)
flags.AddNodePoolNodeIdentityFlags(parser)
flags.AddMaxPodsPerNodeFlag(parser, for_node_pool=True)
flags.AddSandboxFlag(parser)
flags.AddNodeGroupFlag(parser)
flags.AddEnableAutoUpgradeFlag(parser, for_node_pool=True, default=True)
flags.AddLinuxSysctlFlags(parser, for_node_pool=True)
flags.AddSurgeUpgradeFlag(parser, for_node_pool=True, default=1)
flags.AddMaxUnavailableUpgradeFlag(
parser, for_node_pool=True, is_create=True)
flags.AddNodePoolLocationsFlag(parser, for_create=True)
flags.AddSystemConfigFlag(parser, hidden=False)
flags.AddReservationAffinityFlags(parser, for_node_pool=True)
flags.AddEnableGcfsFlag(parser, for_node_pool=True)
flags.AddEnableImageStreamingFlag(parser, for_node_pool=True)
flags.AddNodePoolEnablePrivateNodes(parser, hidden=True)
flags.AddEnableGvnicFlag(parser)
flags.AddSpotFlag(parser, for_node_pool=True)
flags.AddPlacementTypeFlag(parser, for_node_pool=True, hidden=True)
flags.AddEnableRollingUpdateFlag(parser)
flags.AddEnableBlueGreenUpdateFlag(parser)
flags.AddStandardRolloutPolicyFlag(parser, for_node_pool=True)
flags.AddNodePoolSoakDurationFlag(parser, for_node_pool=True)
flags.AddMaintenanceIntervalFlag(parser, for_node_pool=True, hidden=True)
Create.detailed_help = DETAILED_HELP
| true | true |
f72fd770b3c890aabd12bd755ed60cdc88efa9e5 | 8,995 | py | Python | custom/m4change/reports/ld_hmis_report.py | dimagilg/commcare-hq | ea1786238eae556bb7f1cbd8d2460171af1b619c | [
"BSD-3-Clause"
] | 471 | 2015-01-10T02:55:01.000Z | 2022-03-29T18:07:18.000Z | custom/m4change/reports/ld_hmis_report.py | dimagilg/commcare-hq | ea1786238eae556bb7f1cbd8d2460171af1b619c | [
"BSD-3-Clause"
] | 14,354 | 2015-01-01T07:38:23.000Z | 2022-03-31T20:55:14.000Z | custom/m4change/reports/ld_hmis_report.py | dimagilg/commcare-hq | ea1786238eae556bb7f1cbd8d2460171af1b619c | [
"BSD-3-Clause"
] | 175 | 2015-01-06T07:16:47.000Z | 2022-03-29T13:27:01.000Z | from django.utils.translation import ugettext as _
from corehq.apps.locations.permissions import location_safe
from corehq.apps.reports.datatables import DataTablesHeader, DataTablesColumn, NumericColumn
from corehq.apps.reports.filters.select import MonthFilter, YearFilter
from corehq.apps.reports.standard import MonthYearMixin
from corehq.apps.reports.standard.cases.basic import CaseListReport
from custom.common.filters import RestrictedAsyncLocationFilter
from custom.m4change.reports import validate_report_parameters, get_location_hierarchy_by_id
from custom.m4change.reports.reports import M4ChangeReport
from custom.m4change.reports.sql_data import LdHmisCaseSqlData
def _get_row(row_data, form_data, key):
data = form_data.get(key)
rows = dict([(row_key, data.get(row_key, 0)) for row_key in row_data])
for key in rows:
if rows.get(key) == None:
rows[key] = 0
return rows
@location_safe
class LdHmisReport(MonthYearMixin, CaseListReport, M4ChangeReport):
ajax_pagination = False
asynchronous = True
exportable = True
emailable = False
name = "Facility L&D HMIS Report"
slug = "facility_ld_hmis_report"
default_rows = 25
base_template = "m4change/report.html"
report_template_path = "m4change/report_content.html"
fields = [
RestrictedAsyncLocationFilter,
MonthFilter,
YearFilter
]
@classmethod
def get_report_data(cls, config):
validate_report_parameters(["domain", "location_id", "datespan"], config)
domain = config["domain"]
location_id = config["location_id"]
user = config["user"]
sql_data = LdHmisCaseSqlData(domain=domain, datespan=config["datespan"]).data
locations = get_location_hierarchy_by_id(location_id, domain, user)
row_data = LdHmisReport.get_initial_row_data()
for location_id in locations:
key = (domain, location_id)
if key in sql_data:
report_rows = _get_row(row_data, sql_data, key)
for key in report_rows:
row_data.get(key)["value"] += report_rows.get(key)
return sorted([(key, row_data[key]) for key in row_data], key=lambda t: t[1].get("hmis_code"))
@classmethod
def get_initial_row_data(cls):
return {
"deliveries_total": {
"hmis_code": 19, "label": _("Deliveries - Total"), "value": 0
},
"deliveries_svd_total": {
"hmis_code": 20, "label": _("Deliveries - SVD"), "value": 0
},
"deliveries_assisted_total": {
"hmis_code": 21, "label": _("Deliveries - Assisted"), "value": 0
},
"deliveries_caesarean_section_total": {
"hmis_code": 22, "label": _("Deliveries caesarean section"), "value": 0
},
"deliveries_complications_total": {
"hmis_code": 23, "label": _("Deliveries - Complications"), "value": 0
},
'deliveries_preterm_total': {
"hmis_code": 24, "label": _("Deliveries - Preterm"), "value": 0
},
'deliveries_hiv_positive_women_total': {
"hmis_code": 25, "label": _("Deliveries - HIV positive women"), "value": 0
},
'live_birth_hiv_positive_women_total': {
"hmis_code": 26, "label": _("LiveBirth - HIV positive women"), "value": 0
},
'deliveries_hiv_positive_booked_women_total': {
"hmis_code": 27, "label": _("Deliveries - HIV positive booked women"), "value": 0
},
'deliveries_hiv_positive_unbooked_women_total': {
"hmis_code": 28, "label": _("Deliveries - HIV positive unbooked women"), "value": 0
},
'deliveries_monitored_using_partograph_total': {
"hmis_code": 29, "label": _("Deliveries - Monitored using Partograph"), "value": 0
},
'deliveries_skilled_birth_attendant_total': {
"hmis_code": 30, "label": _("Deliveries taken by skilled birth attendant"), "value": 0
},
'tt1_total': {
"hmis_code": 31, "label": _("TT1"), "value": 0
},
'tt2_total': {
"hmis_code": 32, "label": _("TT2"), "value": 0
},
'live_births_male_female_total': {
"hmis_code": 36, "label": _("Live Births(Male, Female, < 2.5kg, >= 2.5k g)"), "value": 0
},
'male_lt_2_5kg_total': {
"hmis_code": 36.1, "label": _("Male, < 2.5kg"), "value": 0
},
'male_gte_2_5kg_total': {
"hmis_code": 36.2, "label": _("Male, >= 2.5kg"), "value": 0
},
'female_lt_2_5kg_total': {
"hmis_code": 36.3, "label": _("Female, < 2.5kg"), "value": 0
},
'female_gte_2_5kg_total': {
"hmis_code": 36.4, "label": _("Female, >= 2.5kg"), "value": 0
},
'still_births_total': {
"hmis_code": 37, "label": _("Still Births total"), "value": 0
},
'fresh_still_births_total': {
"hmis_code": 38.1, "label": _("Fresh Still Births"), "value": 0
},
'other_still_births_total': {
"hmis_code": 38.2, "label": _("Other still Births"), "value": 0
},
'abortion_induced_total': {
"hmis_code": 39.1, "label": _("Abortion Induced"), "value": 0
},
'other_abortions_total': {
"hmis_code": 39.2, "label": _("Other Abortions"), "value": 0
},
'total_abortions_total': {
"hmis_code": 40, "label": _("Total Abortions"), "value": 0
},
'birth_asphyxia_total': {
"hmis_code": 41, "label": _("Birth Asphyxia - Total"), "value": 0
},
'birth_asphyxia_male_total': {
"hmis_code": 41.1, "label": _("Birth Asphyxia - Male"), "value": 0
},
'birth_asphyxia_female_total': {
"hmis_code": 41.2, "label": _("Birth Asphyxia - Female"), "value": 0
},
'neonatal_sepsis_total': {
"hmis_code": 42, "label": _("Neonatal Sepsis - Total"), "value": 0
},
'neonatal_sepsis_male_total': {
"hmis_code": 42.1, "label": _("Neonatal Sepsis - Male"), "value": 0
},
'neonatal_sepsis_female_total': {
"hmis_code": 42.2, "label": _("Neonatal Sepsis - Female"), "value": 0
},
'neonatal_tetanus_total': {
"hmis_code": 43, "label": _("Neonatal Tetanus - Total"), "value": 0
},
'neonatal_tetanus_male_total': {
"hmis_code": 43.1, "label": _("Neonatal Tetanus - Male"), "value": 0
},
'neonatal_tetanus_female_total': {
"hmis_code": 43.2, "label": _("Neonatal Tetanus - Female"), "value": 0
},
'neonatal_jaundice_total': {
"hmis_code": 44, "label": _("Neonatal Jaundice - Total"), "value": 0
},
'neonatal_jaundice_male_total': {
"hmis_code": 44.1, "label": _("Neonatal Jaundice - Male"), "value": 0
},
'neonatal_jaundice_female_total': {
"hmis_code": 44.2, "label": _("Neonatal Jaundice - Female"), "value": 0
},
'low_birth_weight_babies_in_kmc_total': {
"hmis_code": 45, "label": _("Low birth weight babies placed in KMC - Total"), "value": 0
},
'low_birth_weight_babies_in_kmc_male_total': {
"hmis_code": 45.1, "label": _("Low birth weight babies placed in KMC - Male"), "value": 0
},
'low_birth_weight_babies_in_kmc_female_total': {
"hmis_code": 45.2, "label": _("Low birth weight babies placed in KMC - Female"), "value": 0
}
}
@property
def headers(self):
headers = DataTablesHeader(NumericColumn(_("HMIS code")),
DataTablesColumn(_("Data Point")),
NumericColumn(_("Total")))
return headers
@property
def rows(self):
row_data = LdHmisReport.get_report_data({
"location_id": self.request.GET.get("location_id", None),
"datespan": self.datespan,
"domain": str(self.domain),
"user": self.request.couch_user
})
for row in row_data:
yield [
self.table_cell(row[1].get("hmis_code")),
self.table_cell(row[1].get("label")),
self.table_cell(row[1].get("value"))
]
@property
def rendered_report_title(self):
return self.name
| 42.429245 | 107 | 0.545859 | from django.utils.translation import ugettext as _
from corehq.apps.locations.permissions import location_safe
from corehq.apps.reports.datatables import DataTablesHeader, DataTablesColumn, NumericColumn
from corehq.apps.reports.filters.select import MonthFilter, YearFilter
from corehq.apps.reports.standard import MonthYearMixin
from corehq.apps.reports.standard.cases.basic import CaseListReport
from custom.common.filters import RestrictedAsyncLocationFilter
from custom.m4change.reports import validate_report_parameters, get_location_hierarchy_by_id
from custom.m4change.reports.reports import M4ChangeReport
from custom.m4change.reports.sql_data import LdHmisCaseSqlData
def _get_row(row_data, form_data, key):
data = form_data.get(key)
rows = dict([(row_key, data.get(row_key, 0)) for row_key in row_data])
for key in rows:
if rows.get(key) == None:
rows[key] = 0
return rows
@location_safe
class LdHmisReport(MonthYearMixin, CaseListReport, M4ChangeReport):
ajax_pagination = False
asynchronous = True
exportable = True
emailable = False
name = "Facility L&D HMIS Report"
slug = "facility_ld_hmis_report"
default_rows = 25
base_template = "m4change/report.html"
report_template_path = "m4change/report_content.html"
fields = [
RestrictedAsyncLocationFilter,
MonthFilter,
YearFilter
]
@classmethod
def get_report_data(cls, config):
validate_report_parameters(["domain", "location_id", "datespan"], config)
domain = config["domain"]
location_id = config["location_id"]
user = config["user"]
sql_data = LdHmisCaseSqlData(domain=domain, datespan=config["datespan"]).data
locations = get_location_hierarchy_by_id(location_id, domain, user)
row_data = LdHmisReport.get_initial_row_data()
for location_id in locations:
key = (domain, location_id)
if key in sql_data:
report_rows = _get_row(row_data, sql_data, key)
for key in report_rows:
row_data.get(key)["value"] += report_rows.get(key)
return sorted([(key, row_data[key]) for key in row_data], key=lambda t: t[1].get("hmis_code"))
@classmethod
def get_initial_row_data(cls):
return {
"deliveries_total": {
"hmis_code": 19, "label": _("Deliveries - Total"), "value": 0
},
"deliveries_svd_total": {
"hmis_code": 20, "label": _("Deliveries - SVD"), "value": 0
},
"deliveries_assisted_total": {
"hmis_code": 21, "label": _("Deliveries - Assisted"), "value": 0
},
"deliveries_caesarean_section_total": {
"hmis_code": 22, "label": _("Deliveries caesarean section"), "value": 0
},
"deliveries_complications_total": {
"hmis_code": 23, "label": _("Deliveries - Complications"), "value": 0
},
'deliveries_preterm_total': {
"hmis_code": 24, "label": _("Deliveries - Preterm"), "value": 0
},
'deliveries_hiv_positive_women_total': {
"hmis_code": 25, "label": _("Deliveries - HIV positive women"), "value": 0
},
'live_birth_hiv_positive_women_total': {
"hmis_code": 26, "label": _("LiveBirth - HIV positive women"), "value": 0
},
'deliveries_hiv_positive_booked_women_total': {
"hmis_code": 27, "label": _("Deliveries - HIV positive booked women"), "value": 0
},
'deliveries_hiv_positive_unbooked_women_total': {
"hmis_code": 28, "label": _("Deliveries - HIV positive unbooked women"), "value": 0
},
'deliveries_monitored_using_partograph_total': {
"hmis_code": 29, "label": _("Deliveries - Monitored using Partograph"), "value": 0
},
'deliveries_skilled_birth_attendant_total': {
"hmis_code": 30, "label": _("Deliveries taken by skilled birth attendant"), "value": 0
},
'tt1_total': {
"hmis_code": 31, "label": _("TT1"), "value": 0
},
'tt2_total': {
"hmis_code": 32, "label": _("TT2"), "value": 0
},
'live_births_male_female_total': {
"hmis_code": 36, "label": _("Live Births(Male, Female, < 2.5kg, >= 2.5k g)"), "value": 0
},
'male_lt_2_5kg_total': {
"hmis_code": 36.1, "label": _("Male, < 2.5kg"), "value": 0
},
'male_gte_2_5kg_total': {
"hmis_code": 36.2, "label": _("Male, >= 2.5kg"), "value": 0
},
'female_lt_2_5kg_total': {
"hmis_code": 36.3, "label": _("Female, < 2.5kg"), "value": 0
},
'female_gte_2_5kg_total': {
"hmis_code": 36.4, "label": _("Female, >= 2.5kg"), "value": 0
},
'still_births_total': {
"hmis_code": 37, "label": _("Still Births total"), "value": 0
},
'fresh_still_births_total': {
"hmis_code": 38.1, "label": _("Fresh Still Births"), "value": 0
},
'other_still_births_total': {
"hmis_code": 38.2, "label": _("Other still Births"), "value": 0
},
'abortion_induced_total': {
"hmis_code": 39.1, "label": _("Abortion Induced"), "value": 0
},
'other_abortions_total': {
"hmis_code": 39.2, "label": _("Other Abortions"), "value": 0
},
'total_abortions_total': {
"hmis_code": 40, "label": _("Total Abortions"), "value": 0
},
'birth_asphyxia_total': {
"hmis_code": 41, "label": _("Birth Asphyxia - Total"), "value": 0
},
'birth_asphyxia_male_total': {
"hmis_code": 41.1, "label": _("Birth Asphyxia - Male"), "value": 0
},
'birth_asphyxia_female_total': {
"hmis_code": 41.2, "label": _("Birth Asphyxia - Female"), "value": 0
},
'neonatal_sepsis_total': {
"hmis_code": 42, "label": _("Neonatal Sepsis - Total"), "value": 0
},
'neonatal_sepsis_male_total': {
"hmis_code": 42.1, "label": _("Neonatal Sepsis - Male"), "value": 0
},
'neonatal_sepsis_female_total': {
"hmis_code": 42.2, "label": _("Neonatal Sepsis - Female"), "value": 0
},
'neonatal_tetanus_total': {
"hmis_code": 43, "label": _("Neonatal Tetanus - Total"), "value": 0
},
'neonatal_tetanus_male_total': {
"hmis_code": 43.1, "label": _("Neonatal Tetanus - Male"), "value": 0
},
'neonatal_tetanus_female_total': {
"hmis_code": 43.2, "label": _("Neonatal Tetanus - Female"), "value": 0
},
'neonatal_jaundice_total': {
"hmis_code": 44, "label": _("Neonatal Jaundice - Total"), "value": 0
},
'neonatal_jaundice_male_total': {
"hmis_code": 44.1, "label": _("Neonatal Jaundice - Male"), "value": 0
},
'neonatal_jaundice_female_total': {
"hmis_code": 44.2, "label": _("Neonatal Jaundice - Female"), "value": 0
},
'low_birth_weight_babies_in_kmc_total': {
"hmis_code": 45, "label": _("Low birth weight babies placed in KMC - Total"), "value": 0
},
'low_birth_weight_babies_in_kmc_male_total': {
"hmis_code": 45.1, "label": _("Low birth weight babies placed in KMC - Male"), "value": 0
},
'low_birth_weight_babies_in_kmc_female_total': {
"hmis_code": 45.2, "label": _("Low birth weight babies placed in KMC - Female"), "value": 0
}
}
@property
def headers(self):
headers = DataTablesHeader(NumericColumn(_("HMIS code")),
DataTablesColumn(_("Data Point")),
NumericColumn(_("Total")))
return headers
@property
def rows(self):
row_data = LdHmisReport.get_report_data({
"location_id": self.request.GET.get("location_id", None),
"datespan": self.datespan,
"domain": str(self.domain),
"user": self.request.couch_user
})
for row in row_data:
yield [
self.table_cell(row[1].get("hmis_code")),
self.table_cell(row[1].get("label")),
self.table_cell(row[1].get("value"))
]
@property
def rendered_report_title(self):
return self.name
| true | true |
f72fd7ec1ff8566fe5149edae2c9a1ef77dfb47b | 66 | py | Python | server.py | sigu1011/gameinn | 6c314fa5deefdc2780356900a4d6fa55317a18cd | [
"MIT"
] | null | null | null | server.py | sigu1011/gameinn | 6c314fa5deefdc2780356900a4d6fa55317a18cd | [
"MIT"
] | 1 | 2019-11-27T23:46:36.000Z | 2019-11-27T23:46:36.000Z | server.py | sigu1011/gameinn | 6c314fa5deefdc2780356900a4d6fa55317a18cd | [
"MIT"
] | null | null | null | from gameinn import app
if __name__ == '__main__':
app.run()
| 13.2 | 26 | 0.666667 | from gameinn import app
if __name__ == '__main__':
app.run()
| true | true |
f72fd8714765a1fe1b575242873790f455b95c4d | 3,480 | py | Python | text_features_extraction.py | maxgreat/dsve-loc | dd6807d02c0d5fd3e215be8e5c7a88e73102e561 | [
"BSD-3-Clause-Clear"
] | null | null | null | text_features_extraction.py | maxgreat/dsve-loc | dd6807d02c0d5fd3e215be8e5c7a88e73102e561 | [
"BSD-3-Clause-Clear"
] | null | null | null | text_features_extraction.py | maxgreat/dsve-loc | dd6807d02c0d5fd3e215be8e5c7a88e73102e561 | [
"BSD-3-Clause-Clear"
] | null | null | null | """
****************** COPYRIGHT AND CONFIDENTIALITY INFORMATION ******************
Copyright (c) 2018 [Thomson Licensing]
All Rights Reserved
This program contains proprietary information which is a trade secret/business \
secret of [Thomson Licensing] and is protected, even if unpublished, under \
applicable Copyright laws (including French droit d'auteur) and/or may be \
subject to one or more patent(s).
Recipient is to retain this program in confidence and is not permitted to use \
or make copies thereof other than as permitted in a written agreement with \
[Thomson Licensing] unless otherwise expressly allowed by applicable laws or \
by [Thomson Licensing] under express agreement.
Thomson Licensing is a company of the group TECHNICOLOR
*******************************************************************************
This scripts permits one to reproduce training and experiments of:
Engilberge, M., Chevallier, L., Pérez, P., & Cord, M. (2018, April).
Finding beans in burgers: Deep semantic-visual embedding with localization.
In Proceedings of CVPR (pp. 3984-3993)
Author: Martin Engilberge
"""
import argparse
import time
import numpy as np
import torch
from misc.dataset import TextDataset
from misc.model import joint_embedding
from misc.utils import save_obj, collate_fn_cap_padded
from torch.utils.data import DataLoader
device = torch.device("cuda")
# device = torch.device("cpu") # uncomment to run with cpu
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Extract embedding representation for images')
parser.add_argument("-p", '--path', dest="model_path", help='Path to the weights of the model to evaluate', required=True)
parser.add_argument("-d", '--data', dest="data_path", help='path to the file containing the sentence to embed')
parser.add_argument("-o", '--output', dest="output_path", help='path of the output file', default="./text_embedding")
parser.add_argument("-bs", "--batch_size", help="The size of the batches", type=int, default=1)
args = parser.parse_args()
print("Loading model from:", args.model_path)
checkpoint = torch.load(args.model_path, map_location=lambda storage, loc: storage)
join_emb = joint_embedding(checkpoint['args_dict'])
join_emb.load_state_dict(checkpoint["state_dict"])
for param in join_emb.parameters():
param.requires_grad = False
join_emb.to(device)
join_emb.eval()
dataset = TextDataset(args.data_path)
print("Dataset size: ", len(dataset))
dataset_loader = DataLoader(dataset, batch_size=args.batch_size, num_workers=3, pin_memory=True, collate_fn=collate_fn_cap_padded)
caps_enc = list()
print("### Starting sentence embedding ###")
end = time.time()
for i, (caps, length) in enumerate(dataset_loader, 0):
input_caps = caps.to(device)
with torch.no_grad():
_, output_emb = join_emb(None, input_caps, length)
caps_enc.append(output_emb.cpu().data.numpy())
if i % 100 == 99:
print(str((i + 1) * args.batch_size) + "/" + str(len(dataset)) + " captions encoded - Time per batch: " + str((time.time() - end)) + "s")
end = time.time()
print("Processing done -> saving")
caps_stack = np.vstack(caps_enc)
save_obj(caps_stack, args.output_path)
print("The data has been save to ", args.output_path)
| 39.101124 | 150 | 0.675287 |
import argparse
import time
import numpy as np
import torch
from misc.dataset import TextDataset
from misc.model import joint_embedding
from misc.utils import save_obj, collate_fn_cap_padded
from torch.utils.data import DataLoader
device = torch.device("cuda")
_':
parser = argparse.ArgumentParser(description='Extract embedding representation for images')
parser.add_argument("-p", '--path', dest="model_path", help='Path to the weights of the model to evaluate', required=True)
parser.add_argument("-d", '--data', dest="data_path", help='path to the file containing the sentence to embed')
parser.add_argument("-o", '--output', dest="output_path", help='path of the output file', default="./text_embedding")
parser.add_argument("-bs", "--batch_size", help="The size of the batches", type=int, default=1)
args = parser.parse_args()
print("Loading model from:", args.model_path)
checkpoint = torch.load(args.model_path, map_location=lambda storage, loc: storage)
join_emb = joint_embedding(checkpoint['args_dict'])
join_emb.load_state_dict(checkpoint["state_dict"])
for param in join_emb.parameters():
param.requires_grad = False
join_emb.to(device)
join_emb.eval()
dataset = TextDataset(args.data_path)
print("Dataset size: ", len(dataset))
dataset_loader = DataLoader(dataset, batch_size=args.batch_size, num_workers=3, pin_memory=True, collate_fn=collate_fn_cap_padded)
caps_enc = list()
print("### Starting sentence embedding ###")
end = time.time()
for i, (caps, length) in enumerate(dataset_loader, 0):
input_caps = caps.to(device)
with torch.no_grad():
_, output_emb = join_emb(None, input_caps, length)
caps_enc.append(output_emb.cpu().data.numpy())
if i % 100 == 99:
print(str((i + 1) * args.batch_size) + "/" + str(len(dataset)) + " captions encoded - Time per batch: " + str((time.time() - end)) + "s")
end = time.time()
print("Processing done -> saving")
caps_stack = np.vstack(caps_enc)
save_obj(caps_stack, args.output_path)
print("The data has been save to ", args.output_path)
| true | true |
f72fd9650f220263368abc650314f11467ad9ad0 | 117 | py | Python | FRCScouting/Contact/urls.py | xNovax/FRCScouting.ca | caf2774e5854a7386eceb21e57b68c1f9c1f7d2d | [
"MIT"
] | 1 | 2019-06-13T03:07:15.000Z | 2019-06-13T03:07:15.000Z | FRCScouting/Contact/urls.py | xNovax/FRCScouting.ca | caf2774e5854a7386eceb21e57b68c1f9c1f7d2d | [
"MIT"
] | 8 | 2019-07-04T16:19:06.000Z | 2019-07-12T17:37:51.000Z | FRCScouting/Contact/urls.py | xNovax/FRCScouting.ca | caf2774e5854a7386eceb21e57b68c1f9c1f7d2d | [
"MIT"
] | null | null | null | from django.urls import path
from . import views
urlpatterns = [
path('', views.contactus, name= 'contactus')
]
| 16.714286 | 48 | 0.692308 | from django.urls import path
from . import views
urlpatterns = [
path('', views.contactus, name= 'contactus')
]
| true | true |
f72fda32958488cb17ecc7633d36804837bdf534 | 7,499 | py | Python | flsim/utils/config_utils.py | karthikprasad/FLSim | 3c62fe83de2f06feffb9ed65ce9f71803bbd6027 | [
"Apache-2.0"
] | null | null | null | flsim/utils/config_utils.py | karthikprasad/FLSim | 3c62fe83de2f06feffb9ed65ce9f71803bbd6027 | [
"Apache-2.0"
] | null | null | null | flsim/utils/config_utils.py | karthikprasad/FLSim | 3c62fe83de2f06feffb9ed65ce9f71803bbd6027 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.
import argparse
import collections
import json
from typing import Any, Dict, List, Tuple, Type
from hydra.experimental import initialize, compose
from omegaconf import OmegaConf, DictConfig
def fullclassname(cls: Type[Any]) -> str:
"""
Returns the fully qualified class name of the input class.
"""
module = cls.__module__
name = cls.__qualname__
if module is not None and module != "__builtin__":
name = module + "." + name
return name
def _validate_cfg(component_class: Type[Any], cfg: Any):
"""
Validate that cfg doesn't have MISSING fields. This needs to be done only after
all defaults are set, typically in the base class.
We do this by making sure none of the parents have ``_set_defaults_in_cfg`` method.
"""
if not any(
hasattr(parent, "_set_defaults_in_cfg") for parent in component_class.__bases__
):
# looping over the config fields throws incase of missing field
for _ in cfg.items():
pass
def init_self_cfg(
component_obj: Any,
*,
component_class: Type,
config_class: Type,
**kwargs,
):
"""
Initialize FL component config by constructing OmegaConf object,
setting defaults, and validating config.
"""
cfg = (
config_class(**kwargs)
if not hasattr(component_obj, "cfg")
else component_obj.cfg
)
cfg = OmegaConf.create(cfg) # convert any structure to OmegaConf
component_class._set_defaults_in_cfg(cfg) # set default cfg params for this class
# convert any structure to OmegaConf again, after setting defaults
cfg = OmegaConf.create(cfg) # pyre-ignore [6]
_validate_cfg(component_class, cfg) # validate the config
component_obj.cfg = cfg
# trainer config utils for consuming hydra configs
def _flatten_dict(
d: collections.MutableMapping, parent_key="", sep="."
) -> Dict[str, str]:
"""
Changes json of style
```
{
"trainer" : {
"_base_": "base_sync_trainer",
"aggregator": {
"_base_": "base_fed_avg_with_lr_sync_aggregator",
"lr": 0.1
}
}
}
```
to
```
{
"trainer._base_": "base_sync_trainer",
"trainer.aggregator._base_": "base_fed_avg_with_lr_sync_aggregator",
"trainer.aggregator.lr": 0.1,
}
```
"""
items = []
for k, v in d.items():
new_key = parent_key + sep + k if parent_key else k
# if value is not a dict and is mutable, extend the items and flatten again.
# > hacky way of preserving dict values by checking if key has _dict as suffix.
if not new_key.endswith("_dict") and isinstance(v, collections.MutableMapping):
items.extend(_flatten_dict(v, new_key, sep=sep).items())
else:
# check if a number needs to be retained as a string
# the repalce with one dot is needed to handle floats
if type(v) is str and v.replace(".", "", 1).isdigit():
v = f'"{v}"' # enclose it with quotes if so.
items.append((new_key, v))
return dict(items)
def _handle_values_for_overrides_list(v: Any) -> Any:
"""
Handle the special massaging of some values of JSON need to for it to be supplied
to Hydra's overrides list.
"""
# python's None --> cmd line null for override list
v = "null" if v is None else v
# if value is a dict, convert it to string to work with override list.
# dump twice to escape quotes correctly.
v = json.dumps(json.dumps(v)) if type(v) is dict else v
# escape = char in value when present
v = v.replace(r"=", r"\=") if type(v) is str else v
return v
def _hydra_merge_order(dotlist_entry: str) -> Tuple:
"""
The override list needs to be ordered as the last one wins in case of
duplicates: https://hydra.cc/docs/advanced/defaults_list#composition-order
This function arranges the list so that _base_ is at the top, and we
proceed with overrides from top to bottom.
"""
key = dotlist_entry.split("=")[0]
# presence of "@" => it is a _base_ override
default_list_item_indicator = key.count("@") # 1 if true, 0 otherwise
# level in hierarchy; based on number of "."
hierarchy_level = key.count(".")
# multiply by -1 to keep the default list items on top
return (-1 * default_list_item_indicator, hierarchy_level, dotlist_entry)
def fl_json_to_dotlist(
json_config: Dict[str, Any], append_or_override: bool = True
) -> List[str]:
"""
Changes
```
{
"trainer._base_": "base_sync_trainer",
"trainer.aggregator._base_": "base_fed_avg_with_lr_sync_aggregator",
"trainer.aggregator.lr": 0.1,
}
```
to
```
[
"+trainer@trainer=base_sync_trainer",
"+aggregator@trainer.aggregator=base_fed_avg_with_lr_sync_aggregator",
"trainer.aggregator.lr=0.1",
]
```
The override list grammar for reference:
https://hydra.cc/docs/advanced/override_grammar/basic
"""
dotlist_dict = _flatten_dict(json_config)
dotlist_list = []
for k, v in dotlist_dict.items():
if k.endswith("._base_"):
# trainer.aggregator._base_ --> trainer.aggregator
k = k.replace("._base_", "")
# extract aggregator from trainer.aggregator
config_group = k.split(".")[-1]
# trainer.aggregator --> +aggregator@trainer.aggregator
k = f"+{config_group}@{k}"
# +aggregator@trainer.aggregator=base_fed_avg_with_lr_sync_aggregator
dotlist_list.append(f"{k}={v}")
else:
v = _handle_values_for_overrides_list(v)
prefix = "++" if append_or_override else ""
dotlist_list.append(f"{prefix}{k}={v}")
sorted_dotlist_list = sorted(dotlist_list, key=_hydra_merge_order)
return sorted_dotlist_list
def fl_config_from_json(
json_config: Dict[str, Any], append_or_override: bool = True
) -> DictConfig:
"""
Accepts the FLSim config in json format and constructs a Hydra config object.
"""
with initialize(config_path=None):
cfg = compose(
config_name=None,
overrides=fl_json_to_dotlist(json_config, append_or_override),
)
return cfg
def maybe_parse_json_config():
"""
Parse the command line args and build a config object if json config is supplied.
This comes in handy when we want to supply a json config file during to buck run.
This function will no longer be relevant once FLSim entirely moves to YAML configs.
"""
cfg = None
parser = argparse.ArgumentParser(description="Run training loop for FL example")
parser.add_argument("--config-file", type=str, default=None, help="JSON config")
args, _ = parser.parse_known_args()
# if JSON config is specified, build a DictConfig
if args.config_file is not None:
with open(args.config_file, "r") as config_file:
json_config = json.load(config_file)
cfg = fl_config_from_json(json_config["config"])
# else: assume yaml config, and let hydra handle config construction
return cfg
def is_target(config, cls):
return config._target_ == cls._target_
| 34.557604 | 87 | 0.648887 |
import argparse
import collections
import json
from typing import Any, Dict, List, Tuple, Type
from hydra.experimental import initialize, compose
from omegaconf import OmegaConf, DictConfig
def fullclassname(cls: Type[Any]) -> str:
module = cls.__module__
name = cls.__qualname__
if module is not None and module != "__builtin__":
name = module + "." + name
return name
def _validate_cfg(component_class: Type[Any], cfg: Any):
if not any(
hasattr(parent, "_set_defaults_in_cfg") for parent in component_class.__bases__
):
for _ in cfg.items():
pass
def init_self_cfg(
component_obj: Any,
*,
component_class: Type,
config_class: Type,
**kwargs,
):
cfg = (
config_class(**kwargs)
if not hasattr(component_obj, "cfg")
else component_obj.cfg
)
cfg = OmegaConf.create(cfg)
component_class._set_defaults_in_cfg(cfg)
cfg = OmegaConf.create(cfg)
_validate_cfg(component_class, cfg)
component_obj.cfg = cfg
def _flatten_dict(
d: collections.MutableMapping, parent_key="", sep="."
) -> Dict[str, str]:
items = []
for k, v in d.items():
new_key = parent_key + sep + k if parent_key else k
if not new_key.endswith("_dict") and isinstance(v, collections.MutableMapping):
items.extend(_flatten_dict(v, new_key, sep=sep).items())
else:
if type(v) is str and v.replace(".", "", 1).isdigit():
v = f'"{v}"'
items.append((new_key, v))
return dict(items)
def _handle_values_for_overrides_list(v: Any) -> Any:
v = "null" if v is None else v
# if value is a dict, convert it to string to work with override list.
# dump twice to escape quotes correctly.
v = json.dumps(json.dumps(v)) if type(v) is dict else v
# escape = char in value when present
v = v.replace(r"=", r"\=") if type(v) is str else v
return v
def _hydra_merge_order(dotlist_entry: str) -> Tuple:
key = dotlist_entry.split("=")[0]
# presence of "@" => it is a _base_ override
default_list_item_indicator = key.count("@") # 1 if true, 0 otherwise
# level in hierarchy; based on number of "."
hierarchy_level = key.count(".")
# multiply by -1 to keep the default list items on top
return (-1 * default_list_item_indicator, hierarchy_level, dotlist_entry)
def fl_json_to_dotlist(
json_config: Dict[str, Any], append_or_override: bool = True
) -> List[str]:
dotlist_dict = _flatten_dict(json_config)
dotlist_list = []
for k, v in dotlist_dict.items():
if k.endswith("._base_"):
# trainer.aggregator._base_ --> trainer.aggregator
k = k.replace("._base_", "")
# extract aggregator from trainer.aggregator
config_group = k.split(".")[-1]
# trainer.aggregator --> +aggregator@trainer.aggregator
k = f"+{config_group}@{k}"
# +aggregator@trainer.aggregator=base_fed_avg_with_lr_sync_aggregator
dotlist_list.append(f"{k}={v}")
else:
v = _handle_values_for_overrides_list(v)
prefix = "++" if append_or_override else ""
dotlist_list.append(f"{prefix}{k}={v}")
sorted_dotlist_list = sorted(dotlist_list, key=_hydra_merge_order)
return sorted_dotlist_list
def fl_config_from_json(
json_config: Dict[str, Any], append_or_override: bool = True
) -> DictConfig:
with initialize(config_path=None):
cfg = compose(
config_name=None,
overrides=fl_json_to_dotlist(json_config, append_or_override),
)
return cfg
def maybe_parse_json_config():
cfg = None
parser = argparse.ArgumentParser(description="Run training loop for FL example")
parser.add_argument("--config-file", type=str, default=None, help="JSON config")
args, _ = parser.parse_known_args()
# if JSON config is specified, build a DictConfig
if args.config_file is not None:
with open(args.config_file, "r") as config_file:
json_config = json.load(config_file)
cfg = fl_config_from_json(json_config["config"])
# else: assume yaml config, and let hydra handle config construction
return cfg
def is_target(config, cls):
return config._target_ == cls._target_
| true | true |
f72fda7d11cd1da25e984d8313329f9d5e6cc36b | 12,611 | py | Python | py3.1/multiprocess/queues.py | geofft/multiprocess | d998ffea9e82d17662b12b94a236182e7fde46d5 | [
"BSD-3-Clause"
] | 356 | 2015-06-21T21:05:10.000Z | 2022-03-30T11:57:08.000Z | py3.1/multiprocess/queues.py | geofft/multiprocess | d998ffea9e82d17662b12b94a236182e7fde46d5 | [
"BSD-3-Clause"
] | 103 | 2015-06-22T01:44:14.000Z | 2022-03-01T03:44:25.000Z | py3.1/multiprocess/queues.py | geofft/multiprocess | d998ffea9e82d17662b12b94a236182e7fde46d5 | [
"BSD-3-Clause"
] | 72 | 2015-09-02T14:10:24.000Z | 2022-03-25T06:49:43.000Z | #
# Module implementing queues
#
# multiprocessing/queues.py
#
# Copyright (c) 2006-2008, R Oudkerk
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# 3. Neither the name of author nor the names of any contributors may be
# used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
# OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
# SUCH DAMAGE.
#
__all__ = ['Queue', 'SimpleQueue', 'JoinableQueue']
import sys
import os
import threading
import collections
import time
import atexit
import weakref
from queue import Empty, Full
try:
import _multiprocess as _multiprocessing
except ImportError:
import _multiprocessing
from multiprocess import Pipe
from multiprocess.synchronize import Lock, BoundedSemaphore, Semaphore, Condition
from multiprocess.util import debug, info, Finalize, register_after_fork
from multiprocess.forking import assert_spawning
#
# Queue type using a pipe, buffer and thread
#
class Queue(object):
def __init__(self, maxsize=0):
if maxsize <= 0:
maxsize = _multiprocessing.SemLock.SEM_VALUE_MAX
self._maxsize = maxsize
self._reader, self._writer = Pipe(duplex=False)
self._rlock = Lock()
self._opid = os.getpid()
if sys.platform == 'win32':
self._wlock = None
else:
self._wlock = Lock()
self._sem = BoundedSemaphore(maxsize)
self._after_fork()
if sys.platform != 'win32':
register_after_fork(self, Queue._after_fork)
def __getstate__(self):
assert_spawning(self)
return (self._maxsize, self._reader, self._writer,
self._rlock, self._wlock, self._sem, self._opid)
def __setstate__(self, state):
(self._maxsize, self._reader, self._writer,
self._rlock, self._wlock, self._sem, self._opid) = state
self._after_fork()
def _after_fork(self):
debug('Queue._after_fork()')
self._notempty = threading.Condition(threading.Lock())
self._buffer = collections.deque()
self._thread = None
self._jointhread = None
self._joincancelled = False
self._closed = False
self._close = None
self._send = self._writer.send
self._recv = self._reader.recv
self._poll = self._reader.poll
def put(self, obj, block=True, timeout=None):
assert not self._closed
if not self._sem.acquire(block, timeout):
raise Full
self._notempty.acquire()
try:
if self._thread is None:
self._start_thread()
self._buffer.append(obj)
self._notempty.notify()
finally:
self._notempty.release()
def get(self, block=True, timeout=None):
if block and timeout is None:
self._rlock.acquire()
try:
res = self._recv()
self._sem.release()
return res
finally:
self._rlock.release()
else:
if block:
deadline = time.time() + timeout
if not self._rlock.acquire(block, timeout):
raise Empty
try:
if not self._poll(block and (deadline-time.time()) or 0.0):
raise Empty
res = self._recv()
self._sem.release()
return res
finally:
self._rlock.release()
def qsize(self):
# Raises NotImplementedError on Mac OSX because of broken sem_getvalue()
return self._maxsize - self._sem._semlock._get_value()
def empty(self):
return not self._poll()
def full(self):
return self._sem._semlock._is_zero()
def get_nowait(self):
return self.get(False)
def put_nowait(self, obj):
return self.put(obj, False)
def close(self):
self._closed = True
self._reader.close()
if self._close:
self._close()
def join_thread(self):
debug('Queue.join_thread()')
assert self._closed
if self._jointhread:
self._jointhread()
def cancel_join_thread(self):
debug('Queue.cancel_join_thread()')
self._joincancelled = True
try:
self._jointhread.cancel()
except AttributeError:
pass
def _start_thread(self):
debug('Queue._start_thread()')
# Start thread which transfers data from buffer to pipe
self._buffer.clear()
self._thread = threading.Thread(
target=Queue._feed,
args=(self._buffer, self._notempty, self._send,
self._wlock, self._writer.close),
name='QueueFeederThread'
)
self._thread.daemon = True
debug('doing self._thread.start()')
self._thread.start()
debug('... done self._thread.start()')
# On process exit we will wait for data to be flushed to pipe.
#
# However, if this process created the queue then all
# processes which use the queue will be descendants of this
# process. Therefore waiting for the queue to be flushed
# is pointless once all the child processes have been joined.
created_by_this_process = (self._opid == os.getpid())
if not self._joincancelled and not created_by_this_process:
self._jointhread = Finalize(
self._thread, Queue._finalize_join,
[weakref.ref(self._thread)],
exitpriority=-5
)
# Send sentinel to the thread queue object when garbage collected
self._close = Finalize(
self, Queue._finalize_close,
[self._buffer, self._notempty],
exitpriority=10
)
@staticmethod
def _finalize_join(twr):
debug('joining queue thread')
thread = twr()
if thread is not None:
thread.join()
debug('... queue thread joined')
else:
debug('... queue thread already dead')
@staticmethod
def _finalize_close(buffer, notempty):
debug('telling queue thread to quit')
notempty.acquire()
try:
buffer.append(_sentinel)
notempty.notify()
finally:
notempty.release()
@staticmethod
def _feed(buffer, notempty, send, writelock, close):
debug('starting thread to feed data to pipe')
from .util import is_exiting
nacquire = notempty.acquire
nrelease = notempty.release
nwait = notempty.wait
bpopleft = buffer.popleft
sentinel = _sentinel
if sys.platform != 'win32':
wacquire = writelock.acquire
wrelease = writelock.release
else:
wacquire = None
try:
while 1:
nacquire()
try:
if not buffer:
nwait()
finally:
nrelease()
try:
while 1:
obj = bpopleft()
if obj is sentinel:
debug('feeder thread got sentinel -- exiting')
close()
return
if wacquire is None:
send(obj)
else:
wacquire()
try:
send(obj)
finally:
wrelease()
except IndexError:
pass
except Exception as e:
# Since this runs in a daemon thread the resources it uses
# may be become unusable while the process is cleaning up.
# We ignore errors which happen after the process has
# started to cleanup.
try:
if is_exiting():
info('error in queue thread: %s', e)
else:
import traceback
traceback.print_exc()
except Exception:
pass
_sentinel = object()
#
# A queue type which also supports join() and task_done() methods
#
# Note that if you do not call task_done() for each finished task then
# eventually the counter's semaphore may overflow causing Bad Things
# to happen.
#
class JoinableQueue(Queue):
def __init__(self, maxsize=0):
Queue.__init__(self, maxsize)
self._unfinished_tasks = Semaphore(0)
self._cond = Condition()
def __getstate__(self):
return Queue.__getstate__(self) + (self._cond, self._unfinished_tasks)
def __setstate__(self, state):
Queue.__setstate__(self, state[:-2])
self._cond, self._unfinished_tasks = state[-2:]
def put(self, obj, block=True, timeout=None):
assert not self._closed
if not self._sem.acquire(block, timeout):
raise Full
self._notempty.acquire()
self._cond.acquire()
try:
if self._thread is None:
self._start_thread()
self._buffer.append(obj)
self._unfinished_tasks.release()
self._notempty.notify()
finally:
self._cond.release()
self._notempty.release()
def task_done(self):
self._cond.acquire()
try:
if not self._unfinished_tasks.acquire(False):
raise ValueError('task_done() called too many times')
if self._unfinished_tasks._semlock._is_zero():
self._cond.notify_all()
finally:
self._cond.release()
def join(self):
self._cond.acquire()
try:
if not self._unfinished_tasks._semlock._is_zero():
self._cond.wait()
finally:
self._cond.release()
#
# Simplified Queue type -- really just a locked pipe
#
class SimpleQueue(object):
def __init__(self):
self._reader, self._writer = Pipe(duplex=False)
self._rlock = Lock()
if sys.platform == 'win32':
self._wlock = None
else:
self._wlock = Lock()
self._make_methods()
def empty(self):
return not self._reader.poll()
def __getstate__(self):
assert_spawning(self)
return (self._reader, self._writer, self._rlock, self._wlock)
def __setstate__(self, state):
(self._reader, self._writer, self._rlock, self._wlock) = state
self._make_methods()
def _make_methods(self):
recv = self._reader.recv
racquire, rrelease = self._rlock.acquire, self._rlock.release
def get():
racquire()
try:
return recv()
finally:
rrelease()
self.get = get
if self._wlock is None:
# writes to a message oriented win32 pipe are atomic
self.put = self._writer.send
else:
send = self._writer.send
wacquire, wrelease = self._wlock.acquire, self._wlock.release
def put(obj):
wacquire()
try:
return send(obj)
finally:
wrelease()
self.put = put
| 31.606516 | 81 | 0.581714 |
__all__ = ['Queue', 'SimpleQueue', 'JoinableQueue']
import sys
import os
import threading
import collections
import time
import atexit
import weakref
from queue import Empty, Full
try:
import _multiprocess as _multiprocessing
except ImportError:
import _multiprocessing
from multiprocess import Pipe
from multiprocess.synchronize import Lock, BoundedSemaphore, Semaphore, Condition
from multiprocess.util import debug, info, Finalize, register_after_fork
from multiprocess.forking import assert_spawning
class Queue(object):
def __init__(self, maxsize=0):
if maxsize <= 0:
maxsize = _multiprocessing.SemLock.SEM_VALUE_MAX
self._maxsize = maxsize
self._reader, self._writer = Pipe(duplex=False)
self._rlock = Lock()
self._opid = os.getpid()
if sys.platform == 'win32':
self._wlock = None
else:
self._wlock = Lock()
self._sem = BoundedSemaphore(maxsize)
self._after_fork()
if sys.platform != 'win32':
register_after_fork(self, Queue._after_fork)
def __getstate__(self):
assert_spawning(self)
return (self._maxsize, self._reader, self._writer,
self._rlock, self._wlock, self._sem, self._opid)
def __setstate__(self, state):
(self._maxsize, self._reader, self._writer,
self._rlock, self._wlock, self._sem, self._opid) = state
self._after_fork()
def _after_fork(self):
debug('Queue._after_fork()')
self._notempty = threading.Condition(threading.Lock())
self._buffer = collections.deque()
self._thread = None
self._jointhread = None
self._joincancelled = False
self._closed = False
self._close = None
self._send = self._writer.send
self._recv = self._reader.recv
self._poll = self._reader.poll
def put(self, obj, block=True, timeout=None):
assert not self._closed
if not self._sem.acquire(block, timeout):
raise Full
self._notempty.acquire()
try:
if self._thread is None:
self._start_thread()
self._buffer.append(obj)
self._notempty.notify()
finally:
self._notempty.release()
def get(self, block=True, timeout=None):
if block and timeout is None:
self._rlock.acquire()
try:
res = self._recv()
self._sem.release()
return res
finally:
self._rlock.release()
else:
if block:
deadline = time.time() + timeout
if not self._rlock.acquire(block, timeout):
raise Empty
try:
if not self._poll(block and (deadline-time.time()) or 0.0):
raise Empty
res = self._recv()
self._sem.release()
return res
finally:
self._rlock.release()
def qsize(self):
return self._maxsize - self._sem._semlock._get_value()
def empty(self):
return not self._poll()
def full(self):
return self._sem._semlock._is_zero()
def get_nowait(self):
return self.get(False)
def put_nowait(self, obj):
return self.put(obj, False)
def close(self):
self._closed = True
self._reader.close()
if self._close:
self._close()
def join_thread(self):
debug('Queue.join_thread()')
assert self._closed
if self._jointhread:
self._jointhread()
def cancel_join_thread(self):
debug('Queue.cancel_join_thread()')
self._joincancelled = True
try:
self._jointhread.cancel()
except AttributeError:
pass
def _start_thread(self):
debug('Queue._start_thread()')
self._buffer.clear()
self._thread = threading.Thread(
target=Queue._feed,
args=(self._buffer, self._notempty, self._send,
self._wlock, self._writer.close),
name='QueueFeederThread'
)
self._thread.daemon = True
debug('doing self._thread.start()')
self._thread.start()
debug('... done self._thread.start()')
created_by_this_process = (self._opid == os.getpid())
if not self._joincancelled and not created_by_this_process:
self._jointhread = Finalize(
self._thread, Queue._finalize_join,
[weakref.ref(self._thread)],
exitpriority=-5
)
self._close = Finalize(
self, Queue._finalize_close,
[self._buffer, self._notempty],
exitpriority=10
)
@staticmethod
def _finalize_join(twr):
debug('joining queue thread')
thread = twr()
if thread is not None:
thread.join()
debug('... queue thread joined')
else:
debug('... queue thread already dead')
@staticmethod
def _finalize_close(buffer, notempty):
debug('telling queue thread to quit')
notempty.acquire()
try:
buffer.append(_sentinel)
notempty.notify()
finally:
notempty.release()
@staticmethod
def _feed(buffer, notempty, send, writelock, close):
debug('starting thread to feed data to pipe')
from .util import is_exiting
nacquire = notempty.acquire
nrelease = notempty.release
nwait = notempty.wait
bpopleft = buffer.popleft
sentinel = _sentinel
if sys.platform != 'win32':
wacquire = writelock.acquire
wrelease = writelock.release
else:
wacquire = None
try:
while 1:
nacquire()
try:
if not buffer:
nwait()
finally:
nrelease()
try:
while 1:
obj = bpopleft()
if obj is sentinel:
debug('feeder thread got sentinel -- exiting')
close()
return
if wacquire is None:
send(obj)
else:
wacquire()
try:
send(obj)
finally:
wrelease()
except IndexError:
pass
except Exception as e:
try:
if is_exiting():
info('error in queue thread: %s', e)
else:
import traceback
traceback.print_exc()
except Exception:
pass
_sentinel = object()
# to happen.
#
class JoinableQueue(Queue):
def __init__(self, maxsize=0):
Queue.__init__(self, maxsize)
self._unfinished_tasks = Semaphore(0)
self._cond = Condition()
def __getstate__(self):
return Queue.__getstate__(self) + (self._cond, self._unfinished_tasks)
def __setstate__(self, state):
Queue.__setstate__(self, state[:-2])
self._cond, self._unfinished_tasks = state[-2:]
def put(self, obj, block=True, timeout=None):
assert not self._closed
if not self._sem.acquire(block, timeout):
raise Full
self._notempty.acquire()
self._cond.acquire()
try:
if self._thread is None:
self._start_thread()
self._buffer.append(obj)
self._unfinished_tasks.release()
self._notempty.notify()
finally:
self._cond.release()
self._notempty.release()
def task_done(self):
self._cond.acquire()
try:
if not self._unfinished_tasks.acquire(False):
raise ValueError('task_done() called too many times')
if self._unfinished_tasks._semlock._is_zero():
self._cond.notify_all()
finally:
self._cond.release()
def join(self):
self._cond.acquire()
try:
if not self._unfinished_tasks._semlock._is_zero():
self._cond.wait()
finally:
self._cond.release()
#
# Simplified Queue type -- really just a locked pipe
#
class SimpleQueue(object):
def __init__(self):
self._reader, self._writer = Pipe(duplex=False)
self._rlock = Lock()
if sys.platform == 'win32':
self._wlock = None
else:
self._wlock = Lock()
self._make_methods()
def empty(self):
return not self._reader.poll()
def __getstate__(self):
assert_spawning(self)
return (self._reader, self._writer, self._rlock, self._wlock)
def __setstate__(self, state):
(self._reader, self._writer, self._rlock, self._wlock) = state
self._make_methods()
def _make_methods(self):
recv = self._reader.recv
racquire, rrelease = self._rlock.acquire, self._rlock.release
def get():
racquire()
try:
return recv()
finally:
rrelease()
self.get = get
if self._wlock is None:
# writes to a message oriented win32 pipe are atomic
self.put = self._writer.send
else:
send = self._writer.send
wacquire, wrelease = self._wlock.acquire, self._wlock.release
def put(obj):
wacquire()
try:
return send(obj)
finally:
wrelease()
self.put = put
| true | true |
f72fdb09c91a65da5dcb94cfe00e07d00f7cf5cf | 3,669 | py | Python | python/oneflow/test/modules/test_chunk.py | triple-Mu/oneflow | 395da40885016d0b899f8a1eb87e5311a556a9b8 | [
"Apache-2.0"
] | 1 | 2022-03-14T11:17:56.000Z | 2022-03-14T11:17:56.000Z | python/oneflow/test/modules/test_chunk.py | triple-Mu/oneflow | 395da40885016d0b899f8a1eb87e5311a556a9b8 | [
"Apache-2.0"
] | null | null | null | python/oneflow/test/modules/test_chunk.py | triple-Mu/oneflow | 395da40885016d0b899f8a1eb87e5311a556a9b8 | [
"Apache-2.0"
] | null | null | null | """
Copyright 2020 The OneFlow Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
from collections import OrderedDict
from random import shuffle
import numpy as np
from random import shuffle
import oneflow as flow
import oneflow.unittest
from oneflow.test_utils.automated_test_util import *
@flow.unittest.skip_unless_1n1d()
class TestChunk(flow.unittest.TestCase):
@autotest(n=5, check_graph=True)
def test_flow_chunk_list_with_random_data(test_case):
device = random_device()
dim = random(1, 4).to(int)
x = random_tensor(
ndim=4,
dim1=random(low=4, high=8).to(int),
dim2=random(low=4, high=8).to(int),
dim3=random(low=4, high=8).to(int),
).to(device)
y = torch.chunk(x, chunks=random(low=1, high=5).to(int), dim=dim)
z = torch.cat(y, dim=dim)
return z
@autotest(n=10)
def test_flow_chunk_list_with_random_data(test_case):
device = random_device()
dim = random(1, 4).to(int)
x = random_tensor(
ndim=4,
dim1=random(low=4, high=8).to(int),
dim2=random(low=4, high=8).to(int),
dim3=random(low=4, high=8).to(int),
).to(device)
permute_list = [0, 1, 2, 3]
shuffle(permute_list)
y = x.permute(permute_list)
z = torch.chunk(y, chunks=random(low=1, high=5).to(int), dim=dim)
return torch.cat(z, dim=dim)
@autotest(n=5, auto_backward=False, check_graph=True)
def test_flow_chunk_list_with_stride(test_case):
device = random_device()
dim = random(1, 4).to(int)
x = random_tensor(
ndim=4,
dim1=random(low=4, high=8).to(int),
dim2=random(low=4, high=8).to(int),
dim3=random(low=4, high=8).to(int),
).to(device)
perm = [0, 1, 2, 3]
shuffle(perm)
y = x.permute(perm)
z = torch.chunk(y, chunks=random(low=1, high=5).to(int), dim=dim)
return torch.cat(z, dim=dim)
@autotest(n=5, auto_backward=False, check_graph=True)
def test_flow_chunk_list_bool_with_random_data(test_case):
device = random_device()
dim = random(1, 4).to(int)
x = random_tensor(
ndim=4,
dim1=random(low=4, high=8).to(int),
dim2=random(low=4, high=8).to(int),
dim3=random(low=4, high=8).to(int),
).to(device, torch.bool)
y = torch.chunk(x, chunks=random(low=1, high=5).to(int), dim=dim)
z = torch.cat(y, dim=dim)
return z
@autotest(n=5, check_graph=True)
def test_flow_chunk_list_with_random_data_negative_dim(test_case):
device = random_device()
dim = random(1, 3).to(int)
x = random_tensor(
ndim=4,
dim0=random(low=4, high=8).to(int),
dim1=random(low=4, high=8).to(int),
dim2=random(low=4, high=8).to(int),
dim3=random(low=4, high=8).to(int),
).to(device)
y = torch.chunk(x, chunks=4, dim=-1)
z = torch.cat(y, dim=-1)
return z
if __name__ == "__main__":
unittest.main()
| 33.354545 | 73 | 0.613791 |
import unittest
from collections import OrderedDict
from random import shuffle
import numpy as np
from random import shuffle
import oneflow as flow
import oneflow.unittest
from oneflow.test_utils.automated_test_util import *
@flow.unittest.skip_unless_1n1d()
class TestChunk(flow.unittest.TestCase):
@autotest(n=5, check_graph=True)
def test_flow_chunk_list_with_random_data(test_case):
device = random_device()
dim = random(1, 4).to(int)
x = random_tensor(
ndim=4,
dim1=random(low=4, high=8).to(int),
dim2=random(low=4, high=8).to(int),
dim3=random(low=4, high=8).to(int),
).to(device)
y = torch.chunk(x, chunks=random(low=1, high=5).to(int), dim=dim)
z = torch.cat(y, dim=dim)
return z
@autotest(n=10)
def test_flow_chunk_list_with_random_data(test_case):
device = random_device()
dim = random(1, 4).to(int)
x = random_tensor(
ndim=4,
dim1=random(low=4, high=8).to(int),
dim2=random(low=4, high=8).to(int),
dim3=random(low=4, high=8).to(int),
).to(device)
permute_list = [0, 1, 2, 3]
shuffle(permute_list)
y = x.permute(permute_list)
z = torch.chunk(y, chunks=random(low=1, high=5).to(int), dim=dim)
return torch.cat(z, dim=dim)
@autotest(n=5, auto_backward=False, check_graph=True)
def test_flow_chunk_list_with_stride(test_case):
device = random_device()
dim = random(1, 4).to(int)
x = random_tensor(
ndim=4,
dim1=random(low=4, high=8).to(int),
dim2=random(low=4, high=8).to(int),
dim3=random(low=4, high=8).to(int),
).to(device)
perm = [0, 1, 2, 3]
shuffle(perm)
y = x.permute(perm)
z = torch.chunk(y, chunks=random(low=1, high=5).to(int), dim=dim)
return torch.cat(z, dim=dim)
@autotest(n=5, auto_backward=False, check_graph=True)
def test_flow_chunk_list_bool_with_random_data(test_case):
device = random_device()
dim = random(1, 4).to(int)
x = random_tensor(
ndim=4,
dim1=random(low=4, high=8).to(int),
dim2=random(low=4, high=8).to(int),
dim3=random(low=4, high=8).to(int),
).to(device, torch.bool)
y = torch.chunk(x, chunks=random(low=1, high=5).to(int), dim=dim)
z = torch.cat(y, dim=dim)
return z
@autotest(n=5, check_graph=True)
def test_flow_chunk_list_with_random_data_negative_dim(test_case):
device = random_device()
dim = random(1, 3).to(int)
x = random_tensor(
ndim=4,
dim0=random(low=4, high=8).to(int),
dim1=random(low=4, high=8).to(int),
dim2=random(low=4, high=8).to(int),
dim3=random(low=4, high=8).to(int),
).to(device)
y = torch.chunk(x, chunks=4, dim=-1)
z = torch.cat(y, dim=-1)
return z
if __name__ == "__main__":
unittest.main()
| true | true |
f72fdba810e4acacfce8c3f39354b4ef1f6e88b2 | 2,774 | py | Python | src/application/analysis/english_analysis.py | jagoPG/-restaurant-ml-inspector | 4efc7855401cc8cfa9d5e470c14685158a607448 | [
"Apache-2.0"
] | 1 | 2018-07-10T12:53:35.000Z | 2018-07-10T12:53:35.000Z | src/application/analysis/english_analysis.py | jagoPG/-restaurant-ml-inspector | 4efc7855401cc8cfa9d5e470c14685158a607448 | [
"Apache-2.0"
] | null | null | null | src/application/analysis/english_analysis.py | jagoPG/-restaurant-ml-inspector | 4efc7855401cc8cfa9d5e470c14685158a607448 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env
# -*- coding: utf-8 -*-
"""
Copyright 2017-2018 Jagoba Pérez-Gómez
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import logging
from textblob import TextBlob
from src.application.analysis.evaluated_word import EvaluatedWord
class EnglishAnalysis(object):
"""
Receives an array of reviews and analyses them. The results are stored in
an array of words that matches with the keyword repository list. A global
score of all reviews is stored in the $karma$ variable.
"""
def __init__(self, keyword_repository, reviews):
self._keyword_repository = keyword_repository
self._reviews = reviews
self._words = {}
self._karma = 0
def analyse(self):
"""
Analyses the reviews that have been set
"""
for review in self._reviews:
self.__process_english_review(review)
def get_results(self):
"""
:return: Gets the words analysis result
:returns: EvaluatedWord
"""
return self._words
def get_karma(self):
"""
:return: Gets the global score of all reviews
:returns: float
"""
return self._karma
def __process_english_review(self, review):
body = TextBlob(review.review_body)
for sentence in body.sentences:
logging.debug('Polarity: %s' % sentence.sentiment.polarity)
self._karma = (sentence.sentiment.polarity + self._karma) / 2
for sentence in body.sentences:
for smaller_word in sentence.split(' '):
logging.debug('Word: %s' % smaller_word)
self.__process_word(smaller_word, sentence.sentiment.polarity, review.reference)
def __process_word(self, word, karma, review_id):
word = word.lower()
if not self.__is_keyword(word):
return
if word in self._words:
word = self._words[word]
word.add_karma(karma)
if review_id not in word.appearances:
word.add_appearance(review_id)
else:
word = EvaluatedWord(word, karma, [review_id])
self._words[word.word] = word
def __is_keyword(self, word):
return self._keyword_repository.get_of_name(word, 'en') is not None
| 32.255814 | 96 | 0.655732 |
import logging
from textblob import TextBlob
from src.application.analysis.evaluated_word import EvaluatedWord
class EnglishAnalysis(object):
def __init__(self, keyword_repository, reviews):
self._keyword_repository = keyword_repository
self._reviews = reviews
self._words = {}
self._karma = 0
def analyse(self):
for review in self._reviews:
self.__process_english_review(review)
def get_results(self):
return self._words
def get_karma(self):
return self._karma
def __process_english_review(self, review):
body = TextBlob(review.review_body)
for sentence in body.sentences:
logging.debug('Polarity: %s' % sentence.sentiment.polarity)
self._karma = (sentence.sentiment.polarity + self._karma) / 2
for sentence in body.sentences:
for smaller_word in sentence.split(' '):
logging.debug('Word: %s' % smaller_word)
self.__process_word(smaller_word, sentence.sentiment.polarity, review.reference)
def __process_word(self, word, karma, review_id):
word = word.lower()
if not self.__is_keyword(word):
return
if word in self._words:
word = self._words[word]
word.add_karma(karma)
if review_id not in word.appearances:
word.add_appearance(review_id)
else:
word = EvaluatedWord(word, karma, [review_id])
self._words[word.word] = word
def __is_keyword(self, word):
return self._keyword_repository.get_of_name(word, 'en') is not None
| true | true |
f72fdc1d6884bbc99ff86fadd0864d05af6b34ab | 2,186 | py | Python | method_of_moments/continuous/_loc_scale.py | AlbertFarkhutdinov/method_of_moments | 0a69c63197d7f88a3b57356620b4d84e76543177 | [
"MIT"
] | null | null | null | method_of_moments/continuous/_loc_scale.py | AlbertFarkhutdinov/method_of_moments | 0a69c63197d7f88a3b57356620b4d84e76543177 | [
"MIT"
] | null | null | null | method_of_moments/continuous/_loc_scale.py | AlbertFarkhutdinov/method_of_moments | 0a69c63197d7f88a3b57356620b4d84e76543177 | [
"MIT"
] | null | null | null | """
This module contains description of class for probability distributions
from location-scale family.
"""
from method_of_moments.continuous._base_continuous import BaseContinuous
class LocScale(BaseContinuous):
"""
Class for probability distributions from location-scale family.
Parameters
----------
loc : float, optional, default: 0.0
Location parameter of a probability distribution.
scale : float, optional, default: 1.0
Scale parameter of a probability distribution.
**kwargs : `base.BaseDistribution` properties.
Methods
-------
get_standard_mean(mean)
Return mean value for standard distribution in location-scale family.
get_standard_variance(variance)
Return variance for standard distribution in location-scale family.
Raises
------
ValueError
If `scale` is non-positive number.
"""
def __init__(self, loc: float = 0.0, scale: float = 1.0, **kwargs) -> None:
"""Initialize self. See help(type(self)) for accurate signature."""
super().__init__(**kwargs)
self.loc = loc
self.scale = scale
@property
def loc(self) -> float:
"""Return location parameter of a probability distribution."""
return self.__loc
@loc.setter
def loc(self, loc: float = 0.0) -> None:
"""Property setter for `self.loc`."""
self.__loc = loc
@property
def scale(self) -> float:
"""Return scale parameter of a probability distribution."""
return self.__scale
@scale.setter
def scale(self, scale: float = 1.0) -> None:
"""Property setter for `self.scale`."""
if scale <= 0:
raise ValueError('`scale` value must be positive.')
self.__scale = scale
def get_standard_mean(self, mean: float):
"""
Return mean value for standard distribution in location-scale family.
"""
return (mean - self.loc) / self.scale
def get_standard_variance(self, variance: float):
"""
Return variance for standard distribution in location-scale family.
"""
return variance / self.scale ** 2
| 28.025641 | 79 | 0.63312 |
from method_of_moments.continuous._base_continuous import BaseContinuous
class LocScale(BaseContinuous):
def __init__(self, loc: float = 0.0, scale: float = 1.0, **kwargs) -> None:
super().__init__(**kwargs)
self.loc = loc
self.scale = scale
@property
def loc(self) -> float:
return self.__loc
@loc.setter
def loc(self, loc: float = 0.0) -> None:
self.__loc = loc
@property
def scale(self) -> float:
return self.__scale
@scale.setter
def scale(self, scale: float = 1.0) -> None:
if scale <= 0:
raise ValueError('`scale` value must be positive.')
self.__scale = scale
def get_standard_mean(self, mean: float):
return (mean - self.loc) / self.scale
def get_standard_variance(self, variance: float):
return variance / self.scale ** 2
| true | true |
f72fdcd3421f334ce1bfe3c860ab1e55aab23f82 | 1,226 | py | Python | test/test_jcampdx.py | MIRCen/brukerapi-python | 5455800895924c69bf839fa621fa7a06d343b4ff | [
"MIT"
] | 7 | 2020-06-30T16:09:20.000Z | 2022-03-09T13:27:55.000Z | test/test_jcampdx.py | MIRCen/brukerapi-python | 5455800895924c69bf839fa621fa7a06d343b4ff | [
"MIT"
] | 2 | 2020-09-06T19:29:36.000Z | 2021-03-15T08:03:46.000Z | test/test_jcampdx.py | MIRCen/brukerapi-python | 5455800895924c69bf839fa621fa7a06d343b4ff | [
"MIT"
] | 1 | 2022-01-20T09:43:45.000Z | 2022-01-20T09:43:45.000Z | from brukerapi.jcampdx import JCAMPDX
import numpy as np
from pathlib import Path
import pytest
@pytest.mark.skip(reason="in progress")
def test_jcampdx(test_jcampdx_data):
j = JCAMPDX(Path(test_jcampdx_data[1]) / test_jcampdx_data[0]['path'])
for key, ref in test_jcampdx_data[0]['parameters'].items():
parameter_test = j.get_parameter(key)
size_test= parameter_test.size
value_test= parameter_test.value
type_test = value_test.__class__
value_ref = ref['value']
size_ref = ref['size']
type_ref = ref['type']
#test SIZE
if size_ref == 'None':
size_ref = None
if isinstance(size_ref, list):
size_ref = tuple(size_ref)
elif isinstance(size_ref, int):
size_ref = (size_ref,)
assert size_ref == size_test
#test TYPE
assert type_ref == type_test.__name__
#test VALUE
if isinstance(value_test, np.ndarray):
value_ref = np.array(value_ref)
assert np.array_equal(value_ref, value_test)
elif isinstance(value_test, list):
assert value_test == value_ref
else:
assert value_ref == value_test
| 29.902439 | 74 | 0.626427 | from brukerapi.jcampdx import JCAMPDX
import numpy as np
from pathlib import Path
import pytest
@pytest.mark.skip(reason="in progress")
def test_jcampdx(test_jcampdx_data):
j = JCAMPDX(Path(test_jcampdx_data[1]) / test_jcampdx_data[0]['path'])
for key, ref in test_jcampdx_data[0]['parameters'].items():
parameter_test = j.get_parameter(key)
size_test= parameter_test.size
value_test= parameter_test.value
type_test = value_test.__class__
value_ref = ref['value']
size_ref = ref['size']
type_ref = ref['type']
if size_ref == 'None':
size_ref = None
if isinstance(size_ref, list):
size_ref = tuple(size_ref)
elif isinstance(size_ref, int):
size_ref = (size_ref,)
assert size_ref == size_test
assert type_ref == type_test.__name__
if isinstance(value_test, np.ndarray):
value_ref = np.array(value_ref)
assert np.array_equal(value_ref, value_test)
elif isinstance(value_test, list):
assert value_test == value_ref
else:
assert value_ref == value_test
| true | true |
f72fdd762dd6a686c705479e1165f5735db40a61 | 1,055 | py | Python | src/lib/telegram/parsemode.py | thonkify/thonkify | 2cb4493d796746cb46c8519a100ef3ef128a761a | [
"MIT"
] | 17 | 2017-08-04T15:41:05.000Z | 2020-10-16T18:02:41.000Z | src/lib/telegram/parsemode.py | thonkify/thonkify | 2cb4493d796746cb46c8519a100ef3ef128a761a | [
"MIT"
] | 3 | 2017-08-04T23:37:37.000Z | 2017-08-04T23:38:34.000Z | src/lib/telegram/parsemode.py | thonkify/thonkify | 2cb4493d796746cb46c8519a100ef3ef128a761a | [
"MIT"
] | 3 | 2017-12-07T16:30:59.000Z | 2019-06-16T02:48:28.000Z | #!/usr/bin/env python
# pylint: disable=R0903
#
# A library that provides a Python interface to the Telegram Bot API
# Copyright (C) 2015-2017
# Leandro Toledo de Souza <devs@python-telegram-bot.org>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser Public License for more details.
#
# You should have received a copy of the GNU Lesser Public License
# along with this program. If not, see [http://www.gnu.org/licenses/].
"""This module contains an object that represents a Telegram
Message Parse Modes."""
class ParseMode(object):
"""This object represents a Telegram Message Parse Modes."""
MARKDOWN = 'Markdown'
HTML = 'HTML'
| 36.37931 | 71 | 0.747867 |
class ParseMode(object):
MARKDOWN = 'Markdown'
HTML = 'HTML'
| true | true |
f72fdd985d4e4c0bdc2d66e73fde136c53658738 | 3,108 | py | Python | openaerostruct/structures/section_properties_tube.py | lkelvinm/OpenAeroStruct | 395075d28783c1b99b4ab25ddf034000caf9cd0d | [
"Apache-2.0"
] | null | null | null | openaerostruct/structures/section_properties_tube.py | lkelvinm/OpenAeroStruct | 395075d28783c1b99b4ab25ddf034000caf9cd0d | [
"Apache-2.0"
] | null | null | null | openaerostruct/structures/section_properties_tube.py | lkelvinm/OpenAeroStruct | 395075d28783c1b99b4ab25ddf034000caf9cd0d | [
"Apache-2.0"
] | null | null | null | from __future__ import division, print_function
import numpy as np
from openmdao.api import ExplicitComponent
class SectionPropertiesTube(ExplicitComponent):
"""
Compute geometric properties for a tube element.
The thicknesses are added to the interior of the element, so the
'radius' value is the outer radius of the tube.
parameters
----------
radius : numpy array
Outer radii for each FEM element.
thickness : numpy array
Tube thickness for each FEM element.
Returns
-------
A : numpy array
Cross-sectional area for each FEM element.
Iy : numpy array
Area moment of inertia around the y-axis for each FEM element.
Iz : numpy array
Area moment of inertia around the z-axis for each FEM element.
J : numpy array
Polar moment of inertia for each FEM element.
"""
def initialize(self):
self.options.declare('surface', types=dict)
def setup(self):
self.surface = surface = self.options['surface']
self.ny = surface['num_y']
self.add_input('radius', val=np.ones((self.ny - 1)), units='m')
self.add_input('thickness', val=np.ones((self.ny - 1)) * .1, units='m')
self.add_output('A', val=np.zeros((self.ny - 1)), units='m**2')
self.add_output('Iy', val=np.zeros((self.ny - 1)), units='m**4')
self.add_output('Iz', val=np.zeros((self.ny - 1)), units='m**4')
self.add_output('J', val=np.zeros((self.ny - 1)), units='m**4')
a = np.arange((self.ny - 1))
self.declare_partials('*', '*', rows=a, cols=a)
self.set_check_partial_options(wrt='*', method='cs')
def compute(self, inputs, outputs):
pi = np.pi
# Add thickness to the interior of the radius.
# The outer radius is the inputs['radius'] amount.
r1 = inputs['radius'] - inputs['thickness']
r2 = inputs['radius']
# Compute the area, area moments of inertia, and polar moment of inertia
outputs['A'] = pi * (r2**2 - r1**2)
outputs['Iy'] = pi * (r2**4 - r1**4) / 4.
outputs['Iz'] = pi * (r2**4 - r1**4) / 4.
outputs['J'] = pi * (r2**4 - r1**4) / 2.
def compute_partials(self, inputs, partials):
pi = np.pi
radius = inputs['radius'].real
t = inputs['thickness'].real
r1 = radius - t
r2 = radius
dr1_dr = 1.
dr2_dr = 1.
dr1_dt = -1.
dr2_dt = 0.
r1_3 = r1**3
r2_3 = r2**3
partials['A', 'radius'] = 2 * pi * (r2 * dr2_dr - r1 * dr1_dr)
partials['A', 'thickness'] = 2 * pi * (r2 * dr2_dt - r1 * dr1_dt)
partials['Iy', 'radius'] = pi * (r2_3 * dr2_dr - r1_3 * dr1_dr)
partials['Iy', 'thickness'] = pi * (r2_3 * dr2_dt - r1_3 * dr1_dt)
partials['Iz', 'radius'] = pi * (r2_3 * dr2_dr - r1_3 * dr1_dr)
partials['Iz', 'thickness'] = pi * (r2_3 * dr2_dt - r1_3 * dr1_dt)
partials['J', 'radius'] = 2 * pi * (r2_3 * dr2_dr - r1_3 * dr1_dr)
partials['J', 'thickness'] = 2 * pi * (r2_3 * dr2_dt - r1_3 * dr1_dt)
| 35.724138 | 80 | 0.568855 | from __future__ import division, print_function
import numpy as np
from openmdao.api import ExplicitComponent
class SectionPropertiesTube(ExplicitComponent):
def initialize(self):
self.options.declare('surface', types=dict)
def setup(self):
self.surface = surface = self.options['surface']
self.ny = surface['num_y']
self.add_input('radius', val=np.ones((self.ny - 1)), units='m')
self.add_input('thickness', val=np.ones((self.ny - 1)) * .1, units='m')
self.add_output('A', val=np.zeros((self.ny - 1)), units='m**2')
self.add_output('Iy', val=np.zeros((self.ny - 1)), units='m**4')
self.add_output('Iz', val=np.zeros((self.ny - 1)), units='m**4')
self.add_output('J', val=np.zeros((self.ny - 1)), units='m**4')
a = np.arange((self.ny - 1))
self.declare_partials('*', '*', rows=a, cols=a)
self.set_check_partial_options(wrt='*', method='cs')
def compute(self, inputs, outputs):
pi = np.pi
r1 = inputs['radius'] - inputs['thickness']
r2 = inputs['radius']
outputs['A'] = pi * (r2**2 - r1**2)
outputs['Iy'] = pi * (r2**4 - r1**4) / 4.
outputs['Iz'] = pi * (r2**4 - r1**4) / 4.
outputs['J'] = pi * (r2**4 - r1**4) / 2.
def compute_partials(self, inputs, partials):
pi = np.pi
radius = inputs['radius'].real
t = inputs['thickness'].real
r1 = radius - t
r2 = radius
dr1_dr = 1.
dr2_dr = 1.
dr1_dt = -1.
dr2_dt = 0.
r1_3 = r1**3
r2_3 = r2**3
partials['A', 'radius'] = 2 * pi * (r2 * dr2_dr - r1 * dr1_dr)
partials['A', 'thickness'] = 2 * pi * (r2 * dr2_dt - r1 * dr1_dt)
partials['Iy', 'radius'] = pi * (r2_3 * dr2_dr - r1_3 * dr1_dr)
partials['Iy', 'thickness'] = pi * (r2_3 * dr2_dt - r1_3 * dr1_dt)
partials['Iz', 'radius'] = pi * (r2_3 * dr2_dr - r1_3 * dr1_dr)
partials['Iz', 'thickness'] = pi * (r2_3 * dr2_dt - r1_3 * dr1_dt)
partials['J', 'radius'] = 2 * pi * (r2_3 * dr2_dr - r1_3 * dr1_dr)
partials['J', 'thickness'] = 2 * pi * (r2_3 * dr2_dt - r1_3 * dr1_dt)
| true | true |
f72fdda2808488fef61058f47c4ebf00428e8bf0 | 9,861 | py | Python | devel/lib/python2.7/dist-packages/mav_manager/srv/_GoalTimed.py | MultiRobotUPenn/groundstation_ws_vio_swarm | 60e01af6bf32bafb5bc31626b055436278dc8311 | [
"MIT"
] | 1 | 2020-03-10T06:32:51.000Z | 2020-03-10T06:32:51.000Z | install/lib/python2.7/dist-packages/mav_manager/srv/_GoalTimed.py | MultiRobotUPenn/groundstation_ws_vio_swarm | 60e01af6bf32bafb5bc31626b055436278dc8311 | [
"MIT"
] | null | null | null | install/lib/python2.7/dist-packages/mav_manager/srv/_GoalTimed.py | MultiRobotUPenn/groundstation_ws_vio_swarm | 60e01af6bf32bafb5bc31626b055436278dc8311 | [
"MIT"
] | 1 | 2018-11-07T03:37:23.000Z | 2018-11-07T03:37:23.000Z | # This Python file uses the following encoding: utf-8
"""autogenerated by genpy from mav_manager/GoalTimedRequest.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
import genpy
class GoalTimedRequest(genpy.Message):
_md5sum = "3c9a1ea281c62219122f22aa2b508b97"
_type = "mav_manager/GoalTimedRequest"
_has_header = False #flag to mark the presence of a Header object
_full_text = """float32[4] goal
duration duration
time t_start
"""
__slots__ = ['goal','duration','t_start']
_slot_types = ['float32[4]','duration','time']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
goal,duration,t_start
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(GoalTimedRequest, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.goal is None:
self.goal = [0.] * 4
if self.duration is None:
self.duration = genpy.Duration()
if self.t_start is None:
self.t_start = genpy.Time()
else:
self.goal = [0.] * 4
self.duration = genpy.Duration()
self.t_start = genpy.Time()
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
buff.write(_get_struct_4f().pack(*self.goal))
_x = self
buff.write(_get_struct_2i2I().pack(_x.duration.secs, _x.duration.nsecs, _x.t_start.secs, _x.t_start.nsecs))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
if self.duration is None:
self.duration = genpy.Duration()
if self.t_start is None:
self.t_start = genpy.Time()
end = 0
start = end
end += 16
self.goal = _get_struct_4f().unpack(str[start:end])
_x = self
start = end
end += 16
(_x.duration.secs, _x.duration.nsecs, _x.t_start.secs, _x.t_start.nsecs,) = _get_struct_2i2I().unpack(str[start:end])
self.duration.canon()
self.t_start.canon()
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
buff.write(self.goal.tostring())
_x = self
buff.write(_get_struct_2i2I().pack(_x.duration.secs, _x.duration.nsecs, _x.t_start.secs, _x.t_start.nsecs))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
if self.duration is None:
self.duration = genpy.Duration()
if self.t_start is None:
self.t_start = genpy.Time()
end = 0
start = end
end += 16
self.goal = numpy.frombuffer(str[start:end], dtype=numpy.float32, count=4)
_x = self
start = end
end += 16
(_x.duration.secs, _x.duration.nsecs, _x.t_start.secs, _x.t_start.nsecs,) = _get_struct_2i2I().unpack(str[start:end])
self.duration.canon()
self.t_start.canon()
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
def _get_struct_I():
global _struct_I
return _struct_I
_struct_4f = None
def _get_struct_4f():
global _struct_4f
if _struct_4f is None:
_struct_4f = struct.Struct("<4f")
return _struct_4f
_struct_2i2I = None
def _get_struct_2i2I():
global _struct_2i2I
if _struct_2i2I is None:
_struct_2i2I = struct.Struct("<2i2I")
return _struct_2i2I
# This Python file uses the following encoding: utf-8
"""autogenerated by genpy from mav_manager/GoalTimedResponse.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
class GoalTimedResponse(genpy.Message):
_md5sum = "937c9679a518e3a18d831e57125ea522"
_type = "mav_manager/GoalTimedResponse"
_has_header = False #flag to mark the presence of a Header object
_full_text = """bool success
string message
"""
__slots__ = ['success','message']
_slot_types = ['bool','string']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
success,message
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(GoalTimedResponse, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.success is None:
self.success = False
if self.message is None:
self.message = ''
else:
self.success = False
self.message = ''
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
buff.write(_get_struct_B().pack(self.success))
_x = self.message
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
end = 0
start = end
end += 1
(self.success,) = _get_struct_B().unpack(str[start:end])
self.success = bool(self.success)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.message = str[start:end].decode('utf-8')
else:
self.message = str[start:end]
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
buff.write(_get_struct_B().pack(self.success))
_x = self.message
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
end = 0
start = end
end += 1
(self.success,) = _get_struct_B().unpack(str[start:end])
self.success = bool(self.success)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.message = str[start:end].decode('utf-8')
else:
self.message = str[start:end]
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
def _get_struct_I():
global _struct_I
return _struct_I
_struct_B = None
def _get_struct_B():
global _struct_B
if _struct_B is None:
_struct_B = struct.Struct("<B")
return _struct_B
class GoalTimed(object):
_type = 'mav_manager/GoalTimed'
_md5sum = '3200a97d30222d1d03961acacb87f306'
_request_class = GoalTimedRequest
_response_class = GoalTimedResponse
| 33.540816 | 145 | 0.653179 |
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
import genpy
class GoalTimedRequest(genpy.Message):
_md5sum = "3c9a1ea281c62219122f22aa2b508b97"
_type = "mav_manager/GoalTimedRequest"
_has_header = False
_full_text = """float32[4] goal
duration duration
time t_start
"""
__slots__ = ['goal','duration','t_start']
_slot_types = ['float32[4]','duration','time']
def __init__(self, *args, **kwds):
if args or kwds:
super(GoalTimedRequest, self).__init__(*args, **kwds)
if self.goal is None:
self.goal = [0.] * 4
if self.duration is None:
self.duration = genpy.Duration()
if self.t_start is None:
self.t_start = genpy.Time()
else:
self.goal = [0.] * 4
self.duration = genpy.Duration()
self.t_start = genpy.Time()
def _get_types(self):
return self._slot_types
def serialize(self, buff):
try:
buff.write(_get_struct_4f().pack(*self.goal))
_x = self
buff.write(_get_struct_2i2I().pack(_x.duration.secs, _x.duration.nsecs, _x.t_start.secs, _x.t_start.nsecs))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
try:
if self.duration is None:
self.duration = genpy.Duration()
if self.t_start is None:
self.t_start = genpy.Time()
end = 0
start = end
end += 16
self.goal = _get_struct_4f().unpack(str[start:end])
_x = self
start = end
end += 16
(_x.duration.secs, _x.duration.nsecs, _x.t_start.secs, _x.t_start.nsecs,) = _get_struct_2i2I().unpack(str[start:end])
self.duration.canon()
self.t_start.canon()
return self
except struct.error as e:
raise genpy.DeserializationError(e)
def serialize_numpy(self, buff, numpy):
try:
buff.write(self.goal.tostring())
_x = self
buff.write(_get_struct_2i2I().pack(_x.duration.secs, _x.duration.nsecs, _x.t_start.secs, _x.t_start.nsecs))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
try:
if self.duration is None:
self.duration = genpy.Duration()
if self.t_start is None:
self.t_start = genpy.Time()
end = 0
start = end
end += 16
self.goal = numpy.frombuffer(str[start:end], dtype=numpy.float32, count=4)
_x = self
start = end
end += 16
(_x.duration.secs, _x.duration.nsecs, _x.t_start.secs, _x.t_start.nsecs,) = _get_struct_2i2I().unpack(str[start:end])
self.duration.canon()
self.t_start.canon()
return self
except struct.error as e:
raise genpy.DeserializationError(e)
_struct_I = genpy.struct_I
def _get_struct_I():
global _struct_I
return _struct_I
_struct_4f = None
def _get_struct_4f():
global _struct_4f
if _struct_4f is None:
_struct_4f = struct.Struct("<4f")
return _struct_4f
_struct_2i2I = None
def _get_struct_2i2I():
global _struct_2i2I
if _struct_2i2I is None:
_struct_2i2I = struct.Struct("<2i2I")
return _struct_2i2I
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
class GoalTimedResponse(genpy.Message):
_md5sum = "937c9679a518e3a18d831e57125ea522"
_type = "mav_manager/GoalTimedResponse"
_has_header = False
_full_text = """bool success
string message
"""
__slots__ = ['success','message']
_slot_types = ['bool','string']
def __init__(self, *args, **kwds):
if args or kwds:
super(GoalTimedResponse, self).__init__(*args, **kwds)
if self.success is None:
self.success = False
if self.message is None:
self.message = ''
else:
self.success = False
self.message = ''
def _get_types(self):
return self._slot_types
def serialize(self, buff):
try:
buff.write(_get_struct_B().pack(self.success))
_x = self.message
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
try:
end = 0
start = end
end += 1
(self.success,) = _get_struct_B().unpack(str[start:end])
self.success = bool(self.success)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.message = str[start:end].decode('utf-8')
else:
self.message = str[start:end]
return self
except struct.error as e:
raise genpy.DeserializationError(e)
def serialize_numpy(self, buff, numpy):
try:
buff.write(_get_struct_B().pack(self.success))
_x = self.message
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.pack('<I%ss'%length, length, _x))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
try:
end = 0
start = end
end += 1
(self.success,) = _get_struct_B().unpack(str[start:end])
self.success = bool(self.success)
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.message = str[start:end].decode('utf-8')
else:
self.message = str[start:end]
return self
except struct.error as e:
raise genpy.DeserializationError(e)
_struct_I = genpy.struct_I
def _get_struct_I():
global _struct_I
return _struct_I
_struct_B = None
def _get_struct_B():
global _struct_B
if _struct_B is None:
_struct_B = struct.Struct("<B")
return _struct_B
class GoalTimed(object):
_type = 'mav_manager/GoalTimed'
_md5sum = '3200a97d30222d1d03961acacb87f306'
_request_class = GoalTimedRequest
_response_class = GoalTimedResponse
| true | true |
f72fde34553d0101da278cb9f85832174a12acbb | 1,294 | py | Python | src/image-gallery/azext_image_gallery/_client_factory.py | haroonf/azure-cli-extensions | 61c044d34c224372f186934fa7c9313f1cd3a525 | [
"MIT"
] | 207 | 2017-11-29T06:59:41.000Z | 2022-03-31T10:00:53.000Z | src/image-gallery/azext_image_gallery/_client_factory.py | haroonf/azure-cli-extensions | 61c044d34c224372f186934fa7c9313f1cd3a525 | [
"MIT"
] | 4,061 | 2017-10-27T23:19:56.000Z | 2022-03-31T23:18:30.000Z | src/image-gallery/azext_image_gallery/_client_factory.py | haroonf/azure-cli-extensions | 61c044d34c224372f186934fa7c9313f1cd3a525 | [
"MIT"
] | 802 | 2017-10-11T17:36:26.000Z | 2022-03-31T22:24:32.000Z | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
def _compute_client_factory(cli_ctx):
from azure.cli.core.commands.client_factory import get_mgmt_service_client
from .vendored_sdks.azure_mgmt_compute._compute_management_client import ComputeManagementClient
return get_mgmt_service_client(cli_ctx, ComputeManagementClient)
def cf_galleries(cli_ctx, _):
return _compute_client_factory(cli_ctx).galleries
def cf_gallery_images(cli_ctx, _):
return _compute_client_factory(cli_ctx).gallery_images
def cf_community_gallery(cli_ctx, *_):
return _compute_client_factory(cli_ctx).community_galleries
def cf_community_gallery_image(cli_ctx, *_):
return _compute_client_factory(cli_ctx).community_gallery_images
def cf_community_gallery_image_version(cli_ctx, *_):
return _compute_client_factory(cli_ctx).community_gallery_image_versions
def cf_community_gallery_sharing_profile(cli_ctx, *_):
return _compute_client_factory(cli_ctx).gallery_sharing_profile
| 36.971429 | 100 | 0.710974 |
def _compute_client_factory(cli_ctx):
from azure.cli.core.commands.client_factory import get_mgmt_service_client
from .vendored_sdks.azure_mgmt_compute._compute_management_client import ComputeManagementClient
return get_mgmt_service_client(cli_ctx, ComputeManagementClient)
def cf_galleries(cli_ctx, _):
return _compute_client_factory(cli_ctx).galleries
def cf_gallery_images(cli_ctx, _):
return _compute_client_factory(cli_ctx).gallery_images
def cf_community_gallery(cli_ctx, *_):
return _compute_client_factory(cli_ctx).community_galleries
def cf_community_gallery_image(cli_ctx, *_):
return _compute_client_factory(cli_ctx).community_gallery_images
def cf_community_gallery_image_version(cli_ctx, *_):
return _compute_client_factory(cli_ctx).community_gallery_image_versions
def cf_community_gallery_sharing_profile(cli_ctx, *_):
return _compute_client_factory(cli_ctx).gallery_sharing_profile
| true | true |
f72fde9be8445c641564cc9689aca34ffff96645 | 5,280 | py | Python | mmdet/core/hook/ema.py | mrzhuzhe/mmdetection | c04ca2c2a65500bc248a5d2ab6ace5b15f00064d | [
"Apache-2.0"
] | null | null | null | mmdet/core/hook/ema.py | mrzhuzhe/mmdetection | c04ca2c2a65500bc248a5d2ab6ace5b15f00064d | [
"Apache-2.0"
] | null | null | null | mmdet/core/hook/ema.py | mrzhuzhe/mmdetection | c04ca2c2a65500bc248a5d2ab6ace5b15f00064d | [
"Apache-2.0"
] | null | null | null | # Copyright (c) OpenMMLab. All rights reserved.
import math
from mmcv.parallel import is_module_wrapper
from mmcv.runner.hooks import HOOKS, Hook
class BaseEMAHook(Hook):
"""Exponential Moving Average Hook.
Use Exponential Moving Average on all parameters of model in training
process. All parameters have a ema backup, which update by the formula
as below. EMAHook takes priority over EvalHook and CheckpointHook. Note,
the original model parameters are actually saved in ema field after train.
Args:
momentum (float): The momentum used for updating ema parameter.
Ema's parameter are updated with the formula:
`ema_param = (1-momentum) * ema_param + momentum * cur_param`.
Defaults to 0.0002.
skip_buffers (bool): Whether to skip the model buffers, such as
batchnorm running stats (running_mean, running_var), it does not
perform the ema operation. Default to False.
interval (int): Update ema parameter every interval iteration.
Defaults to 1.
resume_from (str, optional): The checkpoint path. Defaults to None.
momentum_fun (func, optional): The function to change momentum
during early iteration (also warmup) to help early training.
It uses `momentum` as a constant. Defaults to None.
"""
def __init__(self,
momentum=0.0002,
interval=1,
skip_buffers=False,
resume_from=None,
momentum_fun=None):
assert 0 < momentum < 1
self.momentum = momentum
self.skip_buffers = skip_buffers
self.interval = interval
self.checkpoint = resume_from
self.momentum_fun = momentum_fun
def before_run(self, runner):
"""To resume model with it's ema parameters more friendly.
Register ema parameter as ``named_buffer`` to model.
"""
model = runner.model
if is_module_wrapper(model):
model = model.module
self.param_ema_buffer = {}
if self.skip_buffers:
self.model_parameters = dict(model.named_parameters())
else:
self.model_parameters = model.state_dict()
for name, value in self.model_parameters.items():
# "." is not allowed in module's buffer name
buffer_name = f"ema_{name.replace('.', '_')}"
self.param_ema_buffer[name] = buffer_name
model.register_buffer(buffer_name, value.data.clone())
self.model_buffers = dict(model.named_buffers())
if self.checkpoint is not None:
runner.resume(self.checkpoint)
def get_momentum(self, runner):
return self.momentum_fun(runner.iter) if self.momentum_fun else \
self.momentum
def after_train_iter(self, runner):
"""Update ema parameter every self.interval iterations."""
if (runner.iter + 1) % self.interval != 0:
return
momentum = self.get_momentum(runner)
for name, parameter in self.model_parameters.items():
# exclude num_tracking
if parameter.dtype.is_floating_point:
buffer_name = self.param_ema_buffer[name]
buffer_parameter = self.model_buffers[buffer_name]
buffer_parameter.mul_(1 - momentum).add_(
parameter.data, alpha=momentum)
def after_train_epoch(self, runner):
"""We load parameter values from ema backup to model before the
EvalHook."""
self._swap_ema_parameters()
def before_train_epoch(self, runner):
"""We recover model's parameter from ema backup after last epoch's
EvalHook."""
self._swap_ema_parameters()
def _swap_ema_parameters(self):
"""Swap the parameter of model with parameter in ema_buffer."""
for name, value in self.model_parameters.items():
temp = value.data.clone()
ema_buffer = self.model_buffers[self.param_ema_buffer[name]]
value.data.copy_(ema_buffer.data)
ema_buffer.data.copy_(temp)
@HOOKS.register_module()
class ExpMomentumEMAHook(BaseEMAHook):
"""EMAHook using exponential momentum strategy.
Args:
total_iter (int): The total number of iterations of EMA momentum.
Defaults to 2000.
"""
def __init__(self, total_iter=2000, **kwargs):
super(ExpMomentumEMAHook, self).__init__(**kwargs)
self.momentum_fun = lambda x: (1 - self.momentum) * math.exp(-(
1 + x) / total_iter) + self.momentum
@HOOKS.register_module()
class LinearMomentumEMAHook(BaseEMAHook):
"""EMAHook using linear momentum strategy.
Args:
warm_up (int): During first warm_up steps, we may use smaller decay
to update ema parameters more slowly. Defaults to 100.
"""
def __init__(self, warm_up=100, **kwargs):
super(LinearMomentumEMAHook, self).__init__(**kwargs)
self.momentum_fun = lambda x: min(self.momentum**self.interval,
(1 + x) / (warm_up + x))
| 40.305344 | 79 | 0.621212 |
import math
from mmcv.parallel import is_module_wrapper
from mmcv.runner.hooks import HOOKS, Hook
class BaseEMAHook(Hook):
def __init__(self,
momentum=0.0002,
interval=1,
skip_buffers=False,
resume_from=None,
momentum_fun=None):
assert 0 < momentum < 1
self.momentum = momentum
self.skip_buffers = skip_buffers
self.interval = interval
self.checkpoint = resume_from
self.momentum_fun = momentum_fun
def before_run(self, runner):
model = runner.model
if is_module_wrapper(model):
model = model.module
self.param_ema_buffer = {}
if self.skip_buffers:
self.model_parameters = dict(model.named_parameters())
else:
self.model_parameters = model.state_dict()
for name, value in self.model_parameters.items():
buffer_name = f"ema_{name.replace('.', '_')}"
self.param_ema_buffer[name] = buffer_name
model.register_buffer(buffer_name, value.data.clone())
self.model_buffers = dict(model.named_buffers())
if self.checkpoint is not None:
runner.resume(self.checkpoint)
def get_momentum(self, runner):
return self.momentum_fun(runner.iter) if self.momentum_fun else \
self.momentum
def after_train_iter(self, runner):
if (runner.iter + 1) % self.interval != 0:
return
momentum = self.get_momentum(runner)
for name, parameter in self.model_parameters.items():
# exclude num_tracking
if parameter.dtype.is_floating_point:
buffer_name = self.param_ema_buffer[name]
buffer_parameter = self.model_buffers[buffer_name]
buffer_parameter.mul_(1 - momentum).add_(
parameter.data, alpha=momentum)
def after_train_epoch(self, runner):
self._swap_ema_parameters()
def before_train_epoch(self, runner):
self._swap_ema_parameters()
def _swap_ema_parameters(self):
for name, value in self.model_parameters.items():
temp = value.data.clone()
ema_buffer = self.model_buffers[self.param_ema_buffer[name]]
value.data.copy_(ema_buffer.data)
ema_buffer.data.copy_(temp)
@HOOKS.register_module()
class ExpMomentumEMAHook(BaseEMAHook):
def __init__(self, total_iter=2000, **kwargs):
super(ExpMomentumEMAHook, self).__init__(**kwargs)
self.momentum_fun = lambda x: (1 - self.momentum) * math.exp(-(
1 + x) / total_iter) + self.momentum
@HOOKS.register_module()
class LinearMomentumEMAHook(BaseEMAHook):
def __init__(self, warm_up=100, **kwargs):
super(LinearMomentumEMAHook, self).__init__(**kwargs)
self.momentum_fun = lambda x: min(self.momentum**self.interval,
(1 + x) / (warm_up + x))
| true | true |
f72fdecee874f57c54aafbb15866dc4f007451be | 1,697 | py | Python | **PyBank**/main.py | cathchristabel/Python-Challenge | f8a56210c15785626c693101f12173c9b55f3c9d | [
"ADSL"
] | null | null | null | **PyBank**/main.py | cathchristabel/Python-Challenge | f8a56210c15785626c693101f12173c9b55f3c9d | [
"ADSL"
] | null | null | null | **PyBank**/main.py | cathchristabel/Python-Challenge | f8a56210c15785626c693101f12173c9b55f3c9d | [
"ADSL"
] | null | null | null | import os
import csv
filepath = os.path.join('..','**PyBank**','Resources','budget_data.csv')
output_path = os.path.join('..','**PyBank**','financial_analysis.txt')
total_months = 0
total_net = 0
net_change_list = []
month_of_change = []
greatest_increase = ["", 0]
greatest_decrease = ["", 9999999999999]
with open (filepath, newline = '') as csvfile:
csvreader = csv.reader(csvfile, delimiter = ',')
header = next(csvreader)
first_row = next(csvreader)
total_months = total_months + 1
total_net = total_net + int(first_row[1])
prev_net = int(first_row[1])
for row in csvreader:
total_months = total_months + 1
total_net += int(row[1])
net_change = int(row[1]) - prev_net
prev_net = int(row[1])
net_change_list = net_change_list + [net_change]
month_of_change = month_of_change + [row[0]]
if net_change > greatest_increase[1]:
greatest_increase[0] = row[0]
greatest_increase[1] = net_change
if net_change < greatest_decrease[1]:
greatest_decrease[0] = row[0]
greatest_decrease[1] = net_change
average_change = sum(net_change_list) / len(net_change_list)
output = (f'Financial Analysis\n'
f'-------------------\n'
f'Total Months: {total_months}\n'
f'Total: ${total_net}\n'
f'Average Change: ${average_change:.2f}\n'
f'Greatest Increase in Profits: {greatest_increase[0]} (${greatest_increase[1]})\n'
f'Greatest Decrease in Profits: {greatest_decrease[0]} (${greatest_decrease[1]})')
print(output)
with open(output_path, "w") as txt_file:
txt_file.write(output)
| 31.425926 | 93 | 0.6264 | import os
import csv
filepath = os.path.join('..','**PyBank**','Resources','budget_data.csv')
output_path = os.path.join('..','**PyBank**','financial_analysis.txt')
total_months = 0
total_net = 0
net_change_list = []
month_of_change = []
greatest_increase = ["", 0]
greatest_decrease = ["", 9999999999999]
with open (filepath, newline = '') as csvfile:
csvreader = csv.reader(csvfile, delimiter = ',')
header = next(csvreader)
first_row = next(csvreader)
total_months = total_months + 1
total_net = total_net + int(first_row[1])
prev_net = int(first_row[1])
for row in csvreader:
total_months = total_months + 1
total_net += int(row[1])
net_change = int(row[1]) - prev_net
prev_net = int(row[1])
net_change_list = net_change_list + [net_change]
month_of_change = month_of_change + [row[0]]
if net_change > greatest_increase[1]:
greatest_increase[0] = row[0]
greatest_increase[1] = net_change
if net_change < greatest_decrease[1]:
greatest_decrease[0] = row[0]
greatest_decrease[1] = net_change
average_change = sum(net_change_list) / len(net_change_list)
output = (f'Financial Analysis\n'
f'-------------------\n'
f'Total Months: {total_months}\n'
f'Total: ${total_net}\n'
f'Average Change: ${average_change:.2f}\n'
f'Greatest Increase in Profits: {greatest_increase[0]} (${greatest_increase[1]})\n'
f'Greatest Decrease in Profits: {greatest_decrease[0]} (${greatest_decrease[1]})')
print(output)
with open(output_path, "w") as txt_file:
txt_file.write(output)
| true | true |
f72fe00487d7fd4d4a1b45f52317911518a2dda8 | 923 | py | Python | integration_tests/src/main/python/marks.py | wbo4958/spark-rapids | 2b18d10313b57aaf6541f40da571c98abcdbc908 | [
"Apache-2.0"
] | null | null | null | integration_tests/src/main/python/marks.py | wbo4958/spark-rapids | 2b18d10313b57aaf6541f40da571c98abcdbc908 | [
"Apache-2.0"
] | null | null | null | integration_tests/src/main/python/marks.py | wbo4958/spark-rapids | 2b18d10313b57aaf6541f40da571c98abcdbc908 | [
"Apache-2.0"
] | null | null | null | # Copyright (c) 2020-2021, NVIDIA CORPORATION.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
allow_non_gpu = pytest.mark.allow_non_gpu
approximate_float = pytest.mark.approximate_float
ignore_order = pytest.mark.ignore_order
incompat = pytest.mark.incompat
limit = pytest.mark.limit
qarun = pytest.mark.qarun
cudf_udf = pytest.mark.cudf_udf
rapids_udf_example_native = pytest.mark.rapids_udf_example_native
| 36.92 | 74 | 0.789816 |
import pytest
allow_non_gpu = pytest.mark.allow_non_gpu
approximate_float = pytest.mark.approximate_float
ignore_order = pytest.mark.ignore_order
incompat = pytest.mark.incompat
limit = pytest.mark.limit
qarun = pytest.mark.qarun
cudf_udf = pytest.mark.cudf_udf
rapids_udf_example_native = pytest.mark.rapids_udf_example_native
| true | true |
f72fe031967fabab6e73cfb6ef6a29f19e93d585 | 473 | py | Python | src/maestral_cocoa/constants.py | SamSchott/maestral-cocoa | bb031b2df010ae84e058fadd3a1b10b19d23b762 | [
"MIT"
] | 8 | 2020-11-13T08:48:01.000Z | 2021-12-16T06:30:27.000Z | macOS/Xcode/Maestral/Maestral/app/maestral_cocoa/constants.py | SamSchott/maestral-cocoa | bb031b2df010ae84e058fadd3a1b10b19d23b762 | [
"MIT"
] | 4 | 2021-08-23T20:41:39.000Z | 2021-11-16T08:43:58.000Z | src/maestral_cocoa/constants.py | SamSchott/maestral-cocoa | bb031b2df010ae84e058fadd3a1b10b19d23b762 | [
"MIT"
] | 1 | 2021-11-09T07:14:44.000Z | 2021-11-09T07:14:44.000Z | # -*- coding: utf-8 -*-
# system imports
import sys
try:
from importlib.metadata import metadata
except ImportError:
# Backwards compatibility Python 3.7 and lower
from importlib_metadata import metadata # type: ignore
_app_module = sys.modules["__main__"].__package__
_md = metadata(_app_module) # type: ignore
# detect if we have been built with briefcase or frozen with PyInstaller
FROZEN = "Briefcase-Version" in _md or getattr(sys, "frozen", False)
| 26.277778 | 72 | 0.744186 |
import sys
try:
from importlib.metadata import metadata
except ImportError:
from importlib_metadata import metadata
_app_module = sys.modules["__main__"].__package__
_md = metadata(_app_module)
FROZEN = "Briefcase-Version" in _md or getattr(sys, "frozen", False)
| true | true |
f72fe13e3737561fcf3652de947a89127a226c44 | 619 | py | Python | scraper_app/pipelines.py | brian-yang/pollen-scraper | 77e47d68bb1c6ca31e7b91550728fa59e9cb2d8a | [
"MIT"
] | null | null | null | scraper_app/pipelines.py | brian-yang/pollen-scraper | 77e47d68bb1c6ca31e7b91550728fa59e9cb2d8a | [
"MIT"
] | null | null | null | scraper_app/pipelines.py | brian-yang/pollen-scraper | 77e47d68bb1c6ca31e7b91550728fa59e9cb2d8a | [
"MIT"
] | null | null | null | from sqlalchemy.orm import sessionmaker
from models import Forecasts, db_connect, create_forecast_table
import logging
class PollenScraperPipeline(object):
def __init__(self):
engine = db_connect()
create_forecast_table(engine)
self.Session = sessionmaker(bind=engine)
def process_item(self, item, spider):
session = self.Session()
forecast = Forecasts(**item)
try:
session.add(forecast)
session.commit()
except:
session.rollback()
raise
finally:
session.close()
return item
| 24.76 | 63 | 0.620355 | from sqlalchemy.orm import sessionmaker
from models import Forecasts, db_connect, create_forecast_table
import logging
class PollenScraperPipeline(object):
def __init__(self):
engine = db_connect()
create_forecast_table(engine)
self.Session = sessionmaker(bind=engine)
def process_item(self, item, spider):
session = self.Session()
forecast = Forecasts(**item)
try:
session.add(forecast)
session.commit()
except:
session.rollback()
raise
finally:
session.close()
return item
| true | true |
f72fe1c22165e6e851d2bbb6a57c5a9a578e49f4 | 845 | py | Python | keras/engine/saving.py | itsraina/keras | 5e9376b5b94b6fb445dd52dbfafbc4e95bff5e35 | [
"Apache-2.0"
] | null | null | null | keras/engine/saving.py | itsraina/keras | 5e9376b5b94b6fb445dd52dbfafbc4e95bff5e35 | [
"Apache-2.0"
] | null | null | null | keras/engine/saving.py | itsraina/keras | 5e9376b5b94b6fb445dd52dbfafbc4e95bff5e35 | [
"Apache-2.0"
] | null | null | null | # Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Model saving utilities.
Everything has been moved to keras/saving/. This file will be deleted soon.
"""
from keras.saving import * # noqa: F401,F403
| 38.409091 | 80 | 0.685207 |
from keras.saving import *
| true | true |
f72fe23bdf252ab6cbb78597079dd21aae3c8959 | 719 | py | Python | ext_pylib/__init__.py | hbradleyiii/ext_pylib | 15a9b5a80db87b5f20e03ef6bfa015acf4bf8543 | [
"MIT"
] | 2 | 2015-12-18T14:33:23.000Z | 2015-12-22T11:48:53.000Z | ext_pylib/__init__.py | hbradleyiii/ext_pylib | 15a9b5a80db87b5f20e03ef6bfa015acf4bf8543 | [
"MIT"
] | null | null | null | ext_pylib/__init__.py | hbradleyiii/ext_pylib | 15a9b5a80db87b5f20e03ef6bfa015acf4bf8543 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# /'' |''\ | |
# \. | |../ | * |..
# / \/ T | \ / | | | \
# \.../\.| __ | \/ | | |../
# ###################/###############
# /
"""
ext_pylib
~~~~~~~~~
Extra python libraries for scaffolding server scripts.
"""
from __future__ import absolute_import
from . import domain
from . import files
from . import input # pylint: disable=redefined-builtin
from . import password
from . import terminal
from . import user
__title__ = 'ext_pylib'
__version__ = '0.2'
__author__ = 'Harold Bradley III'
__license__ = 'MIT'
__copyright__ = 'Copyright 2015-2016 Harold Bradley III'
# Soli Deo gloria. <><
| 20.542857 | 56 | 0.520167 | true | true | |
f72fe24bceb08d360b3e71ca50fe69638691a3cf | 4,782 | py | Python | src/train_set.py | caoyunhao/keras-speed-prediction | b1c87a012f8049050f124062e3cc24322e7d95b9 | [
"BSD-2-Clause"
] | null | null | null | src/train_set.py | caoyunhao/keras-speed-prediction | b1c87a012f8049050f124062e3cc24322e7d95b9 | [
"BSD-2-Clause"
] | null | null | null | src/train_set.py | caoyunhao/keras-speed-prediction | b1c87a012f8049050f124062e3cc24322e7d95b9 | [
"BSD-2-Clause"
] | null | null | null | # !/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2018/4/6 10:55
# @Author : Yunhao Cao
# @File : train_set.py
import os
import re
import shutil
import tool
import config
__author__ = 'Yunhao Cao'
__all__ = [
'',
]
level_list = config.LV_LIST
classes = config.NUM_OF_LEVEL
validation_rate = config.VALIDATION_RATE
origin_data_dir = config.ORIGIN_DATA_DIR
processed_set_dir = config.PROCESSED_SET_DIR
trainset_dir = config.TRAINSET_DIR
validation_set_dir = config.VALIDATION_DIR
cut_shape = config.CUT_SHAPE_0
train_shape = config.TRAIN_SHAPE
image_width = config.IMAGE_WIDTH
image_height = config.IMAGE_HEIGHT
compare_path = tool.compare_path
def get_lv(v) -> int:
"""
返回速度等级
"""
for i, lv in enumerate(level_list):
if abs(v) < lv:
return i
def generate_sync_txt():
vf = 8 # forward velocity, i.e. parallel to earth-surface (m/s)
vl = 9 # leftward velocity, i.e. parallel to earth-surface (m/s)
af = 14 # forward acceleration (m/s^2)
for dir_ in tool.get_all(origin_data_dir):
sync_data_dir = compare_path(dir_, 'oxts', 'data')
print(sync_data_dir)
txt_list = tool.get_all(sync_data_dir)
outlines = list()
for txt in txt_list:
lines = tool.read_text(txt)
line_items = lines[0].split()
# print(float(line_items[vf]) * 3.6)
v_origin = float(line_items[vf]) * 3.6
v_level = get_lv(v_origin)
if v_level is None:
raise Exception
item = '{} {}'.format(v_origin, v_level)
outlines.append(item)
tool.write_text(compare_path(dir_, tool.sync_name), outlines)
def to_name(i):
i = str(i)
return '{}{}{}'.format(''.join(['0' for i in range(0, 10 - len(i))]), i, '.png')
def copy_to_process_set():
for i, set_dir in enumerate(tool.get_all(origin_data_dir)):
lines = tool.read_text(compare_path(set_dir, 'sync.txt'))
set_id = re.match('.*2011_09_26_drive_(?P<set_id>\d*)_sync.*', set_dir).groupdict()["set_id"]
for image_index, line in enumerate(lines):
v, level = line.split()
target_path = compare_path(processed_set_dir, level)
if not os.path.exists(target_path):
os.makedirs(target_path)
origin_filename = compare_path(set_dir, 'image_02', 'data', to_name(image_index))
target_filename = compare_path(target_path, "set_{}_lv{}_{}".format(set_id, level, to_name(image_index)))
print("From {}\n\tTo: {}".format(origin_filename, target_filename))
data = tool.read_image(origin_filename)
if data is None:
print('[WAIN] From image_03', set_dir, image_index)
origin_filename = compare_path(set_dir, 'image_03', 'data', to_name(image_index))
data = tool.read_image(origin_filename)
if data is None:
print("[ERROR] No exists in ", set_dir, image_index)
else:
data = tool.ArrayCut(data, cut_shape[:2], mode=8)
data = tool.image_cut(data, (image_width, image_height))
tool.image_save(target_filename, data)
def split_validation_by_copy():
import random
from_dir = processed_set_dir
for i, cate_dirname in enumerate(os.listdir(from_dir)):
if cate_dirname.startswith('.'):
continue
cate_dir = compare_path(from_dir, cate_dirname)
cate_listdir = list(filter(lambda x: not x.startswith('.'), os.listdir(cate_dir)))
n = int(len(cate_listdir) * validation_rate)
validation_files = random.sample(cate_listdir, n)
validation_cate_path = compare_path(validation_set_dir, cate_dirname)
print(validation_cate_path)
if not os.path.exists(validation_cate_path):
os.makedirs(validation_cate_path)
for validation_file in validation_files:
shutil.copy(compare_path(cate_dir, validation_file),
compare_path(validation_cate_path, validation_file))
train_set_path = compare_path(trainset_dir, cate_dirname)
if not os.path.exists(train_set_path):
os.makedirs(train_set_path)
train_set_files = list(set(cate_listdir).difference(set(validation_files)))
for train_set_file in train_set_files:
shutil.copy(compare_path(cate_dir, train_set_file),
compare_path(train_set_path, train_set_file))
def _test():
# print(get_set('0001').shape)
# print(get_flag('0001').shape)
# print(tool.dir_util.origin_sync_dirname)
# generate_sync_txt()
# copy_to_process_set()
split_validation_by_copy()
if __name__ == '__main__':
_test()
| 31.460526 | 117 | 0.641363 |
import os
import re
import shutil
import tool
import config
__author__ = 'Yunhao Cao'
__all__ = [
'',
]
level_list = config.LV_LIST
classes = config.NUM_OF_LEVEL
validation_rate = config.VALIDATION_RATE
origin_data_dir = config.ORIGIN_DATA_DIR
processed_set_dir = config.PROCESSED_SET_DIR
trainset_dir = config.TRAINSET_DIR
validation_set_dir = config.VALIDATION_DIR
cut_shape = config.CUT_SHAPE_0
train_shape = config.TRAIN_SHAPE
image_width = config.IMAGE_WIDTH
image_height = config.IMAGE_HEIGHT
compare_path = tool.compare_path
def get_lv(v) -> int:
for i, lv in enumerate(level_list):
if abs(v) < lv:
return i
def generate_sync_txt():
vf = 8
vl = 9
af = 14
for dir_ in tool.get_all(origin_data_dir):
sync_data_dir = compare_path(dir_, 'oxts', 'data')
print(sync_data_dir)
txt_list = tool.get_all(sync_data_dir)
outlines = list()
for txt in txt_list:
lines = tool.read_text(txt)
line_items = lines[0].split()
v_origin = float(line_items[vf]) * 3.6
v_level = get_lv(v_origin)
if v_level is None:
raise Exception
item = '{} {}'.format(v_origin, v_level)
outlines.append(item)
tool.write_text(compare_path(dir_, tool.sync_name), outlines)
def to_name(i):
i = str(i)
return '{}{}{}'.format(''.join(['0' for i in range(0, 10 - len(i))]), i, '.png')
def copy_to_process_set():
for i, set_dir in enumerate(tool.get_all(origin_data_dir)):
lines = tool.read_text(compare_path(set_dir, 'sync.txt'))
set_id = re.match('.*2011_09_26_drive_(?P<set_id>\d*)_sync.*', set_dir).groupdict()["set_id"]
for image_index, line in enumerate(lines):
v, level = line.split()
target_path = compare_path(processed_set_dir, level)
if not os.path.exists(target_path):
os.makedirs(target_path)
origin_filename = compare_path(set_dir, 'image_02', 'data', to_name(image_index))
target_filename = compare_path(target_path, "set_{}_lv{}_{}".format(set_id, level, to_name(image_index)))
print("From {}\n\tTo: {}".format(origin_filename, target_filename))
data = tool.read_image(origin_filename)
if data is None:
print('[WAIN] From image_03', set_dir, image_index)
origin_filename = compare_path(set_dir, 'image_03', 'data', to_name(image_index))
data = tool.read_image(origin_filename)
if data is None:
print("[ERROR] No exists in ", set_dir, image_index)
else:
data = tool.ArrayCut(data, cut_shape[:2], mode=8)
data = tool.image_cut(data, (image_width, image_height))
tool.image_save(target_filename, data)
def split_validation_by_copy():
import random
from_dir = processed_set_dir
for i, cate_dirname in enumerate(os.listdir(from_dir)):
if cate_dirname.startswith('.'):
continue
cate_dir = compare_path(from_dir, cate_dirname)
cate_listdir = list(filter(lambda x: not x.startswith('.'), os.listdir(cate_dir)))
n = int(len(cate_listdir) * validation_rate)
validation_files = random.sample(cate_listdir, n)
validation_cate_path = compare_path(validation_set_dir, cate_dirname)
print(validation_cate_path)
if not os.path.exists(validation_cate_path):
os.makedirs(validation_cate_path)
for validation_file in validation_files:
shutil.copy(compare_path(cate_dir, validation_file),
compare_path(validation_cate_path, validation_file))
train_set_path = compare_path(trainset_dir, cate_dirname)
if not os.path.exists(train_set_path):
os.makedirs(train_set_path)
train_set_files = list(set(cate_listdir).difference(set(validation_files)))
for train_set_file in train_set_files:
shutil.copy(compare_path(cate_dir, train_set_file),
compare_path(train_set_path, train_set_file))
def _test():
split_validation_by_copy()
if __name__ == '__main__':
_test()
| true | true |
f72fe2b962d8ae02afda6b1e6bd5174272456fd7 | 1,176 | py | Python | src/okchain1/theme/rtd/conf/clients_ruby.py | sakya666/crate-docs-theme | 5767fe05c342581d1387baa7222ec09f61ce9cc5 | [
"Apache-2.0"
] | null | null | null | src/okchain1/theme/rtd/conf/clients_ruby.py | sakya666/crate-docs-theme | 5767fe05c342581d1387baa7222ec09f61ce9cc5 | [
"Apache-2.0"
] | null | null | null | src/okchain1/theme/rtd/conf/clients_ruby.py | sakya666/crate-docs-theme | 5767fe05c342581d1387baa7222ec09f61ce9cc5 | [
"Apache-2.0"
] | 1 | 2022-03-14T04:06:36.000Z | 2022-03-14T04:06:36.000Z | # -*- coding: utf-8; -*-
#
# Licensed to Crate (https://crate.io) under one or more contributor
# license agreements. See the NOTICE file distributed with this work for
# additional information regarding copyright ownership. Crate licenses
# this file to you under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. You may
# obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# However, if you have executed another commercial license agreement
# with Crate these terms will supersede the license and you may use the
# software solely pursuant to the terms of the relevant commercial agreement.
from okchain1.theme.rtd.conf import *
project = u'Crate Ruby Driver'
html_theme_options.update({
'canonical_url_path': 'docs/clients/ruby/en/latest/',
})
| 40.551724 | 77 | 0.764456 |
from okchain1.theme.rtd.conf import *
project = u'Crate Ruby Driver'
html_theme_options.update({
'canonical_url_path': 'docs/clients/ruby/en/latest/',
})
| true | true |
f72fe2bfca70709b096167614b03f46712fae7e4 | 5,248 | py | Python | hwtLib/tests/types/union_test.py | optical-o/hwtLib | edad621f5ad4cdbea20a5751ff4468979afe2f77 | [
"MIT"
] | 24 | 2017-02-23T10:00:50.000Z | 2022-01-28T12:20:21.000Z | hwtLib/tests/types/union_test.py | optical-o/hwtLib | edad621f5ad4cdbea20a5751ff4468979afe2f77 | [
"MIT"
] | 32 | 2017-04-28T10:29:34.000Z | 2021-04-27T09:16:43.000Z | hwtLib/tests/types/union_test.py | optical-o/hwtLib | edad621f5ad4cdbea20a5751ff4468979afe2f77 | [
"MIT"
] | 8 | 2019-09-19T03:34:36.000Z | 2022-01-21T06:56:58.000Z | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
import unittest
from hwt.hdl.types.bits import Bits
from hwt.hdl.types.struct import HStruct
from hwt.hdl.types.union import HUnion
from hwtLib.types.ctypes import uint8_t, uint16_t, int8_t, uint32_t
from pyMathBitPrecise.bit_utils import mask
class UnionTC(unittest.TestCase):
def test_assertMembersSameSize(self):
t = HUnion(
(uint8_t, "a"),
(uint8_t, "b"),
(uint8_t, "c"),
(uint8_t, "d"),
)
self.assertEqual(t.bit_length(), 8)
with self.assertRaises(TypeError):
HUnion(
(uint16_t, "a"),
(uint8_t, "b"),
)
def test_assertNoPadding(self):
with self.assertRaises(AssertionError):
HUnion(
(uint8_t, None),
(uint8_t, "b"),
)
def test_value_simple(self):
t = HUnion(
(uint8_t, "unsigned"),
(int8_t, "signed"),
)
v = t.from_py(None)
v.unsigned = mask(8)
self.assertEqual(int(v.signed), -1)
v.signed = 0
self.assertEqual(int(v.unsigned), 0)
def test_value_struct_and_bits(self):
t = HUnion(
(uint16_t, "bits"),
(HStruct(
(uint8_t, "lower"),
(uint8_t, "upper"),
), "struct"),
)
v = t.from_py(None)
v.struct.upper = 1
self.assertEqual(v.bits.val, 1 << 8)
self.assertEqual(v.bits.vld_mask, mask(8) << 8)
v.struct.lower = 1
self.assertEqual(v.bits.val, (1 << 8) | 1)
self.assertEqual(v.bits.vld_mask, mask(16))
v.bits = 2
self.assertEqual(int(v.struct.lower), 2)
self.assertEqual(int(v.struct.upper), 0)
def test_value_array_and_bits(self):
t = HUnion(
(uint32_t, "bits"),
(uint8_t[4], "arr"),
)
v = t.from_py(None)
b = (4 << (3 * 8)) | (3 << (2 * 8)) | (2 << 8) | 1
v.bits = b
for i, item in enumerate(v.arr):
self.assertEqual(int(item), i + 1)
self.assertEqual(int(v.bits), b)
def test_value_array_toArray(self):
t = HUnion(
(uint16_t[2], "arr16b"),
(int8_t[4], "arr8b"),
)
v = t.from_py(None)
for i in range(len(v.arr16b)):
v.arr16b[i] = i + 1
for i, item in enumerate(v.arr8b):
if (i + 1) % 2 == 0:
v = 0
else:
v = i // 2 + 1
self.assertEqual(int(item), v)
def test_value_array_of_struct_to_bits(self):
t = HUnion(
(HStruct(
(uint16_t, "a"),
(uint8_t, "b"),
)[3], "arr"),
(Bits(24 * 3), "bits")
)
v = t.from_py(None)
for i in range(len(v.arr)):
v.arr[i] = {"a": i + 1,
"b": (i + 1) * 3
}
self.assertEqual(int(v.bits),
1
| 3 << 16
| 2 << 24
| 6 << (24 + 16)
| 3 << (2 * 24)
| 9 << (2 * 24 + 16))
def test_hunion_type_eq(self):
t0 = HUnion(
(HStruct(
(uint16_t, "a"),
(uint8_t, "b"),
)[3], "arr"),
(Bits(24 * 3), "bits")
)
t1 = HUnion(
(HStruct(
(uint16_t, "a"),
(uint8_t, "b"),
)[3], "arr"),
(Bits(24 * 3), "bits")
)
self.assertEqual(t0, t1)
self.assertEqual(t1, t0)
t1 = HUnion(
(Bits(24 * 3), "bits"),
(HStruct(
(uint16_t, "a"),
(uint8_t, "b"),
)[3], "arr")
)
self.assertEqual(t0, t1)
self.assertEqual(t1, t0)
t1 = HUnion(
(uint32_t, "bits"),
(uint8_t[4], "arr"),
)
self.assertNotEqual(t0, t1)
self.assertNotEqual(t1, t0)
t1 = HUnion(
(Bits(24 * 3), "bbits"),
(HStruct(
(uint16_t, "a"),
(uint8_t, "b"),
)[3], "arr")
)
self.assertNotEqual(t0, t1)
self.assertNotEqual(t1, t0)
t1 = Bits(24 * 3)
self.assertNotEqual(t0, t1)
self.assertNotEqual(t1, t0)
t1 = HUnion(
(Bits(24 * 3, signed=False), "bits"),
(HStruct(
(uint16_t, "a"),
(uint8_t, "b"),
)[3], "arr")
)
self.assertNotEqual(t0, t1)
self.assertNotEqual(t1, t0)
if __name__ == '__main__':
suite = unittest.TestSuite()
# suite.addTest(UnionTC('testValue'))
suite.addTest(unittest.makeSuite(UnionTC))
runner = unittest.TextTestRunner(verbosity=3)
runner.run(suite)
| 26.639594 | 67 | 0.416921 |
import unittest
from hwt.hdl.types.bits import Bits
from hwt.hdl.types.struct import HStruct
from hwt.hdl.types.union import HUnion
from hwtLib.types.ctypes import uint8_t, uint16_t, int8_t, uint32_t
from pyMathBitPrecise.bit_utils import mask
class UnionTC(unittest.TestCase):
def test_assertMembersSameSize(self):
t = HUnion(
(uint8_t, "a"),
(uint8_t, "b"),
(uint8_t, "c"),
(uint8_t, "d"),
)
self.assertEqual(t.bit_length(), 8)
with self.assertRaises(TypeError):
HUnion(
(uint16_t, "a"),
(uint8_t, "b"),
)
def test_assertNoPadding(self):
with self.assertRaises(AssertionError):
HUnion(
(uint8_t, None),
(uint8_t, "b"),
)
def test_value_simple(self):
t = HUnion(
(uint8_t, "unsigned"),
(int8_t, "signed"),
)
v = t.from_py(None)
v.unsigned = mask(8)
self.assertEqual(int(v.signed), -1)
v.signed = 0
self.assertEqual(int(v.unsigned), 0)
def test_value_struct_and_bits(self):
t = HUnion(
(uint16_t, "bits"),
(HStruct(
(uint8_t, "lower"),
(uint8_t, "upper"),
), "struct"),
)
v = t.from_py(None)
v.struct.upper = 1
self.assertEqual(v.bits.val, 1 << 8)
self.assertEqual(v.bits.vld_mask, mask(8) << 8)
v.struct.lower = 1
self.assertEqual(v.bits.val, (1 << 8) | 1)
self.assertEqual(v.bits.vld_mask, mask(16))
v.bits = 2
self.assertEqual(int(v.struct.lower), 2)
self.assertEqual(int(v.struct.upper), 0)
def test_value_array_and_bits(self):
t = HUnion(
(uint32_t, "bits"),
(uint8_t[4], "arr"),
)
v = t.from_py(None)
b = (4 << (3 * 8)) | (3 << (2 * 8)) | (2 << 8) | 1
v.bits = b
for i, item in enumerate(v.arr):
self.assertEqual(int(item), i + 1)
self.assertEqual(int(v.bits), b)
def test_value_array_toArray(self):
t = HUnion(
(uint16_t[2], "arr16b"),
(int8_t[4], "arr8b"),
)
v = t.from_py(None)
for i in range(len(v.arr16b)):
v.arr16b[i] = i + 1
for i, item in enumerate(v.arr8b):
if (i + 1) % 2 == 0:
v = 0
else:
v = i // 2 + 1
self.assertEqual(int(item), v)
def test_value_array_of_struct_to_bits(self):
t = HUnion(
(HStruct(
(uint16_t, "a"),
(uint8_t, "b"),
)[3], "arr"),
(Bits(24 * 3), "bits")
)
v = t.from_py(None)
for i in range(len(v.arr)):
v.arr[i] = {"a": i + 1,
"b": (i + 1) * 3
}
self.assertEqual(int(v.bits),
1
| 3 << 16
| 2 << 24
| 6 << (24 + 16)
| 3 << (2 * 24)
| 9 << (2 * 24 + 16))
def test_hunion_type_eq(self):
t0 = HUnion(
(HStruct(
(uint16_t, "a"),
(uint8_t, "b"),
)[3], "arr"),
(Bits(24 * 3), "bits")
)
t1 = HUnion(
(HStruct(
(uint16_t, "a"),
(uint8_t, "b"),
)[3], "arr"),
(Bits(24 * 3), "bits")
)
self.assertEqual(t0, t1)
self.assertEqual(t1, t0)
t1 = HUnion(
(Bits(24 * 3), "bits"),
(HStruct(
(uint16_t, "a"),
(uint8_t, "b"),
)[3], "arr")
)
self.assertEqual(t0, t1)
self.assertEqual(t1, t0)
t1 = HUnion(
(uint32_t, "bits"),
(uint8_t[4], "arr"),
)
self.assertNotEqual(t0, t1)
self.assertNotEqual(t1, t0)
t1 = HUnion(
(Bits(24 * 3), "bbits"),
(HStruct(
(uint16_t, "a"),
(uint8_t, "b"),
)[3], "arr")
)
self.assertNotEqual(t0, t1)
self.assertNotEqual(t1, t0)
t1 = Bits(24 * 3)
self.assertNotEqual(t0, t1)
self.assertNotEqual(t1, t0)
t1 = HUnion(
(Bits(24 * 3, signed=False), "bits"),
(HStruct(
(uint16_t, "a"),
(uint8_t, "b"),
)[3], "arr")
)
self.assertNotEqual(t0, t1)
self.assertNotEqual(t1, t0)
if __name__ == '__main__':
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(UnionTC))
runner = unittest.TextTestRunner(verbosity=3)
runner.run(suite)
| true | true |
f72fe2eb838ca241bfbb6311a02f5d6800326a7d | 3,217 | py | Python | editor/photo_effects.py | gitgik/photo-editing-app | 730f88a1946d425cbe790cd4ed0689a1938a8cd0 | [
"MIT"
] | 5 | 2017-02-23T14:24:22.000Z | 2021-02-23T03:43:18.000Z | editor/photo_effects.py | gitgik/photo-editing-app | 730f88a1946d425cbe790cd4ed0689a1938a8cd0 | [
"MIT"
] | 1 | 2021-06-08T19:14:01.000Z | 2021-06-08T19:14:01.000Z | editor/photo_effects.py | gitgik/photo-editing-app | 730f88a1946d425cbe790cd4ed0689a1938a8cd0 | [
"MIT"
] | 2 | 2019-01-21T20:16:05.000Z | 2019-06-23T14:30:50.000Z | """Define imports."""
from PIL import ImageFilter, ImageOps, ImageEnhance
def grayscale(image, name, temp_url):
"""Return an image with a contrast of grey."""
image.seek(0)
photo = ImageOps.grayscale(image)
photo.save(temp_url + "GRAYSCALE" + name)
return temp_url + "GRAYSCALE" + name
def smooth(image, name, temp_url):
"""Return a smoothened image."""
image.seek(0)
photo = image.filter(ImageFilter.SMOOTH)
photo.save(temp_url + "SMOOTH" + name)
return temp_url + "SMOOTH" + name
def contour(image, name, temp_url):
"""Return an image with a contour filter."""
image.seek(0)
photo = image.filter(ImageFilter.CONTOUR)
photo.save(temp_url + "CONTOUR" + name)
return temp_url + "CONTOUR" + name
def sharpen(image, name, temp_url):
"""Return a sharpened image."""
image.seek(0)
photo = image.filter(ImageFilter.SHARPEN)
photo.save(temp_url + "SHARPEN" + name)
return temp_url + "SHARPEN" + name
def detail(image, name, temp_url):
"""Return an image with edge enhancement."""
image.seek(0)
photo = image.filter(ImageFilter.EDGE_ENHANCE)
photo.save(temp_url + "DETAIL" + name)
return temp_url + "DETAIL" + name
def flip(image, name, temp_url):
"""Flip an image."""
image.seek(0)
photo = ImageOps.flip(image)
photo.save(temp_url + "FLIP" + name)
return temp_url + "FLIP" + name
def invert(image, name, temp_url):
"""Invert an image."""
image.seek(0)
photo = ImageOps.invert(image)
photo.save(temp_url + "INVERT" + name)
return temp_url + "INVERT" + name
def mirror(image, name, temp_url):
"""Flip the image horizontally."""
image.seek(0)
photo = ImageOps.mirror(image)
photo.save(temp_url + "MIRROR" + name)
return temp_url + "MIRROR" + name
def contrast(image, name, temp_url):
"""Increase the contrast of an image and return the enhanced image."""
image.seek(0)
photo = ImageEnhance.Contrast(image)
photo = photo.enhance(1.5)
photo.save(temp_url + "CONTRAST" + name)
return temp_url + "CONTRAST" + name
def blur(image, name, temp_url):
"""Return a blur image using a gaussian blur filter."""
image.seek(0)
photo = image.filter(
ImageFilter.GaussianBlur(radius=3))
photo.save(temp_url + "BLUR" + name)
return temp_url + "BLUR" + name
def brighten(image, name, temp_url):
"""Return an image with a brightness enhancement factor of 1.5."""
image.seek(0)
photo = ImageEnhance.Brightness(image)
photo = photo.enhance(1.5)
photo.save(temp_url + "BRIGHTEN" + name)
return temp_url + "BRIGHTEN" + name
def darken(image, name, temp_url):
"""Return an image with a brightness enhancement factor of 0.5."""
image.seek(0)
photo = ImageEnhance.Brightness(image)
photo = photo.enhance(0.5)
photo.save(temp_url + "SATURATE" + name)
return temp_url + "SATURATE" + name
def saturate(image, name, temp_url):
"""Return an image with a saturation enhancement factor of 2.0 ."""
image.seek(0)
photo = ImageEnhance.Color(image)
photo = photo.enhance(2.0)
photo.save(temp_url + "SATURATE" + name)
return temp_url + "SATURATE" + name
| 28.723214 | 74 | 0.658688 | from PIL import ImageFilter, ImageOps, ImageEnhance
def grayscale(image, name, temp_url):
image.seek(0)
photo = ImageOps.grayscale(image)
photo.save(temp_url + "GRAYSCALE" + name)
return temp_url + "GRAYSCALE" + name
def smooth(image, name, temp_url):
image.seek(0)
photo = image.filter(ImageFilter.SMOOTH)
photo.save(temp_url + "SMOOTH" + name)
return temp_url + "SMOOTH" + name
def contour(image, name, temp_url):
image.seek(0)
photo = image.filter(ImageFilter.CONTOUR)
photo.save(temp_url + "CONTOUR" + name)
return temp_url + "CONTOUR" + name
def sharpen(image, name, temp_url):
image.seek(0)
photo = image.filter(ImageFilter.SHARPEN)
photo.save(temp_url + "SHARPEN" + name)
return temp_url + "SHARPEN" + name
def detail(image, name, temp_url):
image.seek(0)
photo = image.filter(ImageFilter.EDGE_ENHANCE)
photo.save(temp_url + "DETAIL" + name)
return temp_url + "DETAIL" + name
def flip(image, name, temp_url):
image.seek(0)
photo = ImageOps.flip(image)
photo.save(temp_url + "FLIP" + name)
return temp_url + "FLIP" + name
def invert(image, name, temp_url):
image.seek(0)
photo = ImageOps.invert(image)
photo.save(temp_url + "INVERT" + name)
return temp_url + "INVERT" + name
def mirror(image, name, temp_url):
image.seek(0)
photo = ImageOps.mirror(image)
photo.save(temp_url + "MIRROR" + name)
return temp_url + "MIRROR" + name
def contrast(image, name, temp_url):
image.seek(0)
photo = ImageEnhance.Contrast(image)
photo = photo.enhance(1.5)
photo.save(temp_url + "CONTRAST" + name)
return temp_url + "CONTRAST" + name
def blur(image, name, temp_url):
image.seek(0)
photo = image.filter(
ImageFilter.GaussianBlur(radius=3))
photo.save(temp_url + "BLUR" + name)
return temp_url + "BLUR" + name
def brighten(image, name, temp_url):
image.seek(0)
photo = ImageEnhance.Brightness(image)
photo = photo.enhance(1.5)
photo.save(temp_url + "BRIGHTEN" + name)
return temp_url + "BRIGHTEN" + name
def darken(image, name, temp_url):
image.seek(0)
photo = ImageEnhance.Brightness(image)
photo = photo.enhance(0.5)
photo.save(temp_url + "SATURATE" + name)
return temp_url + "SATURATE" + name
def saturate(image, name, temp_url):
image.seek(0)
photo = ImageEnhance.Color(image)
photo = photo.enhance(2.0)
photo.save(temp_url + "SATURATE" + name)
return temp_url + "SATURATE" + name
| true | true |
f72fe3a6e22942dfdabf42624c7f630b6ceb120b | 610 | py | Python | eveonline-assistant/plans/urls.py | wengole/eveonline-assistant | 35041952509bd347c5c9458630404726d7ddd5d8 | [
"BSD-3-Clause"
] | 1 | 2016-07-01T03:15:16.000Z | 2016-07-01T03:15:16.000Z | eveonline-assistant/plans/urls.py | wengole/eveonline-assistant | 35041952509bd347c5c9458630404726d7ddd5d8 | [
"BSD-3-Clause"
] | null | null | null | eveonline-assistant/plans/urls.py | wengole/eveonline-assistant | 35041952509bd347c5c9458630404726d7ddd5d8 | [
"BSD-3-Clause"
] | null | null | null | from django.conf.urls import patterns, url
from . import views
urlpatterns = patterns(
'',
# URL pattern for the UserListView
url(
regex=r'^add/$',
view=views.AddPlan.as_view(),
name='add'
),
url(
regex=r'^manage/$',
view=views.ManagePlans.as_view(),
name='manage'
),
url(
regex=r'^manage/(?P<plan_id>\d+)/$',
view=views.PlanDetail.as_view(pk_url_kwarg='plan_id'),
name='detail'
),
url(
regex=r'^addToPlan/$',
view=views.AddSkillToPlan.as_view(),
name='add_to_plan'
),
)
| 20.333333 | 62 | 0.545902 | from django.conf.urls import patterns, url
from . import views
urlpatterns = patterns(
'',
url(
regex=r'^add/$',
view=views.AddPlan.as_view(),
name='add'
),
url(
regex=r'^manage/$',
view=views.ManagePlans.as_view(),
name='manage'
),
url(
regex=r'^manage/(?P<plan_id>\d+)/$',
view=views.PlanDetail.as_view(pk_url_kwarg='plan_id'),
name='detail'
),
url(
regex=r'^addToPlan/$',
view=views.AddSkillToPlan.as_view(),
name='add_to_plan'
),
)
| true | true |
f72fe3eae0d57d1739f0d017bc8c4f227f8e08ed | 11,579 | py | Python | asdf/util.py | eteq/asdf | 6d9e0e48bbffea166a19b71e29f5f9c211983bfe | [
"BSD-2-Clause",
"BSD-3-Clause"
] | null | null | null | asdf/util.py | eteq/asdf | 6d9e0e48bbffea166a19b71e29f5f9c211983bfe | [
"BSD-2-Clause",
"BSD-3-Clause"
] | null | null | null | asdf/util.py | eteq/asdf | 6d9e0e48bbffea166a19b71e29f5f9c211983bfe | [
"BSD-2-Clause",
"BSD-3-Clause"
] | null | null | null | # Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
import inspect
import math
import struct
import types
from urllib.parse import urljoin
from urllib.request import pathname2url
from urllib import parse as urlparse
import numpy as np
from .extern.decorators import add_common_docstring
__all__ = ['human_list', 'get_array_base', 'get_base_uri', 'filepath_to_url',
'iter_subclasses', 'calculate_padding', 'resolve_name']
def human_list(l, separator="and"):
"""
Formats a list for human readability.
Parameters
----------
l : sequence
A sequence of strings
separator : string, optional
The word to use between the last two entries. Default:
``"and"``.
Returns
-------
formatted_list : string
Examples
--------
>>> human_list(["vanilla", "strawberry", "chocolate"], "or")
'vanilla, strawberry or chocolate'
"""
if len(l) == 1:
return l[0]
else:
return ', '.join(l[:-1]) + ' ' + separator + ' ' + l[-1]
def get_array_base(arr):
"""
For a given Numpy array, finds the base array that "owns" the
actual data.
"""
base = arr
while isinstance(base.base, np.ndarray):
base = base.base
return base
def get_base_uri(uri):
"""
For a given URI, return the part without any fragment.
"""
parts = urlparse.urlparse(uri)
return urlparse.urlunparse(list(parts[:5]) + [''])
def filepath_to_url(path):
"""
For a given local file path, return a file:// url.
"""
return urljoin('file:', pathname2url(path))
def iter_subclasses(cls):
"""
Returns all subclasses of a class.
"""
for x in cls.__subclasses__():
yield x
for y in iter_subclasses(x):
yield y
def calculate_padding(content_size, pad_blocks, block_size):
"""
Calculates the amount of extra space to add to a block given the
user's request for the amount of extra space. Care is given so
that the total of size of the block with padding is evenly
divisible by block size.
Parameters
----------
content_size : int
The size of the actual content
pad_blocks : float or bool
If `False`, add no padding (always return 0). If `True`, add
a default amount of padding of 10% If a float, it is a factor
to multiple content_size by to get the new total size.
block_size : int
The filesystem block size to use.
Returns
-------
nbytes : int
The number of extra bytes to add for padding.
"""
if not pad_blocks:
return 0
if pad_blocks is True:
pad_blocks = 1.1
new_size = content_size * pad_blocks
new_size = int((math.ceil(
float(new_size) / block_size) + 1) * block_size)
return max(new_size - content_size, 0)
class BinaryStruct(object):
"""
A wrapper around the Python stdlib struct module to define a
binary struct more like a dictionary than a tuple.
"""
def __init__(self, descr, endian='>'):
"""
Parameters
----------
descr : list of tuple
Each entry is a pair ``(name, format)``, where ``format``
is one of the format types understood by `struct`.
endian : str, optional
The endianness of the struct. Must be ``>`` or ``<``.
"""
self._fmt = [endian]
self._offsets = {}
self._names = []
i = 0
for name, fmt in descr:
self._fmt.append(fmt)
self._offsets[name] = (i, (endian + fmt).encode('ascii'))
self._names.append(name)
i += struct.calcsize(fmt.encode('ascii'))
self._fmt = ''.join(self._fmt).encode('ascii')
self._size = struct.calcsize(self._fmt)
@property
def size(self):
"""
Return the size of the struct.
"""
return self._size
def pack(self, **kwargs):
"""
Pack the given arguments, which are given as kwargs, and
return the binary struct.
"""
fields = [0] * len(self._names)
for key, val in kwargs.items():
if key not in self._offsets:
raise KeyError("No header field '{0}'".format(key))
i = self._names.index(key)
fields[i] = val
return struct.pack(self._fmt, *fields)
def unpack(self, buff):
"""
Unpack the given binary buffer into the fields. The result
is a dictionary mapping field names to values.
"""
args = struct.unpack_from(self._fmt, buff[:self._size])
return dict(zip(self._names, args))
def update(self, fd, **kwargs):
"""
Update part of the struct in-place.
Parameters
----------
fd : generic_io.GenericIO instance
A writable, seekable file descriptor, currently seeked
to the beginning of the struct.
**kwargs : values
The values to update on the struct.
"""
updates = []
for key, val in kwargs.items():
if key not in self._offsets:
raise KeyError("No header field '{0}'".format(key))
updates.append((self._offsets[key], val))
updates.sort()
start = fd.tell()
for ((offset, datatype), val) in updates:
fd.seek(start + offset)
fd.write(struct.pack(datatype, val))
class HashableDict(dict):
"""
A simple wrapper around dict to make it hashable.
This is sure to be slow, but for small dictionaries it shouldn't
matter.
"""
def __hash__(self):
return hash(frozenset(self.items()))
def resolve_name(name):
"""Resolve a name like ``module.object`` to an object and return it.
This ends up working like ``from module import object`` but is easier
to deal with than the `__import__` builtin and supports digging into
submodules.
Parameters
----------
name : `str`
A dotted path to a Python object--that is, the name of a function,
class, or other object in a module with the full path to that module,
including parent modules, separated by dots. Also known as the fully
qualified name of the object.
Examples
--------
>>> resolve_name('asdf.util.resolve_name')
<function resolve_name at 0x...>
Raises
------
`ImportError`
If the module or named object is not found.
"""
# Note: On python 2 these must be str objects and not unicode
parts = [str(part) for part in name.split('.')]
if len(parts) == 1:
# No dots in the name--just a straight up module import
cursor = 1
attr_name = str('') # Must not be unicode on Python 2
else:
cursor = len(parts) - 1
attr_name = parts[-1]
module_name = parts[:cursor]
while cursor > 0:
try:
ret = __import__(str('.'.join(module_name)), fromlist=[attr_name])
break
except ImportError:
if cursor == 0:
raise
cursor -= 1
module_name = parts[:cursor]
attr_name = parts[cursor]
ret = ''
for part in parts[cursor:]:
try:
ret = getattr(ret, part)
except AttributeError:
raise ImportError(name)
return ret
def get_class_name(obj, instance=True):
"""
Given a class or instance of a class, returns a string representing the
fully specified path of the class.
Parameters
----------
obj : object
An instance of any object
instance: bool
Indicates whether given object is an instance of the class to be named
"""
typ = type(obj) if instance else obj
return "{}.{}".format(typ.__module__, typ.__name__)
def minversion(module, version, inclusive=True, version_path='__version__'):
"""
Returns `True` if the specified Python module satisfies a minimum version
requirement, and `False` if not.
By default this uses `pkg_resources.parse_version` to do the version
comparison if available. Otherwise it falls back on
`distutils.version.LooseVersion`.
Parameters
----------
module : module or `str`
An imported module of which to check the version, or the name of
that module (in which case an import of that module is attempted--
if this fails `False` is returned).
version : `str`
The version as a string that this module must have at a minimum (e.g.
``'0.12'``).
inclusive : `bool`
The specified version meets the requirement inclusively (i.e. ``>=``)
as opposed to strictly greater than (default: `True`).
version_path : `str`
A dotted attribute path to follow in the module for the version.
Defaults to just ``'__version__'``, which should work for most Python
modules.
"""
if isinstance(module, types.ModuleType):
module_name = module.__name__
elif isinstance(module, str):
module_name = module
try:
module = resolve_name(module_name)
except ImportError:
return False
else:
raise ValueError('module argument must be an actual imported '
'module, or the import name of the module; '
'got {0!r}'.format(module))
if '.' not in version_path:
have_version = getattr(module, version_path)
else:
have_version = resolve_name('.'.join([module.__name__, version_path]))
try:
from pkg_resources import parse_version
except ImportError:
from distutils.version import LooseVersion as parse_version
if inclusive:
return parse_version(have_version) >= parse_version(version)
else:
return parse_version(have_version) > parse_version(version)
class InheritDocstrings(type):
"""
This metaclass makes methods of a class automatically have their
docstrings filled in from the methods they override in the base
class.
If the class uses multiple inheritance, the docstring will be
chosen from the first class in the bases list, in the same way as
methods are normally resolved in Python. If this results in
selecting the wrong docstring, the docstring will need to be
explicitly included on the method.
For example::
>>> from asdf.util import InheritDocstrings
>>> import six
>>> @six.add_metaclass(InheritDocstrings)
... class A(object):
... def wiggle(self):
... "Wiggle the thingamajig"
... pass
>>> class B(A):
... def wiggle(self):
... pass
>>> B.wiggle.__doc__
u'Wiggle the thingamajig'
"""
def __init__(cls, name, bases, dct):
def is_public_member(key):
return (
(key.startswith('__') and key.endswith('__')
and len(key) > 4) or
not key.startswith('_'))
for key, val in dct.items():
if (inspect.isfunction(val) and
is_public_member(key) and
val.__doc__ is None):
for base in cls.__mro__[1:]:
super_method = getattr(base, key, None)
if super_method is not None:
val.__doc__ = super_method.__doc__
break
super(InheritDocstrings, cls).__init__(name, bases, dct)
| 28.9475 | 78 | 0.59297 |
import inspect
import math
import struct
import types
from urllib.parse import urljoin
from urllib.request import pathname2url
from urllib import parse as urlparse
import numpy as np
from .extern.decorators import add_common_docstring
__all__ = ['human_list', 'get_array_base', 'get_base_uri', 'filepath_to_url',
'iter_subclasses', 'calculate_padding', 'resolve_name']
def human_list(l, separator="and"):
if len(l) == 1:
return l[0]
else:
return ', '.join(l[:-1]) + ' ' + separator + ' ' + l[-1]
def get_array_base(arr):
base = arr
while isinstance(base.base, np.ndarray):
base = base.base
return base
def get_base_uri(uri):
parts = urlparse.urlparse(uri)
return urlparse.urlunparse(list(parts[:5]) + [''])
def filepath_to_url(path):
return urljoin('file:', pathname2url(path))
def iter_subclasses(cls):
for x in cls.__subclasses__():
yield x
for y in iter_subclasses(x):
yield y
def calculate_padding(content_size, pad_blocks, block_size):
if not pad_blocks:
return 0
if pad_blocks is True:
pad_blocks = 1.1
new_size = content_size * pad_blocks
new_size = int((math.ceil(
float(new_size) / block_size) + 1) * block_size)
return max(new_size - content_size, 0)
class BinaryStruct(object):
def __init__(self, descr, endian='>'):
self._fmt = [endian]
self._offsets = {}
self._names = []
i = 0
for name, fmt in descr:
self._fmt.append(fmt)
self._offsets[name] = (i, (endian + fmt).encode('ascii'))
self._names.append(name)
i += struct.calcsize(fmt.encode('ascii'))
self._fmt = ''.join(self._fmt).encode('ascii')
self._size = struct.calcsize(self._fmt)
@property
def size(self):
return self._size
def pack(self, **kwargs):
fields = [0] * len(self._names)
for key, val in kwargs.items():
if key not in self._offsets:
raise KeyError("No header field '{0}'".format(key))
i = self._names.index(key)
fields[i] = val
return struct.pack(self._fmt, *fields)
def unpack(self, buff):
args = struct.unpack_from(self._fmt, buff[:self._size])
return dict(zip(self._names, args))
def update(self, fd, **kwargs):
updates = []
for key, val in kwargs.items():
if key not in self._offsets:
raise KeyError("No header field '{0}'".format(key))
updates.append((self._offsets[key], val))
updates.sort()
start = fd.tell()
for ((offset, datatype), val) in updates:
fd.seek(start + offset)
fd.write(struct.pack(datatype, val))
class HashableDict(dict):
def __hash__(self):
return hash(frozenset(self.items()))
def resolve_name(name):
parts = [str(part) for part in name.split('.')]
if len(parts) == 1:
cursor = 1
attr_name = str('')
else:
cursor = len(parts) - 1
attr_name = parts[-1]
module_name = parts[:cursor]
while cursor > 0:
try:
ret = __import__(str('.'.join(module_name)), fromlist=[attr_name])
break
except ImportError:
if cursor == 0:
raise
cursor -= 1
module_name = parts[:cursor]
attr_name = parts[cursor]
ret = ''
for part in parts[cursor:]:
try:
ret = getattr(ret, part)
except AttributeError:
raise ImportError(name)
return ret
def get_class_name(obj, instance=True):
typ = type(obj) if instance else obj
return "{}.{}".format(typ.__module__, typ.__name__)
def minversion(module, version, inclusive=True, version_path='__version__'):
if isinstance(module, types.ModuleType):
module_name = module.__name__
elif isinstance(module, str):
module_name = module
try:
module = resolve_name(module_name)
except ImportError:
return False
else:
raise ValueError('module argument must be an actual imported '
'module, or the import name of the module; '
'got {0!r}'.format(module))
if '.' not in version_path:
have_version = getattr(module, version_path)
else:
have_version = resolve_name('.'.join([module.__name__, version_path]))
try:
from pkg_resources import parse_version
except ImportError:
from distutils.version import LooseVersion as parse_version
if inclusive:
return parse_version(have_version) >= parse_version(version)
else:
return parse_version(have_version) > parse_version(version)
class InheritDocstrings(type):
def __init__(cls, name, bases, dct):
def is_public_member(key):
return (
(key.startswith('__') and key.endswith('__')
and len(key) > 4) or
not key.startswith('_'))
for key, val in dct.items():
if (inspect.isfunction(val) and
is_public_member(key) and
val.__doc__ is None):
for base in cls.__mro__[1:]:
super_method = getattr(base, key, None)
if super_method is not None:
val.__doc__ = super_method.__doc__
break
super(InheritDocstrings, cls).__init__(name, bases, dct)
| true | true |
f72fe510b547f529b3a5626defad1371dfcbc75e | 16,658 | py | Python | wbgapi/data.py | mo-cmyk/wbgapi | a0f8658b7a74ec79256d7b66ff58cb95726e89aa | [
"MIT"
] | 41 | 2020-01-29T17:39:50.000Z | 2022-03-31T00:21:52.000Z | wbgapi/data.py | mo-cmyk/wbgapi | a0f8658b7a74ec79256d7b66ff58cb95726e89aa | [
"MIT"
] | 18 | 2020-01-03T06:43:43.000Z | 2022-02-19T13:09:21.000Z | wbgapi/data.py | mo-cmyk/wbgapi | a0f8658b7a74ec79256d7b66ff58cb95726e89aa | [
"MIT"
] | 7 | 2021-03-24T15:41:09.000Z | 2022-03-21T21:26:25.000Z |
'''Access World Bank API data
'''
import wbgapi as w
try:
import numpy as np
import pandas as pd
except ImportError:
np = None
pd = None
def fetch(series, economy='all', time='all', mrv=None, mrnev=None, skipBlanks=False, labels=False, skipAggs=False, numericTimeKeys=False, params={}, db=None, **dimensions):
'''Retrieve rows of data for the current database
Arguments:
series: a series identifier or list-like, e.g., SP.POP.TOTL
economy: an economy identifier or list-like, e.g., 'BRA' or ['USA', 'CAN', 'MEX']
time: a time identifier or list-like, e.g., 'YR2015' or range(2010,2020).
Both element keys and values are acceptable
mrv: return only the specified number of most recent values (same time period for all economies)
mrnev: return only the specified number of non-empty most recent values (time period varies)
skipBlanks: skip empty observations
labels: include both dimension id and name (e.g., ZWE & Zimbabwe, not just ZWE)
skipAggs: skip aggregates
numericTimeKeys: store the time object by value (e.g., 2014) instead of key ('YR2014') if value is numeric
params: extra query parameters to pass to the API
dimensions: extra dimensions, database specific (e.g., version)
Returns:
A generator object
Examples:
# print name and population of all economies for all available years
for elem in wbgapi.data.fetch('SP.POP.TOTL',labels=True):
print(elem['economy']['value'], elem['time']['value'], elem['value'])
# fetch data for Brazil for odd-numbered years
for elem in wbgapi.data.fetch('NY.GDP.PCAP.CD', 'BRA', range(2011,2020,2)):
print(elem['value'])
# most recent poverty rates for all LAC countries
for elem in wbgapi.data.fetch('SI.POV.NAHC', economy=wb.region.members('LAC'), mrnev=1):
print(elem['economy'], elem['time'], elem['value'])
# dict of most recent population data for economies over 100000
popData = {i['economy']: i['value'] for i in wbgapi.data.fetch('SP.POP.TOTL', mrnev=1, skipAggs=True) if i['value'] > 100000}
'''
if db is None:
db = w.db
concepts = w.source.concepts(db)
concept_keys = {v['key']: k for k,v in concepts.items()}
params_ = {}
params_.update(params)
if mrv:
params_['mrv'] = mrv
elif mrnev:
params_['mrnev'] = mrnev
# you can thus pass series, economy, and time in the dimensions array, and those will overwrite the explicit parameters
dimensions_ = {'series': series, 'economy': economy, 'time': time}
dimensions_.update(dimensions)
url = 'sources/{}'.format(db)
keys = ['series', 'economy', 'time']
values = {}
for k,v in dimensions_.items():
if k not in concepts:
raise KeyError('{} is not a concept in database {}'.format(k, db))
if k not in keys:
keys.append(k)
url += '/{}/{}'.format(concepts[k]['key'], '{' + k + '}')
values[k] = w.queryParam(v, concept=k, db=db)
aggs = w.economy.aggregates()
for row in w.refetch(url, keys, params=params_, **values):
if skipBlanks and row['value'] is None:
continue
skip = False
x = {'value': row['value']}
for elem in row['variable']:
key = concept_keys[elem['concept'].lower()]
if key == 'economy' and skipAggs and elem['id'] in aggs:
skip = True
break
if not skip:
if labels:
del(elem['concept'])
x[key] = elem
if key == 'economy':
x[key]['aggregate'] = elem['id'] in aggs
elif key == 'time' and numericTimeKeys and elem['value'].isdigit():
x[key]['id'] = int(elem['value'])
else:
x[key] = elem['id']
if key == 'economy':
x['aggregate'] = elem['id'] in aggs
elif key == 'time' and numericTimeKeys and elem['value'].isdigit():
x[key] = int(elem['value'])
if not skip:
yield x
def FlatFrame(series, economy='all', time='all', mrv=None, mrnev=None, skipBlanks=False, labels=False, skipAggs=False, params={}, db=None, **dimensions):
'''Retrieve a flat pandas dataframe (1 row per observation)
Arguments:
series: a series identifier or list-like, e.g., SP.POP.TOTL
economy: an economy identifier or list-like, e.g., 'BRA' or ['USA', 'CAN', 'MEX']
time: a time identifier or list-like, e.g., 'YR2015' or range(2010,2020).
Both element keys and values are acceptable
mrv: return only the specified number of most recent values (same time period for all economies)
mrnev: return only the specified number of non-empty most recent values (time period varies)
skipBlanks: skip empty observations
labels: return the dimension name instead of the identifier
skipAggs: skip aggregates
params: extra query parameters to pass to the API
dimensions: extra dimensions, database specific (e.g., version)
Returns:
a pandas DataFrame
Notes:
values in the time column are numeric if possible (2015 not 'YR2015')
'''
if pd is None:
raise ModuleNotFoundError('you must install pandas to use this feature')
key = 'value' if labels else 'id'
df = None
# we set numericTimeKeys=True so that time values will always be numeric if possible
for row in fetch(series, economy, time, mrv=mrv, mrnev=mrnev, skipBlanks=skipBlanks, labels=True, numericTimeKeys=True, skipAggs=skipAggs, params=params, db=db, **dimensions):
if df is None:
# this assumes that the API returns the same object structure in every row, so we can use the first as a template
columns = row.keys()
df = pd.DataFrame(columns=columns)
df.loc[len(df)] = [row[i][key] if type(row[i]) is dict else row[i] for i in columns]
return df
def DataFrame(series, economy='all', time='all', index=None, columns=None, mrv=None, mrnev=None, skipBlanks=False, labels=False, skipAggs=False, numericTimeKeys=False, timeColumns=False, params={}, db=None, **dimensions):
'''Retrieve a 2-dimensional pandas dataframe.
Arguments:
series: a series identifier or list-like, e.g., SP.POP.TOTL
economy: an economy identifier or list-like, e.g., 'BRA' or ['USA', 'CAN', 'MEX']
time: a time identifier or list-like, e.g., 'YR2015' or range(2010,2020).
Both element keys and values are acceptable
index: name or list of dimensions for the DataFrame's index, e.g., 'economy'. If None then the function
will define the index based on your request. Note: to get a dataframe with no index
(i.e., 0-based integers) call `reset_index()` with on the return value of this function.
columns: name of the dimension for the DataFrame's columns, e.g., 'series'. If None then the function
will define columns based on your request.
mrv: return only the specified number of most recent values (same time period for all economies)
mrnev: return only the specified number of non-empty most recent values (time period varies)
skipBlanks: skip empty observations
labels: include the dimension name for rows
skipAggs: skip aggregates
numericTimeKeys: store the time object by value (e.g., 2014) instead of key ('YR2014') if value is numeric
timeColumns: add extra columns to show the time dimension for each series/economy
If 'auto' then the function will guess based on other parameters
params: extra query parameters to pass to the API
dimensions: extra dimensions, database specific (e.g., version)
Returns:
a pandas DataFrame
Examples:
# 5 years of population data (with economy names)
wbgapi.data.DataFrame('SP.POP.TOTL', time=range(2010,2020),labels=True)
# Most recent poverty and income data for LAC
wbgapi.data.DataFrame(['SI.POV.NAHC', 'NY.GDP.PCAP.CD'], economy=wb.region.members('LAC'),mrnev=1,timeColumns=True)
# Fetch most recent CO2 emissions for each country and merge its income group
wbgapi.data.DataFrame('EN.ATM.CO2E.PC',mrnev=1).join(wbgapi.economy.DataFrame()['incomeLevel'])
# Top 10 emitters per capita
wbgapi.data.DataFrame('EN.ATM.CO2E.PC',mrnev=1,labels=True).sort_values('EN.ATM.CO2E.PC',ascending=False).head(10)
Notes:
timeColumns currently defaults to False so that the default column composition is consistent. This may change to 'auto'
at some point, so that mrv behavior is more intuitive for data discovery
'''
def frame(index):
if len(index) > 1:
i = [[]] * len(index)
return pd.DataFrame(index=pd.MultiIndex(levels=i, codes=i, names=tuple(index)))
df = pd.DataFrame()
df.index.name = index[0]
return df
def is_single(x):
if type(x) is str:
if x == 'all':
return False
elif x == 'mrv':
return True
# not necessary to pass db since we don't actually care about the parameters just the count of them
return len(w.queryParam(x).split(';')) == 1
if pd is None:
raise ModuleNotFoundError('you must install pandas to use this feature')
# set up the axes by looking at the index/column parameters
concepts = ['economy','series','time']
for k,v in w.source.concepts(db).items():
if k not in concepts:
concepts.insert(0, k)
if type(index) is str:
index = [index]
if index is None or columns is None:
# we need to infer at least one dimension
dimensions_ = {'series': series, 'economy': economy, 'time': time}
dimensions_.update(dimensions)
axes = concepts.copy()
# now we reduce axes by eliminating any dimension consisting of
# one element not defined in the calling parameters, with a stop
# if we reduce to 2 dimensions
x = concepts.copy()
x.reverse()
for k in x:
if len(axes) == 2:
break
if k == columns or (type(index) is list and k in index):
continue
values = dimensions_.get(k, 'all')
if k == 'time' and (mrv == 1 or mrnev == 1 or is_single(values)):
axes.remove(k)
if timeColumns == 'auto' and (mrv == 1 or mrnev == 1):
timeColumns = True
elif is_single(values):
axes.remove(k)
if columns is None and index is None:
columns = axes.pop(-1)
index = axes
elif columns is None:
# try to guess a column based on what index doesn't define
x = list(filter(lambda x: x not in index, axes))
if len(x) > 0:
columns = x[-1]
elif (set(concepts) - set(list)) > 0:
# index has claimed all non-singular dimensions, so set columns from the full concepts list
x = list(filter(lambda x: x not in index, concepts))
columns = x[-1]
else:
# index is the same as the concepts list. That's not allowed
raise ValueError('one dimension must be a column')
elif index is None:
axes.remove(columns)
index = axes
# sanity checks
if type(columns) is not str or columns not in concepts:
raise ValueError('columns must be None or a dimension')
if type(index) is not list or len(set(index) - set(concepts)) > 0:
raise ValueError('index must be None or a dimension list')
if columns in index:
raise ValueError('columns cannot be an element in index')
if columns == 'time' or 'time' in index or timeColumns == 'auto':
timeColumns = False
# for now let's see if it works to build the dataframe dynamically
df = frame(index)
dummy = pd.Series() # empty series - never assigned actual values
ts_suffix = ':T'
concepts = w.source.concepts(db)
if labels:
# create a separate dataframe for labels so that we can control the column position below
df2 = frame(index)
for row in fetch(series, economy, time, mrv=mrv, mrnev=mrnev, skipBlanks=skipBlanks, labels=True, skipAggs=skipAggs, numericTimeKeys=numericTimeKeys, params=params, db=db, **dimensions):
column_key = row[columns]['id']
if len(index) == 1:
index_key = row[index[0]]['id']
else:
index_key = tuple(map(lambda x: row[x]['id'], index))
# this logic only assigns values to locations that don't yet exist. First observations thus take precedent over subsequent ones
if pd.isna(df.get(column_key, dummy).get(index_key)):
df.loc[index_key, column_key] = np.nan if row['value'] is None else row['value']
if timeColumns:
df.loc[index_key, column_key + ts_suffix] = row['time']['value']
if labels:
for i in index:
df2.loc[index_key, concepts[i]['value']] = row[i]['value']
df.sort_index(axis=0,inplace=True)
df.sort_index(axis=1,inplace=True)
if labels:
return df2.join(df)
# return pd.concat([df2,df], axis=1, sort=False)
return df
def get(series, economy, time='all', mrv=None, mrnev=None, labels=False, numericTimeKeys=False, db=None, **dimensions):
'''Retrieve a single data point for the current database
Arguments:
series: a series identifier
economy: an economy identifier
time: a time identifier. Both element keys and values are acceptable
mrv: return only the specified number of most recent values (same time period for all economies)
mrnev: return only the specified number of non-empty most recent values (time period varies)
labels: include both dimension id and name (e.g., ZWE & Zimbabwe, not just ZWE)
numericTimeKeys: store the time object by value (e.g., 2014) instead of key ('YR2014') if value is numeric
dimensions: extra dimensions, database specific (e.g., version)
Returns:
a data observation
Notes:
This function simply calls fetch() and returns the first result. Hence, you should set mrv or mrnev to 1, or set
time to a single value to get predictable results.
Example:
# print the last population estimate for France
print(wbgapi.data.get('SP.POP.TOTL', 'FRA', mrnev=1)['value'])
'''
for row in fetch(series, economy, time, mrv=mrv, mrnev=mrnev, labels=labels, numericTimeKeys=numericTimeKeys, params={'per_page': 1}, db=db, **dimensions):
return row
def footnote(series, economy, time, db=None):
'''Return the footnote for a single data point, if any
Arguments:
series: a series identifier
economy: an economy identifier
time: a time identifier. Both element keys and values are acceptable
Returns:
footnote text, or None
Example:
print(wbgapi.data.footnote('SP.POP.TOTL', 'FRA', 2015))
'''
if db is None:
db = w.db
# note that this only supports singular footnote references at this point, although the interface suggests otherwise
url = 'sources/{source}/footnote/{economy}~{series}~{time}/metadata'
try:
for row in w.metadata(url, ['series'], source=db, series=series, economy=economy, time=w.queryParam(time, 'time', db=db)):
return row.metadata['FootNote']
except:
pass # will return None then
| 39.380615 | 221 | 0.593228 |
import wbgapi as w
try:
import numpy as np
import pandas as pd
except ImportError:
np = None
pd = None
def fetch(series, economy='all', time='all', mrv=None, mrnev=None, skipBlanks=False, labels=False, skipAggs=False, numericTimeKeys=False, params={}, db=None, **dimensions):
if db is None:
db = w.db
concepts = w.source.concepts(db)
concept_keys = {v['key']: k for k,v in concepts.items()}
params_ = {}
params_.update(params)
if mrv:
params_['mrv'] = mrv
elif mrnev:
params_['mrnev'] = mrnev
dimensions_ = {'series': series, 'economy': economy, 'time': time}
dimensions_.update(dimensions)
url = 'sources/{}'.format(db)
keys = ['series', 'economy', 'time']
values = {}
for k,v in dimensions_.items():
if k not in concepts:
raise KeyError('{} is not a concept in database {}'.format(k, db))
if k not in keys:
keys.append(k)
url += '/{}/{}'.format(concepts[k]['key'], '{' + k + '}')
values[k] = w.queryParam(v, concept=k, db=db)
aggs = w.economy.aggregates()
for row in w.refetch(url, keys, params=params_, **values):
if skipBlanks and row['value'] is None:
continue
skip = False
x = {'value': row['value']}
for elem in row['variable']:
key = concept_keys[elem['concept'].lower()]
if key == 'economy' and skipAggs and elem['id'] in aggs:
skip = True
break
if not skip:
if labels:
del(elem['concept'])
x[key] = elem
if key == 'economy':
x[key]['aggregate'] = elem['id'] in aggs
elif key == 'time' and numericTimeKeys and elem['value'].isdigit():
x[key]['id'] = int(elem['value'])
else:
x[key] = elem['id']
if key == 'economy':
x['aggregate'] = elem['id'] in aggs
elif key == 'time' and numericTimeKeys and elem['value'].isdigit():
x[key] = int(elem['value'])
if not skip:
yield x
def FlatFrame(series, economy='all', time='all', mrv=None, mrnev=None, skipBlanks=False, labels=False, skipAggs=False, params={}, db=None, **dimensions):
if pd is None:
raise ModuleNotFoundError('you must install pandas to use this feature')
key = 'value' if labels else 'id'
df = None
for row in fetch(series, economy, time, mrv=mrv, mrnev=mrnev, skipBlanks=skipBlanks, labels=True, numericTimeKeys=True, skipAggs=skipAggs, params=params, db=db, **dimensions):
if df is None:
columns = row.keys()
df = pd.DataFrame(columns=columns)
df.loc[len(df)] = [row[i][key] if type(row[i]) is dict else row[i] for i in columns]
return df
def DataFrame(series, economy='all', time='all', index=None, columns=None, mrv=None, mrnev=None, skipBlanks=False, labels=False, skipAggs=False, numericTimeKeys=False, timeColumns=False, params={}, db=None, **dimensions):
def frame(index):
if len(index) > 1:
i = [[]] * len(index)
return pd.DataFrame(index=pd.MultiIndex(levels=i, codes=i, names=tuple(index)))
df = pd.DataFrame()
df.index.name = index[0]
return df
def is_single(x):
if type(x) is str:
if x == 'all':
return False
elif x == 'mrv':
return True
return len(w.queryParam(x).split(';')) == 1
if pd is None:
raise ModuleNotFoundError('you must install pandas to use this feature')
# set up the axes by looking at the index/column parameters
concepts = ['economy','series','time']
for k,v in w.source.concepts(db).items():
if k not in concepts:
concepts.insert(0, k)
if type(index) is str:
index = [index]
if index is None or columns is None:
# we need to infer at least one dimension
dimensions_ = {'series': series, 'economy': economy, 'time': time}
dimensions_.update(dimensions)
axes = concepts.copy()
# now we reduce axes by eliminating any dimension consisting of
# one element not defined in the calling parameters, with a stop
# if we reduce to 2 dimensions
x = concepts.copy()
x.reverse()
for k in x:
if len(axes) == 2:
break
if k == columns or (type(index) is list and k in index):
continue
values = dimensions_.get(k, 'all')
if k == 'time' and (mrv == 1 or mrnev == 1 or is_single(values)):
axes.remove(k)
if timeColumns == 'auto' and (mrv == 1 or mrnev == 1):
timeColumns = True
elif is_single(values):
axes.remove(k)
if columns is None and index is None:
columns = axes.pop(-1)
index = axes
elif columns is None:
# try to guess a column based on what index doesn't define
x = list(filter(lambda x: x not in index, axes))
if len(x) > 0:
columns = x[-1]
elif (set(concepts) - set(list)) > 0:
x = list(filter(lambda x: x not in index, concepts))
columns = x[-1]
else:
raise ValueError('one dimension must be a column')
elif index is None:
axes.remove(columns)
index = axes
# sanity checks
if type(columns) is not str or columns not in concepts:
raise ValueError('columns must be None or a dimension')
if type(index) is not list or len(set(index) - set(concepts)) > 0:
raise ValueError('index must be None or a dimension list')
if columns in index:
raise ValueError('columns cannot be an element in index')
if columns == 'time' or 'time' in index or timeColumns == 'auto':
timeColumns = False
# for now let's see if it works to build the dataframe dynamically
df = frame(index)
dummy = pd.Series()
ts_suffix = ':T'
concepts = w.source.concepts(db)
if labels:
df2 = frame(index)
for row in fetch(series, economy, time, mrv=mrv, mrnev=mrnev, skipBlanks=skipBlanks, labels=True, skipAggs=skipAggs, numericTimeKeys=numericTimeKeys, params=params, db=db, **dimensions):
column_key = row[columns]['id']
if len(index) == 1:
index_key = row[index[0]]['id']
else:
index_key = tuple(map(lambda x: row[x]['id'], index))
if pd.isna(df.get(column_key, dummy).get(index_key)):
df.loc[index_key, column_key] = np.nan if row['value'] is None else row['value']
if timeColumns:
df.loc[index_key, column_key + ts_suffix] = row['time']['value']
if labels:
for i in index:
df2.loc[index_key, concepts[i]['value']] = row[i]['value']
df.sort_index(axis=0,inplace=True)
df.sort_index(axis=1,inplace=True)
if labels:
return df2.join(df)
# return pd.concat([df2,df], axis=1, sort=False)
return df
def get(series, economy, time='all', mrv=None, mrnev=None, labels=False, numericTimeKeys=False, db=None, **dimensions):
for row in fetch(series, economy, time, mrv=mrv, mrnev=mrnev, labels=labels, numericTimeKeys=numericTimeKeys, params={'per_page': 1}, db=db, **dimensions):
return row
def footnote(series, economy, time, db=None):
if db is None:
db = w.db
# note that this only supports singular footnote references at this point, although the interface suggests otherwise
url = 'sources/{source}/footnote/{economy}~{series}~{time}/metadata'
try:
for row in w.metadata(url, ['series'], source=db, series=series, economy=economy, time=w.queryParam(time, 'time', db=db)):
return row.metadata['FootNote']
except:
pass # will return None then
| true | true |
f72fe57794917edbcfc8d26818116b24e336b4d8 | 787 | py | Python | examples/tf/trpo_gym_tf_cartpole.py | shadiakiki1986/garage | 095bb5d25b32df1d44b47e99a78a9b01796941d9 | [
"MIT"
] | null | null | null | examples/tf/trpo_gym_tf_cartpole.py | shadiakiki1986/garage | 095bb5d25b32df1d44b47e99a78a9b01796941d9 | [
"MIT"
] | null | null | null | examples/tf/trpo_gym_tf_cartpole.py | shadiakiki1986/garage | 095bb5d25b32df1d44b47e99a78a9b01796941d9 | [
"MIT"
] | null | null | null | import gym
from garage.baselines import LinearFeatureBaseline
from garage.experiment import run_experiment
from garage.tf.algos import TRPO
from garage.tf.envs import TfEnv
from garage.tf.policies import CategoricalMLPPolicy
# Need to wrap in a tf environment and force_reset to true
# see https://github.com/openai/rllab/issues/87#issuecomment-282519288
env = TfEnv(gym.make("CartPole-v0"))
policy = CategoricalMLPPolicy(
name="policy", env_spec=env.spec, hidden_sizes=(32, 32))
baseline = LinearFeatureBaseline(env_spec=env.spec)
algo = TRPO(
env=env,
policy=policy,
baseline=baseline,
batch_size=4000,
max_path_length=200,
n_itr=120,
discount=0.99,
max_kl_step=0.01,
)
run_experiment(algo.train(), n_parallel=1, snapshot_mode="last", seed=1)
| 26.233333 | 72 | 0.758577 | import gym
from garage.baselines import LinearFeatureBaseline
from garage.experiment import run_experiment
from garage.tf.algos import TRPO
from garage.tf.envs import TfEnv
from garage.tf.policies import CategoricalMLPPolicy
CartPole-v0"))
policy = CategoricalMLPPolicy(
name="policy", env_spec=env.spec, hidden_sizes=(32, 32))
baseline = LinearFeatureBaseline(env_spec=env.spec)
algo = TRPO(
env=env,
policy=policy,
baseline=baseline,
batch_size=4000,
max_path_length=200,
n_itr=120,
discount=0.99,
max_kl_step=0.01,
)
run_experiment(algo.train(), n_parallel=1, snapshot_mode="last", seed=1)
| true | true |
f72fe5795879771746bcc6ee4b44c101ac8e4453 | 267 | py | Python | CCF/CSP/2018/18121.py | cnsteven/online-judge | 60ee841a97e2bc0dc9c7b23fe5daa186898ab8b7 | [
"MIT"
] | 1 | 2019-05-04T10:28:32.000Z | 2019-05-04T10:28:32.000Z | CCF/CSP/2018/18121.py | cnsteven/online-judge | 60ee841a97e2bc0dc9c7b23fe5daa186898ab8b7 | [
"MIT"
] | null | null | null | CCF/CSP/2018/18121.py | cnsteven/online-judge | 60ee841a97e2bc0dc9c7b23fe5daa186898ab8b7 | [
"MIT"
] | 3 | 2020-12-31T04:36:38.000Z | 2021-07-25T07:39:31.000Z | r, y, g = map(int, input().split())
n = int(input())
ans = 0
for _ in range(n):
k, t = map(int, input().split())
if k == 0:
ans += t
elif k == 1:
ans += t
elif k == 2:
ans = ans + t + r
elif k == 3:
pass
print(ans)
| 17.8 | 36 | 0.419476 | r, y, g = map(int, input().split())
n = int(input())
ans = 0
for _ in range(n):
k, t = map(int, input().split())
if k == 0:
ans += t
elif k == 1:
ans += t
elif k == 2:
ans = ans + t + r
elif k == 3:
pass
print(ans)
| true | true |
f72fe6c802fc9b6df210c17f9eaf4d123167398f | 1,996 | py | Python | examples/Model_HM_RWS.py | kpoeppel/pytorch_probgraph | b78595ab03bbe92595ad2f6b35f5dd8bf84d6da0 | [
"BSD-3-Clause"
] | 47 | 2020-08-10T02:04:26.000Z | 2022-03-23T22:20:56.000Z | examples/Model_HM_RWS.py | kpoeppel/pytorch_probgraph | b78595ab03bbe92595ad2f6b35f5dd8bf84d6da0 | [
"BSD-3-Clause"
] | null | null | null | examples/Model_HM_RWS.py | kpoeppel/pytorch_probgraph | b78595ab03bbe92595ad2f6b35f5dd8bf84d6da0 | [
"BSD-3-Clause"
] | 4 | 2020-08-10T15:32:06.000Z | 2021-12-29T15:04:20.000Z |
import site
site.addsitedir('..')
import torch
from pytorch_probgraph import BernoulliLayer
from pytorch_probgraph import InteractionLinear
from pytorch_probgraph import HelmholtzMachine
from itertools import chain
from tqdm import tqdm
class Model_HM_RWS(torch.nn.Module):
def __init__(self):
super().__init__()
layer0 = BernoulliLayer(torch.nn.Parameter(torch.zeros([1, 1, 28, 28]), requires_grad=True))
layer1 = BernoulliLayer(torch.nn.Parameter(torch.zeros([1, 200]), requires_grad=True))
layer2 = BernoulliLayer(torch.nn.Parameter(torch.zeros([1, 200]), requires_grad=True))
interactionUp1 = InteractionLinear(layer0.bias.shape[1:], layer1.bias.shape[1:])
interactionDown1 = InteractionLinear(layer1.bias.shape[1:], layer0.bias.shape[1:])
interactionUp2 = InteractionLinear(layer1.bias.shape[1:], layer2.bias.shape[1:])
interactionDown2 = InteractionLinear(layer2.bias.shape[1:], layer1.bias.shape[1:])
parameters = chain(*[m.parameters() for m in [layer0, layer1, layer2, interactionUp1, interactionUp2, interactionDown1, interactionDown2]])
opt = torch.optim.Adam(parameters)
self.model = HelmholtzMachine([layer0, layer1, layer2],
[interactionUp1, interactionUp2],
[interactionDown1, interactionDown2],
optimizer=opt)
#print(interaction.weight.shape)
def train(self, data, epochs=1, device=None):
for epoch in range(epochs):
for dat in data:
self.model.trainReweightedWS(dat.to(device), ksamples=5)
if isinstance(data, tqdm):
data = tqdm(data)
#print(torch.sum(self.model.interaction.weight))
def loglikelihood(self, data):
return self.model.loglikelihood(data, ksamples=100).cpu().detach()
def generate(self, N=1):
return self.model.sampleAll(N=N)[0][0].cpu()
| 43.391304 | 147 | 0.655311 |
import site
site.addsitedir('..')
import torch
from pytorch_probgraph import BernoulliLayer
from pytorch_probgraph import InteractionLinear
from pytorch_probgraph import HelmholtzMachine
from itertools import chain
from tqdm import tqdm
class Model_HM_RWS(torch.nn.Module):
def __init__(self):
super().__init__()
layer0 = BernoulliLayer(torch.nn.Parameter(torch.zeros([1, 1, 28, 28]), requires_grad=True))
layer1 = BernoulliLayer(torch.nn.Parameter(torch.zeros([1, 200]), requires_grad=True))
layer2 = BernoulliLayer(torch.nn.Parameter(torch.zeros([1, 200]), requires_grad=True))
interactionUp1 = InteractionLinear(layer0.bias.shape[1:], layer1.bias.shape[1:])
interactionDown1 = InteractionLinear(layer1.bias.shape[1:], layer0.bias.shape[1:])
interactionUp2 = InteractionLinear(layer1.bias.shape[1:], layer2.bias.shape[1:])
interactionDown2 = InteractionLinear(layer2.bias.shape[1:], layer1.bias.shape[1:])
parameters = chain(*[m.parameters() for m in [layer0, layer1, layer2, interactionUp1, interactionUp2, interactionDown1, interactionDown2]])
opt = torch.optim.Adam(parameters)
self.model = HelmholtzMachine([layer0, layer1, layer2],
[interactionUp1, interactionUp2],
[interactionDown1, interactionDown2],
optimizer=opt)
def train(self, data, epochs=1, device=None):
for epoch in range(epochs):
for dat in data:
self.model.trainReweightedWS(dat.to(device), ksamples=5)
if isinstance(data, tqdm):
data = tqdm(data)
def loglikelihood(self, data):
return self.model.loglikelihood(data, ksamples=100).cpu().detach()
def generate(self, N=1):
return self.model.sampleAll(N=N)[0][0].cpu()
| true | true |
f72fe7511018a20cd842050a050f2a2e4c49353b | 6,242 | py | Python | jesse/models/utils.py | farukuzun/jesse | c4c0c3dbab034db853fc1b09ac0f2697592bed79 | [
"MIT"
] | 1 | 2021-07-04T10:18:28.000Z | 2021-07-04T10:18:28.000Z | jesse/models/utils.py | farukuzun/jesse | c4c0c3dbab034db853fc1b09ac0f2697592bed79 | [
"MIT"
] | null | null | null | jesse/models/utils.py | farukuzun/jesse | c4c0c3dbab034db853fc1b09ac0f2697592bed79 | [
"MIT"
] | null | null | null | import threading
import numpy as np
import jesse.helpers as jh
from jesse.models.Candle import Candle
from jesse.models.CompletedTrade import CompletedTrade
from jesse.models.DailyBalance import DailyBalance
from jesse.models.Order import Order
from jesse.models.Orderbook import Orderbook
from jesse.models.Ticker import Ticker
from jesse.models.Trade import Trade
from jesse.services import logger
def store_candle_into_db(exchange: str, symbol: str, candle: np.ndarray) -> None:
d = {
'id': jh.generate_unique_id(),
'symbol': symbol,
'exchange': exchange,
'timestamp': candle[0],
'open': candle[1],
'high': candle[3],
'low': candle[4],
'close': candle[2],
'volume': candle[5]
}
def async_save() -> None:
Candle.insert(**d).on_conflict_ignore().execute()
print(
jh.color(
f"candle: {jh.timestamp_to_time(d['timestamp'])}-{exchange}-{symbol}: {candle}",
'blue'
)
)
# async call
threading.Thread(target=async_save).start()
def store_ticker_into_db(exchange: str, symbol: str, ticker: np.ndarray) -> None:
return
d = {
'id': jh.generate_unique_id(),
'timestamp': ticker[0],
'last_price': ticker[1],
'high_price': ticker[2],
'low_price': ticker[3],
'volume': ticker[4],
'symbol': symbol,
'exchange': exchange,
}
def async_save() -> None:
Ticker.insert(**d).on_conflict_ignore().execute()
print(
jh.color(f'ticker: {jh.timestamp_to_time(d["timestamp"])}-{exchange}-{symbol}: {ticker}', 'yellow')
)
# async call
threading.Thread(target=async_save).start()
def store_completed_trade_into_db(completed_trade: CompletedTrade) -> None:
return
d = {
'id': completed_trade.id,
'strategy_name': completed_trade.strategy_name,
'symbol': completed_trade.symbol,
'exchange': completed_trade.exchange,
'type': completed_trade.type,
'timeframe': completed_trade.timeframe,
'entry_price': completed_trade.entry_price,
'exit_price': completed_trade.exit_price,
'take_profit_at': completed_trade.take_profit_at,
'stop_loss_at': completed_trade.stop_loss_at,
'qty': completed_trade.qty,
'opened_at': completed_trade.opened_at,
'closed_at': completed_trade.closed_at,
'entry_candle_timestamp': completed_trade.entry_candle_timestamp,
'exit_candle_timestamp': completed_trade.exit_candle_timestamp,
'leverage': completed_trade.leverage,
}
def async_save() -> None:
CompletedTrade.insert(**d).execute()
if jh.is_debugging():
logger.info(f'Stored the completed trade record for {completed_trade.exchange}-{completed_trade.symbol}-{completed_trade.strategy_name} into database.')
# async call
threading.Thread(target=async_save).start()
def store_order_into_db(order: Order) -> None:
return
d = {
'id': order.id,
'trade_id': order.trade_id,
'exchange_id': order.exchange_id,
'vars': order.vars,
'symbol': order.symbol,
'exchange': order.exchange,
'side': order.side,
'type': order.type,
'flag': order.flag,
'qty': order.qty,
'price': order.price,
'status': order.status,
'created_at': order.created_at,
'executed_at': order.executed_at,
'canceled_at': order.canceled_at,
'role': order.role,
}
def async_save() -> None:
Order.insert(**d).execute()
if jh.is_debugging():
logger.info(f'Stored the executed order record for {order.exchange}-{order.symbol} into database.')
# async call
threading.Thread(target=async_save).start()
def store_daily_balance_into_db(daily_balance: dict) -> None:
return
def async_save():
DailyBalance.insert(**daily_balance).execute()
if jh.is_debugging():
logger.info(f'Stored daily portfolio balance record into the database: {daily_balance["asset"]} => {jh.format_currency(round(daily_balance["balance"], 2))}'
)
# async call
threading.Thread(target=async_save).start()
def store_trade_into_db(exchange: str, symbol: str, trade: np.ndarray) -> None:
return
d = {
'id': jh.generate_unique_id(),
'timestamp': trade[0],
'price': trade[1],
'buy_qty': trade[2],
'sell_qty': trade[3],
'buy_count': trade[4],
'sell_count': trade[5],
'symbol': symbol,
'exchange': exchange,
}
def async_save() -> None:
Trade.insert(**d).on_conflict_ignore().execute()
print(
jh.color(
f'trade: {jh.timestamp_to_time(d["timestamp"])}-{exchange}-{symbol}: {trade}',
'green'
)
)
# async call
threading.Thread(target=async_save).start()
def store_orderbook_into_db(exchange: str, symbol: str, orderbook: np.ndarray) -> None:
return
d = {
'id': jh.generate_unique_id(),
'timestamp': jh.now_to_timestamp(),
'data': orderbook.dumps(),
'symbol': symbol,
'exchange': exchange,
}
def async_save() -> None:
Orderbook.insert(**d).on_conflict_ignore().execute()
print(
jh.color(
f'orderbook: {jh.timestamp_to_time(d["timestamp"])}-{exchange}-{symbol}: [{orderbook[0][0][0]}, {orderbook[0][0][1]}], [{orderbook[1][0][0]}, {orderbook[1][0][1]}]',
'magenta'
)
)
# async call
threading.Thread(target=async_save).start()
def fetch_candles_from_db(exchange: str, symbol: str, start_date: int, finish_date: int) -> tuple:
candles_tuple = tuple(
Candle.select(
Candle.timestamp, Candle.open, Candle.close, Candle.high, Candle.low,
Candle.volume
).where(
Candle.timestamp.between(start_date, finish_date),
Candle.exchange == exchange,
Candle.symbol == symbol
).order_by(Candle.timestamp.asc()).tuples()
)
return candles_tuple
| 31.21 | 181 | 0.60942 | import threading
import numpy as np
import jesse.helpers as jh
from jesse.models.Candle import Candle
from jesse.models.CompletedTrade import CompletedTrade
from jesse.models.DailyBalance import DailyBalance
from jesse.models.Order import Order
from jesse.models.Orderbook import Orderbook
from jesse.models.Ticker import Ticker
from jesse.models.Trade import Trade
from jesse.services import logger
def store_candle_into_db(exchange: str, symbol: str, candle: np.ndarray) -> None:
d = {
'id': jh.generate_unique_id(),
'symbol': symbol,
'exchange': exchange,
'timestamp': candle[0],
'open': candle[1],
'high': candle[3],
'low': candle[4],
'close': candle[2],
'volume': candle[5]
}
def async_save() -> None:
Candle.insert(**d).on_conflict_ignore().execute()
print(
jh.color(
f"candle: {jh.timestamp_to_time(d['timestamp'])}-{exchange}-{symbol}: {candle}",
'blue'
)
)
threading.Thread(target=async_save).start()
def store_ticker_into_db(exchange: str, symbol: str, ticker: np.ndarray) -> None:
return
d = {
'id': jh.generate_unique_id(),
'timestamp': ticker[0],
'last_price': ticker[1],
'high_price': ticker[2],
'low_price': ticker[3],
'volume': ticker[4],
'symbol': symbol,
'exchange': exchange,
}
def async_save() -> None:
Ticker.insert(**d).on_conflict_ignore().execute()
print(
jh.color(f'ticker: {jh.timestamp_to_time(d["timestamp"])}-{exchange}-{symbol}: {ticker}', 'yellow')
)
threading.Thread(target=async_save).start()
def store_completed_trade_into_db(completed_trade: CompletedTrade) -> None:
return
d = {
'id': completed_trade.id,
'strategy_name': completed_trade.strategy_name,
'symbol': completed_trade.symbol,
'exchange': completed_trade.exchange,
'type': completed_trade.type,
'timeframe': completed_trade.timeframe,
'entry_price': completed_trade.entry_price,
'exit_price': completed_trade.exit_price,
'take_profit_at': completed_trade.take_profit_at,
'stop_loss_at': completed_trade.stop_loss_at,
'qty': completed_trade.qty,
'opened_at': completed_trade.opened_at,
'closed_at': completed_trade.closed_at,
'entry_candle_timestamp': completed_trade.entry_candle_timestamp,
'exit_candle_timestamp': completed_trade.exit_candle_timestamp,
'leverage': completed_trade.leverage,
}
def async_save() -> None:
CompletedTrade.insert(**d).execute()
if jh.is_debugging():
logger.info(f'Stored the completed trade record for {completed_trade.exchange}-{completed_trade.symbol}-{completed_trade.strategy_name} into database.')
threading.Thread(target=async_save).start()
def store_order_into_db(order: Order) -> None:
return
d = {
'id': order.id,
'trade_id': order.trade_id,
'exchange_id': order.exchange_id,
'vars': order.vars,
'symbol': order.symbol,
'exchange': order.exchange,
'side': order.side,
'type': order.type,
'flag': order.flag,
'qty': order.qty,
'price': order.price,
'status': order.status,
'created_at': order.created_at,
'executed_at': order.executed_at,
'canceled_at': order.canceled_at,
'role': order.role,
}
def async_save() -> None:
Order.insert(**d).execute()
if jh.is_debugging():
logger.info(f'Stored the executed order record for {order.exchange}-{order.symbol} into database.')
threading.Thread(target=async_save).start()
def store_daily_balance_into_db(daily_balance: dict) -> None:
return
def async_save():
DailyBalance.insert(**daily_balance).execute()
if jh.is_debugging():
logger.info(f'Stored daily portfolio balance record into the database: {daily_balance["asset"]} => {jh.format_currency(round(daily_balance["balance"], 2))}'
)
threading.Thread(target=async_save).start()
def store_trade_into_db(exchange: str, symbol: str, trade: np.ndarray) -> None:
return
d = {
'id': jh.generate_unique_id(),
'timestamp': trade[0],
'price': trade[1],
'buy_qty': trade[2],
'sell_qty': trade[3],
'buy_count': trade[4],
'sell_count': trade[5],
'symbol': symbol,
'exchange': exchange,
}
def async_save() -> None:
Trade.insert(**d).on_conflict_ignore().execute()
print(
jh.color(
f'trade: {jh.timestamp_to_time(d["timestamp"])}-{exchange}-{symbol}: {trade}',
'green'
)
)
threading.Thread(target=async_save).start()
def store_orderbook_into_db(exchange: str, symbol: str, orderbook: np.ndarray) -> None:
return
d = {
'id': jh.generate_unique_id(),
'timestamp': jh.now_to_timestamp(),
'data': orderbook.dumps(),
'symbol': symbol,
'exchange': exchange,
}
def async_save() -> None:
Orderbook.insert(**d).on_conflict_ignore().execute()
print(
jh.color(
f'orderbook: {jh.timestamp_to_time(d["timestamp"])}-{exchange}-{symbol}: [{orderbook[0][0][0]}, {orderbook[0][0][1]}], [{orderbook[1][0][0]}, {orderbook[1][0][1]}]',
'magenta'
)
)
threading.Thread(target=async_save).start()
def fetch_candles_from_db(exchange: str, symbol: str, start_date: int, finish_date: int) -> tuple:
candles_tuple = tuple(
Candle.select(
Candle.timestamp, Candle.open, Candle.close, Candle.high, Candle.low,
Candle.volume
).where(
Candle.timestamp.between(start_date, finish_date),
Candle.exchange == exchange,
Candle.symbol == symbol
).order_by(Candle.timestamp.asc()).tuples()
)
return candles_tuple
| true | true |
f72fe7bf8580c7c8f15d68a00c11795a0b14058e | 23,210 | py | Python | vyper/semantics/validation/local.py | Doc-Pixel/vyper | 4da1090d5ed9c339fdd402e987db760d7d63c088 | [
"Apache-2.0"
] | null | null | null | vyper/semantics/validation/local.py | Doc-Pixel/vyper | 4da1090d5ed9c339fdd402e987db760d7d63c088 | [
"Apache-2.0"
] | null | null | null | vyper/semantics/validation/local.py | Doc-Pixel/vyper | 4da1090d5ed9c339fdd402e987db760d7d63c088 | [
"Apache-2.0"
] | null | null | null | import copy
from typing import Optional
from vyper import ast as vy_ast
from vyper.ast.validation import validate_call_args
from vyper.exceptions import (
ExceptionList,
FunctionDeclarationException,
ImmutableViolation,
InvalidLiteral,
InvalidOperation,
InvalidType,
IteratorException,
NonPayableViolation,
StateAccessViolation,
StructureException,
TypeMismatch,
VariableDeclarationException,
VyperException,
)
# TODO consolidate some of these imports
from vyper.semantics.environment import CONSTANT_ENVIRONMENT_VARS, MUTABLE_ENVIRONMENT_VARS
from vyper.semantics.namespace import get_namespace
from vyper.semantics.types.abstract import IntegerAbstractType
from vyper.semantics.types.bases import DataLocation
from vyper.semantics.types.function import (
ContractFunction,
MemberFunctionDefinition,
StateMutability,
)
from vyper.semantics.types.indexable.mapping import MappingDefinition
from vyper.semantics.types.indexable.sequence import (
ArrayDefinition,
DynamicArrayDefinition,
TupleDefinition,
)
from vyper.semantics.types.user.event import Event
from vyper.semantics.types.utils import get_type_from_annotation
from vyper.semantics.types.value.address import AddressDefinition
from vyper.semantics.types.value.array_value import StringDefinition
from vyper.semantics.types.value.boolean import BoolDefinition
from vyper.semantics.validation.annotation import StatementAnnotationVisitor
from vyper.semantics.validation.base import VyperNodeVisitorBase
from vyper.semantics.validation.utils import (
get_common_types,
get_exact_type_from_node,
get_possible_types_from_node,
validate_expected_type,
)
def validate_functions(vy_module: vy_ast.Module) -> None:
"""Analyzes a vyper ast and validates the function-level namespaces."""
err_list = ExceptionList()
namespace = get_namespace()
for node in vy_module.get_children(vy_ast.FunctionDef):
with namespace.enter_scope():
try:
FunctionNodeVisitor(vy_module, node, namespace)
except VyperException as e:
err_list.append(e)
err_list.raise_if_not_empty()
def _is_terminus_node(node: vy_ast.VyperNode) -> bool:
if getattr(node, "_is_terminus", None):
return True
if isinstance(node, vy_ast.Expr) and isinstance(node.value, vy_ast.Call):
func = get_exact_type_from_node(node.value.func)
if getattr(func, "_is_terminus", None):
return True
return False
def check_for_terminus(node_list: list) -> bool:
if next((i for i in node_list if _is_terminus_node(i)), None):
return True
for node in [i for i in node_list if isinstance(i, vy_ast.If)][::-1]:
if not node.orelse or not check_for_terminus(node.orelse):
continue
if not check_for_terminus(node.body):
continue
return True
return False
def _check_iterator_modification(
target_node: vy_ast.VyperNode, search_node: vy_ast.VyperNode
) -> Optional[vy_ast.VyperNode]:
similar_nodes = [
n
for n in search_node.get_descendants(type(target_node))
if vy_ast.compare_nodes(target_node, n)
]
for node in similar_nodes:
# raise if the node is the target of an assignment statement
assign_node = node.get_ancestor((vy_ast.Assign, vy_ast.AugAssign))
# note the use of get_descendants() blocks statements like
# self.my_array[i] = x
if assign_node and node in assign_node.target.get_descendants(include_self=True):
return node
attr_node = node.get_ancestor(vy_ast.Attribute)
# note the use of get_descendants() blocks statements like
# self.my_array[i].append(x)
if (
attr_node is not None
and node in attr_node.value.get_descendants(include_self=True)
and attr_node.attr in ("append", "pop", "extend")
):
return node
return None
def _validate_revert_reason(msg_node: vy_ast.VyperNode) -> None:
if msg_node:
if isinstance(msg_node, vy_ast.Str):
if not msg_node.value.strip():
raise StructureException("Reason string cannot be empty", msg_node)
elif not (isinstance(msg_node, vy_ast.Name) and msg_node.id == "UNREACHABLE"):
try:
validate_expected_type(msg_node, StringDefinition(1024))
except TypeMismatch as e:
raise InvalidType("revert reason must fit within String[1024]") from e
def _validate_address_code_attribute(node: vy_ast.Attribute) -> None:
value_type = get_exact_type_from_node(node.value)
if isinstance(value_type, AddressDefinition) and node.attr == "code":
# Validate `slice(<address>.code, start, length)` where `length` is constant
parent = node.get_ancestor()
if isinstance(parent, vy_ast.Call):
ok_func = isinstance(parent.func, vy_ast.Name) and parent.func.id == "slice"
ok_args = len(parent.args) == 3 and isinstance(parent.args[2], vy_ast.Int)
if ok_func and ok_args:
return
raise StructureException(
"(address).code is only allowed inside of a slice function with a constant length",
node,
)
def _validate_msg_data_attribute(node: vy_ast.Attribute) -> None:
if isinstance(node.value, vy_ast.Name) and node.value.id == "msg" and node.attr == "data":
parent = node.get_ancestor()
if not isinstance(parent, vy_ast.Call) or parent.get("func.id") not in ("slice", "len"):
raise StructureException(
"msg.data is only allowed inside of the slice or len functions",
node,
)
if parent.get("func.id") == "slice":
ok_args = len(parent.args) == 3 and isinstance(parent.args[2], vy_ast.Int)
if not ok_args:
raise StructureException(
"slice(msg.data) must use a compile-time constant for length argument",
parent,
)
class FunctionNodeVisitor(VyperNodeVisitorBase):
ignored_types = (
vy_ast.Break,
vy_ast.Constant,
vy_ast.Pass,
)
scope_name = "function"
def __init__(
self, vyper_module: vy_ast.Module, fn_node: vy_ast.FunctionDef, namespace: dict
) -> None:
self.vyper_module = vyper_module
self.fn_node = fn_node
self.namespace = namespace
self.func = fn_node._metadata["type"]
self.annotation_visitor = StatementAnnotationVisitor(fn_node, namespace)
self.expr_visitor = _LocalExpressionVisitor()
namespace.update(self.func.arguments)
for node in fn_node.body:
self.visit(node)
if self.func.return_type:
if not check_for_terminus(fn_node.body):
raise FunctionDeclarationException(
f"Missing or unmatched return statements in function '{fn_node.name}'",
fn_node,
)
if self.func.mutability == StateMutability.PURE:
node_list = fn_node.get_descendants(
vy_ast.Attribute,
{
"value.id": set(CONSTANT_ENVIRONMENT_VARS.keys()).union(
set(MUTABLE_ENVIRONMENT_VARS.keys())
)
},
)
for node in node_list:
t = node._metadata.get("type")
if isinstance(t, ContractFunction) and t.mutability == StateMutability.PURE:
# allowed
continue
raise StateAccessViolation(
"not allowed to query contract or environment variables in pure functions",
node_list[0],
)
if self.func.mutability is not StateMutability.PAYABLE:
node_list = fn_node.get_descendants(
vy_ast.Attribute, {"value.id": "msg", "attr": "value"}
)
if node_list:
raise NonPayableViolation(
"msg.value is not allowed in non-payable functions", node_list[0]
)
def visit(self, node):
super().visit(node)
self.annotation_visitor.visit(node)
def visit_AnnAssign(self, node):
name = node.get("target.id")
if name is None:
raise VariableDeclarationException("Invalid assignment", node)
if not node.value:
raise VariableDeclarationException(
"Memory variables must be declared with an initial value", node
)
type_definition = get_type_from_annotation(node.annotation, DataLocation.MEMORY)
validate_expected_type(node.value, type_definition)
try:
self.namespace[name] = type_definition
except VyperException as exc:
raise exc.with_annotation(node) from None
self.expr_visitor.visit(node.value)
def visit_Assign(self, node):
if isinstance(node.value, vy_ast.Tuple):
raise StructureException("Right-hand side of assignment cannot be a tuple", node.value)
target = get_exact_type_from_node(node.target)
if isinstance(target, MappingDefinition):
raise StructureException(
"Left-hand side of assignment cannot be a HashMap without a key", node
)
validate_expected_type(node.value, target)
target.validate_modification(node, self.func.mutability)
self.expr_visitor.visit(node.value)
self.expr_visitor.visit(node.target)
def visit_AugAssign(self, node):
if isinstance(node.value, vy_ast.Tuple):
raise StructureException("Right-hand side of assignment cannot be a tuple", node.value)
target = get_exact_type_from_node(node.target)
validate_expected_type(node.value, target)
target.validate_modification(node, self.func.mutability)
self.expr_visitor.visit(node.value)
def visit_Raise(self, node):
if node.exc:
_validate_revert_reason(node.exc)
self.expr_visitor.visit(node.exc)
def visit_Assert(self, node):
if node.msg:
_validate_revert_reason(node.msg)
self.expr_visitor.visit(node.msg)
try:
validate_expected_type(node.test, BoolDefinition())
except InvalidType:
raise InvalidType("Assertion test value must be a boolean", node.test)
self.expr_visitor.visit(node.test)
def visit_Continue(self, node):
for_node = node.get_ancestor(vy_ast.For)
if for_node is None:
raise StructureException("`continue` must be enclosed in a `for` loop", node)
def visit_Return(self, node):
values = node.value
if values is None:
if self.func.return_type:
raise FunctionDeclarationException("Return statement is missing a value", node)
return
elif self.func.return_type is None:
raise FunctionDeclarationException("Function does not return any values", node)
if isinstance(values, vy_ast.Tuple):
values = values.elements
if not isinstance(self.func.return_type, TupleDefinition):
raise FunctionDeclarationException("Function only returns a single value", node)
if self.func.return_type.length != len(values):
raise FunctionDeclarationException(
f"Incorrect number of return values: "
f"expected {self.func.return_type.length}, got {len(values)}",
node,
)
for given, expected in zip(values, self.func.return_type.value_type):
validate_expected_type(given, expected)
else:
validate_expected_type(values, self.func.return_type)
self.expr_visitor.visit(node.value)
def visit_If(self, node):
validate_expected_type(node.test, BoolDefinition())
self.expr_visitor.visit(node.test)
with self.namespace.enter_scope():
for n in node.body:
self.visit(n)
with self.namespace.enter_scope():
for n in node.orelse:
self.visit(n)
def visit_For(self, node):
if isinstance(node.iter, vy_ast.Subscript):
raise StructureException("Cannot iterate over a nested list", node.iter)
if isinstance(node.iter, vy_ast.Call):
# iteration via range()
if node.iter.get("func.id") != "range":
raise IteratorException(
"Cannot iterate over the result of a function call", node.iter
)
validate_call_args(node.iter, (1, 2))
args = node.iter.args
if len(args) == 1:
# range(CONSTANT)
if not isinstance(args[0], vy_ast.Num):
raise StateAccessViolation("Value must be a literal", node)
if args[0].value <= 0:
raise StructureException("For loop must have at least 1 iteration", args[0])
validate_expected_type(args[0], IntegerAbstractType())
type_list = get_possible_types_from_node(args[0])
else:
validate_expected_type(args[0], IntegerAbstractType())
type_list = get_common_types(*args)
if not isinstance(args[0], vy_ast.Constant):
# range(x, x + CONSTANT)
if not isinstance(args[1], vy_ast.BinOp) or not isinstance(
args[1].op, vy_ast.Add
):
raise StructureException(
"Second element must be the first element plus a literal value",
args[0],
)
if not vy_ast.compare_nodes(args[0], args[1].left):
raise StructureException(
"First and second variable must be the same", args[1].left
)
if not isinstance(args[1].right, vy_ast.Int):
raise InvalidLiteral("Literal must be an integer", args[1].right)
if args[1].right.value < 1:
raise StructureException(
f"For loop has invalid number of iterations ({args[1].right.value}),"
" the value must be greater than zero",
args[1].right,
)
else:
# range(CONSTANT, CONSTANT)
if not isinstance(args[1], vy_ast.Int):
raise InvalidType("Value must be a literal integer", args[1])
validate_expected_type(args[1], IntegerAbstractType())
if args[0].value >= args[1].value:
raise StructureException("Second value must be > first value", args[1])
else:
# iteration over a variable or literal list
type_list = [
i.value_type
for i in get_possible_types_from_node(node.iter)
if isinstance(i, (DynamicArrayDefinition, ArrayDefinition))
]
if not type_list:
raise InvalidType("Not an iterable type", node.iter)
if isinstance(node.iter, (vy_ast.Name, vy_ast.Attribute)):
# check for references to the iterated value within the body of the loop
assign = _check_iterator_modification(node.iter, node)
if assign:
raise ImmutableViolation("Cannot modify array during iteration", assign)
# Check if `iter` is a storage variable. get_descendants` is used to check for
# nested `self` (e.g. structs)
iter_is_storage_var = (
isinstance(node.iter, vy_ast.Attribute)
and len(node.iter.get_descendants(vy_ast.Name, {"id": "self"})) > 0
)
if iter_is_storage_var:
# check if iterated value may be modified by function calls inside the loop
iter_name = node.iter.attr
for call_node in node.get_descendants(vy_ast.Call, {"func.value.id": "self"}):
fn_name = call_node.func.attr
fn_node = self.vyper_module.get_children(vy_ast.FunctionDef, {"name": fn_name})[0]
if _check_iterator_modification(node.iter, fn_node):
# check for direct modification
raise ImmutableViolation(
f"Cannot call '{fn_name}' inside for loop, it potentially "
f"modifies iterated storage variable '{iter_name}'",
call_node,
)
for name in self.namespace["self"].members[fn_name].recursive_calls:
# check for indirect modification
fn_node = self.vyper_module.get_children(vy_ast.FunctionDef, {"name": name})[0]
if _check_iterator_modification(node.iter, fn_node):
raise ImmutableViolation(
f"Cannot call '{fn_name}' inside for loop, it may call to '{name}' "
f"which potentially modifies iterated storage variable '{iter_name}'",
call_node,
)
self.expr_visitor.visit(node.iter)
for_loop_exceptions = []
iter_name = node.target.id
for type_ in type_list:
# type check the for loop body using each possible type for iterator value
type_ = copy.deepcopy(type_)
type_.is_constant = True
with self.namespace.enter_scope():
try:
self.namespace[iter_name] = type_
except VyperException as exc:
raise exc.with_annotation(node) from None
try:
for n in node.body:
self.visit(n)
# type information is applied directly because the scope is
# closed prior to the call to `StatementAnnotationVisitor`
node.target._metadata["type"] = type_
return
except (TypeMismatch, InvalidOperation) as exc:
for_loop_exceptions.append(exc)
if len(set(str(i) for i in for_loop_exceptions)) == 1:
# if every attempt at type checking raised the same exception
raise for_loop_exceptions[0]
# return an aggregate TypeMismatch that shows all possible exceptions
# depending on which type is used
types_str = [str(i) for i in type_list]
given_str = f"{', '.join(types_str[:1])} or {types_str[-1]}"
raise TypeMismatch(
f"Iterator value '{iter_name}' may be cast as {given_str}, "
"but type checking fails with all possible types:",
node,
*(
(f"Casting '{iter_name}' as {type_}: {exc.message}", exc.annotations[0])
for type_, exc in zip(type_list, for_loop_exceptions)
),
)
def visit_Expr(self, node):
if not isinstance(node.value, vy_ast.Call):
raise StructureException("Expressions without assignment are disallowed", node)
fn_type = get_exact_type_from_node(node.value.func)
if isinstance(fn_type, Event):
raise StructureException("To call an event you must use the `log` statement", node)
if isinstance(fn_type, ContractFunction):
if (
fn_type.mutability > StateMutability.VIEW
and self.func.mutability <= StateMutability.VIEW
):
raise StateAccessViolation(
f"Cannot call a mutating function from a {self.func.mutability.value} function",
node,
)
if (
self.func.mutability == StateMutability.PURE
and fn_type.mutability != StateMutability.PURE
):
raise StateAccessViolation(
"Cannot call non-pure function from a pure function", node
)
if isinstance(fn_type, MemberFunctionDefinition) and fn_type.is_modifying:
fn_type.underlying_type.validate_modification(node, self.func.mutability)
# NOTE: fetch_call_return validates call args.
return_value = fn_type.fetch_call_return(node.value)
if (
return_value
and not isinstance(fn_type, MemberFunctionDefinition)
and not isinstance(fn_type, ContractFunction)
):
raise StructureException(
f"Function '{fn_type._id}' cannot be called without assigning the result", node
)
self.expr_visitor.visit(node.value)
def visit_Log(self, node):
if not isinstance(node.value, vy_ast.Call):
raise StructureException("Log must call an event", node)
event = get_exact_type_from_node(node.value.func)
if not isinstance(event, Event):
raise StructureException("Value is not an event", node.value)
event.fetch_call_return(node.value)
self.expr_visitor.visit(node.value)
class _LocalExpressionVisitor(VyperNodeVisitorBase):
ignored_types = (vy_ast.Constant, vy_ast.Name)
scope_name = "function"
def visit_Attribute(self, node: vy_ast.Attribute) -> None:
self.visit(node.value)
_validate_msg_data_attribute(node)
_validate_address_code_attribute(node)
def visit_BinOp(self, node: vy_ast.BinOp) -> None:
self.visit(node.left)
self.visit(node.right)
def visit_BoolOp(self, node: vy_ast.BoolOp) -> None:
for value in node.values: # type: ignore[attr-defined]
self.visit(value)
def visit_Call(self, node: vy_ast.Call) -> None:
self.visit(node.func)
for arg in node.args:
self.visit(arg)
for kwarg in node.keywords:
self.visit(kwarg.value)
def visit_Compare(self, node: vy_ast.Compare) -> None:
self.visit(node.left) # type: ignore[attr-defined]
self.visit(node.right) # type: ignore[attr-defined]
def visit_Dict(self, node: vy_ast.Dict) -> None:
for key in node.keys:
self.visit(key)
for value in node.values:
self.visit(value)
def visit_Index(self, node: vy_ast.Index) -> None:
self.visit(node.value)
def visit_List(self, node: vy_ast.List) -> None:
for element in node.elements:
self.visit(element)
def visit_Subscript(self, node: vy_ast.Subscript) -> None:
self.visit(node.value)
self.visit(node.slice)
def visit_Tuple(self, node: vy_ast.Tuple) -> None:
for element in node.elements:
self.visit(element)
def visit_UnaryOp(self, node: vy_ast.UnaryOp) -> None:
self.visit(node.operand) # type: ignore[attr-defined]
| 40.43554 | 100 | 0.609823 | import copy
from typing import Optional
from vyper import ast as vy_ast
from vyper.ast.validation import validate_call_args
from vyper.exceptions import (
ExceptionList,
FunctionDeclarationException,
ImmutableViolation,
InvalidLiteral,
InvalidOperation,
InvalidType,
IteratorException,
NonPayableViolation,
StateAccessViolation,
StructureException,
TypeMismatch,
VariableDeclarationException,
VyperException,
)
from vyper.semantics.environment import CONSTANT_ENVIRONMENT_VARS, MUTABLE_ENVIRONMENT_VARS
from vyper.semantics.namespace import get_namespace
from vyper.semantics.types.abstract import IntegerAbstractType
from vyper.semantics.types.bases import DataLocation
from vyper.semantics.types.function import (
ContractFunction,
MemberFunctionDefinition,
StateMutability,
)
from vyper.semantics.types.indexable.mapping import MappingDefinition
from vyper.semantics.types.indexable.sequence import (
ArrayDefinition,
DynamicArrayDefinition,
TupleDefinition,
)
from vyper.semantics.types.user.event import Event
from vyper.semantics.types.utils import get_type_from_annotation
from vyper.semantics.types.value.address import AddressDefinition
from vyper.semantics.types.value.array_value import StringDefinition
from vyper.semantics.types.value.boolean import BoolDefinition
from vyper.semantics.validation.annotation import StatementAnnotationVisitor
from vyper.semantics.validation.base import VyperNodeVisitorBase
from vyper.semantics.validation.utils import (
get_common_types,
get_exact_type_from_node,
get_possible_types_from_node,
validate_expected_type,
)
def validate_functions(vy_module: vy_ast.Module) -> None:
err_list = ExceptionList()
namespace = get_namespace()
for node in vy_module.get_children(vy_ast.FunctionDef):
with namespace.enter_scope():
try:
FunctionNodeVisitor(vy_module, node, namespace)
except VyperException as e:
err_list.append(e)
err_list.raise_if_not_empty()
def _is_terminus_node(node: vy_ast.VyperNode) -> bool:
if getattr(node, "_is_terminus", None):
return True
if isinstance(node, vy_ast.Expr) and isinstance(node.value, vy_ast.Call):
func = get_exact_type_from_node(node.value.func)
if getattr(func, "_is_terminus", None):
return True
return False
def check_for_terminus(node_list: list) -> bool:
if next((i for i in node_list if _is_terminus_node(i)), None):
return True
for node in [i for i in node_list if isinstance(i, vy_ast.If)][::-1]:
if not node.orelse or not check_for_terminus(node.orelse):
continue
if not check_for_terminus(node.body):
continue
return True
return False
def _check_iterator_modification(
target_node: vy_ast.VyperNode, search_node: vy_ast.VyperNode
) -> Optional[vy_ast.VyperNode]:
similar_nodes = [
n
for n in search_node.get_descendants(type(target_node))
if vy_ast.compare_nodes(target_node, n)
]
for node in similar_nodes:
assign_node = node.get_ancestor((vy_ast.Assign, vy_ast.AugAssign))
if assign_node and node in assign_node.target.get_descendants(include_self=True):
return node
attr_node = node.get_ancestor(vy_ast.Attribute)
if (
attr_node is not None
and node in attr_node.value.get_descendants(include_self=True)
and attr_node.attr in ("append", "pop", "extend")
):
return node
return None
def _validate_revert_reason(msg_node: vy_ast.VyperNode) -> None:
if msg_node:
if isinstance(msg_node, vy_ast.Str):
if not msg_node.value.strip():
raise StructureException("Reason string cannot be empty", msg_node)
elif not (isinstance(msg_node, vy_ast.Name) and msg_node.id == "UNREACHABLE"):
try:
validate_expected_type(msg_node, StringDefinition(1024))
except TypeMismatch as e:
raise InvalidType("revert reason must fit within String[1024]") from e
def _validate_address_code_attribute(node: vy_ast.Attribute) -> None:
value_type = get_exact_type_from_node(node.value)
if isinstance(value_type, AddressDefinition) and node.attr == "code":
parent = node.get_ancestor()
if isinstance(parent, vy_ast.Call):
ok_func = isinstance(parent.func, vy_ast.Name) and parent.func.id == "slice"
ok_args = len(parent.args) == 3 and isinstance(parent.args[2], vy_ast.Int)
if ok_func and ok_args:
return
raise StructureException(
"(address).code is only allowed inside of a slice function with a constant length",
node,
)
def _validate_msg_data_attribute(node: vy_ast.Attribute) -> None:
if isinstance(node.value, vy_ast.Name) and node.value.id == "msg" and node.attr == "data":
parent = node.get_ancestor()
if not isinstance(parent, vy_ast.Call) or parent.get("func.id") not in ("slice", "len"):
raise StructureException(
"msg.data is only allowed inside of the slice or len functions",
node,
)
if parent.get("func.id") == "slice":
ok_args = len(parent.args) == 3 and isinstance(parent.args[2], vy_ast.Int)
if not ok_args:
raise StructureException(
"slice(msg.data) must use a compile-time constant for length argument",
parent,
)
class FunctionNodeVisitor(VyperNodeVisitorBase):
ignored_types = (
vy_ast.Break,
vy_ast.Constant,
vy_ast.Pass,
)
scope_name = "function"
def __init__(
self, vyper_module: vy_ast.Module, fn_node: vy_ast.FunctionDef, namespace: dict
) -> None:
self.vyper_module = vyper_module
self.fn_node = fn_node
self.namespace = namespace
self.func = fn_node._metadata["type"]
self.annotation_visitor = StatementAnnotationVisitor(fn_node, namespace)
self.expr_visitor = _LocalExpressionVisitor()
namespace.update(self.func.arguments)
for node in fn_node.body:
self.visit(node)
if self.func.return_type:
if not check_for_terminus(fn_node.body):
raise FunctionDeclarationException(
f"Missing or unmatched return statements in function '{fn_node.name}'",
fn_node,
)
if self.func.mutability == StateMutability.PURE:
node_list = fn_node.get_descendants(
vy_ast.Attribute,
{
"value.id": set(CONSTANT_ENVIRONMENT_VARS.keys()).union(
set(MUTABLE_ENVIRONMENT_VARS.keys())
)
},
)
for node in node_list:
t = node._metadata.get("type")
if isinstance(t, ContractFunction) and t.mutability == StateMutability.PURE:
continue
raise StateAccessViolation(
"not allowed to query contract or environment variables in pure functions",
node_list[0],
)
if self.func.mutability is not StateMutability.PAYABLE:
node_list = fn_node.get_descendants(
vy_ast.Attribute, {"value.id": "msg", "attr": "value"}
)
if node_list:
raise NonPayableViolation(
"msg.value is not allowed in non-payable functions", node_list[0]
)
def visit(self, node):
super().visit(node)
self.annotation_visitor.visit(node)
def visit_AnnAssign(self, node):
name = node.get("target.id")
if name is None:
raise VariableDeclarationException("Invalid assignment", node)
if not node.value:
raise VariableDeclarationException(
"Memory variables must be declared with an initial value", node
)
type_definition = get_type_from_annotation(node.annotation, DataLocation.MEMORY)
validate_expected_type(node.value, type_definition)
try:
self.namespace[name] = type_definition
except VyperException as exc:
raise exc.with_annotation(node) from None
self.expr_visitor.visit(node.value)
def visit_Assign(self, node):
if isinstance(node.value, vy_ast.Tuple):
raise StructureException("Right-hand side of assignment cannot be a tuple", node.value)
target = get_exact_type_from_node(node.target)
if isinstance(target, MappingDefinition):
raise StructureException(
"Left-hand side of assignment cannot be a HashMap without a key", node
)
validate_expected_type(node.value, target)
target.validate_modification(node, self.func.mutability)
self.expr_visitor.visit(node.value)
self.expr_visitor.visit(node.target)
def visit_AugAssign(self, node):
if isinstance(node.value, vy_ast.Tuple):
raise StructureException("Right-hand side of assignment cannot be a tuple", node.value)
target = get_exact_type_from_node(node.target)
validate_expected_type(node.value, target)
target.validate_modification(node, self.func.mutability)
self.expr_visitor.visit(node.value)
def visit_Raise(self, node):
if node.exc:
_validate_revert_reason(node.exc)
self.expr_visitor.visit(node.exc)
def visit_Assert(self, node):
if node.msg:
_validate_revert_reason(node.msg)
self.expr_visitor.visit(node.msg)
try:
validate_expected_type(node.test, BoolDefinition())
except InvalidType:
raise InvalidType("Assertion test value must be a boolean", node.test)
self.expr_visitor.visit(node.test)
def visit_Continue(self, node):
for_node = node.get_ancestor(vy_ast.For)
if for_node is None:
raise StructureException("`continue` must be enclosed in a `for` loop", node)
def visit_Return(self, node):
values = node.value
if values is None:
if self.func.return_type:
raise FunctionDeclarationException("Return statement is missing a value", node)
return
elif self.func.return_type is None:
raise FunctionDeclarationException("Function does not return any values", node)
if isinstance(values, vy_ast.Tuple):
values = values.elements
if not isinstance(self.func.return_type, TupleDefinition):
raise FunctionDeclarationException("Function only returns a single value", node)
if self.func.return_type.length != len(values):
raise FunctionDeclarationException(
f"Incorrect number of return values: "
f"expected {self.func.return_type.length}, got {len(values)}",
node,
)
for given, expected in zip(values, self.func.return_type.value_type):
validate_expected_type(given, expected)
else:
validate_expected_type(values, self.func.return_type)
self.expr_visitor.visit(node.value)
def visit_If(self, node):
validate_expected_type(node.test, BoolDefinition())
self.expr_visitor.visit(node.test)
with self.namespace.enter_scope():
for n in node.body:
self.visit(n)
with self.namespace.enter_scope():
for n in node.orelse:
self.visit(n)
def visit_For(self, node):
if isinstance(node.iter, vy_ast.Subscript):
raise StructureException("Cannot iterate over a nested list", node.iter)
if isinstance(node.iter, vy_ast.Call):
if node.iter.get("func.id") != "range":
raise IteratorException(
"Cannot iterate over the result of a function call", node.iter
)
validate_call_args(node.iter, (1, 2))
args = node.iter.args
if len(args) == 1:
if not isinstance(args[0], vy_ast.Num):
raise StateAccessViolation("Value must be a literal", node)
if args[0].value <= 0:
raise StructureException("For loop must have at least 1 iteration", args[0])
validate_expected_type(args[0], IntegerAbstractType())
type_list = get_possible_types_from_node(args[0])
else:
validate_expected_type(args[0], IntegerAbstractType())
type_list = get_common_types(*args)
if not isinstance(args[0], vy_ast.Constant):
if not isinstance(args[1], vy_ast.BinOp) or not isinstance(
args[1].op, vy_ast.Add
):
raise StructureException(
"Second element must be the first element plus a literal value",
args[0],
)
if not vy_ast.compare_nodes(args[0], args[1].left):
raise StructureException(
"First and second variable must be the same", args[1].left
)
if not isinstance(args[1].right, vy_ast.Int):
raise InvalidLiteral("Literal must be an integer", args[1].right)
if args[1].right.value < 1:
raise StructureException(
f"For loop has invalid number of iterations ({args[1].right.value}),"
" the value must be greater than zero",
args[1].right,
)
else:
if not isinstance(args[1], vy_ast.Int):
raise InvalidType("Value must be a literal integer", args[1])
validate_expected_type(args[1], IntegerAbstractType())
if args[0].value >= args[1].value:
raise StructureException("Second value must be > first value", args[1])
else:
type_list = [
i.value_type
for i in get_possible_types_from_node(node.iter)
if isinstance(i, (DynamicArrayDefinition, ArrayDefinition))
]
if not type_list:
raise InvalidType("Not an iterable type", node.iter)
if isinstance(node.iter, (vy_ast.Name, vy_ast.Attribute)):
assign = _check_iterator_modification(node.iter, node)
if assign:
raise ImmutableViolation("Cannot modify array during iteration", assign)
iter_is_storage_var = (
isinstance(node.iter, vy_ast.Attribute)
and len(node.iter.get_descendants(vy_ast.Name, {"id": "self"})) > 0
)
if iter_is_storage_var:
iter_name = node.iter.attr
for call_node in node.get_descendants(vy_ast.Call, {"func.value.id": "self"}):
fn_name = call_node.func.attr
fn_node = self.vyper_module.get_children(vy_ast.FunctionDef, {"name": fn_name})[0]
if _check_iterator_modification(node.iter, fn_node):
raise ImmutableViolation(
f"Cannot call '{fn_name}' inside for loop, it potentially "
f"modifies iterated storage variable '{iter_name}'",
call_node,
)
for name in self.namespace["self"].members[fn_name].recursive_calls:
fn_node = self.vyper_module.get_children(vy_ast.FunctionDef, {"name": name})[0]
if _check_iterator_modification(node.iter, fn_node):
raise ImmutableViolation(
f"Cannot call '{fn_name}' inside for loop, it may call to '{name}' "
f"which potentially modifies iterated storage variable '{iter_name}'",
call_node,
)
self.expr_visitor.visit(node.iter)
for_loop_exceptions = []
iter_name = node.target.id
for type_ in type_list:
type_ = copy.deepcopy(type_)
type_.is_constant = True
with self.namespace.enter_scope():
try:
self.namespace[iter_name] = type_
except VyperException as exc:
raise exc.with_annotation(node) from None
try:
for n in node.body:
self.visit(n)
node.target._metadata["type"] = type_
return
except (TypeMismatch, InvalidOperation) as exc:
for_loop_exceptions.append(exc)
if len(set(str(i) for i in for_loop_exceptions)) == 1:
raise for_loop_exceptions[0]
types_str = [str(i) for i in type_list]
given_str = f"{', '.join(types_str[:1])} or {types_str[-1]}"
raise TypeMismatch(
f"Iterator value '{iter_name}' may be cast as {given_str}, "
"but type checking fails with all possible types:",
node,
*(
(f"Casting '{iter_name}' as {type_}: {exc.message}", exc.annotations[0])
for type_, exc in zip(type_list, for_loop_exceptions)
),
)
def visit_Expr(self, node):
if not isinstance(node.value, vy_ast.Call):
raise StructureException("Expressions without assignment are disallowed", node)
fn_type = get_exact_type_from_node(node.value.func)
if isinstance(fn_type, Event):
raise StructureException("To call an event you must use the `log` statement", node)
if isinstance(fn_type, ContractFunction):
if (
fn_type.mutability > StateMutability.VIEW
and self.func.mutability <= StateMutability.VIEW
):
raise StateAccessViolation(
f"Cannot call a mutating function from a {self.func.mutability.value} function",
node,
)
if (
self.func.mutability == StateMutability.PURE
and fn_type.mutability != StateMutability.PURE
):
raise StateAccessViolation(
"Cannot call non-pure function from a pure function", node
)
if isinstance(fn_type, MemberFunctionDefinition) and fn_type.is_modifying:
fn_type.underlying_type.validate_modification(node, self.func.mutability)
return_value = fn_type.fetch_call_return(node.value)
if (
return_value
and not isinstance(fn_type, MemberFunctionDefinition)
and not isinstance(fn_type, ContractFunction)
):
raise StructureException(
f"Function '{fn_type._id}' cannot be called without assigning the result", node
)
self.expr_visitor.visit(node.value)
def visit_Log(self, node):
if not isinstance(node.value, vy_ast.Call):
raise StructureException("Log must call an event", node)
event = get_exact_type_from_node(node.value.func)
if not isinstance(event, Event):
raise StructureException("Value is not an event", node.value)
event.fetch_call_return(node.value)
self.expr_visitor.visit(node.value)
class _LocalExpressionVisitor(VyperNodeVisitorBase):
ignored_types = (vy_ast.Constant, vy_ast.Name)
scope_name = "function"
def visit_Attribute(self, node: vy_ast.Attribute) -> None:
self.visit(node.value)
_validate_msg_data_attribute(node)
_validate_address_code_attribute(node)
def visit_BinOp(self, node: vy_ast.BinOp) -> None:
self.visit(node.left)
self.visit(node.right)
def visit_BoolOp(self, node: vy_ast.BoolOp) -> None:
for value in node.values:
self.visit(value)
def visit_Call(self, node: vy_ast.Call) -> None:
self.visit(node.func)
for arg in node.args:
self.visit(arg)
for kwarg in node.keywords:
self.visit(kwarg.value)
def visit_Compare(self, node: vy_ast.Compare) -> None:
self.visit(node.left)
self.visit(node.right)
def visit_Dict(self, node: vy_ast.Dict) -> None:
for key in node.keys:
self.visit(key)
for value in node.values:
self.visit(value)
def visit_Index(self, node: vy_ast.Index) -> None:
self.visit(node.value)
def visit_List(self, node: vy_ast.List) -> None:
for element in node.elements:
self.visit(element)
def visit_Subscript(self, node: vy_ast.Subscript) -> None:
self.visit(node.value)
self.visit(node.slice)
def visit_Tuple(self, node: vy_ast.Tuple) -> None:
for element in node.elements:
self.visit(element)
def visit_UnaryOp(self, node: vy_ast.UnaryOp) -> None:
self.visit(node.operand)
| true | true |
f72fe8bd0b61ca670674b153c7c49dccbd99c3d8 | 3,581 | py | Python | google/bigtable/v2/bigtable-v2-py/noxfile.py | googleapis/googleapis-gen | d84824c78563d59b0e58d5664bfaa430e9ad7e7a | [
"Apache-2.0"
] | 7 | 2021-02-21T10:39:41.000Z | 2021-12-07T07:31:28.000Z | google/bigtable/v2/bigtable-v2-py/noxfile.py | googleapis/googleapis-gen | d84824c78563d59b0e58d5664bfaa430e9ad7e7a | [
"Apache-2.0"
] | 6 | 2021-02-02T23:46:11.000Z | 2021-11-15T01:46:02.000Z | google/bigtable/v2/bigtable-v2-py/noxfile.py | googleapis/googleapis-gen | d84824c78563d59b0e58d5664bfaa430e9ad7e7a | [
"Apache-2.0"
] | 4 | 2021-01-28T23:25:45.000Z | 2021-08-30T01:55:16.000Z | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import pathlib
import shutil
import subprocess
import sys
import nox # type: ignore
CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute()
LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt"
PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8")
nox.sessions = [
"unit",
"cover",
"mypy",
"check_lower_bounds"
# exclude update_lower_bounds from default
"docs",
]
@nox.session(python=['3.6', '3.7', '3.8', '3.9'])
def unit(session):
"""Run the unit test suite."""
session.install('coverage', 'pytest', 'pytest-cov', 'asyncmock', 'pytest-asyncio')
session.install('-e', '.')
session.run(
'py.test',
'--quiet',
'--cov=google/cloud/bigtable_v2/',
'--cov-config=.coveragerc',
'--cov-report=term',
'--cov-report=html',
os.path.join('tests', 'unit', ''.join(session.posargs))
)
@nox.session(python='3.7')
def cover(session):
"""Run the final coverage report.
This outputs the coverage report aggregating coverage from the unit
test runs (not system test runs), and then erases coverage data.
"""
session.install("coverage", "pytest-cov")
session.run("coverage", "report", "--show-missing", "--fail-under=100")
session.run("coverage", "erase")
@nox.session(python=['3.6', '3.7'])
def mypy(session):
"""Run the type checker."""
session.install('mypy', 'types-pkg_resources')
session.install('.')
session.run(
'mypy',
'--explicit-package-bases',
'google',
)
@nox.session
def update_lower_bounds(session):
"""Update lower bounds in constraints.txt to match setup.py"""
session.install('google-cloud-testutils')
session.install('.')
session.run(
'lower-bound-checker',
'update',
'--package-name',
PACKAGE_NAME,
'--constraints-file',
str(LOWER_BOUND_CONSTRAINTS_FILE),
)
@nox.session
def check_lower_bounds(session):
"""Check lower bounds in setup.py are reflected in constraints file"""
session.install('google-cloud-testutils')
session.install('.')
session.run(
'lower-bound-checker',
'check',
'--package-name',
PACKAGE_NAME,
'--constraints-file',
str(LOWER_BOUND_CONSTRAINTS_FILE),
)
@nox.session(python='3.6')
def docs(session):
"""Build the docs for this library."""
session.install("-e", ".")
session.install("sphinx<3.0.0", "alabaster", "recommonmark")
shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True)
session.run(
"sphinx-build",
"-W", # warnings as errors
"-T", # show full traceback on exception
"-N", # no colors
"-b",
"html",
"-d",
os.path.join("docs", "_build", "doctrees", ""),
os.path.join("docs", ""),
os.path.join("docs", "_build", "html", ""),
)
| 26.924812 | 96 | 0.62692 |
import os
import pathlib
import shutil
import subprocess
import sys
import nox
CURRENT_DIRECTORY = pathlib.Path(__file__).parent.absolute()
LOWER_BOUND_CONSTRAINTS_FILE = CURRENT_DIRECTORY / "constraints.txt"
PACKAGE_NAME = subprocess.check_output([sys.executable, "setup.py", "--name"], encoding="utf-8")
nox.sessions = [
"unit",
"cover",
"mypy",
"check_lower_bounds"
"docs",
]
@nox.session(python=['3.6', '3.7', '3.8', '3.9'])
def unit(session):
session.install('coverage', 'pytest', 'pytest-cov', 'asyncmock', 'pytest-asyncio')
session.install('-e', '.')
session.run(
'py.test',
'--quiet',
'--cov=google/cloud/bigtable_v2/',
'--cov-config=.coveragerc',
'--cov-report=term',
'--cov-report=html',
os.path.join('tests', 'unit', ''.join(session.posargs))
)
@nox.session(python='3.7')
def cover(session):
session.install("coverage", "pytest-cov")
session.run("coverage", "report", "--show-missing", "--fail-under=100")
session.run("coverage", "erase")
@nox.session(python=['3.6', '3.7'])
def mypy(session):
session.install('mypy', 'types-pkg_resources')
session.install('.')
session.run(
'mypy',
'--explicit-package-bases',
'google',
)
@nox.session
def update_lower_bounds(session):
session.install('google-cloud-testutils')
session.install('.')
session.run(
'lower-bound-checker',
'update',
'--package-name',
PACKAGE_NAME,
'--constraints-file',
str(LOWER_BOUND_CONSTRAINTS_FILE),
)
@nox.session
def check_lower_bounds(session):
session.install('google-cloud-testutils')
session.install('.')
session.run(
'lower-bound-checker',
'check',
'--package-name',
PACKAGE_NAME,
'--constraints-file',
str(LOWER_BOUND_CONSTRAINTS_FILE),
)
@nox.session(python='3.6')
def docs(session):
session.install("-e", ".")
session.install("sphinx<3.0.0", "alabaster", "recommonmark")
shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True)
session.run(
"sphinx-build",
"-W",
"-T",
"-N",
"-b",
"html",
"-d",
os.path.join("docs", "_build", "doctrees", ""),
os.path.join("docs", ""),
os.path.join("docs", "_build", "html", ""),
)
| true | true |
f72fea862619713252e7fba20316ffcd135413b8 | 21,695 | py | Python | external/mmdetection/tests/ote_params_validation/test_ote_data_utils_params_validation.py | opencv/openvino_training_extensions | f5d809741e192a2345558efc75899a475019cf98 | [
"Apache-2.0"
] | 775 | 2019-03-01T02:13:33.000Z | 2020-09-07T22:49:15.000Z | external/mmdetection/tests/ote_params_validation/test_ote_data_utils_params_validation.py | opencv/openvino_training_extensions | f5d809741e192a2345558efc75899a475019cf98 | [
"Apache-2.0"
] | 229 | 2019-02-28T21:37:08.000Z | 2020-09-07T15:11:49.000Z | external/mmdetection/tests/ote_params_validation/test_ote_data_utils_params_validation.py | opencv/openvino_training_extensions | f5d809741e192a2345558efc75899a475019cf98 | [
"Apache-2.0"
] | 290 | 2019-02-28T20:32:11.000Z | 2020-09-07T05:51:41.000Z | # Copyright (C) 2021-2022 Intel Corporation
# SPDX-License-Identifier: Apache-2.0
#
import os.path as osp
import tempfile
import mmcv
import pytest
from detection_tasks.extension.datasets.data_utils import (
CocoDataset,
LoadAnnotations,
find_label_by_name,
format_list_to_str,
get_anchor_boxes,
get_classes_from_annotation,
get_sizes_from_dataset_entity,
load_dataset_items_coco_format,
)
from ote_sdk.entities.datasets import DatasetEntity
from ote_sdk.entities.label import Domain, LabelEntity
from ote_sdk.test_suite.e2e_test_system import e2e_pytest_unit
from ote_sdk.tests.parameters_validation.validation_helper import (
check_value_error_exception_raised,
)
def _create_dummy_coco_json(json_name):
image = {
"id": 0,
"width": 640,
"height": 640,
"file_name": "fake_name.jpg",
}
annotation_1 = {
"id": 1,
"image_id": 0,
"category_id": 0,
"area": 400,
"bbox": [50, 60, 20, 20],
"iscrowd": 0,
}
annotation_2 = {
"id": 2,
"image_id": 0,
"category_id": 0,
"area": 900,
"bbox": [100, 120, 30, 30],
"iscrowd": 0,
}
categories = [
{
"id": 0,
"name": "car",
"supercategory": "car",
}
]
fake_json = {
"images": [image],
"annotations": [annotation_1, annotation_2],
"categories": categories,
}
mmcv.dump(fake_json, json_name)
class TestDataUtilsFunctionsInputParamsValidation:
@e2e_pytest_unit
def test_get_classes_from_annotation_input_params_validation(self):
"""
<b>Description:</b>
Check "get_classes_from_annotation" function input parameters validation
<b>Input data:</b>
"path" unexpected object
<b>Expected results:</b>
Test passes if ValueError exception is raised when unexpected type object is specified as input parameter for
"get_classes_from_annotation" function
"""
for unexpected_value in [
# non string object is specified as "path" parameter
1,
# Empty string is specified as "path" parameter
"",
# Path to file with unexpected extension is specified as "path" parameter
"./unexpected_extension.yaml",
# Path to non-existing file is specified as "path" parameter
"./non_existing.json",
# Path with null character is specified as "path" parameter
"./null\0char.json",
# Path with non-printable character is specified as "path" parameter
"./\non_printable_char.json",
]:
with pytest.raises(ValueError):
get_classes_from_annotation(path=unexpected_value)
@e2e_pytest_unit
def test_find_label_by_name_params_validation(self):
"""
<b>Description:</b>
Check "find_label_by_name" function input parameters validation
<b>Input data:</b>
"find_label_by_name" function unexpected-type input parameters
<b>Expected results:</b>
Test passes if ValueError exception is raised when unexpected type object is specified as
input parameter for "find_label_by_name" function
"""
label = LabelEntity(name="test label", domain=Domain.DETECTION)
correct_values_dict = {
"labels": [label],
"name": "test label",
"domain": Domain.DETECTION,
}
unexpected_int = 1
unexpected_values = [
# Unexpected integer is specified as "labels" parameter
("labels", unexpected_int),
# Unexpected integer is specified as nested label
("labels", [label, unexpected_int]),
# Unexpected integer is specified as "name" parameter
("name", unexpected_int),
# Unexpected integer is specified as "domain" parameter
("domain", unexpected_int),
]
check_value_error_exception_raised(
correct_parameters=correct_values_dict,
unexpected_values=unexpected_values,
class_or_function=find_label_by_name,
)
@e2e_pytest_unit
def test_load_dataset_items_coco_format_params_validation(self):
"""
<b>Description:</b>
Check "load_dataset_items_coco_format" function input parameters validation
<b>Input data:</b>
"load_dataset_items_coco_format" function unexpected-type input parameters
<b>Expected results:</b>
Test passes if ValueError exception is raised when unexpected type object is specified as
input parameter for "load_dataset_items_coco_format" function
"""
tmp_dir = tempfile.TemporaryDirectory()
fake_json_file = osp.join(tmp_dir.name, "fake_data.json")
_create_dummy_coco_json(fake_json_file)
label = LabelEntity(name="test label", domain=Domain.DETECTION)
correct_values_dict = {
"ann_file_path": fake_json_file,
"data_root_dir": tmp_dir.name,
"domain": Domain.DETECTION,
}
unexpected_int = 1
unexpected_values = [
# Unexpected integer is specified as "ann_file_path" parameter
("ann_file_path", unexpected_int),
# Empty string is specified as "ann_file_path" parameter
("ann_file_path", ""),
# Path to non-json file is specified as "ann_file_path" parameter
("ann_file_path", osp.join(tmp_dir.name, "non_json.jpg")),
# Path with null character is specified as "ann_file_path" parameter
("ann_file_path", osp.join(tmp_dir.name, "\0fake_data.json")),
# Path with non-printable character is specified as "ann_file_path" parameter
("ann_file_path", osp.join(tmp_dir.name, "\nfake_data.json")),
# Path to non-existing file is specified as "ann_file_path" parameter
("ann_file_path", osp.join(tmp_dir.name, "non_existing.json")),
# Unexpected integer is specified as "data_root_dir" parameter
("data_root_dir", unexpected_int),
# Empty string is specified as "data_root_dir" parameter
("data_root_dir", ""),
# Path with null character is specified as "data_root_dir" parameter
("data_root_dir", "./\0null_char"),
# Path with non-printable character is specified as "data_root_dir" parameter
("data_root_dir", "./\non_printable_char"),
# Unexpected integer is specified as "domain" parameter
("domain", unexpected_int),
# Unexpected integer is specified as "subset" parameter
("subset", unexpected_int),
# Unexpected integer is specified as "labels_list" parameter
("labels_list", unexpected_int),
# Unexpected integer is specified as nested label
("labels_list", [label, unexpected_int]),
# Unexpected string is specified as "with_mask" parameter
("with_mask", "unexpected string"),
]
check_value_error_exception_raised(
correct_parameters=correct_values_dict,
unexpected_values=unexpected_values,
class_or_function=load_dataset_items_coco_format,
)
@e2e_pytest_unit
def test_get_sizes_from_dataset_entity_params_validation(self):
"""
<b>Description:</b>
Check "get_sizes_from_dataset_entity" function input parameters validation
<b>Input data:</b>
"get_sizes_from_dataset_entity" function unexpected-type input parameters
<b>Expected results:</b>
Test passes if ValueError exception is raised when unexpected type object is specified as
input parameter for "get_sizes_from_dataset_entity" function
"""
correct_values_dict = {
"dataset": DatasetEntity(),
"target_wh": [(0.1, 0.1)],
}
unexpected_int = 1
unexpected_values = [
# Unexpected integer is specified as "dataset" parameter
("dataset", unexpected_int),
# Unexpected integer is specified as "target_wh" parameter
("target_wh", unexpected_int),
# Unexpected integer is specified as nested target_wh
("target_wh", [(0.1, 0.1), unexpected_int]),
]
check_value_error_exception_raised(
correct_parameters=correct_values_dict,
unexpected_values=unexpected_values,
class_or_function=get_sizes_from_dataset_entity,
)
@e2e_pytest_unit
def test_format_list_to_str_params_validation(self):
"""
<b>Description:</b>
Check "format_list_to_str" function input parameters validation
<b>Input data:</b>
"value_lists" unexpected type object
<b>Expected results:</b>
Test passes if ValueError exception is raised when unexpected type object is specified as
input parameter for "format_list_to_str" function
"""
with pytest.raises(ValueError):
format_list_to_str(value_lists="unexpected string") # type: ignore
@e2e_pytest_unit
def test_get_anchor_boxes_params_validation(self):
"""
<b>Description:</b>
Check "get_anchor_boxes" function input parameters validation
<b>Input data:</b>
"get_anchor_boxes" function unexpected-type input parameters
<b>Expected results:</b>
Test passes if ValueError exception is raised when unexpected type object is specified as
input parameter for "get_anchor_boxes" function
"""
correct_values_dict = {
"wh_stats": [("wh_stat_1", 1), ("wh_stat_2", 2)],
"group_as": [0, 1, 2],
}
unexpected_str = "unexpected string"
unexpected_values = [
# Unexpected string is specified as "wh_stats" parameter
("wh_stats", unexpected_str),
# Unexpected string is specified as nested "wh_stat"
("wh_stats", [("wh_stat_1", 1), unexpected_str]),
# Unexpected string is specified as "group_as" parameter
("group_as", unexpected_str),
# Unexpected string is specified as nested "group_as"
("group_as", [0, 1, 2, unexpected_str]),
]
check_value_error_exception_raised(
correct_parameters=correct_values_dict,
unexpected_values=unexpected_values,
class_or_function=get_anchor_boxes,
)
class TestLoadAnnotationsInputParamsValidation:
@e2e_pytest_unit
def test_load_annotations_init_params_validation(self):
"""
<b>Description:</b>
Check LoadAnnotations object initialization parameters validation
<b>Input data:</b>
LoadAnnotations object initialization parameters with unexpected type
<b>Expected results:</b>
Test passes if ValueError exception is raised when unexpected type object is specified as
LoadAnnotations initialization parameter
"""
for parameter in ["with_bbox", "with_label", "with_mask"]:
with pytest.raises(ValueError):
LoadAnnotations(**{parameter: "unexpected string"})
@e2e_pytest_unit
def test_load_annotations_call_params_validation(self):
"""
<b>Description:</b>
Check LoadAnnotations object "__call__" method input parameters validation
<b>Input data:</b>
"results" parameter with unexpected type
<b>Expected results:</b>
Test passes if ValueError exception is raised when unexpected type object is specified as
input parameter for "__call__" method
"""
load_annotations = LoadAnnotations()
unexpected_int = 1
for unexpected_value in [
# Unexpected integer is specified as "results" parameter
unexpected_int,
# Unexpected integer is specified as "results" dictionary key
{"result_1": "some results", unexpected_int: "unexpected results"},
]:
with pytest.raises(ValueError):
load_annotations(results=unexpected_value)
class TestCocoDatasetInputParamsValidation:
@staticmethod
def create_fake_json_file():
tmp_dir = tempfile.TemporaryDirectory()
fake_json_file = osp.join(tmp_dir.name, "fake_data.json")
_create_dummy_coco_json(fake_json_file)
return fake_json_file
@staticmethod
def dataset():
tmp_dir = tempfile.TemporaryDirectory()
fake_json_file = osp.join(tmp_dir.name, "fake_data.json")
_create_dummy_coco_json(fake_json_file)
return CocoDataset(fake_json_file)
@e2e_pytest_unit
def test_coco_dataset_init_params_validation(self):
"""
<b>Description:</b>
Check CocoDataset object initialization parameters validation
<b>Input data:</b>
CocoDataset object initialization parameters with unexpected type
<b>Expected results:</b>
Test passes if ValueError exception is raised when unexpected type object is specified as
CocoDataset object initialization parameter
"""
tmp_dir = tempfile.TemporaryDirectory()
fake_json_file = osp.join(tmp_dir.name, "fake_data.json")
_create_dummy_coco_json(fake_json_file)
correct_values_dict = {
"ann_file": fake_json_file,
}
unexpected_str = "unexpected string"
unexpected_int = 1
unexpected_values = [
# Unexpected integer is specified as "ann_file" parameter
("ann_file", unexpected_int),
# Empty string is specified as "ann_file" parameter
("ann_file", ""),
# Path to non-json file is specified as "ann_file" parameter
("ann_file", osp.join(tmp_dir.name, "non_json.jpg")),
# Path with null character is specified as "ann_file" parameter
("ann_file", osp.join(tmp_dir.name, "\0fake_data.json")),
# Path with non-printable character is specified as "ann_file" parameter
("ann_file", osp.join(tmp_dir.name, "\nfake_data.json")),
# Path to non-existing file is specified as "ann_file" parameter
("ann_file", osp.join(tmp_dir.name, "non_existing.json")),
# Unexpected integer is specified as "classes" parameter
("classes", unexpected_int),
# Unexpected integer is specified nested class
("classes", ["class_1", unexpected_int]),
# Unexpected integer is specified as "data_root" parameter
("data_root", unexpected_int),
# Empty string is specified as "data_root" parameter
("data_root", ""),
# Path with null character is specified as "data_root" parameter
("data_root", "./\0null_char"),
# Path with non-printable character is specified as "data_root" parameter
("data_root", "./\non_printable_char"),
# Unexpected integer is specified as "img_prefix" parameter
("img_prefix", unexpected_int),
# Unexpected string is specified as "test_mode" parameter
("test_mode", unexpected_str),
# Unexpected string is specified as "filter_empty_gt" parameter
("filter_empty_gt", unexpected_str),
# Unexpected string is specified as "min_size" parameter
("min_size", unexpected_str),
# Unexpected string is specified as "with_mask" parameter
("with_mask", unexpected_str),
]
check_value_error_exception_raised(
correct_parameters=correct_values_dict,
unexpected_values=unexpected_values,
class_or_function=CocoDataset,
)
@e2e_pytest_unit
def test_coco_dataset_pre_pipeline_params_validation(self):
"""
<b>Description:</b>
Check CocoDataset object "pre_pipeline" method input parameters validation
<b>Input data:</b>
CocoDataset object, "results" parameter with unexpected type
<b>Expected results:</b>
Test passes if ValueError exception is raised when unexpected type object is specified as
input parameter for "pre_pipeline" method
"""
dataset = self.dataset()
unexpected_int = 1
for unexpected_value in [
# Unexpected integer is specified as "results" parameter
unexpected_int,
# Unexpected integer is specified as "results" dictionary key
{"result_1": "some results", unexpected_int: "unexpected results"},
]:
with pytest.raises(ValueError):
dataset.pre_pipeline(results=unexpected_value)
@e2e_pytest_unit
def test_coco_dataset_get_item_params_validation(self):
"""
<b>Description:</b>
Check CocoDataset object "__getitem__" method input parameters validation
<b>Input data:</b>
CocoDataset object, "idx" non-integer type parameter
<b>Expected results:</b>
Test passes if ValueError exception is raised when unexpected type object is specified as
input parameter for "__getitem__" method
"""
dataset = self.dataset()
with pytest.raises(ValueError):
dataset.__getitem__(idx="unexpected string") # type: ignore
@e2e_pytest_unit
def test_coco_dataset_prepare_img_params_validation(self):
"""
<b>Description:</b>
Check CocoDataset object "prepare_img" method input parameters validation
<b>Input data:</b>
CocoDataset object, "idx" non-integer type parameter
<b>Expected results:</b>
Test passes if ValueError exception is raised when unexpected type object is specified as
input parameter for "prepare_img" method
"""
dataset = self.dataset()
with pytest.raises(ValueError):
dataset.prepare_img(idx="unexpected string") # type: ignore
@e2e_pytest_unit
def test_coco_dataset_get_classes_params_validation(self):
"""
<b>Description:</b>
Check CocoDataset object "get_classes" method input parameters validation
<b>Input data:</b>
CocoDataset object, "classes" parameter with unexpected type
<b>Expected results:</b>
Test passes if ValueError exception is raised when unexpected type object is specified as
input parameter for "get_classes" method
"""
dataset = self.dataset()
unexpected_int = 1
for unexpected_value in [
# Unexpected integer is specified as "classes" parameter
unexpected_int,
# Unexpected integer is specified as nested "classes" element
["class_1", unexpected_int],
]:
with pytest.raises(ValueError):
dataset.get_classes(classes=unexpected_value) # type: ignore
@e2e_pytest_unit
def test_coco_dataset_load_annotations_params_validation(self):
"""
<b>Description:</b>
Check CocoDataset object "load_annotations" method input parameters validation
<b>Input data:</b>
CocoDataset object, "ann_file" unexpected object
<b>Expected results:</b>
Test passes if ValueError exception is raised when unexpected type object is specified as
input parameter for "load_annotations" method
"""
dataset = self.dataset()
for unexpected_value in [
# Unexpected integer is specified as "ann_file" parameter
1,
# Empty string is specified as "ann_file" parameter
"",
# Path to non-existing file is specified as "ann_file" parameter
"./non_existing.json",
# Path to non-json file is specified as "ann_file" parameter
"./unexpected_type.jpg",
# Path Null character is specified in "ann_file" parameter
"./null\0char.json",
# Path with non-printable character is specified as "input_config" parameter
"./null\nchar.json",
]:
with pytest.raises(ValueError):
dataset.load_annotations(ann_file=unexpected_value)
@e2e_pytest_unit
def test_coco_dataset_get_ann_info_params_validation(self):
"""
<b>Description:</b>
Check CocoDataset object "get_ann_info" method input parameters validation
<b>Input data:</b>
CocoDataset object, "idx" non-integer type parameter
<b>Expected results:</b>
Test passes if ValueError exception is raised when unexpected type object is specified as
input parameter for "get_ann_info" method
"""
dataset = self.dataset()
with pytest.raises(ValueError):
dataset.get_ann_info(idx="unexpected string") # type: ignore
@e2e_pytest_unit
def test_coco_dataset_get_cat_ids_params_validation(self):
"""
<b>Description:</b>
Check CocoDataset object "get_cat_ids" method input parameters validation
<b>Input data:</b>
CocoDataset object, "idx" non-integer type parameter
<b>Expected results:</b>
Test passes if ValueError exception is raised when unexpected type object is specified as
input parameter for "get_cat_ids" method
"""
dataset = self.dataset()
with pytest.raises(ValueError):
dataset.get_cat_ids(idx="unexpected string") # type: ignore
| 39.445455 | 117 | 0.638442 |
import os.path as osp
import tempfile
import mmcv
import pytest
from detection_tasks.extension.datasets.data_utils import (
CocoDataset,
LoadAnnotations,
find_label_by_name,
format_list_to_str,
get_anchor_boxes,
get_classes_from_annotation,
get_sizes_from_dataset_entity,
load_dataset_items_coco_format,
)
from ote_sdk.entities.datasets import DatasetEntity
from ote_sdk.entities.label import Domain, LabelEntity
from ote_sdk.test_suite.e2e_test_system import e2e_pytest_unit
from ote_sdk.tests.parameters_validation.validation_helper import (
check_value_error_exception_raised,
)
def _create_dummy_coco_json(json_name):
image = {
"id": 0,
"width": 640,
"height": 640,
"file_name": "fake_name.jpg",
}
annotation_1 = {
"id": 1,
"image_id": 0,
"category_id": 0,
"area": 400,
"bbox": [50, 60, 20, 20],
"iscrowd": 0,
}
annotation_2 = {
"id": 2,
"image_id": 0,
"category_id": 0,
"area": 900,
"bbox": [100, 120, 30, 30],
"iscrowd": 0,
}
categories = [
{
"id": 0,
"name": "car",
"supercategory": "car",
}
]
fake_json = {
"images": [image],
"annotations": [annotation_1, annotation_2],
"categories": categories,
}
mmcv.dump(fake_json, json_name)
class TestDataUtilsFunctionsInputParamsValidation:
@e2e_pytest_unit
def test_get_classes_from_annotation_input_params_validation(self):
for unexpected_value in [
1,
"",
"./unexpected_extension.yaml",
"./non_existing.json",
"./null\0char.json",
"./\non_printable_char.json",
]:
with pytest.raises(ValueError):
get_classes_from_annotation(path=unexpected_value)
@e2e_pytest_unit
def test_find_label_by_name_params_validation(self):
label = LabelEntity(name="test label", domain=Domain.DETECTION)
correct_values_dict = {
"labels": [label],
"name": "test label",
"domain": Domain.DETECTION,
}
unexpected_int = 1
unexpected_values = [
("labels", unexpected_int),
("labels", [label, unexpected_int]),
("name", unexpected_int),
("domain", unexpected_int),
]
check_value_error_exception_raised(
correct_parameters=correct_values_dict,
unexpected_values=unexpected_values,
class_or_function=find_label_by_name,
)
@e2e_pytest_unit
def test_load_dataset_items_coco_format_params_validation(self):
tmp_dir = tempfile.TemporaryDirectory()
fake_json_file = osp.join(tmp_dir.name, "fake_data.json")
_create_dummy_coco_json(fake_json_file)
label = LabelEntity(name="test label", domain=Domain.DETECTION)
correct_values_dict = {
"ann_file_path": fake_json_file,
"data_root_dir": tmp_dir.name,
"domain": Domain.DETECTION,
}
unexpected_int = 1
unexpected_values = [
("ann_file_path", unexpected_int),
("ann_file_path", ""),
("ann_file_path", osp.join(tmp_dir.name, "non_json.jpg")),
("ann_file_path", osp.join(tmp_dir.name, "\0fake_data.json")),
("ann_file_path", osp.join(tmp_dir.name, "\nfake_data.json")),
("ann_file_path", osp.join(tmp_dir.name, "non_existing.json")),
("data_root_dir", unexpected_int),
("data_root_dir", ""),
("data_root_dir", "./\0null_char"),
("data_root_dir", "./\non_printable_char"),
("domain", unexpected_int),
("subset", unexpected_int),
("labels_list", unexpected_int),
("labels_list", [label, unexpected_int]),
("with_mask", "unexpected string"),
]
check_value_error_exception_raised(
correct_parameters=correct_values_dict,
unexpected_values=unexpected_values,
class_or_function=load_dataset_items_coco_format,
)
@e2e_pytest_unit
def test_get_sizes_from_dataset_entity_params_validation(self):
correct_values_dict = {
"dataset": DatasetEntity(),
"target_wh": [(0.1, 0.1)],
}
unexpected_int = 1
unexpected_values = [
("dataset", unexpected_int),
("target_wh", unexpected_int),
("target_wh", [(0.1, 0.1), unexpected_int]),
]
check_value_error_exception_raised(
correct_parameters=correct_values_dict,
unexpected_values=unexpected_values,
class_or_function=get_sizes_from_dataset_entity,
)
@e2e_pytest_unit
def test_format_list_to_str_params_validation(self):
with pytest.raises(ValueError):
format_list_to_str(value_lists="unexpected string")
@e2e_pytest_unit
def test_get_anchor_boxes_params_validation(self):
correct_values_dict = {
"wh_stats": [("wh_stat_1", 1), ("wh_stat_2", 2)],
"group_as": [0, 1, 2],
}
unexpected_str = "unexpected string"
unexpected_values = [
("wh_stats", unexpected_str),
("wh_stats", [("wh_stat_1", 1), unexpected_str]),
("group_as", unexpected_str),
("group_as", [0, 1, 2, unexpected_str]),
]
check_value_error_exception_raised(
correct_parameters=correct_values_dict,
unexpected_values=unexpected_values,
class_or_function=get_anchor_boxes,
)
class TestLoadAnnotationsInputParamsValidation:
@e2e_pytest_unit
def test_load_annotations_init_params_validation(self):
for parameter in ["with_bbox", "with_label", "with_mask"]:
with pytest.raises(ValueError):
LoadAnnotations(**{parameter: "unexpected string"})
@e2e_pytest_unit
def test_load_annotations_call_params_validation(self):
load_annotations = LoadAnnotations()
unexpected_int = 1
for unexpected_value in [
unexpected_int,
{"result_1": "some results", unexpected_int: "unexpected results"},
]:
with pytest.raises(ValueError):
load_annotations(results=unexpected_value)
class TestCocoDatasetInputParamsValidation:
@staticmethod
def create_fake_json_file():
tmp_dir = tempfile.TemporaryDirectory()
fake_json_file = osp.join(tmp_dir.name, "fake_data.json")
_create_dummy_coco_json(fake_json_file)
return fake_json_file
@staticmethod
def dataset():
tmp_dir = tempfile.TemporaryDirectory()
fake_json_file = osp.join(tmp_dir.name, "fake_data.json")
_create_dummy_coco_json(fake_json_file)
return CocoDataset(fake_json_file)
@e2e_pytest_unit
def test_coco_dataset_init_params_validation(self):
tmp_dir = tempfile.TemporaryDirectory()
fake_json_file = osp.join(tmp_dir.name, "fake_data.json")
_create_dummy_coco_json(fake_json_file)
correct_values_dict = {
"ann_file": fake_json_file,
}
unexpected_str = "unexpected string"
unexpected_int = 1
unexpected_values = [
("ann_file", unexpected_int),
("ann_file", ""),
("ann_file", osp.join(tmp_dir.name, "non_json.jpg")),
("ann_file", osp.join(tmp_dir.name, "\0fake_data.json")),
("ann_file", osp.join(tmp_dir.name, "\nfake_data.json")),
("ann_file", osp.join(tmp_dir.name, "non_existing.json")),
("classes", unexpected_int),
("classes", ["class_1", unexpected_int]),
("data_root", unexpected_int),
("data_root", ""),
("data_root", "./\0null_char"),
("data_root", "./\non_printable_char"),
("img_prefix", unexpected_int),
("test_mode", unexpected_str),
("filter_empty_gt", unexpected_str),
("min_size", unexpected_str),
("with_mask", unexpected_str),
]
check_value_error_exception_raised(
correct_parameters=correct_values_dict,
unexpected_values=unexpected_values,
class_or_function=CocoDataset,
)
@e2e_pytest_unit
def test_coco_dataset_pre_pipeline_params_validation(self):
dataset = self.dataset()
unexpected_int = 1
for unexpected_value in [
unexpected_int,
{"result_1": "some results", unexpected_int: "unexpected results"},
]:
with pytest.raises(ValueError):
dataset.pre_pipeline(results=unexpected_value)
@e2e_pytest_unit
def test_coco_dataset_get_item_params_validation(self):
dataset = self.dataset()
with pytest.raises(ValueError):
dataset.__getitem__(idx="unexpected string")
@e2e_pytest_unit
def test_coco_dataset_prepare_img_params_validation(self):
dataset = self.dataset()
with pytest.raises(ValueError):
dataset.prepare_img(idx="unexpected string")
@e2e_pytest_unit
def test_coco_dataset_get_classes_params_validation(self):
dataset = self.dataset()
unexpected_int = 1
for unexpected_value in [
unexpected_int,
["class_1", unexpected_int],
]:
with pytest.raises(ValueError):
dataset.get_classes(classes=unexpected_value)
@e2e_pytest_unit
def test_coco_dataset_load_annotations_params_validation(self):
dataset = self.dataset()
for unexpected_value in [
1,
"",
"./non_existing.json",
"./unexpected_type.jpg",
"./null\0char.json",
"./null\nchar.json",
]:
with pytest.raises(ValueError):
dataset.load_annotations(ann_file=unexpected_value)
@e2e_pytest_unit
def test_coco_dataset_get_ann_info_params_validation(self):
dataset = self.dataset()
with pytest.raises(ValueError):
dataset.get_ann_info(idx="unexpected string")
@e2e_pytest_unit
def test_coco_dataset_get_cat_ids_params_validation(self):
dataset = self.dataset()
with pytest.raises(ValueError):
dataset.get_cat_ids(idx="unexpected string")
| true | true |
f72fea9931e22e9f239b53d7134f8989231f7dc2 | 2,129 | py | Python | aiida/backends/djsite/db/migrations/0014_add_node_uuid_unique_constraint.py | azadoks/aiida-core | b806b7fef8fc79090deccfe2019b77cb922e0581 | [
"MIT",
"BSD-3-Clause"
] | 180 | 2019-07-12T07:45:26.000Z | 2022-03-22T13:16:57.000Z | aiida/backends/djsite/db/migrations/0014_add_node_uuid_unique_constraint.py | azadoks/aiida-core | b806b7fef8fc79090deccfe2019b77cb922e0581 | [
"MIT",
"BSD-3-Clause"
] | 2,325 | 2019-07-04T13:41:44.000Z | 2022-03-31T12:17:10.000Z | aiida/backends/djsite/db/migrations/0014_add_node_uuid_unique_constraint.py | azadoks/aiida-core | b806b7fef8fc79090deccfe2019b77cb922e0581 | [
"MIT",
"BSD-3-Clause"
] | 88 | 2019-07-06T01:42:39.000Z | 2022-03-18T14:20:09.000Z | # -*- coding: utf-8 -*-
###########################################################################
# Copyright (c), The AiiDA team. All rights reserved. #
# This file is part of the AiiDA code. #
# #
# The code is hosted on GitHub at https://github.com/aiidateam/aiida-core #
# For further information on the license, see the LICENSE.txt file #
# For further information please visit http://www.aiida.net #
###########################################################################
# pylint: disable=invalid-name
"""Add a uniqueness constraint to the uuid column of DbNode table."""
from django.db import migrations, models
from aiida.backends.djsite.db.migrations import upgrade_schema_version
from aiida.common.utils import get_new_uuid
REVISION = '1.0.14'
DOWN_REVISION = '1.0.13'
def verify_node_uuid_uniqueness(_, __):
"""Check whether the database contains nodes with duplicate UUIDS.
Note that we have to redefine this method from aiida.manage.database.integrity.verify_node_uuid_uniqueness
because the migrations.RunPython command that will invoke this function, will pass two arguments and therefore
this wrapper needs to have a different function signature.
:raises: IntegrityError if database contains nodes with duplicate UUIDS.
"""
from aiida.backends.general.migrations.duplicate_uuids import verify_uuid_uniqueness
verify_uuid_uniqueness(table='db_dbnode')
def reverse_code(_, __):
pass
class Migration(migrations.Migration):
"""Add a uniqueness constraint to the uuid column of DbNode table."""
dependencies = [
('db', '0013_django_1_8'),
]
operations = [
migrations.RunPython(verify_node_uuid_uniqueness, reverse_code=reverse_code),
migrations.AlterField(
model_name='dbnode',
name='uuid',
field=models.CharField(max_length=36, default=get_new_uuid, unique=True),
),
upgrade_schema_version(REVISION, DOWN_REVISION)
]
| 38.709091 | 114 | 0.627055 | true | true | |
f72feae6ab211e77121bc7730e459830daa3eb1d | 826 | py | Python | pyadlml/dataset/obj.py | tcsvn/pyadlml | 9b87d223ba0ef9814ba830263dd35fc6432fae87 | [
"MIT"
] | 4 | 2020-11-11T17:29:10.000Z | 2021-01-08T20:55:47.000Z | pyadlml/dataset/obj.py | tcsvn/pyadlml | 9b87d223ba0ef9814ba830263dd35fc6432fae87 | [
"MIT"
] | null | null | null | pyadlml/dataset/obj.py | tcsvn/pyadlml | 9b87d223ba0ef9814ba830263dd35fc6432fae87 | [
"MIT"
] | 5 | 2020-10-05T03:23:31.000Z | 2022-01-25T19:15:34.000Z | from pyadlml.dataset._representations.raw import create_raw
from pyadlml.dataset._representations.changepoint import create_changepoint
from pyadlml.dataset.activities import check_activities
class Data():
def __init__(self, activities, devices, activity_list, device_list):
#assert check_activities(activities)
#assert check_devices(devices)
self.df_activities = activities
self.df_devices = devices
# list of activities and devices
self.lst_activities = activity_list
self.lst_devices = device_list
def create_cp(self, t_res):
raise NotImplementedError
def create_raw(self, t_res=None, idle=False):
self.df_raw = create_raw(self.df_devices, self.df_activities, t_res)
def create_lastfired(self):
raise NotImplementedError | 34.416667 | 76 | 0.737288 | from pyadlml.dataset._representations.raw import create_raw
from pyadlml.dataset._representations.changepoint import create_changepoint
from pyadlml.dataset.activities import check_activities
class Data():
def __init__(self, activities, devices, activity_list, device_list):
self.df_activities = activities
self.df_devices = devices
self.lst_activities = activity_list
self.lst_devices = device_list
def create_cp(self, t_res):
raise NotImplementedError
def create_raw(self, t_res=None, idle=False):
self.df_raw = create_raw(self.df_devices, self.df_activities, t_res)
def create_lastfired(self):
raise NotImplementedError | true | true |
f72fec11a0ec5517350c9336346de65477e1cb36 | 87,391 | py | Python | python3/pyinotify.py | koto/pyinotify | b828a124bcf2310df7e2e7683b0902fcd78a08bf | [
"MIT"
] | 1 | 2020-03-31T21:41:57.000Z | 2020-03-31T21:41:57.000Z | python3/pyinotify.py | koto/pyinotify | b828a124bcf2310df7e2e7683b0902fcd78a08bf | [
"MIT"
] | null | null | null | python3/pyinotify.py | koto/pyinotify | b828a124bcf2310df7e2e7683b0902fcd78a08bf | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# pyinotify.py - python interface to inotify
# Copyright (c) 2005-2011 Sebastien Martini <seb@dbzteam.org>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
"""
pyinotify
@author: Sebastien Martini
@license: MIT License
@contact: seb@dbzteam.org
"""
class PyinotifyError(Exception):
"""Indicates exceptions raised by a Pyinotify class."""
pass
class UnsupportedPythonVersionError(PyinotifyError):
"""
Raised on unsupported Python versions.
"""
def __init__(self, version):
"""
@param version: Current Python version
@type version: string
"""
PyinotifyError.__init__(self,
('Python %s is unsupported, requires '
'at least Python 3.0') % version)
# Check Python version
import sys
if sys.version_info < (3, 0):
raise UnsupportedPythonVersionError(sys.version)
# Import directives
import threading
import os
import select
import struct
import fcntl
import errno
import termios
import array
import logging
import atexit
from collections import deque
from datetime import datetime, timedelta
import time
import re
import asyncore
import glob
import locale
import subprocess
try:
from functools import reduce
except ImportError:
pass # Will fail on Python 2.4 which has reduce() builtin anyway.
try:
import ctypes
import ctypes.util
except ImportError:
ctypes = None
try:
import inotify_syscalls
except ImportError:
inotify_syscalls = None
__author__ = "seb@dbzteam.org (Sebastien Martini)"
__version__ = "0.9.4"
# Compatibity mode: set to True to improve compatibility with
# Pyinotify 0.7.1. Do not set this variable yourself, call the
# function compatibility_mode() instead.
COMPATIBILITY_MODE = False
class InotifyBindingNotFoundError(PyinotifyError):
"""
Raised when no inotify support couldn't be found.
"""
def __init__(self):
err = "Couldn't find any inotify binding"
PyinotifyError.__init__(self, err)
class INotifyWrapper:
"""
Abstract class wrapping access to inotify's functions. This is an
internal class.
"""
@staticmethod
def create():
"""
Factory method instanciating and returning the right wrapper.
"""
# First, try to use ctypes.
if ctypes:
inotify = _CtypesLibcINotifyWrapper()
if inotify.init():
return inotify
# Second, see if C extension is compiled.
if inotify_syscalls:
inotify = _INotifySyscallsWrapper()
if inotify.init():
return inotify
def get_errno(self):
"""
Return None is no errno code is available.
"""
return self._get_errno()
def str_errno(self):
code = self.get_errno()
if code is None:
return 'Errno: no errno support'
return 'Errno=%s (%s)' % (os.strerror(code), errno.errorcode[code])
def inotify_init(self):
return self._inotify_init()
def inotify_add_watch(self, fd, pathname, mask):
# Unicode strings must be encoded to string prior to calling this
# method.
assert isinstance(pathname, str)
return self._inotify_add_watch(fd, pathname, mask)
def inotify_rm_watch(self, fd, wd):
return self._inotify_rm_watch(fd, wd)
class _INotifySyscallsWrapper(INotifyWrapper):
def __init__(self):
# Stores the last errno value.
self._last_errno = None
def init(self):
assert inotify_syscalls
return True
def _get_errno(self):
return self._last_errno
def _inotify_init(self):
try:
fd = inotify_syscalls.inotify_init()
except IOError as err:
self._last_errno = err.errno
return -1
return fd
def _inotify_add_watch(self, fd, pathname, mask):
try:
wd = inotify_syscalls.inotify_add_watch(fd, pathname, mask)
except IOError as err:
self._last_errno = err.errno
return -1
return wd
def _inotify_rm_watch(self, fd, wd):
try:
ret = inotify_syscalls.inotify_rm_watch(fd, wd)
except IOError as err:
self._last_errno = err.errno
return -1
return ret
class _CtypesLibcINotifyWrapper(INotifyWrapper):
def __init__(self):
self._libc = None
self._get_errno_func = None
def init(self):
assert ctypes
libc_name = None
try:
libc_name = ctypes.util.find_library('c')
except (OSError, IOError):
pass # Will attemp to load it with None anyway.
self._libc = ctypes.CDLL(libc_name, use_errno=True)
self._get_errno_func = ctypes.get_errno
# Eventually check that libc has needed inotify bindings.
if (not hasattr(self._libc, 'inotify_init') or
not hasattr(self._libc, 'inotify_add_watch') or
not hasattr(self._libc, 'inotify_rm_watch')):
return False
self._libc.inotify_init.argtypes = []
self._libc.inotify_init.restype = ctypes.c_int
self._libc.inotify_add_watch.argtypes = [ctypes.c_int, ctypes.c_char_p,
ctypes.c_uint32]
self._libc.inotify_add_watch.restype = ctypes.c_int
self._libc.inotify_rm_watch.argtypes = [ctypes.c_int, ctypes.c_int]
self._libc.inotify_rm_watch.restype = ctypes.c_int
return True
def _get_errno(self):
assert self._get_errno_func
return self._get_errno_func()
def _inotify_init(self):
assert self._libc is not None
return self._libc.inotify_init()
def _inotify_add_watch(self, fd, pathname, mask):
assert self._libc is not None
# Encodes path to a bytes string. This conversion seems required because
# ctypes.create_string_buffer seems to manipulate bytes internally.
# Moreover it seems that inotify_add_watch does not work very well when
# it receives an ctypes.create_unicode_buffer instance as argument.
pathname = pathname.encode(sys.getfilesystemencoding())
pathname = ctypes.create_string_buffer(pathname)
return self._libc.inotify_add_watch(fd, pathname, mask)
def _inotify_rm_watch(self, fd, wd):
assert self._libc is not None
return self._libc.inotify_rm_watch(fd, wd)
def _sysctl(self, *args):
assert self._libc is not None
return self._libc.sysctl(*args)
# Logging
def logger_init():
"""Initialize logger instance."""
log = logging.getLogger("pyinotify")
console_handler = logging.StreamHandler()
console_handler.setFormatter(
logging.Formatter("[%(asctime)s %(name)s %(levelname)s] %(message)s"))
log.addHandler(console_handler)
log.setLevel(20)
return log
log = logger_init()
# inotify's variables
class SysCtlINotify:
"""
Access (read, write) inotify's variables through sysctl. Usually it
requires administrator rights to update them.
Examples:
- Read max_queued_events attribute: myvar = max_queued_events.value
- Update max_queued_events attribute: max_queued_events.value = 42
"""
inotify_attrs = {'max_user_instances': 1,
'max_user_watches': 2,
'max_queued_events': 3}
def __init__(self, attrname, inotify_wrapper):
# FIXME: right now only supporting ctypes
assert ctypes
self._attrname = attrname
self._inotify_wrapper = inotify_wrapper
sino = ctypes.c_int * 3
self._attr = sino(5, 20, SysCtlINotify.inotify_attrs[attrname])
@staticmethod
def create(attrname):
# FIXME: right now only supporting ctypes
if ctypes is None:
return None
inotify_wrapper = _CtypesLibcINotifyWrapper()
if not inotify_wrapper.init():
return None
return SysCtlINotify(attrname, inotify_wrapper)
def get_val(self):
"""
Gets attribute's value.
@return: stored value.
@rtype: int
"""
oldv = ctypes.c_int(0)
size = ctypes.c_int(ctypes.sizeof(oldv))
self._inotify_wrapper._sysctl(self._attr, 3,
ctypes.c_voidp(ctypes.addressof(oldv)),
ctypes.addressof(size),
None, 0)
return oldv.value
def set_val(self, nval):
"""
Sets new attribute's value.
@param nval: replaces current value by nval.
@type nval: int
"""
oldv = ctypes.c_int(0)
sizeo = ctypes.c_int(ctypes.sizeof(oldv))
newv = ctypes.c_int(nval)
sizen = ctypes.c_int(ctypes.sizeof(newv))
self._inotify_wrapper._sysctl(self._attr, 3,
ctypes.c_voidp(ctypes.addressof(oldv)),
ctypes.addressof(sizeo),
ctypes.c_voidp(ctypes.addressof(newv)),
ctypes.addressof(sizen))
value = property(get_val, set_val)
def __repr__(self):
return '<%s=%d>' % (self._attrname, self.get_val())
# Inotify's variables
#
# FIXME: currently these variables are only accessible when ctypes is used,
# otherwise there are set to None.
#
# read: myvar = max_queued_events.value
# update: max_queued_events.value = 42
#
for attrname in ('max_queued_events', 'max_user_instances', 'max_user_watches'):
globals()[attrname] = SysCtlINotify.create(attrname)
class EventsCodes:
"""
Set of codes corresponding to each kind of events.
Some of these flags are used to communicate with inotify, whereas
the others are sent to userspace by inotify notifying some events.
@cvar IN_ACCESS: File was accessed.
@type IN_ACCESS: int
@cvar IN_MODIFY: File was modified.
@type IN_MODIFY: int
@cvar IN_ATTRIB: Metadata changed.
@type IN_ATTRIB: int
@cvar IN_CLOSE_WRITE: Writtable file was closed.
@type IN_CLOSE_WRITE: int
@cvar IN_CLOSE_NOWRITE: Unwrittable file closed.
@type IN_CLOSE_NOWRITE: int
@cvar IN_OPEN: File was opened.
@type IN_OPEN: int
@cvar IN_MOVED_FROM: File was moved from X.
@type IN_MOVED_FROM: int
@cvar IN_MOVED_TO: File was moved to Y.
@type IN_MOVED_TO: int
@cvar IN_CREATE: Subfile was created.
@type IN_CREATE: int
@cvar IN_DELETE: Subfile was deleted.
@type IN_DELETE: int
@cvar IN_DELETE_SELF: Self (watched item itself) was deleted.
@type IN_DELETE_SELF: int
@cvar IN_MOVE_SELF: Self (watched item itself) was moved.
@type IN_MOVE_SELF: int
@cvar IN_UNMOUNT: Backing fs was unmounted.
@type IN_UNMOUNT: int
@cvar IN_Q_OVERFLOW: Event queued overflowed.
@type IN_Q_OVERFLOW: int
@cvar IN_IGNORED: File was ignored.
@type IN_IGNORED: int
@cvar IN_ONLYDIR: only watch the path if it is a directory (new
in kernel 2.6.15).
@type IN_ONLYDIR: int
@cvar IN_DONT_FOLLOW: don't follow a symlink (new in kernel 2.6.15).
IN_ONLYDIR we can make sure that we don't watch
the target of symlinks.
@type IN_DONT_FOLLOW: int
@cvar IN_EXCL_UNLINK: Events are not generated for children after they
have been unlinked from the watched directory.
(new in kernel 2.6.36).
@type IN_EXCL_UNLINK: int
@cvar IN_MASK_ADD: add to the mask of an already existing watch (new
in kernel 2.6.14).
@type IN_MASK_ADD: int
@cvar IN_ISDIR: Event occurred against dir.
@type IN_ISDIR: int
@cvar IN_ONESHOT: Only send event once.
@type IN_ONESHOT: int
@cvar ALL_EVENTS: Alias for considering all of the events.
@type ALL_EVENTS: int
"""
# The idea here is 'configuration-as-code' - this way, we get our nice class
# constants, but we also get nice human-friendly text mappings to do lookups
# against as well, for free:
FLAG_COLLECTIONS = {'OP_FLAGS': {
'IN_ACCESS' : 0x00000001, # File was accessed
'IN_MODIFY' : 0x00000002, # File was modified
'IN_ATTRIB' : 0x00000004, # Metadata changed
'IN_CLOSE_WRITE' : 0x00000008, # Writable file was closed
'IN_CLOSE_NOWRITE' : 0x00000010, # Unwritable file closed
'IN_OPEN' : 0x00000020, # File was opened
'IN_MOVED_FROM' : 0x00000040, # File was moved from X
'IN_MOVED_TO' : 0x00000080, # File was moved to Y
'IN_CREATE' : 0x00000100, # Subfile was created
'IN_DELETE' : 0x00000200, # Subfile was deleted
'IN_DELETE_SELF' : 0x00000400, # Self (watched item itself)
# was deleted
'IN_MOVE_SELF' : 0x00000800, # Self (watched item itself) was moved
},
'EVENT_FLAGS': {
'IN_UNMOUNT' : 0x00002000, # Backing fs was unmounted
'IN_Q_OVERFLOW' : 0x00004000, # Event queued overflowed
'IN_IGNORED' : 0x00008000, # File was ignored
},
'SPECIAL_FLAGS': {
'IN_ONLYDIR' : 0x01000000, # only watch the path if it is a
# directory
'IN_DONT_FOLLOW' : 0x02000000, # don't follow a symlink
'IN_EXCL_UNLINK' : 0x04000000, # exclude events on unlinked objects
'IN_MASK_ADD' : 0x20000000, # add to the mask of an already
# existing watch
'IN_ISDIR' : 0x40000000, # event occurred against dir
'IN_ONESHOT' : 0x80000000, # only send event once
},
}
def maskname(mask):
"""
Returns the event name associated to mask. IN_ISDIR is appended to
the result when appropriate. Note: only one event is returned, because
only one event can be raised at a given time.
@param mask: mask.
@type mask: int
@return: event name.
@rtype: str
"""
ms = mask
name = '%s'
if mask & IN_ISDIR:
ms = mask - IN_ISDIR
name = '%s|IN_ISDIR'
return name % EventsCodes.ALL_VALUES[ms]
maskname = staticmethod(maskname)
# So let's now turn the configuration into code
EventsCodes.ALL_FLAGS = {}
EventsCodes.ALL_VALUES = {}
for flagc, valc in EventsCodes.FLAG_COLLECTIONS.items():
# Make the collections' members directly accessible through the
# class dictionary
setattr(EventsCodes, flagc, valc)
# Collect all the flags under a common umbrella
EventsCodes.ALL_FLAGS.update(valc)
# Make the individual masks accessible as 'constants' at globals() scope
# and masknames accessible by values.
for name, val in valc.items():
globals()[name] = val
EventsCodes.ALL_VALUES[val] = name
# all 'normal' events
ALL_EVENTS = reduce(lambda x, y: x | y, EventsCodes.OP_FLAGS.values())
EventsCodes.ALL_FLAGS['ALL_EVENTS'] = ALL_EVENTS
EventsCodes.ALL_VALUES[ALL_EVENTS] = 'ALL_EVENTS'
class _Event:
"""
Event structure, represent events raised by the system. This
is the base class and should be subclassed.
"""
def __init__(self, dict_):
"""
Attach attributes (contained in dict_) to self.
@param dict_: Set of attributes.
@type dict_: dictionary
"""
for tpl in dict_.items():
setattr(self, *tpl)
def __repr__(self):
"""
@return: Generic event string representation.
@rtype: str
"""
s = ''
for attr, value in sorted(self.__dict__.items(), key=lambda x: x[0]):
if attr.startswith('_'):
continue
if attr == 'mask':
value = hex(getattr(self, attr))
elif isinstance(value, str) and not value:
value = "''"
s += ' %s%s%s' % (output_format.field_name(attr),
output_format.punctuation('='),
output_format.field_value(value))
s = '%s%s%s %s' % (output_format.punctuation('<'),
output_format.class_name(self.__class__.__name__),
s,
output_format.punctuation('>'))
return s
def __str__(self):
return repr(self)
class _RawEvent(_Event):
"""
Raw event, it contains only the informations provided by the system.
It doesn't infer anything.
"""
def __init__(self, wd, mask, cookie, name):
"""
@param wd: Watch Descriptor.
@type wd: int
@param mask: Bitmask of events.
@type mask: int
@param cookie: Cookie.
@type cookie: int
@param name: Basename of the file or directory against which the
event was raised in case where the watched directory
is the parent directory. None if the event was raised
on the watched item itself.
@type name: string or None
"""
# Use this variable to cache the result of str(self), this object
# is immutable.
self._str = None
# name: remove trailing '\0'
d = {'wd': wd,
'mask': mask,
'cookie': cookie,
'name': name.rstrip('\0')}
_Event.__init__(self, d)
log.debug(str(self))
def __str__(self):
if self._str is None:
self._str = _Event.__str__(self)
return self._str
class Event(_Event):
"""
This class contains all the useful informations about the observed
event. However, the presence of each field is not guaranteed and
depends on the type of event. In effect, some fields are irrelevant
for some kind of event (for example 'cookie' is meaningless for
IN_CREATE whereas it is mandatory for IN_MOVE_TO).
The possible fields are:
- wd (int): Watch Descriptor.
- mask (int): Mask.
- maskname (str): Readable event name.
- path (str): path of the file or directory being watched.
- name (str): Basename of the file or directory against which the
event was raised in case where the watched directory
is the parent directory. None if the event was raised
on the watched item itself. This field is always provided
even if the string is ''.
- pathname (str): Concatenation of 'path' and 'name'.
- src_pathname (str): Only present for IN_MOVED_TO events and only in
the case where IN_MOVED_FROM events are watched too. Holds the
source pathname from where pathname was moved from.
- cookie (int): Cookie.
- dir (bool): True if the event was raised against a directory.
"""
def __init__(self, raw):
"""
Concretely, this is the raw event plus inferred infos.
"""
_Event.__init__(self, raw)
self.maskname = EventsCodes.maskname(self.mask)
if COMPATIBILITY_MODE:
self.event_name = self.maskname
try:
if self.name:
self.pathname = os.path.abspath(os.path.join(self.path,
self.name))
else:
self.pathname = os.path.abspath(self.path)
except AttributeError as err:
# Usually it is not an error some events are perfectly valids
# despite the lack of these attributes.
log.debug(err)
class ProcessEventError(PyinotifyError):
"""
ProcessEventError Exception. Raised on ProcessEvent error.
"""
def __init__(self, err):
"""
@param err: Exception error description.
@type err: string
"""
PyinotifyError.__init__(self, err)
class _ProcessEvent:
"""
Abstract processing event class.
"""
def __call__(self, event):
"""
To behave like a functor the object must be callable.
This method is a dispatch method. Its lookup order is:
1. process_MASKNAME method
2. process_FAMILY_NAME method
3. otherwise calls process_default
@param event: Event to be processed.
@type event: Event object
@return: By convention when used from the ProcessEvent class:
- Returning False or None (default value) means keep on
executing next chained functors (see chain.py example).
- Returning True instead means do not execute next
processing functions.
@rtype: bool
@raise ProcessEventError: Event object undispatchable,
unknown event.
"""
stripped_mask = event.mask - (event.mask & IN_ISDIR)
maskname = EventsCodes.ALL_VALUES.get(stripped_mask)
if maskname is None:
raise ProcessEventError("Unknown mask 0x%08x" % stripped_mask)
# 1- look for process_MASKNAME
meth = getattr(self, 'process_' + maskname, None)
if meth is not None:
return meth(event)
# 2- look for process_FAMILY_NAME
meth = getattr(self, 'process_IN_' + maskname.split('_')[1], None)
if meth is not None:
return meth(event)
# 3- default call method process_default
return self.process_default(event)
def __repr__(self):
return '<%s>' % self.__class__.__name__
class _SysProcessEvent(_ProcessEvent):
"""
There is three kind of processing according to each event:
1. special handling (deletion from internal container, bug, ...).
2. default treatment: which is applied to the majority of events.
3. IN_ISDIR is never sent alone, he is piggybacked with a standard
event, he is not processed as the others events, instead, its
value is captured and appropriately aggregated to dst event.
"""
def __init__(self, wm, notifier):
"""
@param wm: Watch Manager.
@type wm: WatchManager instance
@param notifier: Notifier.
@type notifier: Notifier instance
"""
self._watch_manager = wm # watch manager
self._notifier = notifier # notifier
self._mv_cookie = {} # {cookie(int): (src_path(str), date), ...}
self._mv = {} # {src_path(str): (dst_path(str), date), ...}
def cleanup(self):
"""
Cleanup (delete) old (>1mn) records contained in self._mv_cookie
and self._mv.
"""
date_cur_ = datetime.now()
for seq in (self._mv_cookie, self._mv):
for k in list(seq.keys()):
if (date_cur_ - seq[k][1]) > timedelta(minutes=1):
log.debug('Cleanup: deleting entry %s', seq[k][0])
del seq[k]
def process_IN_CREATE(self, raw_event):
"""
If the event affects a directory and the auto_add flag of the
targetted watch is set to True, a new watch is added on this
new directory, with the same attribute values than those of
this watch.
"""
if raw_event.mask & IN_ISDIR:
watch_ = self._watch_manager.get_watch(raw_event.wd)
created_dir = os.path.join(watch_.path, raw_event.name)
if watch_.auto_add and not watch_.exclude_filter(created_dir):
addw = self._watch_manager.add_watch
# The newly monitored directory inherits attributes from its
# parent directory.
addw_ret = addw(created_dir, watch_.mask,
proc_fun=watch_.proc_fun,
rec=False, auto_add=watch_.auto_add,
exclude_filter=watch_.exclude_filter)
# Trick to handle mkdir -p /d1/d2/t3 where d1 is watched and
# d2 and t3 (directory or file) are created.
# Since the directory d2 is new, then everything inside it must
# also be new.
created_dir_wd = addw_ret.get(created_dir)
if (created_dir_wd is not None) and (created_dir_wd > 0):
for name in os.listdir(created_dir):
inner = os.path.join(created_dir, name)
if self._watch_manager.get_wd(inner) is not None:
continue
# Generate (simulate) creation events for sub-
# directories and files.
if os.path.isfile(inner):
# symlinks are handled as files.
flags = IN_CREATE
elif os.path.isdir(inner):
flags = IN_CREATE | IN_ISDIR
else:
# This path should not be taken.
continue
rawevent = _RawEvent(created_dir_wd, flags, 0, name)
self._notifier.append_event(rawevent)
return self.process_default(raw_event)
def process_IN_MOVED_FROM(self, raw_event):
"""
Map the cookie with the source path (+ date for cleaning).
"""
watch_ = self._watch_manager.get_watch(raw_event.wd)
path_ = watch_.path
src_path = os.path.normpath(os.path.join(path_, raw_event.name))
self._mv_cookie[raw_event.cookie] = (src_path, datetime.now())
return self.process_default(raw_event, {'cookie': raw_event.cookie})
def process_IN_MOVED_TO(self, raw_event):
"""
Map the source path with the destination path (+ date for
cleaning).
"""
watch_ = self._watch_manager.get_watch(raw_event.wd)
path_ = watch_.path
dst_path = os.path.normpath(os.path.join(path_, raw_event.name))
mv_ = self._mv_cookie.get(raw_event.cookie)
to_append = {'cookie': raw_event.cookie}
if mv_ is not None:
self._mv[mv_[0]] = (dst_path, datetime.now())
# Let's assume that IN_MOVED_FROM event is always queued before
# that its associated (they share a common cookie) IN_MOVED_TO
# event is queued itself. It is then possible in that scenario
# to provide as additional information to the IN_MOVED_TO event
# the original pathname of the moved file/directory.
to_append['src_pathname'] = mv_[0]
elif (raw_event.mask & IN_ISDIR and watch_.auto_add and
not watch_.exclude_filter(dst_path)):
# We got a diretory that's "moved in" from an unknown source and
# auto_add is enabled. Manually add watches to the inner subtrees.
# The newly monitored directory inherits attributes from its
# parent directory.
self._watch_manager.add_watch(dst_path, watch_.mask,
proc_fun=watch_.proc_fun,
rec=True, auto_add=True,
exclude_filter=watch_.exclude_filter)
return self.process_default(raw_event, to_append)
def process_IN_MOVE_SELF(self, raw_event):
"""
STATUS: the following bug has been fixed in recent kernels (FIXME:
which version ?). Now it raises IN_DELETE_SELF instead.
Old kernels were bugged, this event raised when the watched item
were moved, so we had to update its path, but under some circumstances
it was impossible: if its parent directory and its destination
directory wasn't watched. The kernel (see include/linux/fsnotify.h)
doesn't bring us enough informations like the destination path of
moved items.
"""
watch_ = self._watch_manager.get_watch(raw_event.wd)
src_path = watch_.path
mv_ = self._mv.get(src_path)
if mv_:
dest_path = mv_[0]
watch_.path = dest_path
# add the separator to the source path to avoid overlapping
# path issue when testing with startswith()
src_path += os.path.sep
src_path_len = len(src_path)
# The next loop renames all watches with src_path as base path.
# It seems that IN_MOVE_SELF does not provide IN_ISDIR information
# therefore the next loop is iterated even if raw_event is a file.
for w in self._watch_manager.watches.values():
if w.path.startswith(src_path):
# Note that dest_path is a normalized path.
w.path = os.path.join(dest_path, w.path[src_path_len:])
else:
log.error("The pathname '%s' of this watch %s has probably changed "
"and couldn't be updated, so it cannot be trusted "
"anymore. To fix this error move directories/files only "
"between watched parents directories, in this case e.g. "
"put a watch on '%s'.",
watch_.path, watch_,
os.path.normpath(os.path.join(watch_.path,
os.path.pardir)))
if not watch_.path.endswith('-unknown-path'):
watch_.path += '-unknown-path'
return self.process_default(raw_event)
def process_IN_Q_OVERFLOW(self, raw_event):
"""
Only signal an overflow, most of the common flags are irrelevant
for this event (path, wd, name).
"""
return Event({'mask': raw_event.mask})
def process_IN_IGNORED(self, raw_event):
"""
The watch descriptor raised by this event is now ignored (forever),
it can be safely deleted from the watch manager dictionary.
After this event we can be sure that neither the event queue nor
the system will raise an event associated to this wd again.
"""
event_ = self.process_default(raw_event)
self._watch_manager.del_watch(raw_event.wd)
return event_
def process_default(self, raw_event, to_append=None):
"""
Commons handling for the followings events:
IN_ACCESS, IN_MODIFY, IN_ATTRIB, IN_CLOSE_WRITE, IN_CLOSE_NOWRITE,
IN_OPEN, IN_DELETE, IN_DELETE_SELF, IN_UNMOUNT.
"""
watch_ = self._watch_manager.get_watch(raw_event.wd)
if raw_event.mask & (IN_DELETE_SELF | IN_MOVE_SELF):
# Unfornulately this information is not provided by the kernel
dir_ = watch_.dir
else:
dir_ = bool(raw_event.mask & IN_ISDIR)
dict_ = {'wd': raw_event.wd,
'mask': raw_event.mask,
'path': watch_.path,
'name': raw_event.name,
'dir': dir_}
if COMPATIBILITY_MODE:
dict_['is_dir'] = dir_
if to_append is not None:
dict_.update(to_append)
return Event(dict_)
class ProcessEvent(_ProcessEvent):
"""
Process events objects, can be specialized via subclassing, thus its
behavior can be overriden:
Note: you should not override __init__ in your subclass instead define
a my_init() method, this method will be called automatically from the
constructor of this class with its optionals parameters.
1. Provide specialized individual methods, e.g. process_IN_DELETE for
processing a precise type of event (e.g. IN_DELETE in this case).
2. Or/and provide methods for processing events by 'family', e.g.
process_IN_CLOSE method will process both IN_CLOSE_WRITE and
IN_CLOSE_NOWRITE events (if process_IN_CLOSE_WRITE and
process_IN_CLOSE_NOWRITE aren't defined though).
3. Or/and override process_default for catching and processing all
the remaining types of events.
"""
pevent = None
def __init__(self, pevent=None, **kargs):
"""
Enable chaining of ProcessEvent instances.
@param pevent: Optional callable object, will be called on event
processing (before self).
@type pevent: callable
@param kargs: This constructor is implemented as a template method
delegating its optionals keyworded arguments to the
method my_init().
@type kargs: dict
"""
self.pevent = pevent
self.my_init(**kargs)
def my_init(self, **kargs):
"""
This method is called from ProcessEvent.__init__(). This method is
empty here and must be redefined to be useful. In effect, if you
need to specifically initialize your subclass' instance then you
just have to override this method in your subclass. Then all the
keyworded arguments passed to ProcessEvent.__init__() will be
transmitted as parameters to this method. Beware you MUST pass
keyword arguments though.
@param kargs: optional delegated arguments from __init__().
@type kargs: dict
"""
pass
def __call__(self, event):
stop_chaining = False
if self.pevent is not None:
# By default methods return None so we set as guideline
# that methods asking for stop chaining must explicitely
# return non None or non False values, otherwise the default
# behavior will be to accept chain call to the corresponding
# local method.
stop_chaining = self.pevent(event)
if not stop_chaining:
return _ProcessEvent.__call__(self, event)
def nested_pevent(self):
return self.pevent
def process_IN_Q_OVERFLOW(self, event):
"""
By default this method only reports warning messages, you can overredide
it by subclassing ProcessEvent and implement your own
process_IN_Q_OVERFLOW method. The actions you can take on receiving this
event is either to update the variable max_queued_events in order to
handle more simultaneous events or to modify your code in order to
accomplish a better filtering diminishing the number of raised events.
Because this method is defined, IN_Q_OVERFLOW will never get
transmitted as arguments to process_default calls.
@param event: IN_Q_OVERFLOW event.
@type event: dict
"""
log.warning('Event queue overflowed.')
def process_default(self, event):
"""
Default processing event method. By default does nothing. Subclass
ProcessEvent and redefine this method in order to modify its behavior.
@param event: Event to be processed. Can be of any type of events but
IN_Q_OVERFLOW events (see method process_IN_Q_OVERFLOW).
@type event: Event instance
"""
pass
class PrintAllEvents(ProcessEvent):
"""
Dummy class used to print events strings representations. For instance this
class is used from command line to print all received events to stdout.
"""
def my_init(self, out=None):
"""
@param out: Where events will be written.
@type out: Object providing a valid file object interface.
"""
if out is None:
out = sys.stdout
self._out = out
def process_default(self, event):
"""
Writes event string representation to file object provided to
my_init().
@param event: Event to be processed. Can be of any type of events but
IN_Q_OVERFLOW events (see method process_IN_Q_OVERFLOW).
@type event: Event instance
"""
self._out.write(str(event))
self._out.write('\n')
self._out.flush()
class ChainIfTrue(ProcessEvent):
"""
Makes conditional chaining depending on the result of the nested
processing instance.
"""
def my_init(self, func):
"""
Method automatically called from base class constructor.
"""
self._func = func
def process_default(self, event):
return not self._func(event)
class Stats(ProcessEvent):
"""
Compute and display trivial statistics about processed events.
"""
def my_init(self):
"""
Method automatically called from base class constructor.
"""
self._start_time = time.time()
self._stats = {}
self._stats_lock = threading.Lock()
def process_default(self, event):
"""
Processes |event|.
"""
self._stats_lock.acquire()
try:
events = event.maskname.split('|')
for event_name in events:
count = self._stats.get(event_name, 0)
self._stats[event_name] = count + 1
finally:
self._stats_lock.release()
def _stats_copy(self):
self._stats_lock.acquire()
try:
return self._stats.copy()
finally:
self._stats_lock.release()
def __repr__(self):
stats = self._stats_copy()
elapsed = int(time.time() - self._start_time)
elapsed_str = ''
if elapsed < 60:
elapsed_str = str(elapsed) + 'sec'
elif 60 <= elapsed < 3600:
elapsed_str = '%dmn%dsec' % (elapsed / 60, elapsed % 60)
elif 3600 <= elapsed < 86400:
elapsed_str = '%dh%dmn' % (elapsed / 3600, (elapsed % 3600) / 60)
elif elapsed >= 86400:
elapsed_str = '%dd%dh' % (elapsed / 86400, (elapsed % 86400) / 3600)
stats['ElapsedTime'] = elapsed_str
l = []
for ev, value in sorted(stats.items(), key=lambda x: x[0]):
l.append(' %s=%s' % (output_format.field_name(ev),
output_format.field_value(value)))
s = '<%s%s >' % (output_format.class_name(self.__class__.__name__),
''.join(l))
return s
def dump(self, filename):
"""
Dumps statistics.
@param filename: filename where stats will be dumped, filename is
created and must not exist prior to this call.
@type filename: string
"""
flags = os.O_WRONLY|os.O_CREAT|os.O_NOFOLLOW|os.O_EXCL
fd = os.open(filename, flags, 0o0600)
os.write(fd, bytes(self.__str__(), locale.getpreferredencoding()))
os.close(fd)
def __str__(self, scale=45):
stats = self._stats_copy()
if not stats:
return ''
m = max(stats.values())
unity = scale / m
fmt = '%%-26s%%-%ds%%s' % (len(output_format.field_value('@' * scale))
+ 1)
def func(x):
return fmt % (output_format.field_name(x[0]),
output_format.field_value('@' * int(x[1] * unity)),
output_format.simple('%d' % x[1], 'yellow'))
s = '\n'.join(map(func, sorted(stats.items(), key=lambda x: x[0])))
return s
class NotifierError(PyinotifyError):
"""
Notifier Exception. Raised on Notifier error.
"""
def __init__(self, err):
"""
@param err: Exception string's description.
@type err: string
"""
PyinotifyError.__init__(self, err)
class Notifier:
"""
Read notifications, process events.
"""
def __init__(self, watch_manager, default_proc_fun=None, read_freq=0,
threshold=0, timeout=None):
"""
Initialization. read_freq, threshold and timeout parameters are used
when looping.
@param watch_manager: Watch Manager.
@type watch_manager: WatchManager instance
@param default_proc_fun: Default processing method. If None, a new
instance of PrintAllEvents will be assigned.
@type default_proc_fun: instance of ProcessEvent
@param read_freq: if read_freq == 0, events are read asap,
if read_freq is > 0, this thread sleeps
max(0, read_freq - timeout) seconds. But if
timeout is None it may be different because
poll is blocking waiting for something to read.
@type read_freq: int
@param threshold: File descriptor will be read only if the accumulated
size to read becomes >= threshold. If != 0, you likely
want to use it in combination with an appropriate
value for read_freq because without that you would
keep looping without really reading anything and that
until the amount of events to read is >= threshold.
At least with read_freq set you might sleep.
@type threshold: int
@param timeout:
http://docs.python.org/lib/poll-objects.html#poll-objects
@type timeout: int
"""
# Watch Manager instance
self._watch_manager = watch_manager
# File descriptor
self._fd = self._watch_manager.get_fd()
# Poll object and registration
self._pollobj = select.poll()
self._pollobj.register(self._fd, select.POLLIN)
# This pipe is correctely initialized and used by ThreadedNotifier
self._pipe = (-1, -1)
# Event queue
self._eventq = deque()
# System processing functor, common to all events
self._sys_proc_fun = _SysProcessEvent(self._watch_manager, self)
# Default processing method
self._default_proc_fun = default_proc_fun
if default_proc_fun is None:
self._default_proc_fun = PrintAllEvents()
# Loop parameters
self._read_freq = read_freq
self._threshold = threshold
self._timeout = timeout
# Coalesce events option
self._coalesce = False
# set of str(raw_event), only used when coalesce option is True
self._eventset = set()
def append_event(self, event):
"""
Append a raw event to the event queue.
@param event: An event.
@type event: _RawEvent instance.
"""
self._eventq.append(event)
def proc_fun(self):
return self._default_proc_fun
def coalesce_events(self, coalesce=True):
"""
Coalescing events. Events are usually processed by batchs, their size
depend on various factors. Thus, before processing them, events received
from inotify are aggregated in a fifo queue. If this coalescing
option is enabled events are filtered based on their unicity, only
unique events are enqueued, doublons are discarded. An event is unique
when the combination of its fields (wd, mask, cookie, name) is unique
among events of a same batch. After a batch of events is processed any
events is accepted again. By default this option is disabled, you have
to explictly call this function to turn it on.
@param coalesce: Optional new coalescing value. True by default.
@type coalesce: Bool
"""
self._coalesce = coalesce
if not coalesce:
self._eventset.clear()
def check_events(self, timeout=None):
"""
Check for new events available to read, blocks up to timeout
milliseconds.
@param timeout: If specified it overrides the corresponding instance
attribute _timeout.
@type timeout: int
@return: New events to read.
@rtype: bool
"""
while True:
try:
# blocks up to 'timeout' milliseconds
if timeout is None:
timeout = self._timeout
ret = self._pollobj.poll(timeout)
except select.error as err:
if err.args[0] == errno.EINTR:
continue # interrupted, retry
else:
raise
else:
break
if not ret or (self._pipe[0] == ret[0][0]):
return False
# only one fd is polled
return ret[0][1] & select.POLLIN
def read_events(self):
"""
Read events from device, build _RawEvents, and enqueue them.
"""
buf_ = array.array('i', [0])
# get event queue size
if fcntl.ioctl(self._fd, termios.FIONREAD, buf_, 1) == -1:
return
queue_size = buf_[0]
if queue_size < self._threshold:
log.debug('(fd: %d) %d bytes available to read but threshold is '
'fixed to %d bytes', self._fd, queue_size,
self._threshold)
return
try:
# Read content from file
r = os.read(self._fd, queue_size)
except Exception as msg:
raise NotifierError(msg)
log.debug('Event queue size: %d', queue_size)
rsum = 0 # counter
while rsum < queue_size:
s_size = 16
# Retrieve wd, mask, cookie and fname_len
wd, mask, cookie, fname_len = struct.unpack('iIII',
r[rsum:rsum+s_size])
# Retrieve name
bname, = struct.unpack('%ds' % fname_len,
r[rsum + s_size:rsum + s_size + fname_len])
# FIXME: should we explictly call sys.getdefaultencoding() here ??
uname = bname.decode()
rawevent = _RawEvent(wd, mask, cookie, uname)
if self._coalesce:
# Only enqueue new (unique) events.
raweventstr = str(rawevent)
if raweventstr not in self._eventset:
self._eventset.add(raweventstr)
self._eventq.append(rawevent)
else:
self._eventq.append(rawevent)
rsum += s_size + fname_len
def process_events(self):
"""
Routine for processing events from queue by calling their
associated proccessing method (an instance of ProcessEvent).
It also does internal processings, to keep the system updated.
"""
while self._eventq:
raw_event = self._eventq.popleft() # pop next event
watch_ = self._watch_manager.get_watch(raw_event.wd)
if (watch_ is None) and not (raw_event.mask & IN_Q_OVERFLOW):
if not (raw_event.mask & IN_IGNORED):
# Not really sure how we ended up here, nor how we should
# handle these types of events and if it is appropriate to
# completly skip them (like we are doing here).
log.warning("Unable to retrieve Watch object associated to %s",
repr(raw_event))
continue
revent = self._sys_proc_fun(raw_event) # system processings
if watch_ and watch_.proc_fun:
watch_.proc_fun(revent) # user processings
else:
self._default_proc_fun(revent)
self._sys_proc_fun.cleanup() # remove olds MOVED_* events records
if self._coalesce:
self._eventset.clear()
def __daemonize(self, pid_file=None, stdin=os.devnull, stdout=os.devnull,
stderr=os.devnull):
"""
pid_file: file where the pid will be written. If pid_file=None the pid
is written to /var/run/<sys.argv[0]|pyinotify>.pid, if
pid_file=False no pid_file is written.
stdin, stdout, stderr: files associated to common streams.
"""
if pid_file is None:
dirname = '/var/run/'
basename = os.path.basename(sys.argv[0]) or 'pyinotify'
pid_file = os.path.join(dirname, basename + '.pid')
if pid_file != False and os.path.lexists(pid_file):
err = 'Cannot daemonize: pid file %s already exists.' % pid_file
raise NotifierError(err)
def fork_daemon():
# Adapted from Chad J. Schroeder's recipe
# @see http://code.activestate.com/recipes/278731/
pid = os.fork()
if (pid == 0):
# parent 2
os.setsid()
pid = os.fork()
if (pid == 0):
# child
os.chdir('/')
os.umask(0o022)
else:
# parent 2
os._exit(0)
else:
# parent 1
os._exit(0)
fd_inp = os.open(stdin, os.O_RDONLY)
os.dup2(fd_inp, 0)
fd_out = os.open(stdout, os.O_WRONLY|os.O_CREAT, 0o0600)
os.dup2(fd_out, 1)
fd_err = os.open(stderr, os.O_WRONLY|os.O_CREAT, 0o0600)
os.dup2(fd_err, 2)
# Detach task
fork_daemon()
# Write pid
if pid_file != False:
flags = os.O_WRONLY|os.O_CREAT|os.O_NOFOLLOW|os.O_EXCL
fd_pid = os.open(pid_file, flags, 0o0600)
os.write(fd_pid, bytes(str(os.getpid()) + '\n',
locale.getpreferredencoding()))
os.close(fd_pid)
# Register unlink function
atexit.register(lambda : os.unlink(pid_file))
def _sleep(self, ref_time):
# Only consider sleeping if read_freq is > 0
if self._read_freq > 0:
cur_time = time.time()
sleep_amount = self._read_freq - (cur_time - ref_time)
if sleep_amount > 0:
log.debug('Now sleeping %d seconds', sleep_amount)
time.sleep(sleep_amount)
def loop(self, callback=None, daemonize=False, **args):
"""
Events are read only one time every min(read_freq, timeout)
seconds at best and only if the size to read is >= threshold.
After this method returns it must not be called again for the same
instance.
@param callback: Functor called after each event processing iteration.
Expects to receive the notifier object (self) as first
parameter. If this function returns True the loop is
immediately terminated otherwise the loop method keeps
looping.
@type callback: callable object or function
@param daemonize: This thread is daemonized if set to True.
@type daemonize: boolean
@param args: Optional and relevant only if daemonize is True. Remaining
keyworded arguments are directly passed to daemonize see
__daemonize() method. If pid_file=None or is set to a
pathname the caller must ensure the file does not exist
before this method is called otherwise an exception
pyinotify.NotifierError will be raised. If pid_file=False
it is still daemonized but the pid is not written in any
file.
@type args: various
"""
if daemonize:
self.__daemonize(**args)
# Read and process events forever
while 1:
try:
self.process_events()
if (callback is not None) and (callback(self) is True):
break
ref_time = time.time()
# check_events is blocking
if self.check_events():
self._sleep(ref_time)
self.read_events()
except KeyboardInterrupt:
# Stop monitoring if sigint is caught (Control-C).
log.debug('Pyinotify stops monitoring.')
break
# Close internals
self.stop()
def stop(self):
"""
Close inotify's instance (close its file descriptor).
It destroys all existing watches, pending events,...
This method is automatically called at the end of loop().
"""
self._pollobj.unregister(self._fd)
os.close(self._fd)
class ThreadedNotifier(threading.Thread, Notifier):
"""
This notifier inherits from threading.Thread for instanciating a separate
thread, and also inherits from Notifier, because it is a threaded notifier.
Note that every functionality provided by this class is also provided
through Notifier class. Moreover Notifier should be considered first because
it is not threaded and could be easily daemonized.
"""
def __init__(self, watch_manager, default_proc_fun=None, read_freq=0,
threshold=0, timeout=None):
"""
Initialization, initialize base classes. read_freq, threshold and
timeout parameters are used when looping.
@param watch_manager: Watch Manager.
@type watch_manager: WatchManager instance
@param default_proc_fun: Default processing method. See base class.
@type default_proc_fun: instance of ProcessEvent
@param read_freq: if read_freq == 0, events are read asap,
if read_freq is > 0, this thread sleeps
max(0, read_freq - timeout) seconds.
@type read_freq: int
@param threshold: File descriptor will be read only if the accumulated
size to read becomes >= threshold. If != 0, you likely
want to use it in combination with an appropriate
value set for read_freq because without that you would
keep looping without really reading anything and that
until the amount of events to read is >= threshold. At
least with read_freq you might sleep.
@type threshold: int
@param timeout:
see http://docs.python.org/lib/poll-objects.html#poll-objects
@type timeout: int
"""
# Init threading base class
threading.Thread.__init__(self)
# Stop condition
self._stop_event = threading.Event()
# Init Notifier base class
Notifier.__init__(self, watch_manager, default_proc_fun, read_freq,
threshold, timeout)
# Create a new pipe used for thread termination
self._pipe = os.pipe()
self._pollobj.register(self._pipe[0], select.POLLIN)
def stop(self):
"""
Stop notifier's loop. Stop notification. Join the thread.
"""
self._stop_event.set()
os.write(self._pipe[1], b'stop')
threading.Thread.join(self)
Notifier.stop(self)
self._pollobj.unregister(self._pipe[0])
os.close(self._pipe[0])
os.close(self._pipe[1])
def loop(self):
"""
Thread's main loop. Don't meant to be called by user directly.
Call inherited start() method instead.
Events are read only once time every min(read_freq, timeout)
seconds at best and only if the size of events to read is >= threshold.
"""
# When the loop must be terminated .stop() is called, 'stop'
# is written to pipe fd so poll() returns and .check_events()
# returns False which make evaluate the While's stop condition
# ._stop_event.isSet() wich put an end to the thread's execution.
while not self._stop_event.isSet():
self.process_events()
ref_time = time.time()
if self.check_events():
self._sleep(ref_time)
self.read_events()
def run(self):
"""
Start thread's loop: read and process events until the method
stop() is called.
Never call this method directly, instead call the start() method
inherited from threading.Thread, which then will call run() in
its turn.
"""
self.loop()
class AsyncNotifier(asyncore.file_dispatcher, Notifier):
"""
This notifier inherits from asyncore.file_dispatcher in order to be able to
use pyinotify along with the asyncore framework.
"""
def __init__(self, watch_manager, default_proc_fun=None, read_freq=0,
threshold=0, timeout=None, channel_map=None):
"""
Initializes the async notifier. The only additional parameter is
'channel_map' which is the optional asyncore private map. See
Notifier class for the meaning of the others parameters.
"""
Notifier.__init__(self, watch_manager, default_proc_fun, read_freq,
threshold, timeout)
asyncore.file_dispatcher.__init__(self, self._fd, channel_map)
def handle_read(self):
"""
When asyncore tells us we can read from the fd, we proceed processing
events. This method can be overridden for handling a notification
differently.
"""
self.read_events()
self.process_events()
class TornadoAsyncNotifier(Notifier):
"""
Tornado ioloop adapter.
"""
def __init__(self, watch_manager, ioloop, callback=None,
default_proc_fun=None, read_freq=0, threshold=0, timeout=None,
channel_map=None):
"""
Note that if later you must call ioloop.close() be sure to let the
default parameter to all_fds=False.
See example tornado_notifier.py for an example using this notifier.
@param ioloop: Tornado's IO loop.
@type ioloop: tornado.ioloop.IOLoop instance.
@param callback: Functor called at the end of each call to handle_read
(IOLoop's read handler). Expects to receive the
notifier object (self) as single parameter.
@type callback: callable object or function
"""
self.io_loop = ioloop
self.handle_read_callback = callback
Notifier.__init__(self, watch_manager, default_proc_fun, read_freq,
threshold, timeout)
ioloop.add_handler(self._fd, self.handle_read, ioloop.READ)
def handle_read(self, *args, **kwargs):
"""
See comment in AsyncNotifier.
"""
self.read_events()
self.process_events()
if self.handle_read_callback is not None:
self.handle_read_callback(self)
class Watch:
"""
Represent a watch, i.e. a file or directory being watched.
"""
__slots__ = ('wd', 'path', 'mask', 'proc_fun', 'auto_add',
'exclude_filter', 'dir')
def __init__(self, wd, path, mask, proc_fun, auto_add, exclude_filter):
"""
Initializations.
@param wd: Watch descriptor.
@type wd: int
@param path: Path of the file or directory being watched.
@type path: str
@param mask: Mask.
@type mask: int
@param proc_fun: Processing callable object.
@type proc_fun:
@param auto_add: Automatically add watches on new directories.
@type auto_add: bool
@param exclude_filter: Boolean function, used to exclude new
directories from being automatically watched.
See WatchManager.__init__
@type exclude_filter: callable object
"""
self.wd = wd
self.path = path
self.mask = mask
self.proc_fun = proc_fun
self.auto_add = auto_add
self.exclude_filter = exclude_filter
self.dir = os.path.isdir(self.path)
def __repr__(self):
"""
@return: String representation.
@rtype: str
"""
s = ' '.join(['%s%s%s' % (output_format.field_name(attr),
output_format.punctuation('='),
output_format.field_value(getattr(self,
attr))) \
for attr in self.__slots__ if not attr.startswith('_')])
s = '%s%s %s %s' % (output_format.punctuation('<'),
output_format.class_name(self.__class__.__name__),
s,
output_format.punctuation('>'))
return s
class ExcludeFilter:
"""
ExcludeFilter is an exclusion filter.
"""
def __init__(self, arg_lst):
"""
Examples:
ef1 = ExcludeFilter(["^/etc/rc.*", "^/etc/hostname"])
ef2 = ExcludeFilter("/my/path/exclude.lst")
Where exclude.lst contains:
^/etc/rc.*
^/etc/hostname
Note: it is not possible to exclude a file if its encapsulating
directory is itself watched. See this issue for more details
https://github.com/seb-m/pyinotify/issues/31
@param arg_lst: is either a list of patterns or a filename from which
patterns will be loaded.
@type arg_lst: list of str or str
"""
if isinstance(arg_lst, str):
lst = self._load_patterns_from_file(arg_lst)
elif isinstance(arg_lst, list):
lst = arg_lst
else:
raise TypeError
self._lregex = []
for regex in lst:
self._lregex.append(re.compile(regex, re.UNICODE))
def _load_patterns_from_file(self, filename):
lst = []
with open(filename, 'r') as file_obj:
for line in file_obj.readlines():
# Trim leading an trailing whitespaces
pattern = line.strip()
if not pattern or pattern.startswith('#'):
continue
lst.append(pattern)
return lst
def _match(self, regex, path):
return regex.match(path) is not None
def __call__(self, path):
"""
@param path: Path to match against provided regexps.
@type path: str
@return: Return True if path has been matched and should
be excluded, False otherwise.
@rtype: bool
"""
for regex in self._lregex:
if self._match(regex, path):
return True
return False
class WatchManagerError(Exception):
"""
WatchManager Exception. Raised on error encountered on watches
operations.
"""
def __init__(self, msg, wmd):
"""
@param msg: Exception string's description.
@type msg: string
@param wmd: This dictionary contains the wd assigned to paths of the
same call for which watches were successfully added.
@type wmd: dict
"""
self.wmd = wmd
Exception.__init__(self, msg)
class WatchManager:
"""
Provide operations for watching files and directories. Its internal
dictionary is used to reference watched items. When used inside
threaded code, one must instanciate as many WatchManager instances as
there are ThreadedNotifier instances.
"""
def __init__(self, exclude_filter=lambda path: False):
"""
Initialization: init inotify, init watch manager dictionary.
Raise OSError if initialization fails, raise InotifyBindingNotFoundError
if no inotify binding was found (through ctypes or from direct access to
syscalls).
@param exclude_filter: boolean function, returns True if current
path must be excluded from being watched.
Convenient for providing a common exclusion
filter for every call to add_watch.
@type exclude_filter: callable object
"""
self._exclude_filter = exclude_filter
self._wmd = {} # watch dict key: watch descriptor, value: watch
self._inotify_wrapper = INotifyWrapper.create()
if self._inotify_wrapper is None:
raise InotifyBindingNotFoundError()
self._fd = self._inotify_wrapper.inotify_init() # file descriptor
if self._fd < 0:
err = 'Cannot initialize new instance of inotify, %s'
raise OSError(err % self._inotify_wrapper.str_errno())
def close(self):
"""
Close inotify's file descriptor, this action will also automatically
remove (i.e. stop watching) all its associated watch descriptors.
After a call to this method the WatchManager's instance become useless
and cannot be reused, a new instance must then be instanciated. It
makes sense to call this method in few situations for instance if
several independant WatchManager must be instanciated or if all watches
must be removed and no other watches need to be added.
"""
os.close(self._fd)
def get_fd(self):
"""
Return assigned inotify's file descriptor.
@return: File descriptor.
@rtype: int
"""
return self._fd
def get_watch(self, wd):
"""
Get watch from provided watch descriptor wd.
@param wd: Watch descriptor.
@type wd: int
"""
return self._wmd.get(wd)
def del_watch(self, wd):
"""
Remove watch entry associated to watch descriptor wd.
@param wd: Watch descriptor.
@type wd: int
"""
try:
del self._wmd[wd]
except KeyError as err:
log.error('Cannot delete unknown watch descriptor %s' % str(err))
@property
def watches(self):
"""
Get a reference on the internal watch manager dictionary.
@return: Internal watch manager dictionary.
@rtype: dict
"""
return self._wmd
def __format_path(self, path):
"""
Format path to its internal (stored in watch manager) representation.
"""
# path must be a unicode string (str) and is just normalized.
return os.path.normpath(path)
def __add_watch(self, path, mask, proc_fun, auto_add, exclude_filter):
"""
Add a watch on path, build a Watch object and insert it in the
watch manager dictionary. Return the wd value.
"""
path = self.__format_path(path)
if auto_add and not mask & IN_CREATE:
mask |= IN_CREATE
wd = self._inotify_wrapper.inotify_add_watch(self._fd, path, mask)
if wd < 0:
return wd
watch = Watch(wd=wd, path=path, mask=mask, proc_fun=proc_fun,
auto_add=auto_add, exclude_filter=exclude_filter)
# wd are _always_ indexed with their original unicode paths in wmd.
self._wmd[wd] = watch
log.debug('New %s', watch)
return wd
def __glob(self, path, do_glob):
if do_glob:
return glob.iglob(path)
else:
return [path]
def add_watch(self, path, mask, proc_fun=None, rec=False,
auto_add=False, do_glob=False, quiet=True,
exclude_filter=None):
"""
Add watch(s) on the provided |path|(s) with associated |mask| flag
value and optionally with a processing |proc_fun| function and
recursive flag |rec| set to True.
All |path| components _must_ be str (i.e. unicode) objects.
If |path| is already watched it is ignored, but if it is called with
option rec=True a watch is put on each one of its not-watched
subdirectory.
@param path: Path to watch, the path can either be a file or a
directory. Also accepts a sequence (list) of paths.
@type path: string or list of strings
@param mask: Bitmask of events.
@type mask: int
@param proc_fun: Processing object.
@type proc_fun: function or ProcessEvent instance or instance of
one of its subclasses or callable object.
@param rec: Recursively add watches from path on all its
subdirectories, set to False by default (doesn't
follows symlinks in any case).
@type rec: bool
@param auto_add: Automatically add watches on newly created
directories in watched parent |path| directory.
If |auto_add| is True, IN_CREATE is ored with |mask|
when the watch is added.
@type auto_add: bool
@param do_glob: Do globbing on pathname (see standard globbing
module for more informations).
@type do_glob: bool
@param quiet: if False raises a WatchManagerError exception on
error. See example not_quiet.py.
@type quiet: bool
@param exclude_filter: predicate (boolean function), which returns
True if the current path must be excluded
from being watched. This argument has
precedence over exclude_filter passed to
the class' constructor.
@type exclude_filter: callable object
@return: dict of paths associated to watch descriptors. A wd value
is positive if the watch was added sucessfully, otherwise
the value is negative. If the path was invalid or was already
watched it is not included into this returned dictionary.
@rtype: dict of {str: int}
"""
ret_ = {} # return {path: wd, ...}
if exclude_filter is None:
exclude_filter = self._exclude_filter
# normalize args as list elements
for npath in self.__format_param(path):
# Require that path be a unicode string
if not isinstance(npath, str):
ret_[path] = -3
continue
# unix pathname pattern expansion
for apath in self.__glob(npath, do_glob):
# recursively list subdirs according to rec param
for rpath in self.__walk_rec(apath, rec):
if not exclude_filter(rpath):
wd = ret_[rpath] = self.__add_watch(rpath, mask,
proc_fun,
auto_add,
exclude_filter)
if wd < 0:
err = ('add_watch: cannot watch %s WD=%d, %s' % \
(rpath, wd,
self._inotify_wrapper.str_errno()))
if quiet:
log.error(err)
else:
raise WatchManagerError(err, ret_)
else:
# Let's say -2 means 'explicitely excluded
# from watching'.
ret_[rpath] = -2
return ret_
def __get_sub_rec(self, lpath):
"""
Get every wd from self._wmd if its path is under the path of
one (at least) of those in lpath. Doesn't follow symlinks.
@param lpath: list of watch descriptor
@type lpath: list of int
@return: list of watch descriptor
@rtype: list of int
"""
for d in lpath:
root = self.get_path(d)
if root is not None:
# always keep root
yield d
else:
# if invalid
continue
# nothing else to expect
if not os.path.isdir(root):
continue
# normalization
root = os.path.normpath(root)
# recursion
lend = len(root)
for iwd in self._wmd.items():
cur = iwd[1].path
pref = os.path.commonprefix([root, cur])
if root == os.sep or (len(pref) == lend and \
len(cur) > lend and \
cur[lend] == os.sep):
yield iwd[1].wd
def update_watch(self, wd, mask=None, proc_fun=None, rec=False,
auto_add=False, quiet=True):
"""
Update existing watch descriptors |wd|. The |mask| value, the
processing object |proc_fun|, the recursive param |rec| and the
|auto_add| and |quiet| flags can all be updated.
@param wd: Watch Descriptor to update. Also accepts a list of
watch descriptors.
@type wd: int or list of int
@param mask: Optional new bitmask of events.
@type mask: int
@param proc_fun: Optional new processing function.
@type proc_fun: function or ProcessEvent instance or instance of
one of its subclasses or callable object.
@param rec: Optionally adds watches recursively on all
subdirectories contained into |wd| directory.
@type rec: bool
@param auto_add: Automatically adds watches on newly created
directories in the watch's path corresponding to |wd|.
If |auto_add| is True, IN_CREATE is ored with |mask|
when the watch is updated.
@type auto_add: bool
@param quiet: If False raises a WatchManagerError exception on
error. See example not_quiet.py
@type quiet: bool
@return: dict of watch descriptors associated to booleans values.
True if the corresponding wd has been successfully
updated, False otherwise.
@rtype: dict of {int: bool}
"""
lwd = self.__format_param(wd)
if rec:
lwd = self.__get_sub_rec(lwd)
ret_ = {} # return {wd: bool, ...}
for awd in lwd:
apath = self.get_path(awd)
if not apath or awd < 0:
err = 'update_watch: invalid WD=%d' % awd
if quiet:
log.error(err)
continue
raise WatchManagerError(err, ret_)
if mask:
wd_ = self._inotify_wrapper.inotify_add_watch(self._fd, apath,
mask)
if wd_ < 0:
ret_[awd] = False
err = ('update_watch: cannot update %s WD=%d, %s' % \
(apath, wd_, self._inotify_wrapper.str_errno()))
if quiet:
log.error(err)
continue
raise WatchManagerError(err, ret_)
assert(awd == wd_)
if proc_fun or auto_add:
watch_ = self._wmd[awd]
if proc_fun:
watch_.proc_fun = proc_fun
if auto_add:
watch_.auto_add = auto_add
ret_[awd] = True
log.debug('Updated watch - %s', self._wmd[awd])
return ret_
def __format_param(self, param):
"""
@param param: Parameter.
@type param: string or int
@return: wrap param.
@rtype: list of type(param)
"""
if isinstance(param, list):
for p_ in param:
yield p_
else:
yield param
def get_wd(self, path):
"""
Returns the watch descriptor associated to path. This method
presents a prohibitive cost, always prefer to keep the WD
returned by add_watch(). If the path is unknown it returns None.
@param path: Path.
@type path: str
@return: WD or None.
@rtype: int or None
"""
path = self.__format_path(path)
for iwd in self._wmd.items():
if iwd[1].path == path:
return iwd[0]
def get_path(self, wd):
"""
Returns the path associated to WD, if WD is unknown it returns None.
@param wd: Watch descriptor.
@type wd: int
@return: Path or None.
@rtype: string or None
"""
watch_ = self._wmd.get(wd)
if watch_ is not None:
return watch_.path
def __walk_rec(self, top, rec):
"""
Yields each subdirectories of top, doesn't follow symlinks.
If rec is false, only yield top.
@param top: root directory.
@type top: string
@param rec: recursive flag.
@type rec: bool
@return: path of one subdirectory.
@rtype: string
"""
if not rec or os.path.islink(top) or not os.path.isdir(top):
yield top
else:
for root, dirs, files in os.walk(top):
yield root
def rm_watch(self, wd, rec=False, quiet=True):
"""
Removes watch(s).
@param wd: Watch Descriptor of the file or directory to unwatch.
Also accepts a list of WDs.
@type wd: int or list of int.
@param rec: Recursively removes watches on every already watched
subdirectories and subfiles.
@type rec: bool
@param quiet: If False raises a WatchManagerError exception on
error. See example not_quiet.py
@type quiet: bool
@return: dict of watch descriptors associated to booleans values.
True if the corresponding wd has been successfully
removed, False otherwise.
@rtype: dict of {int: bool}
"""
lwd = self.__format_param(wd)
if rec:
lwd = self.__get_sub_rec(lwd)
ret_ = {} # return {wd: bool, ...}
for awd in lwd:
# remove watch
wd_ = self._inotify_wrapper.inotify_rm_watch(self._fd, awd)
if wd_ < 0:
ret_[awd] = False
err = ('rm_watch: cannot remove WD=%d, %s' % \
(awd, self._inotify_wrapper.str_errno()))
if quiet:
log.error(err)
continue
raise WatchManagerError(err, ret_)
# Remove watch from our dictionary
if awd in self._wmd:
del self._wmd[awd]
ret_[awd] = True
log.debug('Watch WD=%d (%s) removed', awd, self.get_path(awd))
return ret_
def watch_transient_file(self, filename, mask, proc_class):
"""
Watch a transient file, which will be created and deleted frequently
over time (e.g. pid file).
@attention: Currently under the call to this function it is not
possible to correctly watch the events triggered into the same
base directory than the directory where is located this watched
transient file. For instance it would be wrong to make these
two successive calls: wm.watch_transient_file('/var/run/foo.pid', ...)
and wm.add_watch('/var/run/', ...)
@param filename: Filename.
@type filename: string
@param mask: Bitmask of events, should contain IN_CREATE and IN_DELETE.
@type mask: int
@param proc_class: ProcessEvent (or of one of its subclass), beware of
accepting a ProcessEvent's instance as argument into
__init__, see transient_file.py example for more
details.
@type proc_class: ProcessEvent's instance or of one of its subclasses.
@return: Same as add_watch().
@rtype: Same as add_watch().
"""
dirname = os.path.dirname(filename)
if dirname == '':
return {} # Maintains coherence with add_watch()
basename = os.path.basename(filename)
# Assuming we are watching at least for IN_CREATE and IN_DELETE
mask |= IN_CREATE | IN_DELETE
def cmp_name(event):
if getattr(event, 'name') is None:
return False
return basename == event.name
return self.add_watch(dirname, mask,
proc_fun=proc_class(ChainIfTrue(func=cmp_name)),
rec=False,
auto_add=False, do_glob=False,
exclude_filter=lambda path: False)
class RawOutputFormat:
"""
Format string representations.
"""
def __init__(self, format=None):
self.format = format or {}
def simple(self, s, attribute):
if not isinstance(s, str):
s = str(s)
return (self.format.get(attribute, '') + s +
self.format.get('normal', ''))
def punctuation(self, s):
"""Punctuation color."""
return self.simple(s, 'normal')
def field_value(self, s):
"""Field value color."""
return self.simple(s, 'purple')
def field_name(self, s):
"""Field name color."""
return self.simple(s, 'blue')
def class_name(self, s):
"""Class name color."""
return self.format.get('red', '') + self.simple(s, 'bold')
output_format = RawOutputFormat()
class ColoredOutputFormat(RawOutputFormat):
"""
Format colored string representations.
"""
def __init__(self):
f = {'normal': '\033[0m',
'black': '\033[30m',
'red': '\033[31m',
'green': '\033[32m',
'yellow': '\033[33m',
'blue': '\033[34m',
'purple': '\033[35m',
'cyan': '\033[36m',
'bold': '\033[1m',
'uline': '\033[4m',
'blink': '\033[5m',
'invert': '\033[7m'}
RawOutputFormat.__init__(self, f)
def compatibility_mode():
"""
Use this function to turn on the compatibility mode. The compatibility
mode is used to improve compatibility with Pyinotify 0.7.1 (or older)
programs. The compatibility mode provides additional variables 'is_dir',
'event_name', 'EventsCodes.IN_*' and 'EventsCodes.ALL_EVENTS' as
Pyinotify 0.7.1 provided. Do not call this function from new programs!!
Especially if there are developped for Pyinotify >= 0.8.x.
"""
setattr(EventsCodes, 'ALL_EVENTS', ALL_EVENTS)
for evname in globals():
if evname.startswith('IN_'):
setattr(EventsCodes, evname, globals()[evname])
global COMPATIBILITY_MODE
COMPATIBILITY_MODE = True
def command_line():
"""
By default the watched path is '/tmp' and all types of events are
monitored. Events monitoring serves forever, type c^c to stop it.
"""
from optparse import OptionParser
usage = "usage: %prog [options] [path1] [path2] [pathn]"
parser = OptionParser(usage=usage)
parser.add_option("-v", "--verbose", action="store_true",
dest="verbose", help="Verbose mode")
parser.add_option("-r", "--recursive", action="store_true",
dest="recursive",
help="Add watches recursively on paths")
parser.add_option("-a", "--auto_add", action="store_true",
dest="auto_add",
help="Automatically add watches on new directories")
parser.add_option("-e", "--events-list", metavar="EVENT[,...]",
dest="events_list",
help=("A comma-separated list of events to watch for - "
"see the documentation for valid options (defaults"
" to everything)"))
parser.add_option("-s", "--stats", action="store_true",
dest="stats",
help="Display dummy statistics")
parser.add_option("-V", "--version", action="store_true",
dest="version", help="Pyinotify version")
parser.add_option("-f", "--raw-format", action="store_true",
dest="raw_format",
help="Disable enhanced output format.")
parser.add_option("-c", "--command", action="store",
dest="command",
help="Shell command to run upon event")
(options, args) = parser.parse_args()
if options.verbose:
log.setLevel(10)
if options.version:
print(__version__)
if not options.raw_format:
global output_format
output_format = ColoredOutputFormat()
if len(args) < 1:
path = '/tmp' # default watched path
else:
path = args
# watch manager instance
wm = WatchManager()
# notifier instance and init
if options.stats:
notifier = Notifier(wm, default_proc_fun=Stats(), read_freq=5)
else:
notifier = Notifier(wm, default_proc_fun=PrintAllEvents())
# What mask to apply
mask = 0
if options.events_list:
events_list = options.events_list.split(',')
for ev in events_list:
evcode = EventsCodes.ALL_FLAGS.get(ev, 0)
if evcode:
mask |= evcode
else:
parser.error("The event '%s' specified with option -e"
" is not valid" % ev)
else:
mask = ALL_EVENTS
# stats
cb_fun = None
if options.stats:
def cb(s):
sys.stdout.write(repr(s.proc_fun()))
sys.stdout.write('\n')
sys.stdout.write(str(s.proc_fun()))
sys.stdout.write('\n')
sys.stdout.flush()
cb_fun = cb
# External command
if options.command:
def cb(s):
subprocess.Popen(options.command, shell=True)
cb_fun = cb
log.debug('Start monitoring %s, (press c^c to halt pyinotify)' % path)
wm.add_watch(path, mask, rec=options.recursive, auto_add=options.auto_add)
# Loop forever (until sigint signal get caught)
notifier.loop(callback=cb_fun)
if __name__ == '__main__':
command_line()
| 37.603701 | 83 | 0.58276 |
class PyinotifyError(Exception):
pass
class UnsupportedPythonVersionError(PyinotifyError):
def __init__(self, version):
PyinotifyError.__init__(self,
('Python %s is unsupported, requires '
'at least Python 3.0') % version)
import sys
if sys.version_info < (3, 0):
raise UnsupportedPythonVersionError(sys.version)
import threading
import os
import select
import struct
import fcntl
import errno
import termios
import array
import logging
import atexit
from collections import deque
from datetime import datetime, timedelta
import time
import re
import asyncore
import glob
import locale
import subprocess
try:
from functools import reduce
except ImportError:
pass
try:
import ctypes
import ctypes.util
except ImportError:
ctypes = None
try:
import inotify_syscalls
except ImportError:
inotify_syscalls = None
__author__ = "seb@dbzteam.org (Sebastien Martini)"
__version__ = "0.9.4"
COMPATIBILITY_MODE = False
class InotifyBindingNotFoundError(PyinotifyError):
def __init__(self):
err = "Couldn't find any inotify binding"
PyinotifyError.__init__(self, err)
class INotifyWrapper:
@staticmethod
def create():
# First, try to use ctypes.
if ctypes:
inotify = _CtypesLibcINotifyWrapper()
if inotify.init():
return inotify
# Second, see if C extension is compiled.
if inotify_syscalls:
inotify = _INotifySyscallsWrapper()
if inotify.init():
return inotify
def get_errno(self):
return self._get_errno()
def str_errno(self):
code = self.get_errno()
if code is None:
return 'Errno: no errno support'
return 'Errno=%s (%s)' % (os.strerror(code), errno.errorcode[code])
def inotify_init(self):
return self._inotify_init()
def inotify_add_watch(self, fd, pathname, mask):
# Unicode strings must be encoded to string prior to calling this
# method.
assert isinstance(pathname, str)
return self._inotify_add_watch(fd, pathname, mask)
def inotify_rm_watch(self, fd, wd):
return self._inotify_rm_watch(fd, wd)
class _INotifySyscallsWrapper(INotifyWrapper):
def __init__(self):
# Stores the last errno value.
self._last_errno = None
def init(self):
assert inotify_syscalls
return True
def _get_errno(self):
return self._last_errno
def _inotify_init(self):
try:
fd = inotify_syscalls.inotify_init()
except IOError as err:
self._last_errno = err.errno
return -1
return fd
def _inotify_add_watch(self, fd, pathname, mask):
try:
wd = inotify_syscalls.inotify_add_watch(fd, pathname, mask)
except IOError as err:
self._last_errno = err.errno
return -1
return wd
def _inotify_rm_watch(self, fd, wd):
try:
ret = inotify_syscalls.inotify_rm_watch(fd, wd)
except IOError as err:
self._last_errno = err.errno
return -1
return ret
class _CtypesLibcINotifyWrapper(INotifyWrapper):
def __init__(self):
self._libc = None
self._get_errno_func = None
def init(self):
assert ctypes
libc_name = None
try:
libc_name = ctypes.util.find_library('c')
except (OSError, IOError):
pass # Will attemp to load it with None anyway.
self._libc = ctypes.CDLL(libc_name, use_errno=True)
self._get_errno_func = ctypes.get_errno
# Eventually check that libc has needed inotify bindings.
if (not hasattr(self._libc, 'inotify_init') or
not hasattr(self._libc, 'inotify_add_watch') or
not hasattr(self._libc, 'inotify_rm_watch')):
return False
self._libc.inotify_init.argtypes = []
self._libc.inotify_init.restype = ctypes.c_int
self._libc.inotify_add_watch.argtypes = [ctypes.c_int, ctypes.c_char_p,
ctypes.c_uint32]
self._libc.inotify_add_watch.restype = ctypes.c_int
self._libc.inotify_rm_watch.argtypes = [ctypes.c_int, ctypes.c_int]
self._libc.inotify_rm_watch.restype = ctypes.c_int
return True
def _get_errno(self):
assert self._get_errno_func
return self._get_errno_func()
def _inotify_init(self):
assert self._libc is not None
return self._libc.inotify_init()
def _inotify_add_watch(self, fd, pathname, mask):
assert self._libc is not None
# Encodes path to a bytes string. This conversion seems required because
# ctypes.create_string_buffer seems to manipulate bytes internally.
# Moreover it seems that inotify_add_watch does not work very well when
# it receives an ctypes.create_unicode_buffer instance as argument.
pathname = pathname.encode(sys.getfilesystemencoding())
pathname = ctypes.create_string_buffer(pathname)
return self._libc.inotify_add_watch(fd, pathname, mask)
def _inotify_rm_watch(self, fd, wd):
assert self._libc is not None
return self._libc.inotify_rm_watch(fd, wd)
def _sysctl(self, *args):
assert self._libc is not None
return self._libc.sysctl(*args)
# Logging
def logger_init():
log = logging.getLogger("pyinotify")
console_handler = logging.StreamHandler()
console_handler.setFormatter(
logging.Formatter("[%(asctime)s %(name)s %(levelname)s] %(message)s"))
log.addHandler(console_handler)
log.setLevel(20)
return log
log = logger_init()
# inotify's variables
class SysCtlINotify:
inotify_attrs = {'max_user_instances': 1,
'max_user_watches': 2,
'max_queued_events': 3}
def __init__(self, attrname, inotify_wrapper):
assert ctypes
self._attrname = attrname
self._inotify_wrapper = inotify_wrapper
sino = ctypes.c_int * 3
self._attr = sino(5, 20, SysCtlINotify.inotify_attrs[attrname])
@staticmethod
def create(attrname):
if ctypes is None:
return None
inotify_wrapper = _CtypesLibcINotifyWrapper()
if not inotify_wrapper.init():
return None
return SysCtlINotify(attrname, inotify_wrapper)
def get_val(self):
oldv = ctypes.c_int(0)
size = ctypes.c_int(ctypes.sizeof(oldv))
self._inotify_wrapper._sysctl(self._attr, 3,
ctypes.c_voidp(ctypes.addressof(oldv)),
ctypes.addressof(size),
None, 0)
return oldv.value
def set_val(self, nval):
oldv = ctypes.c_int(0)
sizeo = ctypes.c_int(ctypes.sizeof(oldv))
newv = ctypes.c_int(nval)
sizen = ctypes.c_int(ctypes.sizeof(newv))
self._inotify_wrapper._sysctl(self._attr, 3,
ctypes.c_voidp(ctypes.addressof(oldv)),
ctypes.addressof(sizeo),
ctypes.c_voidp(ctypes.addressof(newv)),
ctypes.addressof(sizen))
value = property(get_val, set_val)
def __repr__(self):
return '<%s=%d>' % (self._attrname, self.get_val())
#
# FIXME: currently these variables are only accessible when ctypes is used,
# otherwise there are set to None.
#
# read: myvar = max_queued_events.value
# update: max_queued_events.value = 42
#
for attrname in ('max_queued_events', 'max_user_instances', 'max_user_watches'):
globals()[attrname] = SysCtlINotify.create(attrname)
class EventsCodes:
# The idea here is 'configuration-as-code' - this way, we get our nice class
# constants, but we also get nice human-friendly text mappings to do lookups
# against as well, for free:
FLAG_COLLECTIONS = {'OP_FLAGS': {
'IN_ACCESS' : 0x00000001, # File was accessed
'IN_MODIFY' : 0x00000002, # File was modified
'IN_ATTRIB' : 0x00000004, # Metadata changed
'IN_CLOSE_WRITE' : 0x00000008, # Writable file was closed
'IN_CLOSE_NOWRITE' : 0x00000010, # Unwritable file closed
'IN_OPEN' : 0x00000020, # File was opened
'IN_MOVED_FROM' : 0x00000040, # File was moved from X
'IN_MOVED_TO' : 0x00000080, # File was moved to Y
'IN_CREATE' : 0x00000100, # Subfile was created
'IN_DELETE' : 0x00000200, # Subfile was deleted
'IN_DELETE_SELF' : 0x00000400, # Self (watched item itself)
# was deleted
'IN_MOVE_SELF' : 0x00000800, # Self (watched item itself) was moved
},
'EVENT_FLAGS': {
'IN_UNMOUNT' : 0x00002000, # Backing fs was unmounted
'IN_Q_OVERFLOW' : 0x00004000, # Event queued overflowed
'IN_IGNORED' : 0x00008000, # File was ignored
},
'SPECIAL_FLAGS': {
'IN_ONLYDIR' : 0x01000000, # only watch the path if it is a
# directory
'IN_DONT_FOLLOW' : 0x02000000, # don't follow a symlink
'IN_EXCL_UNLINK' : 0x04000000,
'IN_MASK_ADD' : 0x20000000,
'IN_ISDIR' : 0x40000000,
'IN_ONESHOT' : 0x80000000,
},
}
def maskname(mask):
ms = mask
name = '%s'
if mask & IN_ISDIR:
ms = mask - IN_ISDIR
name = '%s|IN_ISDIR'
return name % EventsCodes.ALL_VALUES[ms]
maskname = staticmethod(maskname)
EventsCodes.ALL_FLAGS = {}
EventsCodes.ALL_VALUES = {}
for flagc, valc in EventsCodes.FLAG_COLLECTIONS.items():
# Make the collections' members directly accessible through the
setattr(EventsCodes, flagc, valc)
EventsCodes.ALL_FLAGS.update(valc)
for name, val in valc.items():
globals()[name] = val
EventsCodes.ALL_VALUES[val] = name
ALL_EVENTS = reduce(lambda x, y: x | y, EventsCodes.OP_FLAGS.values())
EventsCodes.ALL_FLAGS['ALL_EVENTS'] = ALL_EVENTS
EventsCodes.ALL_VALUES[ALL_EVENTS] = 'ALL_EVENTS'
class _Event:
def __init__(self, dict_):
for tpl in dict_.items():
setattr(self, *tpl)
def __repr__(self):
s = ''
for attr, value in sorted(self.__dict__.items(), key=lambda x: x[0]):
if attr.startswith('_'):
continue
if attr == 'mask':
value = hex(getattr(self, attr))
elif isinstance(value, str) and not value:
value = "''"
s += ' %s%s%s' % (output_format.field_name(attr),
output_format.punctuation('='),
output_format.field_value(value))
s = '%s%s%s %s' % (output_format.punctuation('<'),
output_format.class_name(self.__class__.__name__),
s,
output_format.punctuation('>'))
return s
def __str__(self):
return repr(self)
class _RawEvent(_Event):
def __init__(self, wd, mask, cookie, name):
self._str = None
d = {'wd': wd,
'mask': mask,
'cookie': cookie,
'name': name.rstrip('\0')}
_Event.__init__(self, d)
log.debug(str(self))
def __str__(self):
if self._str is None:
self._str = _Event.__str__(self)
return self._str
class Event(_Event):
def __init__(self, raw):
_Event.__init__(self, raw)
self.maskname = EventsCodes.maskname(self.mask)
if COMPATIBILITY_MODE:
self.event_name = self.maskname
try:
if self.name:
self.pathname = os.path.abspath(os.path.join(self.path,
self.name))
else:
self.pathname = os.path.abspath(self.path)
except AttributeError as err:
log.debug(err)
class ProcessEventError(PyinotifyError):
def __init__(self, err):
PyinotifyError.__init__(self, err)
class _ProcessEvent:
def __call__(self, event):
stripped_mask = event.mask - (event.mask & IN_ISDIR)
maskname = EventsCodes.ALL_VALUES.get(stripped_mask)
if maskname is None:
raise ProcessEventError("Unknown mask 0x%08x" % stripped_mask)
meth = getattr(self, 'process_' + maskname, None)
if meth is not None:
return meth(event)
meth = getattr(self, 'process_IN_' + maskname.split('_')[1], None)
if meth is not None:
return meth(event)
return self.process_default(event)
def __repr__(self):
return '<%s>' % self.__class__.__name__
class _SysProcessEvent(_ProcessEvent):
def __init__(self, wm, notifier):
self._watch_manager = wm
self._notifier = notifier
self._mv_cookie = {}
self._mv = {}
def cleanup(self):
date_cur_ = datetime.now()
for seq in (self._mv_cookie, self._mv):
for k in list(seq.keys()):
if (date_cur_ - seq[k][1]) > timedelta(minutes=1):
log.debug('Cleanup: deleting entry %s', seq[k][0])
del seq[k]
def process_IN_CREATE(self, raw_event):
if raw_event.mask & IN_ISDIR:
watch_ = self._watch_manager.get_watch(raw_event.wd)
created_dir = os.path.join(watch_.path, raw_event.name)
if watch_.auto_add and not watch_.exclude_filter(created_dir):
addw = self._watch_manager.add_watch
addw_ret = addw(created_dir, watch_.mask,
proc_fun=watch_.proc_fun,
rec=False, auto_add=watch_.auto_add,
exclude_filter=watch_.exclude_filter)
created_dir_wd = addw_ret.get(created_dir)
if (created_dir_wd is not None) and (created_dir_wd > 0):
for name in os.listdir(created_dir):
inner = os.path.join(created_dir, name)
if self._watch_manager.get_wd(inner) is not None:
continue
if os.path.isfile(inner):
flags = IN_CREATE
elif os.path.isdir(inner):
flags = IN_CREATE | IN_ISDIR
else:
continue
rawevent = _RawEvent(created_dir_wd, flags, 0, name)
self._notifier.append_event(rawevent)
return self.process_default(raw_event)
def process_IN_MOVED_FROM(self, raw_event):
watch_ = self._watch_manager.get_watch(raw_event.wd)
path_ = watch_.path
src_path = os.path.normpath(os.path.join(path_, raw_event.name))
self._mv_cookie[raw_event.cookie] = (src_path, datetime.now())
return self.process_default(raw_event, {'cookie': raw_event.cookie})
def process_IN_MOVED_TO(self, raw_event):
watch_ = self._watch_manager.get_watch(raw_event.wd)
path_ = watch_.path
dst_path = os.path.normpath(os.path.join(path_, raw_event.name))
mv_ = self._mv_cookie.get(raw_event.cookie)
to_append = {'cookie': raw_event.cookie}
if mv_ is not None:
self._mv[mv_[0]] = (dst_path, datetime.now())
# that its associated (they share a common cookie) IN_MOVED_TO
# event is queued itself. It is then possible in that scenario
# to provide as additional information to the IN_MOVED_TO event
# the original pathname of the moved file/directory.
to_append['src_pathname'] = mv_[0]
elif (raw_event.mask & IN_ISDIR and watch_.auto_add and
not watch_.exclude_filter(dst_path)):
# We got a diretory that's "moved in" from an unknown source and
self._watch_manager.add_watch(dst_path, watch_.mask,
proc_fun=watch_.proc_fun,
rec=True, auto_add=True,
exclude_filter=watch_.exclude_filter)
return self.process_default(raw_event, to_append)
def process_IN_MOVE_SELF(self, raw_event):
watch_ = self._watch_manager.get_watch(raw_event.wd)
src_path = watch_.path
mv_ = self._mv.get(src_path)
if mv_:
dest_path = mv_[0]
watch_.path = dest_path
src_path += os.path.sep
src_path_len = len(src_path)
for w in self._watch_manager.watches.values():
if w.path.startswith(src_path):
w.path = os.path.join(dest_path, w.path[src_path_len:])
else:
log.error("The pathname '%s' of this watch %s has probably changed "
"and couldn't be updated, so it cannot be trusted "
"anymore. To fix this error move directories/files only "
"between watched parents directories, in this case e.g. "
"put a watch on '%s'.",
watch_.path, watch_,
os.path.normpath(os.path.join(watch_.path,
os.path.pardir)))
if not watch_.path.endswith('-unknown-path'):
watch_.path += '-unknown-path'
return self.process_default(raw_event)
def process_IN_Q_OVERFLOW(self, raw_event):
return Event({'mask': raw_event.mask})
def process_IN_IGNORED(self, raw_event):
event_ = self.process_default(raw_event)
self._watch_manager.del_watch(raw_event.wd)
return event_
def process_default(self, raw_event, to_append=None):
watch_ = self._watch_manager.get_watch(raw_event.wd)
if raw_event.mask & (IN_DELETE_SELF | IN_MOVE_SELF):
# Unfornulately this information is not provided by the kernel
dir_ = watch_.dir
else:
dir_ = bool(raw_event.mask & IN_ISDIR)
dict_ = {'wd': raw_event.wd,
'mask': raw_event.mask,
'path': watch_.path,
'name': raw_event.name,
'dir': dir_}
if COMPATIBILITY_MODE:
dict_['is_dir'] = dir_
if to_append is not None:
dict_.update(to_append)
return Event(dict_)
class ProcessEvent(_ProcessEvent):
pevent = None
def __init__(self, pevent=None, **kargs):
self.pevent = pevent
self.my_init(**kargs)
def my_init(self, **kargs):
pass
def __call__(self, event):
stop_chaining = False
if self.pevent is not None:
# By default methods return None so we set as guideline
# that methods asking for stop chaining must explicitely
# return non None or non False values, otherwise the default
# behavior will be to accept chain call to the corresponding
# local method.
stop_chaining = self.pevent(event)
if not stop_chaining:
return _ProcessEvent.__call__(self, event)
def nested_pevent(self):
return self.pevent
def process_IN_Q_OVERFLOW(self, event):
log.warning('Event queue overflowed.')
def process_default(self, event):
pass
class PrintAllEvents(ProcessEvent):
def my_init(self, out=None):
if out is None:
out = sys.stdout
self._out = out
def process_default(self, event):
self._out.write(str(event))
self._out.write('\n')
self._out.flush()
class ChainIfTrue(ProcessEvent):
def my_init(self, func):
self._func = func
def process_default(self, event):
return not self._func(event)
class Stats(ProcessEvent):
def my_init(self):
self._start_time = time.time()
self._stats = {}
self._stats_lock = threading.Lock()
def process_default(self, event):
self._stats_lock.acquire()
try:
events = event.maskname.split('|')
for event_name in events:
count = self._stats.get(event_name, 0)
self._stats[event_name] = count + 1
finally:
self._stats_lock.release()
def _stats_copy(self):
self._stats_lock.acquire()
try:
return self._stats.copy()
finally:
self._stats_lock.release()
def __repr__(self):
stats = self._stats_copy()
elapsed = int(time.time() - self._start_time)
elapsed_str = ''
if elapsed < 60:
elapsed_str = str(elapsed) + 'sec'
elif 60 <= elapsed < 3600:
elapsed_str = '%dmn%dsec' % (elapsed / 60, elapsed % 60)
elif 3600 <= elapsed < 86400:
elapsed_str = '%dh%dmn' % (elapsed / 3600, (elapsed % 3600) / 60)
elif elapsed >= 86400:
elapsed_str = '%dd%dh' % (elapsed / 86400, (elapsed % 86400) / 3600)
stats['ElapsedTime'] = elapsed_str
l = []
for ev, value in sorted(stats.items(), key=lambda x: x[0]):
l.append(' %s=%s' % (output_format.field_name(ev),
output_format.field_value(value)))
s = '<%s%s >' % (output_format.class_name(self.__class__.__name__),
''.join(l))
return s
def dump(self, filename):
flags = os.O_WRONLY|os.O_CREAT|os.O_NOFOLLOW|os.O_EXCL
fd = os.open(filename, flags, 0o0600)
os.write(fd, bytes(self.__str__(), locale.getpreferredencoding()))
os.close(fd)
def __str__(self, scale=45):
stats = self._stats_copy()
if not stats:
return ''
m = max(stats.values())
unity = scale / m
fmt = '%%-26s%%-%ds%%s' % (len(output_format.field_value('@' * scale))
+ 1)
def func(x):
return fmt % (output_format.field_name(x[0]),
output_format.field_value('@' * int(x[1] * unity)),
output_format.simple('%d' % x[1], 'yellow'))
s = '\n'.join(map(func, sorted(stats.items(), key=lambda x: x[0])))
return s
class NotifierError(PyinotifyError):
def __init__(self, err):
PyinotifyError.__init__(self, err)
class Notifier:
def __init__(self, watch_manager, default_proc_fun=None, read_freq=0,
threshold=0, timeout=None):
# Watch Manager instance
self._watch_manager = watch_manager
# File descriptor
self._fd = self._watch_manager.get_fd()
# Poll object and registration
self._pollobj = select.poll()
self._pollobj.register(self._fd, select.POLLIN)
# This pipe is correctely initialized and used by ThreadedNotifier
self._pipe = (-1, -1)
# Event queue
self._eventq = deque()
# System processing functor, common to all events
self._sys_proc_fun = _SysProcessEvent(self._watch_manager, self)
# Default processing method
self._default_proc_fun = default_proc_fun
if default_proc_fun is None:
self._default_proc_fun = PrintAllEvents()
# Loop parameters
self._read_freq = read_freq
self._threshold = threshold
self._timeout = timeout
# Coalesce events option
self._coalesce = False
# set of str(raw_event), only used when coalesce option is True
self._eventset = set()
def append_event(self, event):
self._eventq.append(event)
def proc_fun(self):
return self._default_proc_fun
def coalesce_events(self, coalesce=True):
self._coalesce = coalesce
if not coalesce:
self._eventset.clear()
def check_events(self, timeout=None):
while True:
try:
# blocks up to 'timeout' milliseconds
if timeout is None:
timeout = self._timeout
ret = self._pollobj.poll(timeout)
except select.error as err:
if err.args[0] == errno.EINTR:
continue # interrupted, retry
else:
raise
else:
break
if not ret or (self._pipe[0] == ret[0][0]):
return False
# only one fd is polled
return ret[0][1] & select.POLLIN
def read_events(self):
buf_ = array.array('i', [0])
# get event queue size
if fcntl.ioctl(self._fd, termios.FIONREAD, buf_, 1) == -1:
return
queue_size = buf_[0]
if queue_size < self._threshold:
log.debug('(fd: %d) %d bytes available to read but threshold is '
'fixed to %d bytes', self._fd, queue_size,
self._threshold)
return
try:
# Read content from file
r = os.read(self._fd, queue_size)
except Exception as msg:
raise NotifierError(msg)
log.debug('Event queue size: %d', queue_size)
rsum = 0 # counter
while rsum < queue_size:
s_size = 16
# Retrieve wd, mask, cookie and fname_len
wd, mask, cookie, fname_len = struct.unpack('iIII',
r[rsum:rsum+s_size])
# Retrieve name
bname, = struct.unpack('%ds' % fname_len,
r[rsum + s_size:rsum + s_size + fname_len])
# FIXME: should we explictly call sys.getdefaultencoding() here ??
uname = bname.decode()
rawevent = _RawEvent(wd, mask, cookie, uname)
if self._coalesce:
# Only enqueue new (unique) events.
raweventstr = str(rawevent)
if raweventstr not in self._eventset:
self._eventset.add(raweventstr)
self._eventq.append(rawevent)
else:
self._eventq.append(rawevent)
rsum += s_size + fname_len
def process_events(self):
while self._eventq:
raw_event = self._eventq.popleft() # pop next event
watch_ = self._watch_manager.get_watch(raw_event.wd)
if (watch_ is None) and not (raw_event.mask & IN_Q_OVERFLOW):
if not (raw_event.mask & IN_IGNORED):
# Not really sure how we ended up here, nor how we should
# handle these types of events and if it is appropriate to
# completly skip them (like we are doing here).
log.warning("Unable to retrieve Watch object associated to %s",
repr(raw_event))
continue
revent = self._sys_proc_fun(raw_event) # system processings
if watch_ and watch_.proc_fun:
watch_.proc_fun(revent) # user processings
else:
self._default_proc_fun(revent)
self._sys_proc_fun.cleanup() # remove olds MOVED_* events records
if self._coalesce:
self._eventset.clear()
def __daemonize(self, pid_file=None, stdin=os.devnull, stdout=os.devnull,
stderr=os.devnull):
if pid_file is None:
dirname = '/var/run/'
basename = os.path.basename(sys.argv[0]) or 'pyinotify'
pid_file = os.path.join(dirname, basename + '.pid')
if pid_file != False and os.path.lexists(pid_file):
err = 'Cannot daemonize: pid file %s already exists.' % pid_file
raise NotifierError(err)
def fork_daemon():
# Adapted from Chad J. Schroeder's recipe
pid = os.fork()
if (pid == 0):
os.setsid()
pid = os.fork()
if (pid == 0):
os.chdir('/')
os.umask(0o022)
else:
os._exit(0)
else:
os._exit(0)
fd_inp = os.open(stdin, os.O_RDONLY)
os.dup2(fd_inp, 0)
fd_out = os.open(stdout, os.O_WRONLY|os.O_CREAT, 0o0600)
os.dup2(fd_out, 1)
fd_err = os.open(stderr, os.O_WRONLY|os.O_CREAT, 0o0600)
os.dup2(fd_err, 2)
fork_daemon()
if pid_file != False:
flags = os.O_WRONLY|os.O_CREAT|os.O_NOFOLLOW|os.O_EXCL
fd_pid = os.open(pid_file, flags, 0o0600)
os.write(fd_pid, bytes(str(os.getpid()) + '\n',
locale.getpreferredencoding()))
os.close(fd_pid)
atexit.register(lambda : os.unlink(pid_file))
def _sleep(self, ref_time):
if self._read_freq > 0:
cur_time = time.time()
sleep_amount = self._read_freq - (cur_time - ref_time)
if sleep_amount > 0:
log.debug('Now sleeping %d seconds', sleep_amount)
time.sleep(sleep_amount)
def loop(self, callback=None, daemonize=False, **args):
if daemonize:
self.__daemonize(**args)
while 1:
try:
self.process_events()
if (callback is not None) and (callback(self) is True):
break
ref_time = time.time()
if self.check_events():
self._sleep(ref_time)
self.read_events()
except KeyboardInterrupt:
log.debug('Pyinotify stops monitoring.')
break
self.stop()
def stop(self):
self._pollobj.unregister(self._fd)
os.close(self._fd)
class ThreadedNotifier(threading.Thread, Notifier):
def __init__(self, watch_manager, default_proc_fun=None, read_freq=0,
threshold=0, timeout=None):
threading.Thread.__init__(self)
self._stop_event = threading.Event()
Notifier.__init__(self, watch_manager, default_proc_fun, read_freq,
threshold, timeout)
self._pipe = os.pipe()
self._pollobj.register(self._pipe[0], select.POLLIN)
def stop(self):
self._stop_event.set()
os.write(self._pipe[1], b'stop')
threading.Thread.join(self)
Notifier.stop(self)
self._pollobj.unregister(self._pipe[0])
os.close(self._pipe[0])
os.close(self._pipe[1])
def loop(self):
# ._stop_event.isSet() wich put an end to the thread's execution.
while not self._stop_event.isSet():
self.process_events()
ref_time = time.time()
if self.check_events():
self._sleep(ref_time)
self.read_events()
def run(self):
self.loop()
class AsyncNotifier(asyncore.file_dispatcher, Notifier):
def __init__(self, watch_manager, default_proc_fun=None, read_freq=0,
threshold=0, timeout=None, channel_map=None):
Notifier.__init__(self, watch_manager, default_proc_fun, read_freq,
threshold, timeout)
asyncore.file_dispatcher.__init__(self, self._fd, channel_map)
def handle_read(self):
self.read_events()
self.process_events()
class TornadoAsyncNotifier(Notifier):
def __init__(self, watch_manager, ioloop, callback=None,
default_proc_fun=None, read_freq=0, threshold=0, timeout=None,
channel_map=None):
self.io_loop = ioloop
self.handle_read_callback = callback
Notifier.__init__(self, watch_manager, default_proc_fun, read_freq,
threshold, timeout)
ioloop.add_handler(self._fd, self.handle_read, ioloop.READ)
def handle_read(self, *args, **kwargs):
self.read_events()
self.process_events()
if self.handle_read_callback is not None:
self.handle_read_callback(self)
class Watch:
__slots__ = ('wd', 'path', 'mask', 'proc_fun', 'auto_add',
'exclude_filter', 'dir')
def __init__(self, wd, path, mask, proc_fun, auto_add, exclude_filter):
self.wd = wd
self.path = path
self.mask = mask
self.proc_fun = proc_fun
self.auto_add = auto_add
self.exclude_filter = exclude_filter
self.dir = os.path.isdir(self.path)
def __repr__(self):
s = ' '.join(['%s%s%s' % (output_format.field_name(attr),
output_format.punctuation('='),
output_format.field_value(getattr(self,
attr))) \
for attr in self.__slots__ if not attr.startswith('_')])
s = '%s%s %s %s' % (output_format.punctuation('<'),
output_format.class_name(self.__class__.__name__),
s,
output_format.punctuation('>'))
return s
class ExcludeFilter:
def __init__(self, arg_lst):
if isinstance(arg_lst, str):
lst = self._load_patterns_from_file(arg_lst)
elif isinstance(arg_lst, list):
lst = arg_lst
else:
raise TypeError
self._lregex = []
for regex in lst:
self._lregex.append(re.compile(regex, re.UNICODE))
def _load_patterns_from_file(self, filename):
lst = []
with open(filename, 'r') as file_obj:
for line in file_obj.readlines():
pattern = line.strip()
if not pattern or pattern.startswith('#'):
continue
lst.append(pattern)
return lst
def _match(self, regex, path):
return regex.match(path) is not None
def __call__(self, path):
for regex in self._lregex:
if self._match(regex, path):
return True
return False
class WatchManagerError(Exception):
def __init__(self, msg, wmd):
self.wmd = wmd
Exception.__init__(self, msg)
class WatchManager:
def __init__(self, exclude_filter=lambda path: False):
self._exclude_filter = exclude_filter
self._wmd = {}
self._inotify_wrapper = INotifyWrapper.create()
if self._inotify_wrapper is None:
raise InotifyBindingNotFoundError()
self._fd = self._inotify_wrapper.inotify_init()
if self._fd < 0:
err = 'Cannot initialize new instance of inotify, %s'
raise OSError(err % self._inotify_wrapper.str_errno())
def close(self):
os.close(self._fd)
def get_fd(self):
return self._fd
def get_watch(self, wd):
return self._wmd.get(wd)
def del_watch(self, wd):
try:
del self._wmd[wd]
except KeyError as err:
log.error('Cannot delete unknown watch descriptor %s' % str(err))
@property
def watches(self):
return self._wmd
def __format_path(self, path):
return os.path.normpath(path)
def __add_watch(self, path, mask, proc_fun, auto_add, exclude_filter):
path = self.__format_path(path)
if auto_add and not mask & IN_CREATE:
mask |= IN_CREATE
wd = self._inotify_wrapper.inotify_add_watch(self._fd, path, mask)
if wd < 0:
return wd
watch = Watch(wd=wd, path=path, mask=mask, proc_fun=proc_fun,
auto_add=auto_add, exclude_filter=exclude_filter)
self._wmd[wd] = watch
log.debug('New %s', watch)
return wd
def __glob(self, path, do_glob):
if do_glob:
return glob.iglob(path)
else:
return [path]
def add_watch(self, path, mask, proc_fun=None, rec=False,
auto_add=False, do_glob=False, quiet=True,
exclude_filter=None):
ret_ = {}
if exclude_filter is None:
exclude_filter = self._exclude_filter
for npath in self.__format_param(path):
if not isinstance(npath, str):
ret_[path] = -3
continue
for apath in self.__glob(npath, do_glob):
for rpath in self.__walk_rec(apath, rec):
if not exclude_filter(rpath):
wd = ret_[rpath] = self.__add_watch(rpath, mask,
proc_fun,
auto_add,
exclude_filter)
if wd < 0:
err = ('add_watch: cannot watch %s WD=%d, %s' % \
(rpath, wd,
self._inotify_wrapper.str_errno()))
if quiet:
log.error(err)
else:
raise WatchManagerError(err, ret_)
else:
ret_[rpath] = -2
return ret_
def __get_sub_rec(self, lpath):
for d in lpath:
root = self.get_path(d)
if root is not None:
# always keep root
yield d
else:
# if invalid
continue
# nothing else to expect
if not os.path.isdir(root):
continue
# normalization
root = os.path.normpath(root)
# recursion
lend = len(root)
for iwd in self._wmd.items():
cur = iwd[1].path
pref = os.path.commonprefix([root, cur])
if root == os.sep or (len(pref) == lend and \
len(cur) > lend and \
cur[lend] == os.sep):
yield iwd[1].wd
def update_watch(self, wd, mask=None, proc_fun=None, rec=False,
auto_add=False, quiet=True):
lwd = self.__format_param(wd)
if rec:
lwd = self.__get_sub_rec(lwd)
ret_ = {} # return {wd: bool, ...}
for awd in lwd:
apath = self.get_path(awd)
if not apath or awd < 0:
err = 'update_watch: invalid WD=%d' % awd
if quiet:
log.error(err)
continue
raise WatchManagerError(err, ret_)
if mask:
wd_ = self._inotify_wrapper.inotify_add_watch(self._fd, apath,
mask)
if wd_ < 0:
ret_[awd] = False
err = ('update_watch: cannot update %s WD=%d, %s' % \
(apath, wd_, self._inotify_wrapper.str_errno()))
if quiet:
log.error(err)
continue
raise WatchManagerError(err, ret_)
assert(awd == wd_)
if proc_fun or auto_add:
watch_ = self._wmd[awd]
if proc_fun:
watch_.proc_fun = proc_fun
if auto_add:
watch_.auto_add = auto_add
ret_[awd] = True
log.debug('Updated watch - %s', self._wmd[awd])
return ret_
def __format_param(self, param):
if isinstance(param, list):
for p_ in param:
yield p_
else:
yield param
def get_wd(self, path):
path = self.__format_path(path)
for iwd in self._wmd.items():
if iwd[1].path == path:
return iwd[0]
def get_path(self, wd):
watch_ = self._wmd.get(wd)
if watch_ is not None:
return watch_.path
def __walk_rec(self, top, rec):
if not rec or os.path.islink(top) or not os.path.isdir(top):
yield top
else:
for root, dirs, files in os.walk(top):
yield root
def rm_watch(self, wd, rec=False, quiet=True):
lwd = self.__format_param(wd)
if rec:
lwd = self.__get_sub_rec(lwd)
ret_ = {} # return {wd: bool, ...}
for awd in lwd:
# remove watch
wd_ = self._inotify_wrapper.inotify_rm_watch(self._fd, awd)
if wd_ < 0:
ret_[awd] = False
err = ('rm_watch: cannot remove WD=%d, %s' % \
(awd, self._inotify_wrapper.str_errno()))
if quiet:
log.error(err)
continue
raise WatchManagerError(err, ret_)
# Remove watch from our dictionary
if awd in self._wmd:
del self._wmd[awd]
ret_[awd] = True
log.debug('Watch WD=%d (%s) removed', awd, self.get_path(awd))
return ret_
def watch_transient_file(self, filename, mask, proc_class):
dirname = os.path.dirname(filename)
if dirname == '':
return {} # Maintains coherence with add_watch()
basename = os.path.basename(filename)
# Assuming we are watching at least for IN_CREATE and IN_DELETE
mask |= IN_CREATE | IN_DELETE
def cmp_name(event):
if getattr(event, 'name') is None:
return False
return basename == event.name
return self.add_watch(dirname, mask,
proc_fun=proc_class(ChainIfTrue(func=cmp_name)),
rec=False,
auto_add=False, do_glob=False,
exclude_filter=lambda path: False)
class RawOutputFormat:
def __init__(self, format=None):
self.format = format or {}
def simple(self, s, attribute):
if not isinstance(s, str):
s = str(s)
return (self.format.get(attribute, '') + s +
self.format.get('normal', ''))
def punctuation(self, s):
return self.simple(s, 'normal')
def field_value(self, s):
return self.simple(s, 'purple')
def field_name(self, s):
return self.simple(s, 'blue')
def class_name(self, s):
return self.format.get('red', '') + self.simple(s, 'bold')
output_format = RawOutputFormat()
class ColoredOutputFormat(RawOutputFormat):
def __init__(self):
f = {'normal': '\033[0m',
'black': '\033[30m',
'red': '\033[31m',
'green': '\033[32m',
'yellow': '\033[33m',
'blue': '\033[34m',
'purple': '\033[35m',
'cyan': '\033[36m',
'bold': '\033[1m',
'uline': '\033[4m',
'blink': '\033[5m',
'invert': '\033[7m'}
RawOutputFormat.__init__(self, f)
def compatibility_mode():
setattr(EventsCodes, 'ALL_EVENTS', ALL_EVENTS)
for evname in globals():
if evname.startswith('IN_'):
setattr(EventsCodes, evname, globals()[evname])
global COMPATIBILITY_MODE
COMPATIBILITY_MODE = True
def command_line():
from optparse import OptionParser
usage = "usage: %prog [options] [path1] [path2] [pathn]"
parser = OptionParser(usage=usage)
parser.add_option("-v", "--verbose", action="store_true",
dest="verbose", help="Verbose mode")
parser.add_option("-r", "--recursive", action="store_true",
dest="recursive",
help="Add watches recursively on paths")
parser.add_option("-a", "--auto_add", action="store_true",
dest="auto_add",
help="Automatically add watches on new directories")
parser.add_option("-e", "--events-list", metavar="EVENT[,...]",
dest="events_list",
help=("A comma-separated list of events to watch for - "
"see the documentation for valid options (defaults"
" to everything)"))
parser.add_option("-s", "--stats", action="store_true",
dest="stats",
help="Display dummy statistics")
parser.add_option("-V", "--version", action="store_true",
dest="version", help="Pyinotify version")
parser.add_option("-f", "--raw-format", action="store_true",
dest="raw_format",
help="Disable enhanced output format.")
parser.add_option("-c", "--command", action="store",
dest="command",
help="Shell command to run upon event")
(options, args) = parser.parse_args()
if options.verbose:
log.setLevel(10)
if options.version:
print(__version__)
if not options.raw_format:
global output_format
output_format = ColoredOutputFormat()
if len(args) < 1:
path = '/tmp' # default watched path
else:
path = args
# watch manager instance
wm = WatchManager()
# notifier instance and init
if options.stats:
notifier = Notifier(wm, default_proc_fun=Stats(), read_freq=5)
else:
notifier = Notifier(wm, default_proc_fun=PrintAllEvents())
# What mask to apply
mask = 0
if options.events_list:
events_list = options.events_list.split(',')
for ev in events_list:
evcode = EventsCodes.ALL_FLAGS.get(ev, 0)
if evcode:
mask |= evcode
else:
parser.error("The event '%s' specified with option -e"
" is not valid" % ev)
else:
mask = ALL_EVENTS
# stats
cb_fun = None
if options.stats:
def cb(s):
sys.stdout.write(repr(s.proc_fun()))
sys.stdout.write('\n')
sys.stdout.write(str(s.proc_fun()))
sys.stdout.write('\n')
sys.stdout.flush()
cb_fun = cb
# External command
if options.command:
def cb(s):
subprocess.Popen(options.command, shell=True)
cb_fun = cb
log.debug('Start monitoring %s, (press c^c to halt pyinotify)' % path)
wm.add_watch(path, mask, rec=options.recursive, auto_add=options.auto_add)
# Loop forever (until sigint signal get caught)
notifier.loop(callback=cb_fun)
if __name__ == '__main__':
command_line()
| true | true |
f72fec409082a747247e54f3160b84531dff3bf0 | 49 | py | Python | pydotted/__init__.py | aredden/pydotted | 62ad1d3eaccc65edc94b3cf4a0673ad089a29c6a | [
"MIT"
] | null | null | null | pydotted/__init__.py | aredden/pydotted | 62ad1d3eaccc65edc94b3cf4a0673ad089a29c6a | [
"MIT"
] | null | null | null | pydotted/__init__.py | aredden/pydotted | 62ad1d3eaccc65edc94b3cf4a0673ad089a29c6a | [
"MIT"
] | null | null | null | from .pydotted import pydot
__ALL__ = ["pydot"]
| 12.25 | 27 | 0.714286 | from .pydotted import pydot
__ALL__ = ["pydot"]
| true | true |
f72fed24b1aa083de6ed1211270c3ee51f07a93e | 5,502 | py | Python | custom_components/panasonic_smart_app/sensor.py | sugoi-wada/panasonic_smart_app | 78c3e377165b93c415108fa21137067585cfc72d | [
"MIT"
] | null | null | null | custom_components/panasonic_smart_app/sensor.py | sugoi-wada/panasonic_smart_app | 78c3e377165b93c415108fa21137067585cfc72d | [
"MIT"
] | null | null | null | custom_components/panasonic_smart_app/sensor.py | sugoi-wada/panasonic_smart_app | 78c3e377165b93c415108fa21137067585cfc72d | [
"MIT"
] | null | null | null | from datetime import datetime, timedelta
import logging
from homeassistant.components.sensor import SensorEntity
from homeassistant.const import (
STATE_UNAVAILABLE,
DEVICE_CLASS_HUMIDITY,
DEVICE_CLASS_TEMPERATURE,
DEVICE_CLASS_ENERGY,
DEVICE_CLASS_PM25,
TEMP_CELSIUS,
ENERGY_KILO_WATT_HOUR,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
PERCENTAGE,
)
from .entity import PanasonicBaseEntity
from .const import (
DOMAIN,
DEVICE_TYPE_DEHUMIDIFIER,
DEVICE_TYPE_AC,
DATA_CLIENT,
DATA_COORDINATOR,
LABEL_PM25,
LABEL_HUMIDITY,
LABEL_OUTDOOR_TEMPERATURE,
LABEL_ENERGY,
ICON_PM25,
ICON_THERMOMETER,
ICON_HUMIDITY,
ICON_ENERGY,
STATE_MEASUREMENT,
STATE_TOTAL_INCREASING,
)
_LOGGER = logging.getLogger(__package__)
async def async_setup_entry(hass, entry, async_add_entities) -> bool:
client = hass.data[DOMAIN][entry.entry_id][DATA_CLIENT]
coordinator = hass.data[DOMAIN][entry.entry_id][DATA_COORDINATOR]
devices = coordinator.data
sensors = []
for index, device in enumerate(devices):
device_type = int(device.get("DeviceType"))
sensors.append(
PanasonicEnergySensor(
coordinator,
index,
client,
device,
)
)
if device_type == DEVICE_TYPE_DEHUMIDIFIER:
sensors.append(
PanasonicHumiditySensor(
coordinator,
index,
client,
device,
)
)
sensors.append(
PanasonicPM25Sensor(
coordinator,
index,
client,
device,
)
)
if device_type == DEVICE_TYPE_AC:
sensors.append(
PanasonicOutdoorTemperatureSensor(
coordinator,
index,
client,
device,
)
)
async_add_entities(sensors, True)
return True
class PanasonicHumiditySensor(PanasonicBaseEntity, SensorEntity):
""" Panasonic dehumidifier current humidity sensor """
@property
def label(self):
return f"{self.nickname} {LABEL_HUMIDITY}"
@property
def icon(self) -> str:
return ICON_HUMIDITY
@property
def device_class(self) -> str:
return DEVICE_CLASS_HUMIDITY
@property
def state(self) -> int:
status = self.coordinator.data[self.index]["status"]
_current_humd = status.get("0x07", None)
_LOGGER.debug(f"[{self.label}] state: {_current_humd}")
return _current_humd if _current_humd else STATE_UNAVAILABLE
@property
def state_class(self) -> str:
return STATE_MEASUREMENT
@property
def unit_of_measurement(self) -> str:
return PERCENTAGE
class PanasonicPM25Sensor(PanasonicBaseEntity, SensorEntity):
""" Panasonic dehumidifer PM2.5 sensor """
@property
def label(self) -> str:
return f"{self.nickname} {LABEL_PM25}"
@property
def icon(self) -> str:
return ICON_PM25
@property
def device_class(self) -> str:
return DEVICE_CLASS_PM25
@property
def state(self) -> int:
status = self.coordinator.data[self.index]["status"]
_pm25 = float(status.get("0x53", -1))
_LOGGER.debug(f"[{self.label}] state: {_pm25}")
return _pm25 if _pm25 >= 0 else STATE_UNAVAILABLE
@property
def state_class(self) -> str:
return STATE_MEASUREMENT
@property
def unit_of_measurement(self) -> str:
return CONCENTRATION_MICROGRAMS_PER_CUBIC_METER
class PanasonicOutdoorTemperatureSensor(PanasonicBaseEntity, SensorEntity):
""" Panasonic AC outdoor temperature sensor """
@property
def label(self) -> str:
return f"{self.nickname} {LABEL_OUTDOOR_TEMPERATURE}"
@property
def icon(self) -> str:
return ICON_THERMOMETER
@property
def device_class(self) -> str:
return DEVICE_CLASS_TEMPERATURE
@property
def state(self) -> int:
status = self.coordinator.data[self.index]["status"]
_outdoor_temperature = float(status.get("0x21", -1))
_LOGGER.debug(f"[{self.label}] state: {_outdoor_temperature}")
return _outdoor_temperature if _outdoor_temperature >= 0 else STATE_UNAVAILABLE
@property
def state_class(self) -> str:
return STATE_MEASUREMENT
@property
def unit_of_measurement(self) -> str:
return TEMP_CELSIUS
class PanasonicEnergySensor(PanasonicBaseEntity, SensorEntity):
""" Panasonic energy sensor """
@property
def label(self) -> str:
return f"{self.nickname} {LABEL_ENERGY}"
@property
def icon(self) -> str:
return ICON_ENERGY
@property
def device_class(self) -> str:
return DEVICE_CLASS_ENERGY
@property
def last_reset(self):
return datetime.today().replace(day=1)
@property
def state(self) -> int:
energy = self.coordinator.data[self.index]["energy"]
_LOGGER.debug(f"[{self.label}] state: {energy}")
return energy if energy >= 0 else STATE_UNAVAILABLE
@property
def state_class(self) -> str:
return STATE_TOTAL_INCREASING
@property
def unit_of_measurement(self) -> str:
return ENERGY_KILO_WATT_HOUR
| 25.71028 | 87 | 0.62341 | from datetime import datetime, timedelta
import logging
from homeassistant.components.sensor import SensorEntity
from homeassistant.const import (
STATE_UNAVAILABLE,
DEVICE_CLASS_HUMIDITY,
DEVICE_CLASS_TEMPERATURE,
DEVICE_CLASS_ENERGY,
DEVICE_CLASS_PM25,
TEMP_CELSIUS,
ENERGY_KILO_WATT_HOUR,
CONCENTRATION_MICROGRAMS_PER_CUBIC_METER,
PERCENTAGE,
)
from .entity import PanasonicBaseEntity
from .const import (
DOMAIN,
DEVICE_TYPE_DEHUMIDIFIER,
DEVICE_TYPE_AC,
DATA_CLIENT,
DATA_COORDINATOR,
LABEL_PM25,
LABEL_HUMIDITY,
LABEL_OUTDOOR_TEMPERATURE,
LABEL_ENERGY,
ICON_PM25,
ICON_THERMOMETER,
ICON_HUMIDITY,
ICON_ENERGY,
STATE_MEASUREMENT,
STATE_TOTAL_INCREASING,
)
_LOGGER = logging.getLogger(__package__)
async def async_setup_entry(hass, entry, async_add_entities) -> bool:
client = hass.data[DOMAIN][entry.entry_id][DATA_CLIENT]
coordinator = hass.data[DOMAIN][entry.entry_id][DATA_COORDINATOR]
devices = coordinator.data
sensors = []
for index, device in enumerate(devices):
device_type = int(device.get("DeviceType"))
sensors.append(
PanasonicEnergySensor(
coordinator,
index,
client,
device,
)
)
if device_type == DEVICE_TYPE_DEHUMIDIFIER:
sensors.append(
PanasonicHumiditySensor(
coordinator,
index,
client,
device,
)
)
sensors.append(
PanasonicPM25Sensor(
coordinator,
index,
client,
device,
)
)
if device_type == DEVICE_TYPE_AC:
sensors.append(
PanasonicOutdoorTemperatureSensor(
coordinator,
index,
client,
device,
)
)
async_add_entities(sensors, True)
return True
class PanasonicHumiditySensor(PanasonicBaseEntity, SensorEntity):
@property
def label(self):
return f"{self.nickname} {LABEL_HUMIDITY}"
@property
def icon(self) -> str:
return ICON_HUMIDITY
@property
def device_class(self) -> str:
return DEVICE_CLASS_HUMIDITY
@property
def state(self) -> int:
status = self.coordinator.data[self.index]["status"]
_current_humd = status.get("0x07", None)
_LOGGER.debug(f"[{self.label}] state: {_current_humd}")
return _current_humd if _current_humd else STATE_UNAVAILABLE
@property
def state_class(self) -> str:
return STATE_MEASUREMENT
@property
def unit_of_measurement(self) -> str:
return PERCENTAGE
class PanasonicPM25Sensor(PanasonicBaseEntity, SensorEntity):
@property
def label(self) -> str:
return f"{self.nickname} {LABEL_PM25}"
@property
def icon(self) -> str:
return ICON_PM25
@property
def device_class(self) -> str:
return DEVICE_CLASS_PM25
@property
def state(self) -> int:
status = self.coordinator.data[self.index]["status"]
_pm25 = float(status.get("0x53", -1))
_LOGGER.debug(f"[{self.label}] state: {_pm25}")
return _pm25 if _pm25 >= 0 else STATE_UNAVAILABLE
@property
def state_class(self) -> str:
return STATE_MEASUREMENT
@property
def unit_of_measurement(self) -> str:
return CONCENTRATION_MICROGRAMS_PER_CUBIC_METER
class PanasonicOutdoorTemperatureSensor(PanasonicBaseEntity, SensorEntity):
@property
def label(self) -> str:
return f"{self.nickname} {LABEL_OUTDOOR_TEMPERATURE}"
@property
def icon(self) -> str:
return ICON_THERMOMETER
@property
def device_class(self) -> str:
return DEVICE_CLASS_TEMPERATURE
@property
def state(self) -> int:
status = self.coordinator.data[self.index]["status"]
_outdoor_temperature = float(status.get("0x21", -1))
_LOGGER.debug(f"[{self.label}] state: {_outdoor_temperature}")
return _outdoor_temperature if _outdoor_temperature >= 0 else STATE_UNAVAILABLE
@property
def state_class(self) -> str:
return STATE_MEASUREMENT
@property
def unit_of_measurement(self) -> str:
return TEMP_CELSIUS
class PanasonicEnergySensor(PanasonicBaseEntity, SensorEntity):
@property
def label(self) -> str:
return f"{self.nickname} {LABEL_ENERGY}"
@property
def icon(self) -> str:
return ICON_ENERGY
@property
def device_class(self) -> str:
return DEVICE_CLASS_ENERGY
@property
def last_reset(self):
return datetime.today().replace(day=1)
@property
def state(self) -> int:
energy = self.coordinator.data[self.index]["energy"]
_LOGGER.debug(f"[{self.label}] state: {energy}")
return energy if energy >= 0 else STATE_UNAVAILABLE
@property
def state_class(self) -> str:
return STATE_TOTAL_INCREASING
@property
def unit_of_measurement(self) -> str:
return ENERGY_KILO_WATT_HOUR
| true | true |
f72fed563a8c29934c97216b6cbba861286ec271 | 3,487 | py | Python | IPython/core/tests/test_prompts.py | flexlee/ipython | 7528fbd76073c90262b9ac127de57c4c59b23a5c | [
"BSD-3-Clause-Clear"
] | 1 | 2022-03-13T23:06:43.000Z | 2022-03-13T23:06:43.000Z | IPython/core/tests/test_prompts.py | andreasjansson/ipython | 09b4311726f46945b936c699f7a6489d74d7397f | [
"BSD-3-Clause-Clear"
] | null | null | null | IPython/core/tests/test_prompts.py | andreasjansson/ipython | 09b4311726f46945b936c699f7a6489d74d7397f | [
"BSD-3-Clause-Clear"
] | 1 | 2020-05-03T10:25:12.000Z | 2020-05-03T10:25:12.000Z | # -*- coding: utf-8
"""Tests for prompt generation."""
import unittest
import os
import nose.tools as nt
from IPython.testing import tools as tt, decorators as dec
from IPython.core.prompts import PromptManager, LazyEvaluate
from IPython.testing.globalipapp import get_ipython
from IPython.utils import py3compat
from IPython.utils.tempdir import TemporaryDirectory
ip = get_ipython()
class PromptTests(unittest.TestCase):
def setUp(self):
self.pm = PromptManager(shell=ip, config=ip.config)
def test_multiline_prompt(self):
self.pm.in_template = "[In]\n>>>"
self.pm.render('in')
self.assertEqual(self.pm.width, 3)
self.assertEqual(self.pm.txtwidth, 3)
self.pm.in_template = '[In]\n'
self.pm.render('in')
self.assertEqual(self.pm.width, 0)
self.assertEqual(self.pm.txtwidth, 0)
def test_translate_abbreviations(self):
def do_translate(template):
self.pm.in_template = template
return self.pm.templates['in']
pairs = [(r'%n>', '{color.number}{count}{color.prompt}>'),
(r'\T', '{time}'),
(r'\n', '\n')
]
tt.check_pairs(do_translate, pairs)
def test_user_ns(self):
self.pm.color_scheme = 'NoColor'
ip.ex("foo='bar'")
self.pm.in_template = "In [{foo}]"
prompt = self.pm.render('in')
self.assertEqual(prompt, u'In [bar]')
def test_builtins(self):
self.pm.color_scheme = 'NoColor'
self.pm.in_template = "In [{int}]"
prompt = self.pm.render('in')
self.assertEqual(prompt, u"In [%r]" % int)
def test_undefined(self):
self.pm.color_scheme = 'NoColor'
self.pm.in_template = "In [{foo_dne}]"
prompt = self.pm.render('in')
self.assertEqual(prompt, u"In [<ERROR: 'foo_dne' not found>]")
def test_render(self):
self.pm.in_template = r'\#>'
self.assertEqual(self.pm.render('in',color=False), '%d>' % ip.execution_count)
def test_render_unicode_cwd(self):
save = os.getcwdu()
with TemporaryDirectory(u'ünicødé') as td:
os.chdir(td)
self.pm.in_template = r'\w [\#]'
p = self.pm.render('in', color=False)
self.assertEqual(p, u"%s [%i]" % (os.getcwdu(), ip.execution_count))
os.chdir(save)
def test_lazy_eval_unicode(self):
u = u'ünicødé'
lz = LazyEvaluate(lambda : u)
# str(lz) would fail
self.assertEqual(unicode(lz), u)
self.assertEqual(format(lz), u)
def test_lazy_eval_nonascii_bytes(self):
u = u'ünicødé'
b = u.encode('utf8')
lz = LazyEvaluate(lambda : b)
# unicode(lz) would fail
self.assertEqual(str(lz), str(b))
self.assertEqual(format(lz), str(b))
def test_lazy_eval_float(self):
f = 0.503
lz = LazyEvaluate(lambda : f)
self.assertEqual(str(lz), str(f))
self.assertEqual(unicode(lz), unicode(f))
self.assertEqual(format(lz), str(f))
self.assertEqual(format(lz, '.1'), '0.5')
@dec.skip_win32
def test_cwd_x(self):
self.pm.in_template = r"\X0"
save = os.getcwdu()
os.chdir(os.path.expanduser('~'))
p = self.pm.render('in', color=False)
try:
self.assertEqual(p, '~')
finally:
os.chdir(save)
| 31.133929 | 86 | 0.578721 |
import unittest
import os
import nose.tools as nt
from IPython.testing import tools as tt, decorators as dec
from IPython.core.prompts import PromptManager, LazyEvaluate
from IPython.testing.globalipapp import get_ipython
from IPython.utils import py3compat
from IPython.utils.tempdir import TemporaryDirectory
ip = get_ipython()
class PromptTests(unittest.TestCase):
def setUp(self):
self.pm = PromptManager(shell=ip, config=ip.config)
def test_multiline_prompt(self):
self.pm.in_template = "[In]\n>>>"
self.pm.render('in')
self.assertEqual(self.pm.width, 3)
self.assertEqual(self.pm.txtwidth, 3)
self.pm.in_template = '[In]\n'
self.pm.render('in')
self.assertEqual(self.pm.width, 0)
self.assertEqual(self.pm.txtwidth, 0)
def test_translate_abbreviations(self):
def do_translate(template):
self.pm.in_template = template
return self.pm.templates['in']
pairs = [(r'%n>', '{color.number}{count}{color.prompt}>'),
(r'\T', '{time}'),
(r'\n', '\n')
]
tt.check_pairs(do_translate, pairs)
def test_user_ns(self):
self.pm.color_scheme = 'NoColor'
ip.ex("foo='bar'")
self.pm.in_template = "In [{foo}]"
prompt = self.pm.render('in')
self.assertEqual(prompt, u'In [bar]')
def test_builtins(self):
self.pm.color_scheme = 'NoColor'
self.pm.in_template = "In [{int}]"
prompt = self.pm.render('in')
self.assertEqual(prompt, u"In [%r]" % int)
def test_undefined(self):
self.pm.color_scheme = 'NoColor'
self.pm.in_template = "In [{foo_dne}]"
prompt = self.pm.render('in')
self.assertEqual(prompt, u"In [<ERROR: 'foo_dne' not found>]")
def test_render(self):
self.pm.in_template = r'\#>'
self.assertEqual(self.pm.render('in',color=False), '%d>' % ip.execution_count)
def test_render_unicode_cwd(self):
save = os.getcwdu()
with TemporaryDirectory(u'ünicødé') as td:
os.chdir(td)
self.pm.in_template = r'\w [\#]'
p = self.pm.render('in', color=False)
self.assertEqual(p, u"%s [%i]" % (os.getcwdu(), ip.execution_count))
os.chdir(save)
def test_lazy_eval_unicode(self):
u = u'ünicødé'
lz = LazyEvaluate(lambda : u)
self.assertEqual(unicode(lz), u)
self.assertEqual(format(lz), u)
def test_lazy_eval_nonascii_bytes(self):
u = u'ünicødé'
b = u.encode('utf8')
lz = LazyEvaluate(lambda : b)
self.assertEqual(str(lz), str(b))
self.assertEqual(format(lz), str(b))
def test_lazy_eval_float(self):
f = 0.503
lz = LazyEvaluate(lambda : f)
self.assertEqual(str(lz), str(f))
self.assertEqual(unicode(lz), unicode(f))
self.assertEqual(format(lz), str(f))
self.assertEqual(format(lz, '.1'), '0.5')
@dec.skip_win32
def test_cwd_x(self):
self.pm.in_template = r"\X0"
save = os.getcwdu()
os.chdir(os.path.expanduser('~'))
p = self.pm.render('in', color=False)
try:
self.assertEqual(p, '~')
finally:
os.chdir(save)
| true | true |
f72fed7319c1d66dcc65177c208b1a6671806efd | 4,361 | py | Python | var/spack/repos/builtin/packages/ginkgo/package.py | robertodr/spack | 9b809e01b47d48f01b3d257912fe1b752943cd3d | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 1 | 2020-09-02T11:55:57.000Z | 2020-09-02T11:55:57.000Z | var/spack/repos/builtin/packages/ginkgo/package.py | robertodr/spack | 9b809e01b47d48f01b3d257912fe1b752943cd3d | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null | var/spack/repos/builtin/packages/ginkgo/package.py | robertodr/spack | 9b809e01b47d48f01b3d257912fe1b752943cd3d | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 2 | 2020-01-10T18:54:54.000Z | 2021-07-03T22:57:16.000Z | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
import sys
class Ginkgo(CMakePackage, CudaPackage):
"""High-performance linear algebra library for manycore systems,
with a focus on sparse solution of linear systems."""
homepage = "https://ginkgo-project.github.io/"
git = "https://github.com/ginkgo-project/ginkgo.git"
maintainers = ['tcojean', 'hartwiganzt']
version('develop', branch='develop')
version('master', branch='master')
version('1.3.0', commit='4678668c66f634169def81620a85c9a20b7cec78') # v1.3.0
version('1.2.0', commit='b4be2be961fd5db45c3d02b5e004d73550722e31') # v1.2.0
version('1.1.1', commit='08d2c5200d3c78015ac8a4fd488bafe1e4240cf5') # v1.1.1
version('1.1.0', commit='b9bec8225442b3eb2a85a870efa112ab767a17fb') # v1.1.0
version('1.0.0', commit='45244641e0c2b19ba33aecd25153c0bddbcbe1a0') # v1.0.0
variant('shared', default=True, description='Build shared libraries')
variant('full_optimizations', default=False, description='Compile with all optimizations')
variant('openmp', default=sys.platform != 'darwin', description='Build with OpenMP')
variant('develtools', default=False, description='Compile with develtools enabled')
variant('build_type', default='Release',
description='The build type to build',
values=('Debug', 'Release'))
variant('hip', default=False, description='Compile Ginkgo with HIP support')
depends_on('cmake@3.9:', type='build')
depends_on('cuda@9:', when='+cuda')
depends_on('hip', when='+hip')
depends_on('hipsparse', type="link", when='+hip')
depends_on('hipblas', type="link", when='+hip')
depends_on('rocrand', type="link", when='@develop+hip')
depends_on('rocthrust', type="build", when='+hip')
# Somehow, these dependencies not propagated by the HIP stack?
depends_on('rocm-device-libs', type="link", when='+hip')
depends_on('comgr', type="link", when='+hip')
conflicts('%gcc@:5.2.9')
conflicts("+hip", when="@:1.1.1")
# The HIP packages from spack doen't seem to work well with CUDA
# backend for now, so disable HIP with CUDA backend.
conflicts("+cuda", when="+hip")
def cmake_args(self):
# Check that the have the correct C++ standard is available
if self.spec.satisfies('@:1.2.0'):
try:
self.compiler.cxx11_flag
except UnsupportedCompilerFlag:
InstallError('Ginkgo requires a C++11-compliant C++ compiler')
else:
try:
self.compiler.cxx14_flag
except UnsupportedCompilerFlag:
InstallError('Ginkgo requires a C++14-compliant C++ compiler')
spec = self.spec
args = [
'-DGINKGO_BUILD_CUDA=%s' % ('ON' if '+cuda' in spec else 'OFF'),
'-DGINKGO_BUILD_OMP=%s' % ('ON' if '+openmp' in spec else 'OFF'),
'-DBUILD_SHARED_LIBS=%s' % ('ON' if '+shared' in spec else 'OFF'),
'-DGINKGO_JACOBI_FULL_OPTIMIZATIONS=%s' % (
'ON' if '+full_optimizations' in spec else 'OFF'),
'-DGINKGO_DEVEL_TOOLS=%s' % (
'ON' if '+develtools' in spec else 'OFF'),
'-DGINKGO_BUILD_HIP=%s' % ('ON' if '+hip' in spec else 'OFF'),
# As we are not exposing benchmarks, examples, tests nor doc
# as part of the installation, disable building them altogether.
'-DGINKGO_BUILD_BENCHMARKS=OFF',
'-DGINKGO_BUILD_DOC=OFF',
'-DGINKGO_BUILD_EXAMPLES=OFF',
'-DGINKGO_BUILD_TESTS=OFF'
]
if '+hip' in spec:
args.append('-DHIP_PATH={0}'. format(spec['hip'].prefix))
args.append('-DHIP_CLANG_PATH={0}/bin'.
format(spec['llvm-amdgpu'].prefix))
args.append('-DHIP_CLANG_INCLUDE_PATH={0}/include'.
format(spec['llvm-amdgpu'].prefix))
args.append('-DHIPSPARSE_PATH={0}'.
format(spec['hipsparse'].prefix))
args.append('-DHIPBLAS_PATH={0}'.
format(spec['hipblas'].prefix))
return args
| 45.427083 | 94 | 0.61706 |
from spack import *
import sys
class Ginkgo(CMakePackage, CudaPackage):
homepage = "https://ginkgo-project.github.io/"
git = "https://github.com/ginkgo-project/ginkgo.git"
maintainers = ['tcojean', 'hartwiganzt']
version('develop', branch='develop')
version('master', branch='master')
version('1.3.0', commit='4678668c66f634169def81620a85c9a20b7cec78')
version('1.2.0', commit='b4be2be961fd5db45c3d02b5e004d73550722e31')
version('1.1.1', commit='08d2c5200d3c78015ac8a4fd488bafe1e4240cf5')
version('1.1.0', commit='b9bec8225442b3eb2a85a870efa112ab767a17fb')
version('1.0.0', commit='45244641e0c2b19ba33aecd25153c0bddbcbe1a0')
variant('shared', default=True, description='Build shared libraries')
variant('full_optimizations', default=False, description='Compile with all optimizations')
variant('openmp', default=sys.platform != 'darwin', description='Build with OpenMP')
variant('develtools', default=False, description='Compile with develtools enabled')
variant('build_type', default='Release',
description='The build type to build',
values=('Debug', 'Release'))
variant('hip', default=False, description='Compile Ginkgo with HIP support')
depends_on('cmake@3.9:', type='build')
depends_on('cuda@9:', when='+cuda')
depends_on('hip', when='+hip')
depends_on('hipsparse', type="link", when='+hip')
depends_on('hipblas', type="link", when='+hip')
depends_on('rocrand', type="link", when='@develop+hip')
depends_on('rocthrust', type="build", when='+hip')
depends_on('rocm-device-libs', type="link", when='+hip')
depends_on('comgr', type="link", when='+hip')
conflicts('%gcc@:5.2.9')
conflicts("+hip", when="@:1.1.1")
# backend for now, so disable HIP with CUDA backend.
conflicts("+cuda", when="+hip")
def cmake_args(self):
# Check that the have the correct C++ standard is available
if self.spec.satisfies('@:1.2.0'):
try:
self.compiler.cxx11_flag
except UnsupportedCompilerFlag:
InstallError('Ginkgo requires a C++11-compliant C++ compiler')
else:
try:
self.compiler.cxx14_flag
except UnsupportedCompilerFlag:
InstallError('Ginkgo requires a C++14-compliant C++ compiler')
spec = self.spec
args = [
'-DGINKGO_BUILD_CUDA=%s' % ('ON' if '+cuda' in spec else 'OFF'),
'-DGINKGO_BUILD_OMP=%s' % ('ON' if '+openmp' in spec else 'OFF'),
'-DBUILD_SHARED_LIBS=%s' % ('ON' if '+shared' in spec else 'OFF'),
'-DGINKGO_JACOBI_FULL_OPTIMIZATIONS=%s' % (
'ON' if '+full_optimizations' in spec else 'OFF'),
'-DGINKGO_DEVEL_TOOLS=%s' % (
'ON' if '+develtools' in spec else 'OFF'),
'-DGINKGO_BUILD_HIP=%s' % ('ON' if '+hip' in spec else 'OFF'),
# As we are not exposing benchmarks, examples, tests nor doc
# as part of the installation, disable building them altogether.
'-DGINKGO_BUILD_BENCHMARKS=OFF',
'-DGINKGO_BUILD_DOC=OFF',
'-DGINKGO_BUILD_EXAMPLES=OFF',
'-DGINKGO_BUILD_TESTS=OFF'
]
if '+hip' in spec:
args.append('-DHIP_PATH={0}'. format(spec['hip'].prefix))
args.append('-DHIP_CLANG_PATH={0}/bin'.
format(spec['llvm-amdgpu'].prefix))
args.append('-DHIP_CLANG_INCLUDE_PATH={0}/include'.
format(spec['llvm-amdgpu'].prefix))
args.append('-DHIPSPARSE_PATH={0}'.
format(spec['hipsparse'].prefix))
args.append('-DHIPBLAS_PATH={0}'.
format(spec['hipblas'].prefix))
return args
| true | true |
f72fedd3534283eb11dfd4a84eada7c236ead59a | 10,438 | py | Python | src/quart/wrappers/request.py | MarkoShiva/quart | f6709c6082a3cab9dffdcd937122f4d65a5990f7 | [
"MIT"
] | null | null | null | src/quart/wrappers/request.py | MarkoShiva/quart | f6709c6082a3cab9dffdcd937122f4d65a5990f7 | [
"MIT"
] | null | null | null | src/quart/wrappers/request.py | MarkoShiva/quart | f6709c6082a3cab9dffdcd937122f4d65a5990f7 | [
"MIT"
] | null | null | null | from __future__ import annotations
import asyncio
import io
from cgi import FieldStorage, parse_header
from typing import Any, AnyStr, Awaitable, Callable, Generator, Optional
from urllib.parse import parse_qs
from werkzeug.datastructures import CombinedMultiDict, Headers, MultiDict
from .base import BaseRequestWebsocket, JSONMixin
from ..datastructures import FileStorage
SERVER_PUSH_HEADERS_TO_COPY = {
"accept",
"accept-encoding",
"accept-language",
"cache-control",
"user-agent",
}
class Body:
"""A request body container.
The request body can either be iterated over and consumed in parts
(without building up memory usage) or awaited.
.. code-block:: python
async for data in body:
...
# or simply
complete = await body
Note: It is not possible to iterate over the data and then await
it.
"""
def __init__(
self, expected_content_length: Optional[int], max_content_length: Optional[int]
) -> None:
self._data = bytearray()
self._complete: asyncio.Event = asyncio.Event()
self._has_data: asyncio.Event = asyncio.Event()
self._max_content_length = max_content_length
# Exceptions must be raised within application (not ASGI)
# calls, this is achieved by having the ASGI methods set this
# to an exception on error.
self._must_raise: Optional[Exception] = None
if (
expected_content_length is not None
and max_content_length is not None
and expected_content_length > max_content_length
):
from ..exceptions import RequestEntityTooLarge # noqa Avoiding circular import
self._must_raise = RequestEntityTooLarge()
def __aiter__(self) -> "Body":
return self
async def __anext__(self) -> bytes:
if self._must_raise is not None:
raise self._must_raise
# if we got all of the data in the first shot, then self._complete is
# set and self._has_data will not get set again, so skip the await
# if we already have completed everything
if not self._complete.is_set():
await self._has_data.wait()
if self._complete.is_set() and len(self._data) == 0:
raise StopAsyncIteration()
data = bytes(self._data)
self._data.clear()
self._has_data.clear()
return data
def __await__(self) -> Generator[Any, None, Any]:
# Must check the _must_raise before and after waiting on the
# completion event as it may change whilst waiting and the
# event may not be set if there is already an issue.
if self._must_raise is not None:
raise self._must_raise
yield from self._complete.wait().__await__()
if self._must_raise is not None:
raise self._must_raise
return bytes(self._data)
def append(self, data: bytes) -> None:
if data == b"" or self._must_raise is not None:
return
self._data.extend(data)
self._has_data.set()
if self._max_content_length is not None and len(self._data) > self._max_content_length:
from ..exceptions import RequestEntityTooLarge # noqa Avoiding circular import
self._must_raise = RequestEntityTooLarge()
self.set_complete()
def set_complete(self) -> None:
self._complete.set()
self._has_data.set()
def set_result(self, data: bytes) -> None:
"""Convienience method, mainly for testing."""
self.append(data)
self.set_complete()
class Request(BaseRequestWebsocket, JSONMixin):
"""This class represents a request.
It can be subclassed and the subclassed used in preference by
replacing the :attr:`~quart.Quart.request_class` with your
subclass.
Attributes:
body_class: The class to store the body data within.
"""
body_class = Body
def __init__(
self,
method: str,
scheme: str,
path: str,
query_string: bytes,
headers: Headers,
root_path: str,
http_version: str,
scope: dict,
*,
max_content_length: Optional[int] = None,
body_timeout: Optional[int] = None,
send_push_promise: Callable[[str, Headers], Awaitable[None]],
) -> None:
"""Create a request object.
Arguments:
method: The HTTP verb.
scheme: The scheme used for the request.
path: The full unquoted path of the request.
query_string: The raw bytes for the query string part.
headers: The request headers.
root_path: The root path that should be prepended to all
routes.
http_version: The HTTP version of the request.
body: An awaitable future for the body data i.e.
``data = await body``
max_content_length: The maximum length in bytes of the
body (None implies no limit in Quart).
body_timeout: The maximum time (seconds) to wait for the
body before timing out.
send_push_promise: An awaitable to send a push promise based
off of this request (HTTP/2 feature).
scope: Underlying ASGI scope dictionary.
"""
super().__init__(
method, scheme, path, query_string, headers, root_path, http_version, scope
)
self.body_timeout = body_timeout
self.body = self.body_class(self.content_length, max_content_length)
self._form: Optional[MultiDict] = None
self._files: Optional[MultiDict] = None
self._send_push_promise = send_push_promise
async def get_data(self, raw: bool = True) -> AnyStr:
"""The request body data."""
try:
body_future = asyncio.ensure_future(self.body)
raw_data = await asyncio.wait_for(body_future, timeout=self.body_timeout)
except asyncio.TimeoutError:
body_future.cancel()
try:
await body_future
except asyncio.CancelledError:
pass
from ..exceptions import RequestTimeout # noqa Avoiding circular import
raise RequestTimeout()
if raw:
return raw_data
else:
return raw_data.decode(self.charset)
@property
async def data(self) -> bytes:
return await self.get_data()
@property
async def values(self) -> CombinedMultiDict:
form = await self.form
return CombinedMultiDict([self.args, form])
@property
async def form(self) -> MultiDict:
"""The parsed form encoded data.
Note file data is present in the :attr:`files`.
"""
if self._form is None:
await self._load_form_data()
return self._form
@property
async def files(self) -> MultiDict:
"""The parsed files.
This will return an empty multidict unless the request
mimetype was ``enctype="multipart/form-data"`` and the method
POST, PUT, or PATCH.
"""
if self._files is None:
await self._load_form_data()
return self._files
async def _load_form_data(self) -> None:
raw_data: bytes = await self.get_data(raw=True)
self._form = MultiDict()
self._files = MultiDict()
content_header = self.content_type
if content_header is None:
return
content_type, parameters = parse_header(content_header)
if content_type == "application/x-www-form-urlencoded":
try:
data = raw_data.decode(parameters.get("charset", "utf-8"))
except UnicodeDecodeError:
from ..exceptions import BadRequest # noqa Avoiding circular import
raise BadRequest()
for key, values in parse_qs(data, keep_blank_values=True).items():
for value in values:
self._form.add(key, value)
elif content_type == "multipart/form-data":
field_storage = FieldStorage(
io.BytesIO(raw_data),
headers={name.lower(): value for name, value in self.headers.items()},
environ={"REQUEST_METHOD": "POST"},
limit=len(raw_data),
)
for key in field_storage:
field_storage_key = field_storage[key]
if isinstance(field_storage_key, list):
for item in field_storage_key:
self._load_field_storage(key, item)
else:
self._load_field_storage(key, field_storage_key)
def _load_field_storage(self, key: str, field_storage: FieldStorage) -> None:
if isinstance(field_storage, FieldStorage) and field_storage.filename is not None:
self._files.add(
key,
FileStorage(
io.BytesIO(field_storage.file.read()),
field_storage.filename,
field_storage.name, # type: ignore
field_storage.type,
field_storage.headers, # type: ignore
),
)
else:
self._form.add(key, field_storage.value)
@property
def content_encoding(self) -> Optional[str]:
return self.headers.get("Content-Encoding")
@property
def content_length(self) -> Optional[int]:
if "Content-Length" in self.headers:
return int(self.headers["Content-Length"])
else:
return None
@property
def content_md5(self) -> Optional[str]:
return self.headers.get("Content-md5")
@property
def content_type(self) -> Optional[str]:
return self.headers.get("Content-Type")
async def _load_json_data(self) -> str:
"""Return the data after decoding."""
return await self.get_data(raw=False)
async def send_push_promise(self, path: str) -> None:
headers = Headers()
for name in SERVER_PUSH_HEADERS_TO_COPY:
for value in self.headers.getlist(name):
headers.add(name, value)
await self._send_push_promise(path, headers)
def __repr__(self) -> str:
return f"{self.__class__.__name__}({self.method}, {self.path})"
| 34.111111 | 95 | 0.610653 | from __future__ import annotations
import asyncio
import io
from cgi import FieldStorage, parse_header
from typing import Any, AnyStr, Awaitable, Callable, Generator, Optional
from urllib.parse import parse_qs
from werkzeug.datastructures import CombinedMultiDict, Headers, MultiDict
from .base import BaseRequestWebsocket, JSONMixin
from ..datastructures import FileStorage
SERVER_PUSH_HEADERS_TO_COPY = {
"accept",
"accept-encoding",
"accept-language",
"cache-control",
"user-agent",
}
class Body:
def __init__(
self, expected_content_length: Optional[int], max_content_length: Optional[int]
) -> None:
self._data = bytearray()
self._complete: asyncio.Event = asyncio.Event()
self._has_data: asyncio.Event = asyncio.Event()
self._max_content_length = max_content_length
self._must_raise: Optional[Exception] = None
if (
expected_content_length is not None
and max_content_length is not None
and expected_content_length > max_content_length
):
from ..exceptions import RequestEntityTooLarge
self._must_raise = RequestEntityTooLarge()
def __aiter__(self) -> "Body":
return self
async def __anext__(self) -> bytes:
if self._must_raise is not None:
raise self._must_raise
if not self._complete.is_set():
await self._has_data.wait()
if self._complete.is_set() and len(self._data) == 0:
raise StopAsyncIteration()
data = bytes(self._data)
self._data.clear()
self._has_data.clear()
return data
def __await__(self) -> Generator[Any, None, Any]:
if self._must_raise is not None:
raise self._must_raise
yield from self._complete.wait().__await__()
if self._must_raise is not None:
raise self._must_raise
return bytes(self._data)
def append(self, data: bytes) -> None:
if data == b"" or self._must_raise is not None:
return
self._data.extend(data)
self._has_data.set()
if self._max_content_length is not None and len(self._data) > self._max_content_length:
from ..exceptions import RequestEntityTooLarge
self._must_raise = RequestEntityTooLarge()
self.set_complete()
def set_complete(self) -> None:
self._complete.set()
self._has_data.set()
def set_result(self, data: bytes) -> None:
self.append(data)
self.set_complete()
class Request(BaseRequestWebsocket, JSONMixin):
body_class = Body
def __init__(
self,
method: str,
scheme: str,
path: str,
query_string: bytes,
headers: Headers,
root_path: str,
http_version: str,
scope: dict,
*,
max_content_length: Optional[int] = None,
body_timeout: Optional[int] = None,
send_push_promise: Callable[[str, Headers], Awaitable[None]],
) -> None:
super().__init__(
method, scheme, path, query_string, headers, root_path, http_version, scope
)
self.body_timeout = body_timeout
self.body = self.body_class(self.content_length, max_content_length)
self._form: Optional[MultiDict] = None
self._files: Optional[MultiDict] = None
self._send_push_promise = send_push_promise
async def get_data(self, raw: bool = True) -> AnyStr:
try:
body_future = asyncio.ensure_future(self.body)
raw_data = await asyncio.wait_for(body_future, timeout=self.body_timeout)
except asyncio.TimeoutError:
body_future.cancel()
try:
await body_future
except asyncio.CancelledError:
pass
from ..exceptions import RequestTimeout
raise RequestTimeout()
if raw:
return raw_data
else:
return raw_data.decode(self.charset)
@property
async def data(self) -> bytes:
return await self.get_data()
@property
async def values(self) -> CombinedMultiDict:
form = await self.form
return CombinedMultiDict([self.args, form])
@property
async def form(self) -> MultiDict:
if self._form is None:
await self._load_form_data()
return self._form
@property
async def files(self) -> MultiDict:
if self._files is None:
await self._load_form_data()
return self._files
async def _load_form_data(self) -> None:
raw_data: bytes = await self.get_data(raw=True)
self._form = MultiDict()
self._files = MultiDict()
content_header = self.content_type
if content_header is None:
return
content_type, parameters = parse_header(content_header)
if content_type == "application/x-www-form-urlencoded":
try:
data = raw_data.decode(parameters.get("charset", "utf-8"))
except UnicodeDecodeError:
from ..exceptions import BadRequest
raise BadRequest()
for key, values in parse_qs(data, keep_blank_values=True).items():
for value in values:
self._form.add(key, value)
elif content_type == "multipart/form-data":
field_storage = FieldStorage(
io.BytesIO(raw_data),
headers={name.lower(): value for name, value in self.headers.items()},
environ={"REQUEST_METHOD": "POST"},
limit=len(raw_data),
)
for key in field_storage:
field_storage_key = field_storage[key]
if isinstance(field_storage_key, list):
for item in field_storage_key:
self._load_field_storage(key, item)
else:
self._load_field_storage(key, field_storage_key)
def _load_field_storage(self, key: str, field_storage: FieldStorage) -> None:
if isinstance(field_storage, FieldStorage) and field_storage.filename is not None:
self._files.add(
key,
FileStorage(
io.BytesIO(field_storage.file.read()),
field_storage.filename,
field_storage.name,
field_storage.type,
field_storage.headers,
),
)
else:
self._form.add(key, field_storage.value)
@property
def content_encoding(self) -> Optional[str]:
return self.headers.get("Content-Encoding")
@property
def content_length(self) -> Optional[int]:
if "Content-Length" in self.headers:
return int(self.headers["Content-Length"])
else:
return None
@property
def content_md5(self) -> Optional[str]:
return self.headers.get("Content-md5")
@property
def content_type(self) -> Optional[str]:
return self.headers.get("Content-Type")
async def _load_json_data(self) -> str:
return await self.get_data(raw=False)
async def send_push_promise(self, path: str) -> None:
headers = Headers()
for name in SERVER_PUSH_HEADERS_TO_COPY:
for value in self.headers.getlist(name):
headers.add(name, value)
await self._send_push_promise(path, headers)
def __repr__(self) -> str:
return f"{self.__class__.__name__}({self.method}, {self.path})"
| true | true |
f72fee28d1d7a6de068ec92b5dd4448e2007bd1e | 7,158 | py | Python | sdk/python/pulumi_azure_native/avs/v20210101preview/get_workload_network_dns_service.py | pulumi-bot/pulumi-azure-native | f7b9490b5211544318e455e5cceafe47b628e12c | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/avs/v20210101preview/get_workload_network_dns_service.py | pulumi-bot/pulumi-azure-native | f7b9490b5211544318e455e5cceafe47b628e12c | [
"Apache-2.0"
] | null | null | null | sdk/python/pulumi_azure_native/avs/v20210101preview/get_workload_network_dns_service.py | pulumi-bot/pulumi-azure-native | f7b9490b5211544318e455e5cceafe47b628e12c | [
"Apache-2.0"
] | null | null | null | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
__all__ = [
'GetWorkloadNetworkDnsServiceResult',
'AwaitableGetWorkloadNetworkDnsServiceResult',
'get_workload_network_dns_service',
]
@pulumi.output_type
class GetWorkloadNetworkDnsServiceResult:
"""
NSX DNS Service
"""
def __init__(__self__, default_dns_zone=None, display_name=None, dns_service_ip=None, fqdn_zones=None, id=None, log_level=None, name=None, provisioning_state=None, revision=None, status=None, type=None):
if default_dns_zone and not isinstance(default_dns_zone, str):
raise TypeError("Expected argument 'default_dns_zone' to be a str")
pulumi.set(__self__, "default_dns_zone", default_dns_zone)
if display_name and not isinstance(display_name, str):
raise TypeError("Expected argument 'display_name' to be a str")
pulumi.set(__self__, "display_name", display_name)
if dns_service_ip and not isinstance(dns_service_ip, str):
raise TypeError("Expected argument 'dns_service_ip' to be a str")
pulumi.set(__self__, "dns_service_ip", dns_service_ip)
if fqdn_zones and not isinstance(fqdn_zones, list):
raise TypeError("Expected argument 'fqdn_zones' to be a list")
pulumi.set(__self__, "fqdn_zones", fqdn_zones)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if log_level and not isinstance(log_level, str):
raise TypeError("Expected argument 'log_level' to be a str")
pulumi.set(__self__, "log_level", log_level)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if provisioning_state and not isinstance(provisioning_state, str):
raise TypeError("Expected argument 'provisioning_state' to be a str")
pulumi.set(__self__, "provisioning_state", provisioning_state)
if revision and not isinstance(revision, float):
raise TypeError("Expected argument 'revision' to be a float")
pulumi.set(__self__, "revision", revision)
if status and not isinstance(status, str):
raise TypeError("Expected argument 'status' to be a str")
pulumi.set(__self__, "status", status)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="defaultDnsZone")
def default_dns_zone(self) -> Optional[str]:
"""
Default DNS zone of the DNS Service.
"""
return pulumi.get(self, "default_dns_zone")
@property
@pulumi.getter(name="displayName")
def display_name(self) -> Optional[str]:
"""
Display name of the DNS Service.
"""
return pulumi.get(self, "display_name")
@property
@pulumi.getter(name="dnsServiceIp")
def dns_service_ip(self) -> Optional[str]:
"""
DNS service IP of the DNS Service.
"""
return pulumi.get(self, "dns_service_ip")
@property
@pulumi.getter(name="fqdnZones")
def fqdn_zones(self) -> Optional[Sequence[str]]:
"""
FQDN zones of the DNS Service.
"""
return pulumi.get(self, "fqdn_zones")
@property
@pulumi.getter
def id(self) -> str:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="logLevel")
def log_level(self) -> Optional[str]:
"""
DNS Service log level.
"""
return pulumi.get(self, "log_level")
@property
@pulumi.getter
def name(self) -> str:
"""
Resource name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> str:
"""
The provisioning state
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter
def revision(self) -> Optional[float]:
"""
NSX revision number.
"""
return pulumi.get(self, "revision")
@property
@pulumi.getter
def status(self) -> str:
"""
DNS Service status.
"""
return pulumi.get(self, "status")
@property
@pulumi.getter
def type(self) -> str:
"""
Resource type.
"""
return pulumi.get(self, "type")
class AwaitableGetWorkloadNetworkDnsServiceResult(GetWorkloadNetworkDnsServiceResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetWorkloadNetworkDnsServiceResult(
default_dns_zone=self.default_dns_zone,
display_name=self.display_name,
dns_service_ip=self.dns_service_ip,
fqdn_zones=self.fqdn_zones,
id=self.id,
log_level=self.log_level,
name=self.name,
provisioning_state=self.provisioning_state,
revision=self.revision,
status=self.status,
type=self.type)
def get_workload_network_dns_service(dns_service_id: Optional[str] = None,
private_cloud_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetWorkloadNetworkDnsServiceResult:
"""
NSX DNS Service
:param str dns_service_id: NSX DNS Service identifier. Generally the same as the DNS Service's display name
:param str private_cloud_name: Name of the private cloud
:param str resource_group_name: The name of the resource group. The name is case insensitive.
"""
__args__ = dict()
__args__['dnsServiceId'] = dns_service_id
__args__['privateCloudName'] = private_cloud_name
__args__['resourceGroupName'] = resource_group_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:avs/v20210101preview:getWorkloadNetworkDnsService', __args__, opts=opts, typ=GetWorkloadNetworkDnsServiceResult).value
return AwaitableGetWorkloadNetworkDnsServiceResult(
default_dns_zone=__ret__.default_dns_zone,
display_name=__ret__.display_name,
dns_service_ip=__ret__.dns_service_ip,
fqdn_zones=__ret__.fqdn_zones,
id=__ret__.id,
log_level=__ret__.log_level,
name=__ret__.name,
provisioning_state=__ret__.provisioning_state,
revision=__ret__.revision,
status=__ret__.status,
type=__ret__.type)
| 35.969849 | 207 | 0.644035 |
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
__all__ = [
'GetWorkloadNetworkDnsServiceResult',
'AwaitableGetWorkloadNetworkDnsServiceResult',
'get_workload_network_dns_service',
]
@pulumi.output_type
class GetWorkloadNetworkDnsServiceResult:
def __init__(__self__, default_dns_zone=None, display_name=None, dns_service_ip=None, fqdn_zones=None, id=None, log_level=None, name=None, provisioning_state=None, revision=None, status=None, type=None):
if default_dns_zone and not isinstance(default_dns_zone, str):
raise TypeError("Expected argument 'default_dns_zone' to be a str")
pulumi.set(__self__, "default_dns_zone", default_dns_zone)
if display_name and not isinstance(display_name, str):
raise TypeError("Expected argument 'display_name' to be a str")
pulumi.set(__self__, "display_name", display_name)
if dns_service_ip and not isinstance(dns_service_ip, str):
raise TypeError("Expected argument 'dns_service_ip' to be a str")
pulumi.set(__self__, "dns_service_ip", dns_service_ip)
if fqdn_zones and not isinstance(fqdn_zones, list):
raise TypeError("Expected argument 'fqdn_zones' to be a list")
pulumi.set(__self__, "fqdn_zones", fqdn_zones)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if log_level and not isinstance(log_level, str):
raise TypeError("Expected argument 'log_level' to be a str")
pulumi.set(__self__, "log_level", log_level)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if provisioning_state and not isinstance(provisioning_state, str):
raise TypeError("Expected argument 'provisioning_state' to be a str")
pulumi.set(__self__, "provisioning_state", provisioning_state)
if revision and not isinstance(revision, float):
raise TypeError("Expected argument 'revision' to be a float")
pulumi.set(__self__, "revision", revision)
if status and not isinstance(status, str):
raise TypeError("Expected argument 'status' to be a str")
pulumi.set(__self__, "status", status)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="defaultDnsZone")
def default_dns_zone(self) -> Optional[str]:
return pulumi.get(self, "default_dns_zone")
@property
@pulumi.getter(name="displayName")
def display_name(self) -> Optional[str]:
return pulumi.get(self, "display_name")
@property
@pulumi.getter(name="dnsServiceIp")
def dns_service_ip(self) -> Optional[str]:
return pulumi.get(self, "dns_service_ip")
@property
@pulumi.getter(name="fqdnZones")
def fqdn_zones(self) -> Optional[Sequence[str]]:
return pulumi.get(self, "fqdn_zones")
@property
@pulumi.getter
def id(self) -> str:
return pulumi.get(self, "id")
@property
@pulumi.getter(name="logLevel")
def log_level(self) -> Optional[str]:
return pulumi.get(self, "log_level")
@property
@pulumi.getter
def name(self) -> str:
return pulumi.get(self, "name")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> str:
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter
def revision(self) -> Optional[float]:
return pulumi.get(self, "revision")
@property
@pulumi.getter
def status(self) -> str:
return pulumi.get(self, "status")
@property
@pulumi.getter
def type(self) -> str:
return pulumi.get(self, "type")
class AwaitableGetWorkloadNetworkDnsServiceResult(GetWorkloadNetworkDnsServiceResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetWorkloadNetworkDnsServiceResult(
default_dns_zone=self.default_dns_zone,
display_name=self.display_name,
dns_service_ip=self.dns_service_ip,
fqdn_zones=self.fqdn_zones,
id=self.id,
log_level=self.log_level,
name=self.name,
provisioning_state=self.provisioning_state,
revision=self.revision,
status=self.status,
type=self.type)
def get_workload_network_dns_service(dns_service_id: Optional[str] = None,
private_cloud_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetWorkloadNetworkDnsServiceResult:
__args__ = dict()
__args__['dnsServiceId'] = dns_service_id
__args__['privateCloudName'] = private_cloud_name
__args__['resourceGroupName'] = resource_group_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:avs/v20210101preview:getWorkloadNetworkDnsService', __args__, opts=opts, typ=GetWorkloadNetworkDnsServiceResult).value
return AwaitableGetWorkloadNetworkDnsServiceResult(
default_dns_zone=__ret__.default_dns_zone,
display_name=__ret__.display_name,
dns_service_ip=__ret__.dns_service_ip,
fqdn_zones=__ret__.fqdn_zones,
id=__ret__.id,
log_level=__ret__.log_level,
name=__ret__.name,
provisioning_state=__ret__.provisioning_state,
revision=__ret__.revision,
status=__ret__.status,
type=__ret__.type)
| true | true |
f72fee595b4703f699cfe5d567dfaf697a1d6207 | 828 | py | Python | pysm/preprocessing/museum_crm/x01_make_karma_sources.py | binh-vu/semantic-modeling | b387584502ba1daa6abd6b7573828416f6426b49 | [
"MIT"
] | 3 | 2019-10-31T15:26:20.000Z | 2022-03-03T06:04:03.000Z | pysm/preprocessing/museum_crm/x01_make_karma_sources.py | binh-vu/semantic-modeling | b387584502ba1daa6abd6b7573828416f6426b49 | [
"MIT"
] | 1 | 2021-10-05T14:57:29.000Z | 2022-03-27T01:58:41.000Z | pysm/preprocessing/museum_crm/x01_make_karma_sources.py | binh-vu/semantic-modeling | b387584502ba1daa6abd6b7573828416f6426b49 | [
"MIT"
] | null | null | null | #!/usr/bin/python
# -*- coding: utf-8 -*-
import ujson
from pathlib import Path
from typing import Dict, Tuple, List, Set, Union, Optional, Any
from semantic_modeling.config import config
from semantic_modeling.data_io import get_data_tables, get_raw_data_tables, get_semantic_models, get_ontology, \
get_sampled_data_tables
from semantic_modeling.utilities.serializable import serializeJSON
from transformation.r2rml.commands.modeling import SetInternalLinkCmd, SetSemanticTypeCmd
from transformation.r2rml.r2rml import R2RML
dataset = "museum_crm"
ont = get_ontology(dataset)
source_dir = Path(config.datasets[dataset].as_path()) / "karma-version" / "sources"
source_dir.mkdir(exist_ok=True, parents=True)
for tbl in get_sampled_data_tables(dataset):
serializeJSON(tbl.rows, source_dir / f"{tbl.id}.json", indent=4) | 41.4 | 112 | 0.805556 |
import ujson
from pathlib import Path
from typing import Dict, Tuple, List, Set, Union, Optional, Any
from semantic_modeling.config import config
from semantic_modeling.data_io import get_data_tables, get_raw_data_tables, get_semantic_models, get_ontology, \
get_sampled_data_tables
from semantic_modeling.utilities.serializable import serializeJSON
from transformation.r2rml.commands.modeling import SetInternalLinkCmd, SetSemanticTypeCmd
from transformation.r2rml.r2rml import R2RML
dataset = "museum_crm"
ont = get_ontology(dataset)
source_dir = Path(config.datasets[dataset].as_path()) / "karma-version" / "sources"
source_dir.mkdir(exist_ok=True, parents=True)
for tbl in get_sampled_data_tables(dataset):
serializeJSON(tbl.rows, source_dir / f"{tbl.id}.json", indent=4) | true | true |
f72fef007e9ec6112672dfd0e87b7ec609049c6a | 2,115 | py | Python | scrape_artists/artists.py | flannerykj/python_scrape | c5166431810432c24e04150eb305b3ec2a899a91 | [
"MIT"
] | null | null | null | scrape_artists/artists.py | flannerykj/python_scrape | c5166431810432c24e04150eb305b3ec2a899a91 | [
"MIT"
] | null | null | null | scrape_artists/artists.py | flannerykj/python_scrape | c5166431810432c24e04150eb305b3ec2a899a91 | [
"MIT"
] | null | null | null |
import csv
import requests
import socket
from bs4 import BeautifulSoup
import re
import json
def parse_artists():
artist_profiles = []
try:
url = 'http://wx.toronto.ca/inter/pmmd/streetart.nsf/artists?OpenView'
response = requests.get(url)
html = response.content
soup = BeautifulSoup(html)
link_list = soup.findAll('a', attrs={'class': 'viewa1'})
for item in link_list:
item_url = 'http://wx.toronto.ca'+item.get('href')
profile = get_profile_data(item_url)
artist_profiles.append(profile)
except Exception as e:
print (e.message)
return artist_profiles
def get_profile_data(url):
try:
response = requests.get(url)
html = response.content
soup = BeautifulSoup(html)
profile = soup.find('div', attrs={'id': 'profiledisplay'}).text
name = soup.findAll('legend')[0].text
email = re.search(r'[\w\.-]+@[\w\.-]+', profile).group().replace('Business', '')
website = re.search(r'Website: (.*?)[\n\r\s]+', profile).group().replace('Website: ', '')
bio = re.search(r'Profile\n(.*?)\n', profile).group().replace('Profile', '')
description = re.search(r'Business/Organization Description\n(.*?)\n', profile).group().replace('Business/Organization Description', '')
experience = re.search(r'Experience\n(.*?)\n', profile).group().replace('Experience', '')
return {
"name": name,
"email": email,
"website": website,
"bio": bio,
"description": description,
"experience": experience,
"dateJoined": "1508884475917",
"dateUpdated": "1508884475917"
}
return profile
except Exception as e:
print (e.message)
return
with open('artists.json', 'w') as outfile:
json.dump(parse_artists(), outfile)
'''artist_urls = get_artist_urls()
artist_array = compile_artist_profiles(artist_urls)
outfile = open("./toronto-artists.csv", "wb")
writer = csv.writer(outfile)
writer.writerows(recipe_array)'''
| 33.571429 | 144 | 0.605674 |
import csv
import requests
import socket
from bs4 import BeautifulSoup
import re
import json
def parse_artists():
artist_profiles = []
try:
url = 'http://wx.toronto.ca/inter/pmmd/streetart.nsf/artists?OpenView'
response = requests.get(url)
html = response.content
soup = BeautifulSoup(html)
link_list = soup.findAll('a', attrs={'class': 'viewa1'})
for item in link_list:
item_url = 'http://wx.toronto.ca'+item.get('href')
profile = get_profile_data(item_url)
artist_profiles.append(profile)
except Exception as e:
print (e.message)
return artist_profiles
def get_profile_data(url):
try:
response = requests.get(url)
html = response.content
soup = BeautifulSoup(html)
profile = soup.find('div', attrs={'id': 'profiledisplay'}).text
name = soup.findAll('legend')[0].text
email = re.search(r'[\w\.-]+@[\w\.-]+', profile).group().replace('Business', '')
website = re.search(r'Website: (.*?)[\n\r\s]+', profile).group().replace('Website: ', '')
bio = re.search(r'Profile\n(.*?)\n', profile).group().replace('Profile', '')
description = re.search(r'Business/Organization Description\n(.*?)\n', profile).group().replace('Business/Organization Description', '')
experience = re.search(r'Experience\n(.*?)\n', profile).group().replace('Experience', '')
return {
"name": name,
"email": email,
"website": website,
"bio": bio,
"description": description,
"experience": experience,
"dateJoined": "1508884475917",
"dateUpdated": "1508884475917"
}
return profile
except Exception as e:
print (e.message)
return
with open('artists.json', 'w') as outfile:
json.dump(parse_artists(), outfile)
| true | true |
f72fef0e4ab230a89d2f0b6d56c75cd135c69cf4 | 497 | py | Python | puzzles/day21/puzzle1.py | sbr075/advent2021 | e431b56d9ee9ef9ef02fb9f9cde276feefb78095 | [
"MIT"
] | 1 | 2021-12-03T23:13:36.000Z | 2021-12-03T23:13:36.000Z | puzzles/day21/puzzle1.py | sbr075/advent2021 | e431b56d9ee9ef9ef02fb9f9cde276feefb78095 | [
"MIT"
] | null | null | null | puzzles/day21/puzzle1.py | sbr075/advent2021 | e431b56d9ee9ef9ef02fb9f9cde276feefb78095 | [
"MIT"
] | null | null | null | def read_input():
with open("input.txt", "r") as file:
return [int(p[28:]) for p in file.read().splitlines()]
mod = lambda i,j: ((i-1) % j) + 1
def main():
pos = read_input()
s = [0,0]
for i in range(1,1000,3):
pos[(i-1)%2] += sum([mod(j,100) for j in range(i,i+3)])
pos[(i-1)%2] = mod(pos[(i-1)%2],10)
s[(i-1)%2] += pos[(i-1)%2]
if s[(i-1)%2] >= 1000: break
print(f"Part 1 {min(s)*(i+2)}")
if __name__ == "__main__":
main() | 26.157895 | 63 | 0.478873 | def read_input():
with open("input.txt", "r") as file:
return [int(p[28:]) for p in file.read().splitlines()]
mod = lambda i,j: ((i-1) % j) + 1
def main():
pos = read_input()
s = [0,0]
for i in range(1,1000,3):
pos[(i-1)%2] += sum([mod(j,100) for j in range(i,i+3)])
pos[(i-1)%2] = mod(pos[(i-1)%2],10)
s[(i-1)%2] += pos[(i-1)%2]
if s[(i-1)%2] >= 1000: break
print(f"Part 1 {min(s)*(i+2)}")
if __name__ == "__main__":
main() | true | true |
f72fefc517a309b1ebb05a09c441a25eb97845f7 | 654 | py | Python | sort/insertion_sort.py | vasili-byl/algorithms | 4e37609ab9b724e140cfec4b01495a0952d28724 | [
"MIT"
] | 1 | 2020-05-02T13:40:10.000Z | 2020-05-02T13:40:10.000Z | sort/insertion_sort.py | vasili-byl/algorithms | 4e37609ab9b724e140cfec4b01495a0952d28724 | [
"MIT"
] | null | null | null | sort/insertion_sort.py | vasili-byl/algorithms | 4e37609ab9b724e140cfec4b01495a0952d28724 | [
"MIT"
] | null | null | null | from sort.abstract_sort import Sort
class InsertionSort(Sort):
def __call__(self, array, left_bound=None, right_bound=None):
if left_bound is None:
left_bound = 0
if right_bound is None:
right_bound = len(array) - 1
for i in range(left_bound + 1, right_bound + 1):
pos = left_bound
for j in range(i - 1, left_bound - 1, -1):
if array[j] <= array[i]:
pos = j + 1
break
current = array[i]
for j in range(i - 1, pos - 1, -1):
array[j + 1] = array[j]
array[pos] = current
| 32.7 | 65 | 0.496942 | from sort.abstract_sort import Sort
class InsertionSort(Sort):
def __call__(self, array, left_bound=None, right_bound=None):
if left_bound is None:
left_bound = 0
if right_bound is None:
right_bound = len(array) - 1
for i in range(left_bound + 1, right_bound + 1):
pos = left_bound
for j in range(i - 1, left_bound - 1, -1):
if array[j] <= array[i]:
pos = j + 1
break
current = array[i]
for j in range(i - 1, pos - 1, -1):
array[j + 1] = array[j]
array[pos] = current
| true | true |
f72ff070a885f440110d03df8a65db80bf61a2f3 | 4,299 | py | Python | rllib/utils/torch_ops.py | acmore/ray | 9f0f54266064e203b0bdcc9d3fa947cb4518ebc0 | [
"Apache-2.0"
] | null | null | null | rllib/utils/torch_ops.py | acmore/ray | 9f0f54266064e203b0bdcc9d3fa947cb4518ebc0 | [
"Apache-2.0"
] | 1 | 2020-06-23T07:54:44.000Z | 2020-06-23T08:04:47.000Z | rllib/utils/torch_ops.py | acmore/ray | 9f0f54266064e203b0bdcc9d3fa947cb4518ebc0 | [
"Apache-2.0"
] | null | null | null | import numpy as np
from ray.rllib.utils import try_import_tree
from ray.rllib.utils.framework import try_import_torch
torch, _ = try_import_torch()
tree = try_import_tree()
def explained_variance(y, pred):
y_var = torch.var(y, dim=[0])
diff_var = torch.var(y - pred, dim=[0])
min_ = torch.Tensor([-1.0])
return torch.max(
min_.to(device=torch.device("cuda"))
if torch.cuda.is_available() else min_,
1 - (diff_var / y_var))
def global_norm(tensors):
"""Returns the global L2 norm over a list of tensors.
output = sqrt(SUM(t ** 2 for t in tensors)),
where SUM reduces over all tensors and over all elements in tensors.
Args:
tensors (List[torch.Tensor]): The list of tensors to calculate the
global norm over.
"""
# List of single tensors' L2 norms: SQRT(SUM(xi^2)) over all xi in tensor.
single_l2s = [
torch.pow(torch.sum(torch.pow(t, 2.0)), 0.5) for t in tensors
]
# Compute global norm from all single tensors' L2 norms.
return torch.pow(sum(torch.pow(l2, 2.0) for l2 in single_l2s), 0.5)
def huber_loss(x, delta=1.0):
"""Reference: https://en.wikipedia.org/wiki/Huber_loss"""
return torch.where(
torch.abs(x) < delta,
torch.pow(x, 2.0) * 0.5, delta * (torch.abs(x) - 0.5 * delta))
def l2_loss(x):
"""Computes half the L2 norm of a tensor without the sqrt.
output = sum(x ** 2) / 2
"""
return torch.sum(torch.pow(x, 2.0)) / 2.0
def reduce_mean_ignore_inf(x, axis):
"""Same as torch.mean() but ignores -inf values."""
mask = torch.ne(x, float("-inf"))
x_zeroed = torch.where(mask, x, torch.zeros_like(x))
return torch.sum(x_zeroed, axis) / torch.sum(mask.float(), axis)
def minimize_and_clip(optimizer, clip_val=10):
"""Clips gradients found in `optimizer.param_groups` to given value.
Ensures the norm of the gradients for each variable is clipped to
`clip_val`
"""
for param_group in optimizer.param_groups:
for p in param_group["params"]:
if p.grad is not None:
torch.nn.utils.clip_grad_norm_(p.grad, clip_val)
def sequence_mask(lengths, maxlen=None, dtype=None):
"""Offers same behavior as tf.sequence_mask for torch.
Thanks to Dimitris Papatheodorou
(https://discuss.pytorch.org/t/pytorch-equivalent-for-tf-sequence-mask/
39036).
"""
if maxlen is None:
maxlen = lengths.max()
mask = ~(torch.ones((len(lengths), maxlen)).to(
lengths.device).cumsum(dim=1).t() > lengths).t()
mask.type(dtype or torch.bool)
return mask
def convert_to_non_torch_type(stats):
"""Converts values in `stats` to non-Tensor numpy or python types.
Args:
stats (any): Any (possibly nested) struct, the values in which will be
converted and returned as a new struct with all torch tensors
being converted to numpy types.
Returns:
Any: A new struct with the same structure as `stats`, but with all
values converted to non-torch Tensor types.
"""
# The mapping function used to numpyize torch Tensors.
def mapping(item):
if isinstance(item, torch.Tensor):
return item.cpu().item() if len(item.size()) == 0 else \
item.cpu().detach().numpy()
else:
return item
return tree.map_structure(mapping, stats)
def convert_to_torch_tensor(stats, device=None):
"""Converts any struct to torch.Tensors.
stats (any): Any (possibly nested) struct, the values in which will be
converted and returned as a new struct with all leaves converted
to torch tensors.
Returns:
Any: A new struct with the same structure as `stats`, but with all
values converted to torch Tensor types.
"""
def mapping(item):
if torch.is_tensor(item):
return item if device is None else item.to(device)
tensor = torch.from_numpy(np.asarray(item))
# Floatify all float64 tensors.
if tensor.dtype == torch.double:
tensor = tensor.float()
return tensor if device is None else tensor.to(device)
return tree.map_structure(mapping, stats)
def atanh(x):
return 0.5 * torch.log((1 + x) / (1 - x))
| 30.928058 | 78 | 0.640381 | import numpy as np
from ray.rllib.utils import try_import_tree
from ray.rllib.utils.framework import try_import_torch
torch, _ = try_import_torch()
tree = try_import_tree()
def explained_variance(y, pred):
y_var = torch.var(y, dim=[0])
diff_var = torch.var(y - pred, dim=[0])
min_ = torch.Tensor([-1.0])
return torch.max(
min_.to(device=torch.device("cuda"))
if torch.cuda.is_available() else min_,
1 - (diff_var / y_var))
def global_norm(tensors):
single_l2s = [
torch.pow(torch.sum(torch.pow(t, 2.0)), 0.5) for t in tensors
]
# Compute global norm from all single tensors' L2 norms.
return torch.pow(sum(torch.pow(l2, 2.0) for l2 in single_l2s), 0.5)
def huber_loss(x, delta=1.0):
return torch.where(
torch.abs(x) < delta,
torch.pow(x, 2.0) * 0.5, delta * (torch.abs(x) - 0.5 * delta))
def l2_loss(x):
return torch.sum(torch.pow(x, 2.0)) / 2.0
def reduce_mean_ignore_inf(x, axis):
mask = torch.ne(x, float("-inf"))
x_zeroed = torch.where(mask, x, torch.zeros_like(x))
return torch.sum(x_zeroed, axis) / torch.sum(mask.float(), axis)
def minimize_and_clip(optimizer, clip_val=10):
for param_group in optimizer.param_groups:
for p in param_group["params"]:
if p.grad is not None:
torch.nn.utils.clip_grad_norm_(p.grad, clip_val)
def sequence_mask(lengths, maxlen=None, dtype=None):
if maxlen is None:
maxlen = lengths.max()
mask = ~(torch.ones((len(lengths), maxlen)).to(
lengths.device).cumsum(dim=1).t() > lengths).t()
mask.type(dtype or torch.bool)
return mask
def convert_to_non_torch_type(stats):
def mapping(item):
if isinstance(item, torch.Tensor):
return item.cpu().item() if len(item.size()) == 0 else \
item.cpu().detach().numpy()
else:
return item
return tree.map_structure(mapping, stats)
def convert_to_torch_tensor(stats, device=None):
def mapping(item):
if torch.is_tensor(item):
return item if device is None else item.to(device)
tensor = torch.from_numpy(np.asarray(item))
if tensor.dtype == torch.double:
tensor = tensor.float()
return tensor if device is None else tensor.to(device)
return tree.map_structure(mapping, stats)
def atanh(x):
return 0.5 * torch.log((1 + x) / (1 - x))
| true | true |
f72ff1c4d7592842535f6a31fa135b7e0705f968 | 1,651 | py | Python | spotify_tracker/watcher_client.py | eriktaubeneck/spotifytracker | c0f7f1a418aae9184cb1d2d27835495f261027ce | [
"MIT"
] | null | null | null | spotify_tracker/watcher_client.py | eriktaubeneck/spotifytracker | c0f7f1a418aae9184cb1d2d27835495f261027ce | [
"MIT"
] | null | null | null | spotify_tracker/watcher_client.py | eriktaubeneck/spotifytracker | c0f7f1a418aae9184cb1d2d27835495f261027ce | [
"MIT"
] | null | null | null | import time
import logging
from .spotify_client import SpotifyPlaylistClient
from . import config
logger = logging.getLogger(name='spotify_tracker')
class SpotifyWatcherClient(SpotifyPlaylistClient):
def __init__(self):
self.playlist_id = config.get_config_value('watcher_playlist_id')
self.last_track_id = None
return super().__init__()
def setup_playlist_id(self):
print("You need to add a playlist_id to your config to save "
"song history to.")
sp_playlists = self.sp.user_playlists(self.username)
playlists = [p for p in sp_playlists['items']
if p['owner']['id'] == self.username]
for playlist in playlists:
print('{}: {}'.format(playlist['name'], playlist['id']))
playlist_id = input("Please input the playlist_id of the Playlist "
"you'd like to save your history to: ")
config.save_config_value('watcher_playlist_id', playlist_id)
def main(self):
track_id = self.get_current_track_id()
if not track_id or track_id == self.last_track_id:
return
logger.info('Currently listening to {}'.format(
self.get_track_name_and_artist_string(track_id)
))
self.add_track_to_playlist(track_id)
self.last_track_id = track_id
def watch(self):
if not self.check_config():
raise Exception("Please run setupwatcher command.")
logger.debug('Starting watch loop')
while True:
logger.debug('New watch lap completed.')
self.safe_main()
time.sleep(5)
| 34.395833 | 75 | 0.634161 | import time
import logging
from .spotify_client import SpotifyPlaylistClient
from . import config
logger = logging.getLogger(name='spotify_tracker')
class SpotifyWatcherClient(SpotifyPlaylistClient):
def __init__(self):
self.playlist_id = config.get_config_value('watcher_playlist_id')
self.last_track_id = None
return super().__init__()
def setup_playlist_id(self):
print("You need to add a playlist_id to your config to save "
"song history to.")
sp_playlists = self.sp.user_playlists(self.username)
playlists = [p for p in sp_playlists['items']
if p['owner']['id'] == self.username]
for playlist in playlists:
print('{}: {}'.format(playlist['name'], playlist['id']))
playlist_id = input("Please input the playlist_id of the Playlist "
"you'd like to save your history to: ")
config.save_config_value('watcher_playlist_id', playlist_id)
def main(self):
track_id = self.get_current_track_id()
if not track_id or track_id == self.last_track_id:
return
logger.info('Currently listening to {}'.format(
self.get_track_name_and_artist_string(track_id)
))
self.add_track_to_playlist(track_id)
self.last_track_id = track_id
def watch(self):
if not self.check_config():
raise Exception("Please run setupwatcher command.")
logger.debug('Starting watch loop')
while True:
logger.debug('New watch lap completed.')
self.safe_main()
time.sleep(5)
| true | true |
f72ff241d91ef455622d3abfa9df1af912c8d07d | 6,207 | py | Python | pyqtgraph/console/template_pyqt5.py | StSav012/pyqtgraph | 65e17c4e3707eb3bd4d91cdc13504d9b150f4360 | [
"MIT"
] | 1 | 2022-01-30T20:04:51.000Z | 2022-01-30T20:04:51.000Z | pyqtgraph/console/template_pyqt5.py | StSav012/pyqtgraph | 65e17c4e3707eb3bd4d91cdc13504d9b150f4360 | [
"MIT"
] | null | null | null | pyqtgraph/console/template_pyqt5.py | StSav012/pyqtgraph | 65e17c4e3707eb3bd4d91cdc13504d9b150f4360 | [
"MIT"
] | null | null | null |
# Form implementation generated from reading ui file 'pyqtgraph/console/template.ui'
#
# Created by: PyQt5 UI code generator 5.5.1
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName("Form")
Form.resize(739, 497)
self.gridLayout = QtWidgets.QGridLayout(Form)
self.gridLayout.setContentsMargins(0, 0, 0, 0)
self.gridLayout.setSpacing(0)
self.gridLayout.setObjectName("gridLayout")
self.splitter = QtWidgets.QSplitter(Form)
self.splitter.setOrientation(QtCore.Qt.Vertical)
self.splitter.setObjectName("splitter")
self.layoutWidget = QtWidgets.QWidget(self.splitter)
self.layoutWidget.setObjectName("layoutWidget")
self.verticalLayout = QtWidgets.QVBoxLayout(self.layoutWidget)
self.verticalLayout.setObjectName("verticalLayout")
self.output = QtWidgets.QPlainTextEdit(self.layoutWidget)
font = QtGui.QFont()
font.setFamily("Monospace")
self.output.setFont(font)
self.output.setReadOnly(True)
self.output.setObjectName("output")
self.verticalLayout.addWidget(self.output)
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.input = CmdInput(self.layoutWidget)
self.input.setObjectName("input")
self.horizontalLayout.addWidget(self.input)
self.historyBtn = QtWidgets.QPushButton(self.layoutWidget)
self.historyBtn.setCheckable(True)
self.historyBtn.setObjectName("historyBtn")
self.horizontalLayout.addWidget(self.historyBtn)
self.exceptionBtn = QtWidgets.QPushButton(self.layoutWidget)
self.exceptionBtn.setCheckable(True)
self.exceptionBtn.setObjectName("exceptionBtn")
self.horizontalLayout.addWidget(self.exceptionBtn)
self.verticalLayout.addLayout(self.horizontalLayout)
self.historyList = QtWidgets.QListWidget(self.splitter)
font = QtGui.QFont()
font.setFamily("Monospace")
self.historyList.setFont(font)
self.historyList.setObjectName("historyList")
self.exceptionGroup = QtWidgets.QGroupBox(self.splitter)
self.exceptionGroup.setObjectName("exceptionGroup")
self.gridLayout_2 = QtWidgets.QGridLayout(self.exceptionGroup)
self.gridLayout_2.setContentsMargins(-1, 0, -1, 0)
self.gridLayout_2.setHorizontalSpacing(2)
self.gridLayout_2.setVerticalSpacing(0)
self.gridLayout_2.setObjectName("gridLayout_2")
self.clearExceptionBtn = QtWidgets.QPushButton(self.exceptionGroup)
self.clearExceptionBtn.setEnabled(False)
self.clearExceptionBtn.setObjectName("clearExceptionBtn")
self.gridLayout_2.addWidget(self.clearExceptionBtn, 0, 6, 1, 1)
self.catchAllExceptionsBtn = QtWidgets.QPushButton(self.exceptionGroup)
self.catchAllExceptionsBtn.setCheckable(True)
self.catchAllExceptionsBtn.setObjectName("catchAllExceptionsBtn")
self.gridLayout_2.addWidget(self.catchAllExceptionsBtn, 0, 1, 1, 1)
self.catchNextExceptionBtn = QtWidgets.QPushButton(self.exceptionGroup)
self.catchNextExceptionBtn.setCheckable(True)
self.catchNextExceptionBtn.setObjectName("catchNextExceptionBtn")
self.gridLayout_2.addWidget(self.catchNextExceptionBtn, 0, 0, 1, 1)
self.onlyUncaughtCheck = QtWidgets.QCheckBox(self.exceptionGroup)
self.onlyUncaughtCheck.setChecked(True)
self.onlyUncaughtCheck.setObjectName("onlyUncaughtCheck")
self.gridLayout_2.addWidget(self.onlyUncaughtCheck, 0, 4, 1, 1)
self.exceptionStackList = QtWidgets.QListWidget(self.exceptionGroup)
self.exceptionStackList.setAlternatingRowColors(True)
self.exceptionStackList.setObjectName("exceptionStackList")
self.gridLayout_2.addWidget(self.exceptionStackList, 2, 0, 1, 7)
self.runSelectedFrameCheck = QtWidgets.QCheckBox(self.exceptionGroup)
self.runSelectedFrameCheck.setChecked(True)
self.runSelectedFrameCheck.setObjectName("runSelectedFrameCheck")
self.gridLayout_2.addWidget(self.runSelectedFrameCheck, 3, 0, 1, 7)
self.exceptionInfoLabel = QtWidgets.QLabel(self.exceptionGroup)
self.exceptionInfoLabel.setWordWrap(True)
self.exceptionInfoLabel.setObjectName("exceptionInfoLabel")
self.gridLayout_2.addWidget(self.exceptionInfoLabel, 1, 0, 1, 7)
spacerItem = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_2.addItem(spacerItem, 0, 5, 1, 1)
self.label = QtWidgets.QLabel(self.exceptionGroup)
self.label.setObjectName("label")
self.gridLayout_2.addWidget(self.label, 0, 2, 1, 1)
self.filterText = QtWidgets.QLineEdit(self.exceptionGroup)
self.filterText.setObjectName("filterText")
self.gridLayout_2.addWidget(self.filterText, 0, 3, 1, 1)
self.gridLayout.addWidget(self.splitter, 0, 0, 1, 1)
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
_translate = QtCore.QCoreApplication.translate
Form.setWindowTitle(_translate("Form", "Console"))
self.historyBtn.setText(_translate("Form", "History.."))
self.exceptionBtn.setText(_translate("Form", "Exceptions.."))
self.exceptionGroup.setTitle(_translate("Form", "Exception Handling"))
self.clearExceptionBtn.setText(_translate("Form", "Clear Stack"))
self.catchAllExceptionsBtn.setText(_translate("Form", "Show All Exceptions"))
self.catchNextExceptionBtn.setText(_translate("Form", "Show Next Exception"))
self.onlyUncaughtCheck.setText(_translate("Form", "Only Uncaught Exceptions"))
self.runSelectedFrameCheck.setText(_translate("Form", "Run commands in selected stack frame"))
self.exceptionInfoLabel.setText(_translate("Form", "Stack Trace"))
self.label.setText(_translate("Form", "Filter (regex):"))
from .CmdInput import CmdInput
| 53.973913 | 114 | 0.72241 |
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName("Form")
Form.resize(739, 497)
self.gridLayout = QtWidgets.QGridLayout(Form)
self.gridLayout.setContentsMargins(0, 0, 0, 0)
self.gridLayout.setSpacing(0)
self.gridLayout.setObjectName("gridLayout")
self.splitter = QtWidgets.QSplitter(Form)
self.splitter.setOrientation(QtCore.Qt.Vertical)
self.splitter.setObjectName("splitter")
self.layoutWidget = QtWidgets.QWidget(self.splitter)
self.layoutWidget.setObjectName("layoutWidget")
self.verticalLayout = QtWidgets.QVBoxLayout(self.layoutWidget)
self.verticalLayout.setObjectName("verticalLayout")
self.output = QtWidgets.QPlainTextEdit(self.layoutWidget)
font = QtGui.QFont()
font.setFamily("Monospace")
self.output.setFont(font)
self.output.setReadOnly(True)
self.output.setObjectName("output")
self.verticalLayout.addWidget(self.output)
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.input = CmdInput(self.layoutWidget)
self.input.setObjectName("input")
self.horizontalLayout.addWidget(self.input)
self.historyBtn = QtWidgets.QPushButton(self.layoutWidget)
self.historyBtn.setCheckable(True)
self.historyBtn.setObjectName("historyBtn")
self.horizontalLayout.addWidget(self.historyBtn)
self.exceptionBtn = QtWidgets.QPushButton(self.layoutWidget)
self.exceptionBtn.setCheckable(True)
self.exceptionBtn.setObjectName("exceptionBtn")
self.horizontalLayout.addWidget(self.exceptionBtn)
self.verticalLayout.addLayout(self.horizontalLayout)
self.historyList = QtWidgets.QListWidget(self.splitter)
font = QtGui.QFont()
font.setFamily("Monospace")
self.historyList.setFont(font)
self.historyList.setObjectName("historyList")
self.exceptionGroup = QtWidgets.QGroupBox(self.splitter)
self.exceptionGroup.setObjectName("exceptionGroup")
self.gridLayout_2 = QtWidgets.QGridLayout(self.exceptionGroup)
self.gridLayout_2.setContentsMargins(-1, 0, -1, 0)
self.gridLayout_2.setHorizontalSpacing(2)
self.gridLayout_2.setVerticalSpacing(0)
self.gridLayout_2.setObjectName("gridLayout_2")
self.clearExceptionBtn = QtWidgets.QPushButton(self.exceptionGroup)
self.clearExceptionBtn.setEnabled(False)
self.clearExceptionBtn.setObjectName("clearExceptionBtn")
self.gridLayout_2.addWidget(self.clearExceptionBtn, 0, 6, 1, 1)
self.catchAllExceptionsBtn = QtWidgets.QPushButton(self.exceptionGroup)
self.catchAllExceptionsBtn.setCheckable(True)
self.catchAllExceptionsBtn.setObjectName("catchAllExceptionsBtn")
self.gridLayout_2.addWidget(self.catchAllExceptionsBtn, 0, 1, 1, 1)
self.catchNextExceptionBtn = QtWidgets.QPushButton(self.exceptionGroup)
self.catchNextExceptionBtn.setCheckable(True)
self.catchNextExceptionBtn.setObjectName("catchNextExceptionBtn")
self.gridLayout_2.addWidget(self.catchNextExceptionBtn, 0, 0, 1, 1)
self.onlyUncaughtCheck = QtWidgets.QCheckBox(self.exceptionGroup)
self.onlyUncaughtCheck.setChecked(True)
self.onlyUncaughtCheck.setObjectName("onlyUncaughtCheck")
self.gridLayout_2.addWidget(self.onlyUncaughtCheck, 0, 4, 1, 1)
self.exceptionStackList = QtWidgets.QListWidget(self.exceptionGroup)
self.exceptionStackList.setAlternatingRowColors(True)
self.exceptionStackList.setObjectName("exceptionStackList")
self.gridLayout_2.addWidget(self.exceptionStackList, 2, 0, 1, 7)
self.runSelectedFrameCheck = QtWidgets.QCheckBox(self.exceptionGroup)
self.runSelectedFrameCheck.setChecked(True)
self.runSelectedFrameCheck.setObjectName("runSelectedFrameCheck")
self.gridLayout_2.addWidget(self.runSelectedFrameCheck, 3, 0, 1, 7)
self.exceptionInfoLabel = QtWidgets.QLabel(self.exceptionGroup)
self.exceptionInfoLabel.setWordWrap(True)
self.exceptionInfoLabel.setObjectName("exceptionInfoLabel")
self.gridLayout_2.addWidget(self.exceptionInfoLabel, 1, 0, 1, 7)
spacerItem = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.gridLayout_2.addItem(spacerItem, 0, 5, 1, 1)
self.label = QtWidgets.QLabel(self.exceptionGroup)
self.label.setObjectName("label")
self.gridLayout_2.addWidget(self.label, 0, 2, 1, 1)
self.filterText = QtWidgets.QLineEdit(self.exceptionGroup)
self.filterText.setObjectName("filterText")
self.gridLayout_2.addWidget(self.filterText, 0, 3, 1, 1)
self.gridLayout.addWidget(self.splitter, 0, 0, 1, 1)
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
_translate = QtCore.QCoreApplication.translate
Form.setWindowTitle(_translate("Form", "Console"))
self.historyBtn.setText(_translate("Form", "History.."))
self.exceptionBtn.setText(_translate("Form", "Exceptions.."))
self.exceptionGroup.setTitle(_translate("Form", "Exception Handling"))
self.clearExceptionBtn.setText(_translate("Form", "Clear Stack"))
self.catchAllExceptionsBtn.setText(_translate("Form", "Show All Exceptions"))
self.catchNextExceptionBtn.setText(_translate("Form", "Show Next Exception"))
self.onlyUncaughtCheck.setText(_translate("Form", "Only Uncaught Exceptions"))
self.runSelectedFrameCheck.setText(_translate("Form", "Run commands in selected stack frame"))
self.exceptionInfoLabel.setText(_translate("Form", "Stack Trace"))
self.label.setText(_translate("Form", "Filter (regex):"))
from .CmdInput import CmdInput
| true | true |
f72ff2fe325c5f00daf0160abd5517c0408afd68 | 9,889 | py | Python | helper/phillips_hue_wrapper.py | andrewtatham/enviroplus-python | 213eee4ab7c72cafd4d5fc5a33eb24397b665822 | [
"MIT"
] | null | null | null | helper/phillips_hue_wrapper.py | andrewtatham/enviroplus-python | 213eee4ab7c72cafd4d5fc5a33eb24397b665822 | [
"MIT"
] | null | null | null | helper/phillips_hue_wrapper.py | andrewtatham/enviroplus-python | 213eee4ab7c72cafd4d5fc5a33eb24397b665822 | [
"MIT"
] | null | null | null | import datetime
import pprint
import random
import time
from itertools import cycle
from phue import Bridge
from helper import colour_helper
class HueWrapper(object):
def __init__(self, bridge_ip='192.168.1.73', light_configs=None, profiles=None):
if not light_configs:
light_configs = [
{'name': 'Hue color spot 1', 'is_colour': True},
{'name': 'Hue color spot 2', 'is_colour': True},
{'name': 'Hue color spot 3', 'is_colour': True},
{'name': 'DEATH STAR', 'is_colour': True},
{'name': 'Right Colour Strip', 'is_colour': True},
{'name': 'Right White Strip', 'is_colour': False},
{'name': 'Left Colour Strip', 'is_colour': True},
{'name': 'Left White Strip', 'is_colour': False},
]
if not profiles:
self.bright_white_mode = {
'name': 'bright white',
'profile_state': {},
'lights': {
'Hue color spot 1': {'is_on': True, 'light_state': {}, 'func': self._normal_func},
'Hue color spot 2': {'is_on': True, 'light_state': {}, 'func': self._normal_func},
'Hue color spot 3': {'is_on': True, 'light_state': {}, 'func': self._normal_func},
'DEATH STAR': {'is_on': True, 'light_state': {}, 'func': self._normal_func},
'Right Colour Strip': {'is_on': False, 'light_state': {}, 'func': None},
'Right White Strip': {'is_on': True, 'light_state': {}, 'func': self._normal_func},
'Left Colour Strip': {'is_on': False, 'light_state': {}, 'func': None},
'Left White Strip': {'is_on': True, 'light_state': {}, 'func': self._normal_func},
}
}
self.normal_mode = {
'name': 'normal',
'profile_state': {},
'lights': {
'Hue color spot 1': {'is_on': False, 'light_state': {}, 'func': None},
'Hue color spot 2': {'is_on': False, 'light_state': {}, 'func': None},
'Hue color spot 3': {'is_on': False, 'light_state': {}, 'func': None},
'DEATH STAR': {'is_on': False, 'light_state': {}, 'func': None},
'Right Colour Strip': {'is_on': False, 'light_state': {}, 'func': None},
'Right White Strip': {'is_on': True, 'light_state': {}, 'func': self._normal_func},
'Left Colour Strip': {'is_on': False, 'light_state': {}, 'func': None},
'Left White Strip': {'is_on': True, 'light_state': {}, 'func': self._normal_func},
}
}
self.colour_mode = {
'name': 'colour',
'profile_state': {},
'lights': {
'Hue color spot 1': {'is_on': True, 'light_state': {}, 'func': self._colour_func},
'Hue color spot 2': {'is_on': True, 'light_state': {}, 'func': self._colour_func},
'Hue color spot 3': {'is_on': True, 'light_state': {}, 'func': self._colour_func},
'DEATH STAR': {'is_on': True, 'light_state': {}, 'func': self._colour_func},
'Right Colour Strip': {'is_on': True, 'light_state': {}, 'func': self._colour_func},
'Right White Strip': {'is_on': False, 'light_state': {}, 'func': None},
'Left Colour Strip': {'is_on': True, 'light_state': {}, 'func': self._colour_func},
'Left White Strip': {'is_on': False, 'light_state': {}, 'func': None},
}
}
profiles = [
self.bright_white_mode,
self.normal_mode,
self.colour_mode,
]
self.light_configs = light_configs
self.profiles = cycle(profiles)
self.profile = next(self.profiles)
self.bridge_ip = bridge_ip
self.b = None
self.lights = []
def connect(self):
self.b = Bridge(self.bridge_ip)
self.b.connect()
pprint.pprint(self.b.get_api())
for actual_light in self.b.lights:
name = actual_light.name
for light_config in self.light_configs:
if light_config['name'] == name:
name += " *"
actual_light.is_colour = light_config['is_colour']
self.lights.append(actual_light)
print(name)
if self.lights:
print("connected")
for actual_light in self.lights:
pprint.pprint(actual_light.__dict__)
def on(self):
for light in self.lights:
light.on = True
def colour_temperature(self, temp):
# (white only) 154 is the coolest, 500 is the warmest
for light in self.lights:
light.colortemp = temp
def xy(self, x, y):
# co-ordinates in CIE 1931 space
for light in self.lights:
if light.is_colour:
light.xy = (x, y)
def random_colour(self):
for light in self.lights:
if light.is_colour:
light.xy = [random.random(), random.random()]
def hue(self, hue, sat=254):
# hue' parameter has the range 0-65535 so represents approximately 182*degrees
# sat is 0-254?
for light in self.lights:
light.hue = hue
light.saturation = sat
def brightness(self, bright):
# // brightness between 0-254 (NB 0 is not off!)
for light in self.lights:
light.bri = bright
def colour_loop_off(self):
for light in self.lights:
if light.is_colour:
light.effect = "none"
def colour_loop_on(self):
for light in self.lights:
if light.is_colour:
light.effect = "colorloop"
def flash_once(self):
for light in self.lights:
light.alert = "select"
def flash_multiple(self):
for light in self.lights:
light.alert = "lselect"
def flash_off(self):
for light in self.lights:
light.alert = None
def off(self):
for light in self.lights:
light.on = False
@property
def is_on(self):
on = False
for light in self.lights:
on = on or light.on
return on
@property
def is_off(self):
return not self.is_on
def set_hsv(self, h, s, v):
h = int(h * 65535)
s = int(s * 255)
v = int(v * 255)
print((h, s, v))
for light in self.lights:
if light.is_colour:
light.hue = h
light.sat = s
light.bri = v
def quick_transitions(self):
for light in self.lights:
light.transitiontime = 0
def sleep(self, seconds):
time.sleep(seconds)
def next_profile(self):
self.profile = next(self.profiles)
def do_whatever(self):
now = datetime.datetime.now()
weekday = now.weekday()
hour = now.hour
monday = 0
friday = 4
saturday = 5
sunday = 6
is_daytime = 8 <= hour <= 18
in_work_hours = monday <= weekday <= friday and is_daytime
is_weekend = saturday <= weekday <= sunday
if in_work_hours:
self.profile = self.bright_white_mode
else:
if is_weekend:
self.profile = self.colour_mode
else:
self.profile = self.normal_mode
if is_daytime:
bright = 254
else:
bright = 8
if self.is_on:
for light in self.lights:
light_profile = self.profile['lights'][light.name]
profile_state = self.profile['profile_state']
if light_profile:
if light_profile['is_on'] != light.on:
light.on = light_profile['is_on']
light_func = light_profile['func']
light_state = light_profile['light_state']
if light_profile['is_on'] and light_func:
light_func(light=light, light_state=light_state, profile_state=profile_state, bright=bright)
def _normal_func(self, light, **kwargs):
# (white only) 154 is the coolest, 500 is the warmest
ct = 500 + int(colour_helper.day_factor * (154 - 500))
if "bright" in kwargs and kwargs["bright"]:
brightness = kwargs["bright"]
else:
# // brightness between 0-254 (NB 0 is not off!)
brightness = int(colour_helper.day_factor * 254)
light.colortemp = ct
light.brightness = brightness
pass
def _colour_func(self, light, **kwargs):
# hue' parameter has the range 0-65535 so represents approximately 182*degrees
minute = datetime.datetime.now().minute
hue = int(minute/59 * 65535)
sat = 254
if "bright" in kwargs and kwargs["bright"]:
brightness = kwargs["bright"]
else:
# // brightness between 0-254 (NB 0 is not off!)
brightness = int(colour_helper.day_factor * 254)
light.hue = hue
light.saturation = sat
light.brightness = brightness
if __name__ == '__main__':
hue = HueWrapper()
hue.connect()
hue.on()
hue.brightness(254)
hue.colour_temperature(154)
hue.sleep(5)
hue.colour_temperature(500)
hue.sleep(5)
hue.colour_temperature(154)
hue.sleep(5)
# for _ in range(5):
# hue.random_colour()
# hue.sleep(1)
#
# hue.colour_loop_on()
# hue.sleep(10)
# hue.colour_loop_off()
# hue.sleep(10)
hue.off()
| 35.317857 | 116 | 0.522095 | import datetime
import pprint
import random
import time
from itertools import cycle
from phue import Bridge
from helper import colour_helper
class HueWrapper(object):
def __init__(self, bridge_ip='192.168.1.73', light_configs=None, profiles=None):
if not light_configs:
light_configs = [
{'name': 'Hue color spot 1', 'is_colour': True},
{'name': 'Hue color spot 2', 'is_colour': True},
{'name': 'Hue color spot 3', 'is_colour': True},
{'name': 'DEATH STAR', 'is_colour': True},
{'name': 'Right Colour Strip', 'is_colour': True},
{'name': 'Right White Strip', 'is_colour': False},
{'name': 'Left Colour Strip', 'is_colour': True},
{'name': 'Left White Strip', 'is_colour': False},
]
if not profiles:
self.bright_white_mode = {
'name': 'bright white',
'profile_state': {},
'lights': {
'Hue color spot 1': {'is_on': True, 'light_state': {}, 'func': self._normal_func},
'Hue color spot 2': {'is_on': True, 'light_state': {}, 'func': self._normal_func},
'Hue color spot 3': {'is_on': True, 'light_state': {}, 'func': self._normal_func},
'DEATH STAR': {'is_on': True, 'light_state': {}, 'func': self._normal_func},
'Right Colour Strip': {'is_on': False, 'light_state': {}, 'func': None},
'Right White Strip': {'is_on': True, 'light_state': {}, 'func': self._normal_func},
'Left Colour Strip': {'is_on': False, 'light_state': {}, 'func': None},
'Left White Strip': {'is_on': True, 'light_state': {}, 'func': self._normal_func},
}
}
self.normal_mode = {
'name': 'normal',
'profile_state': {},
'lights': {
'Hue color spot 1': {'is_on': False, 'light_state': {}, 'func': None},
'Hue color spot 2': {'is_on': False, 'light_state': {}, 'func': None},
'Hue color spot 3': {'is_on': False, 'light_state': {}, 'func': None},
'DEATH STAR': {'is_on': False, 'light_state': {}, 'func': None},
'Right Colour Strip': {'is_on': False, 'light_state': {}, 'func': None},
'Right White Strip': {'is_on': True, 'light_state': {}, 'func': self._normal_func},
'Left Colour Strip': {'is_on': False, 'light_state': {}, 'func': None},
'Left White Strip': {'is_on': True, 'light_state': {}, 'func': self._normal_func},
}
}
self.colour_mode = {
'name': 'colour',
'profile_state': {},
'lights': {
'Hue color spot 1': {'is_on': True, 'light_state': {}, 'func': self._colour_func},
'Hue color spot 2': {'is_on': True, 'light_state': {}, 'func': self._colour_func},
'Hue color spot 3': {'is_on': True, 'light_state': {}, 'func': self._colour_func},
'DEATH STAR': {'is_on': True, 'light_state': {}, 'func': self._colour_func},
'Right Colour Strip': {'is_on': True, 'light_state': {}, 'func': self._colour_func},
'Right White Strip': {'is_on': False, 'light_state': {}, 'func': None},
'Left Colour Strip': {'is_on': True, 'light_state': {}, 'func': self._colour_func},
'Left White Strip': {'is_on': False, 'light_state': {}, 'func': None},
}
}
profiles = [
self.bright_white_mode,
self.normal_mode,
self.colour_mode,
]
self.light_configs = light_configs
self.profiles = cycle(profiles)
self.profile = next(self.profiles)
self.bridge_ip = bridge_ip
self.b = None
self.lights = []
def connect(self):
self.b = Bridge(self.bridge_ip)
self.b.connect()
pprint.pprint(self.b.get_api())
for actual_light in self.b.lights:
name = actual_light.name
for light_config in self.light_configs:
if light_config['name'] == name:
name += " *"
actual_light.is_colour = light_config['is_colour']
self.lights.append(actual_light)
print(name)
if self.lights:
print("connected")
for actual_light in self.lights:
pprint.pprint(actual_light.__dict__)
def on(self):
for light in self.lights:
light.on = True
def colour_temperature(self, temp):
for light in self.lights:
light.colortemp = temp
def xy(self, x, y):
for light in self.lights:
if light.is_colour:
light.xy = (x, y)
def random_colour(self):
for light in self.lights:
if light.is_colour:
light.xy = [random.random(), random.random()]
def hue(self, hue, sat=254):
# sat is 0-254?
for light in self.lights:
light.hue = hue
light.saturation = sat
def brightness(self, bright):
# // brightness between 0-254 (NB 0 is not off!)
for light in self.lights:
light.bri = bright
def colour_loop_off(self):
for light in self.lights:
if light.is_colour:
light.effect = "none"
def colour_loop_on(self):
for light in self.lights:
if light.is_colour:
light.effect = "colorloop"
def flash_once(self):
for light in self.lights:
light.alert = "select"
def flash_multiple(self):
for light in self.lights:
light.alert = "lselect"
def flash_off(self):
for light in self.lights:
light.alert = None
def off(self):
for light in self.lights:
light.on = False
@property
def is_on(self):
on = False
for light in self.lights:
on = on or light.on
return on
@property
def is_off(self):
return not self.is_on
def set_hsv(self, h, s, v):
h = int(h * 65535)
s = int(s * 255)
v = int(v * 255)
print((h, s, v))
for light in self.lights:
if light.is_colour:
light.hue = h
light.sat = s
light.bri = v
def quick_transitions(self):
for light in self.lights:
light.transitiontime = 0
def sleep(self, seconds):
time.sleep(seconds)
def next_profile(self):
self.profile = next(self.profiles)
def do_whatever(self):
now = datetime.datetime.now()
weekday = now.weekday()
hour = now.hour
monday = 0
friday = 4
saturday = 5
sunday = 6
is_daytime = 8 <= hour <= 18
in_work_hours = monday <= weekday <= friday and is_daytime
is_weekend = saturday <= weekday <= sunday
if in_work_hours:
self.profile = self.bright_white_mode
else:
if is_weekend:
self.profile = self.colour_mode
else:
self.profile = self.normal_mode
if is_daytime:
bright = 254
else:
bright = 8
if self.is_on:
for light in self.lights:
light_profile = self.profile['lights'][light.name]
profile_state = self.profile['profile_state']
if light_profile:
if light_profile['is_on'] != light.on:
light.on = light_profile['is_on']
light_func = light_profile['func']
light_state = light_profile['light_state']
if light_profile['is_on'] and light_func:
light_func(light=light, light_state=light_state, profile_state=profile_state, bright=bright)
def _normal_func(self, light, **kwargs):
# (white only) 154 is the coolest, 500 is the warmest
ct = 500 + int(colour_helper.day_factor * (154 - 500))
if "bright" in kwargs and kwargs["bright"]:
brightness = kwargs["bright"]
else:
# // brightness between 0-254 (NB 0 is not off!)
brightness = int(colour_helper.day_factor * 254)
light.colortemp = ct
light.brightness = brightness
pass
def _colour_func(self, light, **kwargs):
# hue' parameter has the range 0-65535 so represents approximately 182*degrees
minute = datetime.datetime.now().minute
hue = int(minute/59 * 65535)
sat = 254
if "bright" in kwargs and kwargs["bright"]:
brightness = kwargs["bright"]
else:
brightness = int(colour_helper.day_factor * 254)
light.hue = hue
light.saturation = sat
light.brightness = brightness
if __name__ == '__main__':
hue = HueWrapper()
hue.connect()
hue.on()
hue.brightness(254)
hue.colour_temperature(154)
hue.sleep(5)
hue.colour_temperature(500)
hue.sleep(5)
hue.colour_temperature(154)
hue.sleep(5)
hue.off()
| true | true |
f72ff313165970077760e6b80119c882d0e4e3b3 | 57,975 | py | Python | openconcept/analysis/performance/solver_phases.py | kanekosh/openconcept | f4646d583ba1840540e648601c963adab13cdccf | [
"MIT"
] | null | null | null | openconcept/analysis/performance/solver_phases.py | kanekosh/openconcept | f4646d583ba1840540e648601c963adab13cdccf | [
"MIT"
] | 1 | 2022-01-18T17:02:23.000Z | 2022-01-19T19:33:34.000Z | openconcept/analysis/performance/solver_phases.py | eytanadler/openconcept | 7878e5725eed78a023136b58250361531c7c7654 | [
"MIT"
] | 1 | 2021-11-13T22:40:31.000Z | 2021-11-13T22:40:31.000Z | from __future__ import division
from openmdao.api import Group, ExplicitComponent, IndepVarComp, BalanceComp, ImplicitComponent
import openconcept.api as oc
from openconcept.analysis.atmospherics.compute_atmos_props import ComputeAtmosphericProperties
from openconcept.analysis.aerodynamics import Lift, StallSpeed
from openconcept.utilities.math import ElementMultiplyDivideComp, AddSubtractComp
from openconcept.utilities.math.integrals import Integrator
from openconcept.utilities.linearinterp import LinearInterpolator
from openconcept.utilities.math.integrals import Integrator
import numpy as np
import copy
class ClimbAngleComp(ExplicitComponent):
"""
Computes steady climb angle based on excess thrust.
This is a helper function
and shouldn't be instantiated in the top-level model directly.
Inputs
------
drag : float
Aircraft drag at v2 (climb out) flight condition (scalar, N)
weight : float
Takeoff weight (scalar, kg)
thrust : float
Thrust at the v2 (climb out) flight condition (scalar, N)
Outputs
-------
gamma : float
Climb out flight path angle (scalar, rad)
Options
-------
num_nodes : int
Number of points to run
"""
def initialize(self):
self.options.declare('num_nodes', default=1)
def setup(self):
nn = self.options['num_nodes']
self.add_input('drag', units='N',shape=(nn,))
self.add_input('weight', units='kg', shape=(nn,))
self.add_input('thrust', units='N',shape=(nn,))
self.add_output('gamma', units='rad',shape=(nn,))
self.declare_partials(['gamma'], ['weight','thrust','drag'], cols=np.arange(0,nn), rows=np.arange(0,nn))
def compute(self, inputs, outputs):
g = 9.80665 #m/s^2
outputs['gamma'] = np.arcsin((inputs['thrust']-inputs['drag'])/inputs['weight']/g)
def compute_partials(self, inputs, J):
g = 9.80665 #m/s^2
interior_qty = (inputs['thrust']-inputs['drag'])/inputs['weight']/g
d_arcsin = 1/np.sqrt(1-interior_qty**2)
J['gamma','thrust'] = d_arcsin/inputs['weight']/g
J['gamma','drag'] = -d_arcsin/inputs['weight']/g
J['gamma','weight'] = -d_arcsin*(inputs['thrust']-inputs['drag'])/inputs['weight']**2/g
class FlipVectorComp(ExplicitComponent):
"""
Reverses the order of an OpenMDAO vector
This is a helper function
and shouldn't be instantiated in the top-level model directly.
Inputs
------
vec_in : float
Incoming vector in forward order
Outputs
-------
vec_out : float
Reversed order version of vec_in
Options
-------
num_nodes : int
Number of points to run
negative : boolean
Whether to apply a negative scaler. Default False preserves vector values.
True returns all values with negative sign.
units : string or None
Units for vec_in and vec_out (Default None)
Specify as an OpenMDAO unit string (e.g. 'kg')
"""
def initialize(self):
self.options.declare('num_nodes',default=1)
self.options.declare('negative',default=False)
self.options.declare('units',default=None)
def setup(self):
nn = self.options['num_nodes']
units = self.options['units']
self.add_input('vec_in', units=units, shape=(nn,))
self.add_output('vec_out', units=units, shape=(nn,))
negative = self.options['negative']
if negative:
scaler = -1
else:
scaler = 1
self.declare_partials(['vec_out'],['vec_in'],rows=np.arange(nn-1,-1,-1),cols=np.arange(0,nn,1),val=scaler*np.ones((nn,)))
def compute(self, inputs, outputs):
negative = self.options['negative']
if negative:
scaler = -1
else:
scaler = 1
outputs['vec_out'] = scaler * np.flip(inputs['vec_in'], 0)
class BFLImplicitSolve(ImplicitComponent):
"""
Computes a residual equation so Newton solver can set v1 to analyze balanced field length
This residual is equal to zero if:
- The rejected takeoff and engine-out takeoff distances are equal, or:
- V1 is equal to VR and the engine out takeoff distance is longer than the RTO distance
Since this is a discontinous function, the partial derivatives are written in a special way
to 'coax' the V1 value into the right setting with a Newton step. It's kind of a hack.
Inputs
------
distance_continue : float
Engine-out takeoff distance (scalar, m)
distance_abort : float
Distance to full-stop when takeoff is rejected at V1 (scalar, m)
takeoff|vr : float
Rotation speed (scalar, m/s)
Outputs
-------
takeoff|v1 : float
Decision speed (scalar, m/s)
"""
def setup(self):
self.add_input('distance_continue', units='m')
self.add_input('distance_abort', units='m')
self.add_input('takeoff|vr', units='m/s')
self.add_output('takeoff|v1', units='m/s',val=20,lower=10,upper=150)
self.declare_partials('takeoff|v1',['distance_continue','distance_abort','takeoff|v1','takeoff|vr'])
def apply_nonlinear(self, inputs, outputs, residuals):
speedtol = 1e-1
disttol = 0
#force the decision speed to zero
if inputs['takeoff|vr'] < outputs['takeoff|v1'] + speedtol:
residuals['takeoff|v1'] = inputs['takeoff|vr'] - outputs['takeoff|v1']
else:
residuals['takeoff|v1'] = inputs['distance_continue'] - inputs['distance_abort']
#if you are within vtol on the correct side but the stopping distance bigger, use the regular mode
if inputs['takeoff|vr'] >= outputs['takeoff|v1'] and inputs['takeoff|vr'] - outputs['takeoff|v1'] < speedtol and (inputs['distance_abort'] - inputs['distance_continue']) > disttol:
residuals['takeoff|v1'] = inputs['distance_continue'] - inputs['distance_abort']
def linearize(self, inputs, outputs, partials):
speedtol = 1e-1
disttol = 0
if inputs['takeoff|vr'] < outputs['takeoff|v1'] + speedtol:
partials['takeoff|v1','distance_continue'] = 0
partials['takeoff|v1','distance_abort'] = 0
partials['takeoff|v1','takeoff|vr'] = 1
partials['takeoff|v1','takeoff|v1'] = -1
else:
partials['takeoff|v1','distance_continue'] = 1
partials['takeoff|v1','distance_abort'] = -1
partials['takeoff|v1','takeoff|vr'] = 0
partials['takeoff|v1','takeoff|v1'] = 0
if inputs['takeoff|vr'] >= outputs['takeoff|v1'] and inputs['takeoff|vr'] - outputs['takeoff|v1'] < speedtol and (inputs['distance_abort'] - inputs['distance_continue']) > disttol:
partials['takeoff|v1','distance_continue'] = 1
partials['takeoff|v1','distance_abort'] = -1
partials['takeoff|v1','takeoff|vr'] = 0
partials['takeoff|v1','takeoff|v1'] = 0
class Groundspeeds(ExplicitComponent):
"""
Computes groundspeed for vectorial true airspeed and true vertical speed.
This is a helper function for the main mission analysis routines
and shouldn't be instantiated directly.
Inputs
------
fltcond|vs : float
Vertical speed for all mission phases (vector, m/s)
fltcond|Utrue : float
True airspeed for all mission phases (vector, m/s)
Outputs
-------
fltcond|groundspeed : float
True groundspeed for all mission phases (vector, m/s)
fltcond|cosgamma : float
Cosine of the flght path angle for all mission phases (vector, dimensionless)
fltcond|singamma : float
Sine of the flight path angle for all mission phases (vector, dimensionless)
Options
-------
num_nodes : int
Number of points to run
"""
def initialize(self):
self.options.declare('num_nodes',default=1,desc="Number of Simpson intervals to use per seg (eg. climb, cruise, descend). Number of analysis points is 2N+1")
def setup(self):
nn = self.options['num_nodes']
self.add_input('fltcond|vs', units='m/s',shape=(nn,))
self.add_input('fltcond|Utrue', units='m/s',shape=(nn,))
self.add_output('fltcond|groundspeed', units='m/s',shape=(nn,))
self.add_output('fltcond|cosgamma', shape=(nn,), desc='Cosine of the flight path angle')
self.add_output('fltcond|singamma', shape=(nn,), desc='sin of the flight path angle' )
self.declare_partials(['fltcond|groundspeed','fltcond|cosgamma','fltcond|singamma'], ['fltcond|vs','fltcond|Utrue'], rows=range(nn), cols=range(nn))
def compute(self, inputs, outputs):
nn = self.options['num_nodes']
#compute the groundspeed on climb and desc
inside = inputs['fltcond|Utrue']**2-inputs['fltcond|vs']**2
groundspeed = np.sqrt(inside)
groundspeed_fixed = np.sqrt(np.where(np.less(inside, 0.0), 0.01, inside))
#groundspeed = np.sqrt(inputs['fltcond|Utrue']**2-inputs['fltcond|vs']**2)
#groundspeed_fixed= np.where(np.isnan(groundspeed),0,groundspeed)
outputs['fltcond|groundspeed'] = groundspeed_fixed
outputs['fltcond|singamma'] = np.where(np.isnan(groundspeed),1,inputs['fltcond|vs'] / inputs['fltcond|Utrue'])
outputs['fltcond|cosgamma'] = groundspeed_fixed / inputs['fltcond|Utrue']
def compute_partials(self, inputs, J):
inside = inputs['fltcond|Utrue']**2-inputs['fltcond|vs']**2
groundspeed = np.sqrt(inside)
groundspeed_fixed = np.sqrt(np.where(np.less(inside, 0.0), 0.01, inside))
J['fltcond|groundspeed','fltcond|vs'] = np.where(np.isnan(groundspeed),0,(1/2) / groundspeed_fixed * (-2) * inputs['fltcond|vs'])
J['fltcond|groundspeed','fltcond|Utrue'] = np.where(np.isnan(groundspeed),0, (1/2) / groundspeed_fixed * 2 * inputs['fltcond|Utrue'])
J['fltcond|singamma','fltcond|vs'] = np.where(np.isnan(groundspeed), 0, 1 / inputs['fltcond|Utrue'])
J['fltcond|singamma','fltcond|Utrue'] = np.where(np.isnan(groundspeed), 0, - inputs['fltcond|vs'] / inputs['fltcond|Utrue'] ** 2)
J['fltcond|cosgamma','fltcond|vs'] = J['fltcond|groundspeed','fltcond|vs'] / inputs['fltcond|Utrue']
J['fltcond|cosgamma','fltcond|Utrue'] = (J['fltcond|groundspeed','fltcond|Utrue'] * inputs['fltcond|Utrue'] - groundspeed_fixed) / inputs['fltcond|Utrue']**2
class HorizontalAcceleration(ExplicitComponent):
"""
Computes acceleration during takeoff run and effectively forms the T-D residual.
Inputs
------
weight : float
Aircraft weight (scalar, kg)
drag : float
Aircraft drag at each analysis point (vector, N)
lift : float
Aircraft lift at each analysis point (vector, N)
thrust : float
Thrust at each TO analysis point (vector, N)
fltcond|singamma : float
The sine of the flight path angle gamma (vector, dimensionless)
braking : float
Effective rolling friction multiplier at each point (vector, dimensionless)
Outputs
-------
accel_horiz : float
Aircraft horizontal acceleration (vector, m/s**2)
Options
-------
num_nodes : int
Number of analysis points to run
"""
def initialize(self):
self.options.declare('num_nodes',default=1)
def setup(self):
nn = self.options['num_nodes']
g = 9.80665 #m/s^2
self.add_input('weight', units='kg', shape=(nn,))
self.add_input('drag', units='N',shape=(nn,))
self.add_input('lift', units='N',shape=(nn,))
self.add_input('thrust', units='N',shape=(nn,))
self.add_input('fltcond|singamma',shape=(nn,))
self.add_input('braking',shape=(nn,))
self.add_output('accel_horiz', units='m/s**2', shape=(nn,))
arange=np.arange(nn)
self.declare_partials(['accel_horiz'], ['weight','drag','lift','thrust','braking'], rows=arange, cols=arange)
self.declare_partials(['accel_horiz'], ['fltcond|singamma'], rows=arange, cols=arange, val=-g*np.ones((nn,)))
def compute(self, inputs, outputs):
nn = self.options['num_nodes']
g = 9.80665 #m/s^2
m = inputs['weight']
floor_vec = np.where(np.less((g-inputs['lift']/m),0.0),0.0,1.0)
accel = inputs['thrust']/m - inputs['drag']/m - floor_vec*inputs['braking']*(g-inputs['lift']/m) - g*inputs['fltcond|singamma']
outputs['accel_horiz'] = accel
def compute_partials(self, inputs, J):
g = 9.80665 #m/s^2
m = inputs['weight']
floor_vec = np.where(np.less((g-inputs['lift']/m),0.0),0.0,1.0)
J['accel_horiz','thrust'] = 1/m
J['accel_horiz','drag'] = -1/m
J['accel_horiz','braking'] = -floor_vec*(g-inputs['lift']/m)
J['accel_horiz','lift'] = floor_vec*inputs['braking']/m
J['accel_horiz','weight'] = (inputs['drag']-inputs['thrust']-floor_vec*inputs['braking']*inputs['lift'])/m**2
class VerticalAcceleration(ExplicitComponent):
"""
Computes acceleration during takeoff run in the vertical plane.
Only used during full unsteady takeoff performance analysis due to stability issues
Inputs
------
weight : float
Aircraft weight (scalar, kg)
drag : float
Aircraft drag at each analysis point (vector, N)
lift : float
Aircraft lift at each analysis point (vector, N)
thrust : float
Thrust at each TO analysis point (vector, N)
fltcond|singamma : float
The sine of the flight path angle gamma (vector, dimensionless)
fltcond|cosgamma : float
The sine of the flight path angle gamma (vector, dimensionless)
Outputs
-------
accel_vert : float
Aircraft horizontal acceleration (vector, m/s**2)
Options
-------
num_nodes : int
Number of analysis points to run
"""
def initialize(self):
self.options.declare('num_nodes',default=1)
def setup(self):
nn = self.options['num_nodes']
g = 9.80665 #m/s^2
self.add_input('weight', units='kg', shape=(nn,))
self.add_input('drag', units='N',shape=(nn,))
self.add_input('lift', units='N',shape=(nn,))
self.add_input('thrust', units='N',shape=(nn,))
self.add_input('fltcond|singamma',shape=(nn,))
self.add_input('fltcond|cosgamma',shape=(nn,))
self.add_output('accel_vert', units='m/s**2', shape=(nn,),upper=2.5*g,lower=-1*g)
arange=np.arange(nn)
self.declare_partials(['accel_vert'], ['weight','drag','lift','thrust','fltcond|singamma','fltcond|cosgamma'], rows=arange, cols=arange)
def compute(self, inputs, outputs):
nn = self.options['num_nodes']
g = 9.80665 #m/s^2
cosg = inputs['fltcond|cosgamma']
sing = inputs['fltcond|singamma']
accel = (inputs['lift']*cosg + (inputs['thrust']-inputs['drag'])*sing - g*inputs['weight'])/inputs['weight']
accel = np.clip(accel, -g, 2.5*g)
outputs['accel_vert'] = accel
def compute_partials(self, inputs, J):
g = 9.80665 #m/s^2
m = inputs['weight']
cosg = inputs['fltcond|cosgamma']
sing = inputs['fltcond|singamma']
J['accel_vert','thrust'] = sing / m
J['accel_vert','drag'] = -sing / m
J['accel_vert','lift'] = cosg / m
J['accel_vert','fltcond|singamma'] = (inputs['thrust']-inputs['drag']) / m
J['accel_vert','fltcond|cosgamma'] = inputs['lift'] / m
J['accel_vert','weight'] = -(inputs['lift']*cosg + (inputs['thrust']-inputs['drag'])*sing)/m**2
class SteadyFlightCL(ExplicitComponent):
"""
Computes lift coefficient at each analysis point
This is a helper function for the main mission analysis routine
and shouldn't be instantiated directly.
Inputs
------
weight : float
Aircraft weight at each analysis point (vector, kg)
fltcond|q : float
Dynamic pressure at each analysis point (vector, Pascal)
ac|geom|wing|S_ref : float
Reference wing area (scalar, m**2)
fltcond|cosgamma : float
Cosine of the flght path angle for all mission phases (vector, dimensionless)
Outputs
-------
fltcond|CL : float
Lift coefficient (vector, dimensionless)
Options
-------
num_nodes : int
Number of analysis nodes to run
mission_segments : list
The list of mission segments to track
"""
def initialize(self):
self.options.declare('num_nodes',default=5,desc="Number of Simpson intervals to use per seg (eg. climb, cruise, descend). Number of analysis points is 2N+1")
self.options.declare('mission_segments',default=['climb','cruise','descent'])
def setup(self):
nn = self.options['num_nodes']
arange = np.arange(nn)
self.add_input('weight', units='kg', shape=(nn,))
self.add_input('fltcond|q', units='N * m**-2', shape=(nn,))
self.add_input('ac|geom|wing|S_ref', units='m **2')
self.add_input('fltcond|cosgamma', val=1.0, shape=(nn,))
self.add_output('fltcond|CL',shape=(nn,))
self.declare_partials(['fltcond|CL'], ['weight','fltcond|q',"fltcond|cosgamma"], rows=arange, cols=arange)
self.declare_partials(['fltcond|CL'], ['ac|geom|wing|S_ref'], rows=arange, cols=np.zeros(nn))
def compute(self, inputs, outputs):
g = 9.80665 #m/s^2
outputs['fltcond|CL'] = inputs['fltcond|cosgamma']*g*inputs['weight']/inputs['fltcond|q']/inputs['ac|geom|wing|S_ref']
def compute_partials(self, inputs, J):
g = 9.80665 #m/s^2
J['fltcond|CL','weight'] = inputs['fltcond|cosgamma']*g/inputs['fltcond|q']/inputs['ac|geom|wing|S_ref']
J['fltcond|CL','fltcond|q'] = - inputs['fltcond|cosgamma']*g*inputs['weight'] / inputs['fltcond|q']**2 / inputs['ac|geom|wing|S_ref']
J['fltcond|CL','ac|geom|wing|S_ref'] = - inputs['fltcond|cosgamma']*g*inputs['weight'] / inputs['fltcond|q'] / inputs['ac|geom|wing|S_ref']**2
J['fltcond|CL','fltcond|cosgamma'] = g*inputs['weight']/inputs['fltcond|q']/inputs['ac|geom|wing|S_ref']
class GroundRollPhase(oc.PhaseGroup):
"""
This component group models the ground roll phase of a takeoff (acceleration before flight)
User-settable parameters include:
throttle (default 100 percent)
rolling friction coeff (default 0.03 for accelerating segments and 0.4 for braking)
propulsor_active (default 1 for v0 to v1, 0 for v1 to vr and braking) to model engine failure
altitude (fltcond|h)
The BaseAircraftGroup object is passed in.
The BaseAircraftGroup should be built to accept the following inputs
and return the following outputs.
The outputs should be promoted to the top level in the component.
Inputs
------
range : float
Total distance travelled (vector, m)
fltcond|h : float
Altitude (vector, m)
fltcond|vs : float
Vertical speed (vector, m/s)
fltcond|Ueas : float
Equivalent airspeed (vector, m/s)
fltcond|Utrue : float
True airspeed (vector, m/s)
fltcond|p : float
Pressure (vector, Pa)
fltcond|rho : float
Density (vector, kg/m3)
fltcond|T : float
Temperature (vector, K)
fltcond|q : float
Dynamic pressure (vector, Pa)
fltcond|CL : float
Lift coefficient (vector, dimensionless)
throttle : float
Motor / propeller throttle setting scaled from 0 to 1 or slightly more (vector, dimensionless)
propulsor_active : float
If a multi-propulsor airplane, a failure condition should be modeled in the propulsion model by multiplying throttle by propulsor_active.
It will generally be 1.0 unless a failure condition is being modeled, in which case it will be 0 (vector, dimensionless)
braking : float
Brake friction coefficient (default 0.4 for dry runway braking, 0.03 for resistance unbraked)
Should not be applied in the air or nonphysical effects will result (vector, dimensionless)
lift : float
Lift force (vector, N)
Outputs
-------
thrust : float
Total thrust force produced by all propulsors (vector, N)
drag : float
Total drag force in the airplane axis produced by all sources of drag (vector, N)
weight : float
Weight (mass, really) of the airplane at each point in time. (vector, kg)
ac|geom|wing|S_ref
Wing reference area (scalar, m**2)
ac|aero|CLmax_TO
CLmax with flaps in max takeoff position (scalar, dimensionless)
ac|weights|MTOW
Maximum takeoff weight (scalar, kg)
"""
def initialize(self):
self.options.declare('num_nodes',default=1)
self.options.declare('flight_phase',default=None,desc='Phase of flight e.g. v0v1, cruise')
self.options.declare('aircraft_model',default=None)
def setup(self):
nn = self.options['num_nodes']
ivcomp = self.add_subsystem('const_settings', IndepVarComp(), promotes_outputs=["*"])
# set CL = 0.1 for the ground roll per Raymer's book
ivcomp.add_output('fltcond|CL', val=np.ones((nn,))*0.1)
ivcomp.add_output('vr_vstall_mult',val=1.1)
ivcomp.add_output('fltcond|h',val=np.zeros((nn,)),units='m')
ivcomp.add_output('fltcond|vs',val=np.zeros((nn,)),units='m/s')
ivcomp.add_output('zero_speed',val=2,units='m/s')
flight_phase = self.options['flight_phase']
if flight_phase == 'v0v1':
ivcomp.add_output('braking',val=np.ones((nn,))*0.03)
ivcomp.add_output('propulsor_active',val=np.ones((nn,)))
ivcomp.add_output('throttle',val=np.ones((nn,)))
zero_start = True
elif flight_phase == 'v1vr':
ivcomp.add_output('braking',val=np.ones((nn,))*0.03)
ivcomp.add_output('propulsor_active',val=np.zeros((nn,)))
ivcomp.add_output('throttle',val=np.ones((nn,)))
zero_start = False
elif flight_phase == 'v1v0':
ivcomp.add_output('braking',val=0.4*np.ones((nn,)))
ivcomp.add_output('propulsor_active',val=np.zeros((nn,)))
ivcomp.add_output('throttle',val=np.zeros((nn,)))
zero_start=False
self.add_subsystem('atmos', ComputeAtmosphericProperties(num_nodes=nn, true_airspeed_in=True), promotes_inputs=['*'], promotes_outputs=['*'])
self.add_subsystem('gs',Groundspeeds(num_nodes=nn),promotes_inputs=['*'],promotes_outputs=['*'])
# add the user-defined aircraft model
self.add_subsystem('acmodel',self.options['aircraft_model'](num_nodes=nn,flight_phase=self.options['flight_phase']),promotes_inputs=['*'],promotes_outputs=['*'])
self.add_subsystem('lift',Lift(num_nodes=nn), promotes_inputs=['*'],promotes_outputs=['*'])
self.add_subsystem('stall',StallSpeed(),promotes_inputs=[('CLmax','ac|aero|CLmax_TO'),('weight','ac|weights|MTOW'),'ac|geom|wing|S_ref'],promotes_outputs=['*'])
self.add_subsystem('vrspeed',ElementMultiplyDivideComp(output_name='takeoff|vr',input_names=['Vstall_eas','vr_vstall_mult'],input_units=['m/s',None]),promotes_inputs=['*'],promotes_outputs=['*'])
self.add_subsystem('haccel',HorizontalAcceleration(num_nodes=nn), promotes_inputs=['*'],promotes_outputs=['*'])
if flight_phase == 'v1v0':
#unfortunately need to shoot backwards to avoid negative airspeeds
#reverse the order of the accelerations so the last one is first (and make them negative)
self.add_subsystem('flipaccel', FlipVectorComp(num_nodes=nn, units='m/s**2', negative=True), promotes_inputs=[('vec_in','accel_horiz')])
#integrate the timesteps in reverse from near zero speed.
ode_integ = self.add_subsystem('ode_integ', Integrator(num_nodes=nn, method='simpson', diff_units='s',time_setup='duration'), promotes_inputs=['*'], promotes_outputs=['*'])
ode_integ.add_integrand('vel_q', units='m/s', rate_name='vel_dqdt', start_name='zero_speed', end_name='fltcond|Utrue_initial', lower=1.5)
self.connect('flipaccel.vec_out','vel_dqdt')
#flip the result of the reverse integration again so the flight condition is forward and consistent with everythign else
self.add_subsystem('flipvel', FlipVectorComp(num_nodes=nn, units='m/s', negative=False), promotes_outputs=[('vec_out','fltcond|Utrue')])
self.connect('vel_q','flipvel.vec_in')
# now set the time step so that backwards shooting results in the correct 'initial' segment airspeed
self.add_subsystem('v0constraint',BalanceComp(name='duration',units='s',eq_units='m/s',rhs_name='fltcond|Utrue_initial',lhs_name='takeoff|v1',val=10.,upper=100.,lower=1.),
promotes_inputs=['*'],promotes_outputs=['duration'])
else:
# forward shooting for these acceleration segmentes
ode_integ = self.add_subsystem('ode_integ', Integrator(num_nodes=nn, method='simpson', diff_units='s',time_setup='duration'), promotes_inputs=['*'], promotes_outputs=['*'])
ode_integ.add_integrand('fltcond|Utrue', units='m/s', rate_name='accel_horiz', start_name='fltcond|Utrue_initial', end_name='fltcond|Utrue_final', lower=1.5)
if flight_phase == 'v0v1':
self.connect('zero_speed','fltcond|Utrue_initial')
self.add_subsystem('v1constraint',BalanceComp(name='duration',units='s',eq_units='m/s',rhs_name='fltcond|Utrue_final',lhs_name='takeoff|v1',val=10.,upper=100.,lower=1.),
promotes_inputs=['*'],promotes_outputs=['duration'])
elif flight_phase == 'v1vr':
self.add_subsystem('vrconstraint',BalanceComp(name='duration',units='s',eq_units='m/s',rhs_name='fltcond|Utrue_final',lhs_name='takeoff|vr',val=5.,upper=12.,lower=0.0),
promotes_inputs=['*'],promotes_outputs=['duration'])
if zero_start:
ode_integ.add_integrand('range', rate_name='fltcond|groundspeed', units='m', zero_start=True)
else:
ode_integ.add_integrand('range', rate_name='fltcond|groundspeed', units='m')
class RotationPhase(oc.PhaseGroup):
"""
This group models the transition from ground roll to climb out during a takeoff
using force balance in the vertical and horizontal directions.
User-settable parameters include:
throttle (default 100 percent)
rolling friction coeff (default 0.03 for accelerating segments and 0.4 for braking)
propulsor_active (default 1 for v0 to v1, 0 for v1 to vr and braking) to model engine failure
altitude (fltcond|h)
obstacle clearance hight (h_obs) default 35 feet per FAR 25
Rotation CL/CLmax ratio (default 0.83)
The BaseAircraftGroup object is passed in.
The BaseAircraftGroup should be built to accept the following inputs
and return the following outputs.
The outputs should be promoted to the top level in the component.
Inputs
------
range : float
Total distance travelled (vector, m)
fltcond|h : float
Altitude (vector, m)
fltcond|vs : float
Vertical speed (vector, m/s)
fltcond|Ueas : float
Equivalent airspeed (vector, m/s)
fltcond|Utrue : float
True airspeed (vector, m/s)
fltcond|p : float
Pressure (vector, Pa)
fltcond|rho : float
Density (vector, kg/m3)
fltcond|T : float
Temperature (vector, K)
fltcond|q : float
Dynamic pressure (vector, Pa)
fltcond|CL : float
Lift coefficient (vector, dimensionless)
throttle : float
Motor / propeller throttle setting scaled from 0 to 1 or slightly more (vector, dimensionless)
propulsor_active : float
If a multi-propulsor airplane, a failure condition should be modeled in the propulsion model by multiplying throttle by propulsor_active.
It will generally be 1.0 unless a failure condition is being modeled, in which case it will be 0 (vector, dimensionless)
braking : float
Percentage brakes applied, from 0 to 1. Should not be applied in the air or nonphysical effects will result (vector, dimensionless)
lift : float
Lift force (vector, N)
Outputs
-------
thrust : float
Total thrust force produced by all propulsors (vector, N)
drag : float
Total drag force in the airplane axis produced by all sources of drag (vector, N)
weight : float
Weight (mass, really) of the airplane at each point in time. Generally will need to be integrated by Dymos as a state with a rate source (vector, kg)
ac|geom|wing|S_ref
Wing reference area (scalar, m**2)
ac|aero|CLmax_TO
CLmax with flaps in max takeoff position (scalar, dimensionless)
ac|weights|MTOW
Maximum takeoff weight (scalar, kg)
"""
def initialize(self):
self.options.declare('num_nodes',default=1)
self.options.declare('flight_phase',default=None)
self.options.declare('aircraft_model',default=None)
def setup(self):
nn = self.options['num_nodes']
ivcomp = self.add_subsystem('const_settings', IndepVarComp(), promotes_outputs=["*"])
ivcomp.add_output('CL_rotate_mult', val=np.ones((nn,))*0.83)
ivcomp.add_output('h_obs', val=35, units='ft')
flight_phase = self.options['flight_phase']
if flight_phase == 'rotate':
ivcomp.add_output('braking',val=np.zeros((nn,)))
ivcomp.add_output('propulsor_active',val=np.zeros((nn,)))
ivcomp.add_output('throttle',val=np.ones((nn,)))
self.add_subsystem('atmos', ComputeAtmosphericProperties(num_nodes=nn, true_airspeed_in=True), promotes_inputs=['*'], promotes_outputs=['*'])
self.add_subsystem('gs',Groundspeeds(num_nodes=nn),promotes_inputs=['*'],promotes_outputs=['*'])
clcomp = self.add_subsystem('clcomp',ElementMultiplyDivideComp(output_name='fltcond|CL', input_names=['CL_rotate_mult','ac|aero|CLmax_TO'],
vec_size=[nn,1], length=1),
promotes_inputs=['*'], promotes_outputs=['*'])
self.add_subsystem('acmodel',self.options['aircraft_model'](num_nodes=nn,flight_phase=self.options['flight_phase']),promotes_inputs=['*'],promotes_outputs=['*'])
self.add_subsystem('lift',Lift(num_nodes=nn), promotes_inputs=['*'],promotes_outputs=['*'])
self.add_subsystem('haccel',HorizontalAcceleration(num_nodes=nn), promotes_inputs=['*'],promotes_outputs=['*'])
self.add_subsystem('vaccel',VerticalAcceleration(num_nodes=nn), promotes_inputs=['*'],promotes_outputs=['*'])
# TODO always starts from zero altitude
self.add_subsystem('clear_obstacle',BalanceComp(name='duration',units='s',val=1,eq_units='m',rhs_name='fltcond|h_final',lhs_name='h_obs',lower=0.1,upper=15),
promotes_inputs=['*'],promotes_outputs=['duration'])
int1 = self.add_subsystem('intvelocity', Integrator(num_nodes=nn, method='simpson',diff_units='s',time_setup='duration'), promotes_outputs=['*'], promotes_inputs=['*'])
int1.add_integrand('fltcond|Utrue', rate_name='accel_horiz', units='m/s', lower=0.1)
int2 = self.add_subsystem('intrange', Integrator(num_nodes=nn, method='simpson',diff_units='s',time_setup='duration'), promotes_outputs=['*'], promotes_inputs=['*'])
int2.add_integrand('range', rate_name='fltcond|groundspeed', units='m')
int3 = self.add_subsystem('intvs', Integrator(num_nodes=nn, method='simpson',diff_units='s',time_setup='duration'), promotes_outputs=['*'], promotes_inputs=['*'])
int3.add_integrand('fltcond|vs', rate_name='accel_vert', units='m/s', zero_start=True)
int4 = self.add_subsystem('inth', Integrator(num_nodes=nn, method='simpson',diff_units='s',time_setup='duration'), promotes_outputs=['*'], promotes_inputs=['*'])
int4.add_integrand('fltcond|h', rate_name='fltcond|vs', units='m', zero_start=True)
class SteadyFlightPhase(oc.PhaseGroup):
"""
This component group models steady flight conditions.
Settable mission parameters include:
Airspeed (fltcond|Ueas)
Vertical speed (fltcond|vs)
Duration of the segment (duration)
Throttle is set automatically to ensure steady flight
The BaseAircraftGroup object is passed in.
The BaseAircraftGroup should be built to accept the following inputs
and return the following outputs.
The outputs should be promoted to the top level in the component.
Inputs
------
range : float
Total distance travelled (vector, m)
fltcond|h : float
Altitude (vector, m)
fltcond|vs : float
Vertical speed (vector, m/s)
fltcond|Ueas : float
Equivalent airspeed (vector, m/s)
fltcond|Utrue : float
True airspeed (vector, m/s)
fltcond|p : float
Pressure (vector, Pa)
fltcond|rho : float
Density (vector, kg/m3)
fltcond|T : float
Temperature (vector, K)
fltcond|q : float
Dynamic pressure (vector, Pa)
fltcond|CL : float
Lift coefficient (vector, dimensionless)
throttle : float
Motor / propeller throttle setting scaled from 0 to 1 or slightly more (vector, dimensionless)
propulsor_active : float
If a multi-propulsor airplane, a failure condition should be modeled in the propulsion model by multiplying throttle by propulsor_active.
It will generally be 1.0 unless a failure condition is being modeled, in which case it will be 0 (vector, dimensionless)
braking : float
Brake friction coefficient (default 0.4 for dry runway braking, 0.03 for resistance unbraked)
Should not be applied in the air or nonphysical effects will result (vector, dimensionless)
lift : float
Lift force (vector, N)
Outputs
-------
thrust : float
Total thrust force produced by all propulsors (vector, N)
drag : float
Total drag force in the airplane axis produced by all sources of drag (vector, N)
weight : float
Weight (mass, really) of the airplane at each point in time. (vector, kg)
ac|geom|wing|S_ref
Wing reference area (scalar, m**2)
ac|aero|CLmax_TO
CLmax with flaps in max takeoff position (scalar, dimensionless)
ac|weights|MTOW
Maximum takeoff weight (scalar, kg)
"""
def initialize(self):
self.options.declare('num_nodes',default=1)
self.options.declare('flight_phase',default=None,desc='Phase of flight e.g. v0v1, cruise')
self.options.declare('aircraft_model',default=None)
def setup(self):
nn = self.options['num_nodes']
ivcomp = self.add_subsystem('const_settings', IndepVarComp(), promotes_outputs=["*"])
ivcomp.add_output('propulsor_active', val=np.ones(nn))
ivcomp.add_output('braking', val=np.zeros(nn))
ivcomp.add_output('fltcond|Ueas',val=np.ones((nn,))*90, units='m/s')
ivcomp.add_output('fltcond|vs',val=np.ones((nn,))*1, units='m/s')
ivcomp.add_output('zero_accel',val=np.zeros((nn,)),units='m/s**2')
integ = self.add_subsystem('ode_integ', Integrator(num_nodes=nn, diff_units='s', time_setup='duration', method='simpson'), promotes_inputs=['fltcond|vs', 'fltcond|groundspeed'], promotes_outputs=['fltcond|h', 'range'])
integ.add_integrand('fltcond|h', rate_name='fltcond|vs', val=1.0, units='m')
self.add_subsystem('atmos', ComputeAtmosphericProperties(num_nodes=nn, true_airspeed_in=False), promotes_inputs=['*'], promotes_outputs=['*'])
self.add_subsystem('gs',Groundspeeds(num_nodes=nn),promotes_inputs=['*'],promotes_outputs=['*'])
# add the user-defined aircraft model
self.add_subsystem('acmodel',self.options['aircraft_model'](num_nodes=nn, flight_phase=self.options['flight_phase']),promotes_inputs=['*'],promotes_outputs=['*'])
self.add_subsystem('clcomp',SteadyFlightCL(num_nodes=nn), promotes_inputs=['*'],promotes_outputs=['*'])
self.add_subsystem('lift',Lift(num_nodes=nn), promotes_inputs=['*'],promotes_outputs=['*'])
self.add_subsystem('haccel',HorizontalAcceleration(num_nodes=nn), promotes_inputs=['*'],promotes_outputs=['*'])
integ.add_integrand('range', rate_name='fltcond|groundspeed', val=1.0, units='m')
self.add_subsystem('steadyflt',BalanceComp(name='throttle',val=np.ones((nn,))*0.5,lower=0.01,upper=2.0,units=None,normalize=False,eq_units='m/s**2',rhs_name='accel_horiz',lhs_name='zero_accel',rhs_val=np.zeros((nn,))),
promotes_inputs=['accel_horiz','zero_accel'],promotes_outputs=['throttle'])
# class OldSteadyFlightPhase(Group):
# """
# This component group models steady flight conditions.
# Settable mission parameters include:
# Airspeed (fltcond|Ueas)
# Vertical speed (fltcond|vs)
# Duration of the segment (duration)
# Throttle is set automatically to ensure steady flight
# The BaseAircraftGroup object is passed in.
# The BaseAircraftGroup should be built to accept the following inputs
# and return the following outputs.
# The outputs should be promoted to the top level in the component.
# Inputs
# ------
# range : float
# Total distance travelled (vector, m)
# fltcond|h : float
# Altitude (vector, m)
# fltcond|vs : float
# Vertical speed (vector, m/s)
# fltcond|Ueas : float
# Equivalent airspeed (vector, m/s)
# fltcond|Utrue : float
# True airspeed (vector, m/s)
# fltcond|p : float
# Pressure (vector, Pa)
# fltcond|rho : float
# Density (vector, kg/m3)
# fltcond|T : float
# Temperature (vector, K)
# fltcond|q : float
# Dynamic pressure (vector, Pa)
# fltcond|CL : float
# Lift coefficient (vector, dimensionless)
# throttle : float
# Motor / propeller throttle setting scaled from 0 to 1 or slightly more (vector, dimensionless)
# propulsor_active : float
# If a multi-propulsor airplane, a failure condition should be modeled in the propulsion model by multiplying throttle by propulsor_active.
# It will generally be 1.0 unless a failure condition is being modeled, in which case it will be 0 (vector, dimensionless)
# braking : float
# Brake friction coefficient (default 0.4 for dry runway braking, 0.03 for resistance unbraked)
# Should not be applied in the air or nonphysical effects will result (vector, dimensionless)
# lift : float
# Lift force (vector, N)
# Outputs
# -------
# thrust : float
# Total thrust force produced by all propulsors (vector, N)
# drag : float
# Total drag force in the airplane axis produced by all sources of drag (vector, N)
# weight : float
# Weight (mass, really) of the airplane at each point in time. (vector, kg)
# ac|geom|wing|S_ref
# Wing reference area (scalar, m**2)
# ac|aero|CLmax_TO
# CLmax with flaps in max takeoff position (scalar, dimensionless)
# ac|weights|MTOW
# Maximum takeoff weight (scalar, kg)
# """
# def initialize(self):
# self.options.declare('num_nodes',default=1)
# self.options.declare('flight_phase',default=None,desc='Phase of flight e.g. v0v1, cruise')
# self.options.declare('aircraft_model',default=None)
# def setup(self):
# nn = self.options['num_nodes']
# ivcomp = self.add_subsystem('const_settings', IndepVarComp(), promotes_outputs=["*"])
# ivcomp.add_output('propulsor_active', val=np.ones(nn))
# ivcomp.add_output('braking', val=np.zeros(nn))
# ivcomp.add_output('fltcond|Ueas',val=np.ones((nn,))*90, units='m/s')
# ivcomp.add_output('fltcond|vs',val=np.ones((nn,))*1, units='m/s')
# ivcomp.add_output('zero_accel',val=np.zeros((nn,)),units='m/s**2')
# self.add_subsystem('inth',Integrator(num_nodes=nn, method='simpson', quantity_units='m', diff_units='s', time_setup='duration'),
# promotes_inputs=[('dqdt','fltcond|vs'),'duration',('q_initial','fltcond|h_initial')],promotes_outputs=[('q','fltcond|h'),('q_final','fltcond|h_final')])
# self.add_subsystem('atmos', ComputeAtmosphericProperties(num_nodes=nn, true_airspeed_in=False), promotes_inputs=['*'], promotes_outputs=['*'])
# self.add_subsystem('gs',Groundspeeds(num_nodes=nn),promotes_inputs=['*'],promotes_outputs=['*'])
# # add the user-defined aircraft model
# self.add_subsystem('acmodel',self.options['aircraft_model'](num_nodes=nn, flight_phase=self.options['flight_phase']),promotes_inputs=['*'],promotes_outputs=['*'])
# self.add_subsystem('clcomp',SteadyFlightCL(num_nodes=nn), promotes_inputs=['*'],promotes_outputs=['*'])
# self.add_subsystem('lift',Lift(num_nodes=nn), promotes_inputs=['*'],promotes_outputs=['*'])
# self.add_subsystem('haccel',HorizontalAcceleration(num_nodes=nn), promotes_inputs=['*'],promotes_outputs=['*'])
# self.add_subsystem('intrange',Integrator(num_nodes=nn, method='simpson', quantity_units='m', diff_units='s', time_setup='duration'),
# promotes_inputs=[('dqdt','fltcond|groundspeed'),'duration',('q_initial','range_initial')],promotes_outputs=[('q','range'),('q_final','range_final')])
# self.add_subsystem('steadyflt',BalanceComp(name='throttle',val=np.ones((nn,))*0.5,lower=0.01,upper=2.0,units=None,normalize=False,eq_units='m/s**2',rhs_name='accel_horiz',lhs_name='zero_accel',rhs_val=np.zeros((nn,))),
# promotes_inputs=['accel_horiz','zero_accel'],promotes_outputs=['throttle'])
class ClimbAnglePhase(Group):
"""
This component checks the climb angle for a
single flight condition at the V2 speed. No integration is performed.
User settable parameter includes the V2/Vstall multiple (default 1.2)
Useful for ensuring all-engine climb gradients in optimization.
Choose flight_phase = AllEngineClimbAngle or EngineOutClimbAngle
to set the propulsor_active property correctly.
Inputs
------
range : float
Total distance travelled (vector, m)
fltcond|h : float
Altitude (vector, m)
fltcond|vs : float
Vertical speed (vector, m/s)
fltcond|Ueas : float
Equivalent airspeed (vector, m/s)
fltcond|Utrue : float
True airspeed (vector, m/s)
fltcond|p : float
Pressure (vector, Pa)
fltcond|rho : float
Density (vector, kg/m3)
fltcond|T : float
Temperature (vector, K)
fltcond|q : float
Dynamic pressure (vector, Pa)
fltcond|CL : float
Lift coefficient (vector, dimensionless)
throttle : float
Motor / propeller throttle setting scaled from 0 to 1 or slightly more (vector, dimensionless)
propulsor_active : float
If a multi-propulsor airplane, a failure condition should be modeled in the propulsion model by multiplying throttle by propulsor_active.
It will generally be 1.0 unless a failure condition is being modeled, in which case it will be 0 (vector, dimensionless)
lift : float
Lift force (vector, N)
Outputs
-------
thrust : float
Total thrust force produced by all propulsors (vector, N)
drag : float
Total drag force in the airplane axis produced by all sources of drag (vector, N)
weight : float
Weight (mass, really) of the airplane at each point in time. Generally will need to be integrated by Dymos as a state with a rate source (vector, kg)
ac|geom|wing|S_ref
Wing reference area (scalar, m**2)
ac|aero|CLmax_TO
CLmax with flaps in max takeoff position (scalar, dimensionless)
ac|weights|MTOW
Maximum takeoff weight (scalar, kg)
"""
def initialize(self):
self.options.declare('num_nodes',default=1)
self.options.declare('flight_phase',default=None,desc='Phase of flight e.g. v0v1, cruise')
self.options.declare('aircraft_model',default=None)
def setup(self):
nn = self.options['num_nodes']
ivcomp = self.add_subsystem('const_settings', IndepVarComp(), promotes_outputs=["*"])
ivcomp.add_output('v2_vstall_mult',val=1.2)
ivcomp.add_output('fltcond|h',val=np.zeros((nn,)),units='m')
ivcomp.add_output('fltcond|cosgamma', val=np.ones((nn,)))
flight_phase = self.options['flight_phase']
if flight_phase == 'AllEngineClimbAngle':
ivcomp.add_output('propulsor_active',val=np.ones((nn,)))
ivcomp.add_output('throttle',val=np.ones((nn,)))
elif flight_phase == 'EngineOutClimbAngle':
ivcomp.add_output('propulsor_active',val=np.zeros((nn,)))
ivcomp.add_output('throttle',val=np.ones((nn,)))
self.add_subsystem('stall',StallSpeed(),promotes_inputs=[('CLmax','ac|aero|CLmax_TO'),('weight','ac|weights|MTOW'),'ac|geom|wing|S_ref'],promotes_outputs=['*'])
self.add_subsystem('vrspeed',ElementMultiplyDivideComp(output_name='takeoff|v2',input_names=['Vstall_eas','v2_vstall_mult'],input_units=['m/s',None]),promotes_inputs=['*'],promotes_outputs=['*'])
self.add_subsystem('atmos', ComputeAtmosphericProperties(num_nodes=nn, true_airspeed_in=False), promotes_inputs=['*'], promotes_outputs=['*'])
self.add_subsystem('clcomp',SteadyFlightCL(num_nodes=nn), promotes_inputs=[('weight','ac|weights|MTOW'),'fltcond|*','ac|*'],promotes_outputs=['*'])
self.connect('takeoff|v2','fltcond|Ueas')
# the aircraft model needs to provide thrust and drag
self.add_subsystem('acmodel',self.options['aircraft_model'](num_nodes=nn,flight_phase=self.options['flight_phase']),promotes_inputs=['*'],promotes_outputs=['*'])
self.add_subsystem('climbangle',ClimbAngleComp(num_nodes=nn),promotes_inputs=['drag',('weight','ac|weights|MTOW'),'thrust'],promotes_outputs=['gamma'])
class TakeoffTransition(ExplicitComponent):
"""
Computes distance and altitude at end of circular transition.
Based on TO distance analysis method in Raymer book.
Obstacle clearance height set for GA / Part 23 aircraft
Override for analyzing Part 25 aircraft
Inputs
------
fltcond|Utrue
Transition true airspeed (generally avg of vr and v2) (scalar, m/s)
gamma : float
Climb out flight path angle (scalar, rad)
Outputs
-------
s_transition : float
Horizontal distance during transition to v2 climb out (scalar, m)
h_transition : float
Altitude at transition point (scalar, m)
t_transition : float
Elapsed time in transition (scalar, s)
Options
-------
h_obstacle : float
Obstacle height to clear (in **meters**) (default 10.66, equiv. 35 ft)
load_factor : float
Load factor during rotation and transition (default 1.2 from Raymer book)
"""
def initialize(self):
self.options.declare('h_obstacle',default=10.66,desc='Obstacle clearance height in m')
self.options.declare('load_factor', default=1.2, desc='Load factor during circular arc transition')
def setup(self):
self.add_input('fltcond|Utrue', units='m/s', src_indices=0)
self.add_input('gamma', units='rad', src_indices=0)
self.add_output('s_transition', units='m')
self.add_output('h_transition', units='m')
self.add_output('t_transition',units='s')
self.declare_partials(['s_transition','h_transition','t_transition'], ['fltcond|Utrue','gamma'])
def compute(self, inputs, outputs):
hobs = self.options['h_obstacle']
nfactor = self.options['load_factor'] - 1
g = 9.80665 #m/s^2
gam = inputs['gamma']
ut = inputs['fltcond|Utrue']
R = ut**2/nfactor/g
st = R*np.sin(gam)
ht = R*(1-np.cos(gam))
#alternate formula if the obstacle is cleared during transition
if ht > hobs:
st = np.sqrt(R**2-(R-hobs)**2)
ht = hobs
outputs['s_transition'] = st
outputs['h_transition'] = ht
outputs['t_transition'] = st / ut
def compute_partials(self, inputs, J):
hobs = self.options['h_obstacle']
nfactor = self.options['load_factor'] - 1
g = 9.80665 #m/s^2
gam = inputs['gamma']
ut = inputs['fltcond|Utrue']
R = ut**2/nfactor/g
dRdut = 2*ut/nfactor/g
st = R*np.sin(gam)
ht = R*(1-np.cos(gam))
#alternate formula if the obstacle is cleared during transition
if ht > hobs:
st = np.sqrt(R**2-(R-hobs)**2)
dstdut = 1/2/np.sqrt(R**2-(R-hobs)**2) * (2*R*dRdut - 2*(R-hobs)*dRdut)
dstdgam = 0
dhtdut = 0
dhtdgam = 0
else:
dhtdut = dRdut*(1-np.cos(gam))
dhtdgam = R*np.sin(gam)
dstdut = dRdut*np.sin(gam)
dstdgam = R*np.cos(gam)
J['s_transition','gamma'] = dstdgam
J['s_transition','fltcond|Utrue'] = dstdut
J['h_transition','gamma'] = dhtdgam
J['h_transition','fltcond|Utrue'] = dhtdut
J['t_transition','gamma'] = dstdgam / ut
J['t_transition','fltcond|Utrue'] = (dstdut * ut - st) / ut ** 2
class TakeoffClimb(ExplicitComponent):
"""
Computes ground distance from end of transition until obstacle is cleared.
Analysis based on Raymer book.
Inputs
------
gamma : float
Climb out flight path angle (scalar, rad)
h_transition : float
Altitude at transition point (scalar, m)
Outputs
-------
s_climb : float
Horizontal distance from end of transition until obstacle is cleared (scalar, m)
Options
-------
h_obstacle : float
Obstacle height to clear (in **meters**) (default 10.66, equiv. 35 ft)
"""
def initialize(self):
self.options.declare('h_obstacle',default=10.66,desc='Obstacle clearance height in m')
def setup(self):
self.add_input('h_transition', units='m')
self.add_input('gamma', units='rad',src_indices=-1)
self.add_input('fltcond|Utrue', units='m/s',src_indices=-1)
self.add_output('s_climb', units='m')
self.add_output('t_climb', units='s')
self.declare_partials(['s_climb'], ['h_transition','gamma'])
self.declare_partials(['t_climb'], ['h_transition','gamma','fltcond|Utrue'])
def compute(self, inputs, outputs):
hobs = self.options['h_obstacle']
gam = inputs['gamma']
ht = inputs['h_transition']
ut = inputs['fltcond|Utrue']
sc = (hobs-ht)/np.tan(gam)
outputs['s_climb'] = sc
outputs['t_climb'] = sc / ut
def compute_partials(self, inputs, J):
hobs = self.options['h_obstacle']
gam = inputs['gamma']
ht = inputs['h_transition']
ut = inputs['fltcond|Utrue']
sc = (hobs-ht)/np.tan(gam)
J['s_climb','gamma'] = -(hobs-ht)/np.tan(gam)**2 * (1/np.cos(gam))**2
J['s_climb','h_transition'] = -1/np.tan(gam)
J['t_climb','gamma'] = J['s_climb','gamma'] / ut
J['t_climb','h_transition'] = J['s_climb','h_transition'] / ut
J['t_climb','fltcond|Utrue'] = - sc / ut ** 2
class RobustRotationPhase(oc.PhaseGroup):
"""
This adds general mission analysis capabilities to an existing airplane model.
The BaseAircraftGroup object is passed in. It should be built to accept the following inputs and return the following outputs.
The outputs should be promoted to the top level in the component.
Inputs
------
range : float
Total distance travelled (vector, m)
fltcond|h : float
Altitude (vector, m)
fltcond|vs : float
Vertical speed (vector, m/s)
fltcond|Ueas : float
Equivalent airspeed (vector, m/s)
fltcond|Utrue : float
True airspeed (vector, m/s)
fltcond|p : float
Pressure (vector, Pa)
fltcond|rho : float
Density (vector, kg/m3)
fltcond|T : float
Temperature (vector, K)
fltcond|q : float
Dynamic pressure (vector, Pa)
fltcond|CL : float
Lift coefficient (vector, dimensionless)
throttle : float
Motor / propeller throttle setting scaled from 0 to 1 or slightly more (vector, dimensionless)
propulsor_active : float
If a multi-propulsor airplane, a failure condition should be modeled in the propulsion model by multiplying throttle by propulsor_active.
It will generally be 1.0 unless a failure condition is being modeled, in which case it will be 0 (vector, dimensionless)
braking : float
Percentage brakes applied, from 0 to 1. Should not be applied in the air or nonphysical effects will result (vector, dimensionless)
lift : float
Lift force (vector, N)
Outputs
-------
thrust : float
Total thrust force produced by all propulsors (vector, N)
drag : float
Total drag force in the airplane axis produced by all sources of drag (vector, N)
weight : float
Weight (mass, really) of the airplane at each point in time. Generally will need to be integrated by Dymos as a state with a rate source (vector, kg)
ac|geom|wing|S_ref
Wing reference area (scalar, m**2)
ac|aero|CLmax_TO
CLmax with flaps in max takeoff position (scalar, dimensionless)
ac|weights|MTOW
Maximum takeoff weight (scalar, kg)
"""
def initialize(self):
self.options.declare('num_nodes',default=1)
self.options.declare('flight_phase',default=None,desc='Phase of flight e.g. v0v1, cruise')
self.options.declare('aircraft_model',default=None)
self.options.declare('h_obstacle',default=10.66, )
def setup(self):
nn = self.options['num_nodes']
ivcomp = self.add_subsystem('const_settings', IndepVarComp(), promotes_outputs=["*"])
flight_phase = self.options['flight_phase']
if flight_phase == 'rotate':
ivcomp.add_output('braking',val=np.zeros((nn,)))
ivcomp.add_output('propulsor_active',val=np.zeros((nn,)))
ivcomp.add_output('throttle',val=np.ones((nn,)))
# flight conditions are sea level takeoff, transition speed
# split off a single node to compute climb angle
# compute the transition distance and add it to range_initial
# compute the transition time as a function of the groundspeed
# provide transition time as duration
ivcomp.add_output('v2_vstall_mult',val=1.2)
ivcomp.add_output('vr_vstall_mult',val=1.1)
ivcomp.add_output('fltcond|vs', val=np.zeros((nn,)),units='m/s')
ivcomp.add_output('fltcond|cosgamma', val=np.ones((nn,)),units=None)
ivcomp.add_output('h_obstacle',val=35,units='ft')
self.add_subsystem('altitudes',LinearInterpolator(num_nodes=nn, units='m'),promotes_inputs=[('start_val','h_initial')],promotes_outputs=[('vec','fltcond|h')])
self.connect('h_obstacle','altitudes.end_val')
self.add_subsystem('stall',StallSpeed(),promotes_inputs=[('CLmax','ac|aero|CLmax_TO'),('weight','ac|weights|MTOW'),'ac|geom|wing|S_ref'],promotes_outputs=['*'])
self.add_subsystem('vrspeed',ElementMultiplyDivideComp(output_name='takeoff|vr',input_names=['Vstall_eas','vr_vstall_mult'],input_units=['m/s',None]),promotes_inputs=['*'],promotes_outputs=['*'])
self.add_subsystem('v2speed',ElementMultiplyDivideComp(output_name='takeoff|v2',input_names=['Vstall_eas','v2_vstall_mult'],input_units=['m/s',None]),promotes_inputs=['*'],promotes_outputs=['*'])
self.add_subsystem('speeds',LinearInterpolator(num_nodes=nn,units='kn'),promotes_inputs=[('start_val','takeoff|vr'),('end_val','takeoff|v2')],promotes_outputs=[('vec','fltcond|Ueas')])
self.add_subsystem('atmos', ComputeAtmosphericProperties(num_nodes=nn, true_airspeed_in=False), promotes_inputs=['*'], promotes_outputs=['*'])
# pretty confident there's a simpler closed form multiple for CL at v2
self.add_subsystem('clcomp',SteadyFlightCL(num_nodes=nn), promotes_inputs=['weight','fltcond|*','ac|*'],promotes_outputs=['*'])
# the aircraft model needs to provide thrust and drag
self.add_subsystem('acmodel',self.options['aircraft_model'](num_nodes=nn,flight_phase=self.options['flight_phase']),promotes_inputs=['*'],promotes_outputs=['*'])
self.add_subsystem('climbangle',ClimbAngleComp(num_nodes=nn),promotes_inputs=['drag','weight','thrust'],promotes_outputs=['gamma'])
self.add_subsystem('transition',TakeoffTransition(),promotes_inputs=['fltcond|Utrue','gamma'],promotes_outputs=['h_transition','s_transition','t_transition'])
self.add_subsystem('v2climb',TakeoffClimb(),promotes_inputs=['h_transition','gamma','fltcond|Utrue'],promotes_outputs=['s_climb','t_climb'])
self.add_subsystem('tod_final',AddSubtractComp(output_name='range_final',input_names=['range_initial','s_transition','s_climb'],units='m'),promotes_inputs=['*'],promotes_outputs=['*'])
self.add_subsystem('duration',AddSubtractComp(output_name='duration',input_names=['t_transition','t_climb'],units='s'),promotes_inputs=['*'],promotes_outputs=['*'])
self.add_subsystem('h_final',AddSubtractComp(output_name='fltcond|h_final',input_names=['h_obstacle'],units='m'),promotes_inputs=['*'],promotes_outputs=['*'])
self.add_subsystem('ranges',LinearInterpolator(num_nodes=nn,units='m'),promotes_inputs=[('start_val','range_initial'),('end_val','range_final')],promotes_outputs=[('vec','range')])
| 47.952854 | 228 | 0.65185 | from __future__ import division
from openmdao.api import Group, ExplicitComponent, IndepVarComp, BalanceComp, ImplicitComponent
import openconcept.api as oc
from openconcept.analysis.atmospherics.compute_atmos_props import ComputeAtmosphericProperties
from openconcept.analysis.aerodynamics import Lift, StallSpeed
from openconcept.utilities.math import ElementMultiplyDivideComp, AddSubtractComp
from openconcept.utilities.math.integrals import Integrator
from openconcept.utilities.linearinterp import LinearInterpolator
from openconcept.utilities.math.integrals import Integrator
import numpy as np
import copy
class ClimbAngleComp(ExplicitComponent):
def initialize(self):
self.options.declare('num_nodes', default=1)
def setup(self):
nn = self.options['num_nodes']
self.add_input('drag', units='N',shape=(nn,))
self.add_input('weight', units='kg', shape=(nn,))
self.add_input('thrust', units='N',shape=(nn,))
self.add_output('gamma', units='rad',shape=(nn,))
self.declare_partials(['gamma'], ['weight','thrust','drag'], cols=np.arange(0,nn), rows=np.arange(0,nn))
def compute(self, inputs, outputs):
g = 9.80665
outputs['gamma'] = np.arcsin((inputs['thrust']-inputs['drag'])/inputs['weight']/g)
def compute_partials(self, inputs, J):
g = 9.80665
interior_qty = (inputs['thrust']-inputs['drag'])/inputs['weight']/g
d_arcsin = 1/np.sqrt(1-interior_qty**2)
J['gamma','thrust'] = d_arcsin/inputs['weight']/g
J['gamma','drag'] = -d_arcsin/inputs['weight']/g
J['gamma','weight'] = -d_arcsin*(inputs['thrust']-inputs['drag'])/inputs['weight']**2/g
class FlipVectorComp(ExplicitComponent):
def initialize(self):
self.options.declare('num_nodes',default=1)
self.options.declare('negative',default=False)
self.options.declare('units',default=None)
def setup(self):
nn = self.options['num_nodes']
units = self.options['units']
self.add_input('vec_in', units=units, shape=(nn,))
self.add_output('vec_out', units=units, shape=(nn,))
negative = self.options['negative']
if negative:
scaler = -1
else:
scaler = 1
self.declare_partials(['vec_out'],['vec_in'],rows=np.arange(nn-1,-1,-1),cols=np.arange(0,nn,1),val=scaler*np.ones((nn,)))
def compute(self, inputs, outputs):
negative = self.options['negative']
if negative:
scaler = -1
else:
scaler = 1
outputs['vec_out'] = scaler * np.flip(inputs['vec_in'], 0)
class BFLImplicitSolve(ImplicitComponent):
def setup(self):
self.add_input('distance_continue', units='m')
self.add_input('distance_abort', units='m')
self.add_input('takeoff|vr', units='m/s')
self.add_output('takeoff|v1', units='m/s',val=20,lower=10,upper=150)
self.declare_partials('takeoff|v1',['distance_continue','distance_abort','takeoff|v1','takeoff|vr'])
def apply_nonlinear(self, inputs, outputs, residuals):
speedtol = 1e-1
disttol = 0
if inputs['takeoff|vr'] < outputs['takeoff|v1'] + speedtol:
residuals['takeoff|v1'] = inputs['takeoff|vr'] - outputs['takeoff|v1']
else:
residuals['takeoff|v1'] = inputs['distance_continue'] - inputs['distance_abort']
if inputs['takeoff|vr'] >= outputs['takeoff|v1'] and inputs['takeoff|vr'] - outputs['takeoff|v1'] < speedtol and (inputs['distance_abort'] - inputs['distance_continue']) > disttol:
residuals['takeoff|v1'] = inputs['distance_continue'] - inputs['distance_abort']
def linearize(self, inputs, outputs, partials):
speedtol = 1e-1
disttol = 0
if inputs['takeoff|vr'] < outputs['takeoff|v1'] + speedtol:
partials['takeoff|v1','distance_continue'] = 0
partials['takeoff|v1','distance_abort'] = 0
partials['takeoff|v1','takeoff|vr'] = 1
partials['takeoff|v1','takeoff|v1'] = -1
else:
partials['takeoff|v1','distance_continue'] = 1
partials['takeoff|v1','distance_abort'] = -1
partials['takeoff|v1','takeoff|vr'] = 0
partials['takeoff|v1','takeoff|v1'] = 0
if inputs['takeoff|vr'] >= outputs['takeoff|v1'] and inputs['takeoff|vr'] - outputs['takeoff|v1'] < speedtol and (inputs['distance_abort'] - inputs['distance_continue']) > disttol:
partials['takeoff|v1','distance_continue'] = 1
partials['takeoff|v1','distance_abort'] = -1
partials['takeoff|v1','takeoff|vr'] = 0
partials['takeoff|v1','takeoff|v1'] = 0
class Groundspeeds(ExplicitComponent):
def initialize(self):
self.options.declare('num_nodes',default=1,desc="Number of Simpson intervals to use per seg (eg. climb, cruise, descend). Number of analysis points is 2N+1")
def setup(self):
nn = self.options['num_nodes']
self.add_input('fltcond|vs', units='m/s',shape=(nn,))
self.add_input('fltcond|Utrue', units='m/s',shape=(nn,))
self.add_output('fltcond|groundspeed', units='m/s',shape=(nn,))
self.add_output('fltcond|cosgamma', shape=(nn,), desc='Cosine of the flight path angle')
self.add_output('fltcond|singamma', shape=(nn,), desc='sin of the flight path angle' )
self.declare_partials(['fltcond|groundspeed','fltcond|cosgamma','fltcond|singamma'], ['fltcond|vs','fltcond|Utrue'], rows=range(nn), cols=range(nn))
def compute(self, inputs, outputs):
nn = self.options['num_nodes']
inside = inputs['fltcond|Utrue']**2-inputs['fltcond|vs']**2
groundspeed = np.sqrt(inside)
groundspeed_fixed = np.sqrt(np.where(np.less(inside, 0.0), 0.01, inside))
outputs['fltcond|groundspeed'] = groundspeed_fixed
outputs['fltcond|singamma'] = np.where(np.isnan(groundspeed),1,inputs['fltcond|vs'] / inputs['fltcond|Utrue'])
outputs['fltcond|cosgamma'] = groundspeed_fixed / inputs['fltcond|Utrue']
def compute_partials(self, inputs, J):
inside = inputs['fltcond|Utrue']**2-inputs['fltcond|vs']**2
groundspeed = np.sqrt(inside)
groundspeed_fixed = np.sqrt(np.where(np.less(inside, 0.0), 0.01, inside))
J['fltcond|groundspeed','fltcond|vs'] = np.where(np.isnan(groundspeed),0,(1/2) / groundspeed_fixed * (-2) * inputs['fltcond|vs'])
J['fltcond|groundspeed','fltcond|Utrue'] = np.where(np.isnan(groundspeed),0, (1/2) / groundspeed_fixed * 2 * inputs['fltcond|Utrue'])
J['fltcond|singamma','fltcond|vs'] = np.where(np.isnan(groundspeed), 0, 1 / inputs['fltcond|Utrue'])
J['fltcond|singamma','fltcond|Utrue'] = np.where(np.isnan(groundspeed), 0, - inputs['fltcond|vs'] / inputs['fltcond|Utrue'] ** 2)
J['fltcond|cosgamma','fltcond|vs'] = J['fltcond|groundspeed','fltcond|vs'] / inputs['fltcond|Utrue']
J['fltcond|cosgamma','fltcond|Utrue'] = (J['fltcond|groundspeed','fltcond|Utrue'] * inputs['fltcond|Utrue'] - groundspeed_fixed) / inputs['fltcond|Utrue']**2
class HorizontalAcceleration(ExplicitComponent):
def initialize(self):
self.options.declare('num_nodes',default=1)
def setup(self):
nn = self.options['num_nodes']
g = 9.80665
self.add_input('weight', units='kg', shape=(nn,))
self.add_input('drag', units='N',shape=(nn,))
self.add_input('lift', units='N',shape=(nn,))
self.add_input('thrust', units='N',shape=(nn,))
self.add_input('fltcond|singamma',shape=(nn,))
self.add_input('braking',shape=(nn,))
self.add_output('accel_horiz', units='m/s**2', shape=(nn,))
arange=np.arange(nn)
self.declare_partials(['accel_horiz'], ['weight','drag','lift','thrust','braking'], rows=arange, cols=arange)
self.declare_partials(['accel_horiz'], ['fltcond|singamma'], rows=arange, cols=arange, val=-g*np.ones((nn,)))
def compute(self, inputs, outputs):
nn = self.options['num_nodes']
g = 9.80665
m = inputs['weight']
floor_vec = np.where(np.less((g-inputs['lift']/m),0.0),0.0,1.0)
accel = inputs['thrust']/m - inputs['drag']/m - floor_vec*inputs['braking']*(g-inputs['lift']/m) - g*inputs['fltcond|singamma']
outputs['accel_horiz'] = accel
def compute_partials(self, inputs, J):
g = 9.80665
m = inputs['weight']
floor_vec = np.where(np.less((g-inputs['lift']/m),0.0),0.0,1.0)
J['accel_horiz','thrust'] = 1/m
J['accel_horiz','drag'] = -1/m
J['accel_horiz','braking'] = -floor_vec*(g-inputs['lift']/m)
J['accel_horiz','lift'] = floor_vec*inputs['braking']/m
J['accel_horiz','weight'] = (inputs['drag']-inputs['thrust']-floor_vec*inputs['braking']*inputs['lift'])/m**2
class VerticalAcceleration(ExplicitComponent):
def initialize(self):
self.options.declare('num_nodes',default=1)
def setup(self):
nn = self.options['num_nodes']
g = 9.80665
self.add_input('weight', units='kg', shape=(nn,))
self.add_input('drag', units='N',shape=(nn,))
self.add_input('lift', units='N',shape=(nn,))
self.add_input('thrust', units='N',shape=(nn,))
self.add_input('fltcond|singamma',shape=(nn,))
self.add_input('fltcond|cosgamma',shape=(nn,))
self.add_output('accel_vert', units='m/s**2', shape=(nn,),upper=2.5*g,lower=-1*g)
arange=np.arange(nn)
self.declare_partials(['accel_vert'], ['weight','drag','lift','thrust','fltcond|singamma','fltcond|cosgamma'], rows=arange, cols=arange)
def compute(self, inputs, outputs):
nn = self.options['num_nodes']
g = 9.80665
cosg = inputs['fltcond|cosgamma']
sing = inputs['fltcond|singamma']
accel = (inputs['lift']*cosg + (inputs['thrust']-inputs['drag'])*sing - g*inputs['weight'])/inputs['weight']
accel = np.clip(accel, -g, 2.5*g)
outputs['accel_vert'] = accel
def compute_partials(self, inputs, J):
g = 9.80665
m = inputs['weight']
cosg = inputs['fltcond|cosgamma']
sing = inputs['fltcond|singamma']
J['accel_vert','thrust'] = sing / m
J['accel_vert','drag'] = -sing / m
J['accel_vert','lift'] = cosg / m
J['accel_vert','fltcond|singamma'] = (inputs['thrust']-inputs['drag']) / m
J['accel_vert','fltcond|cosgamma'] = inputs['lift'] / m
J['accel_vert','weight'] = -(inputs['lift']*cosg + (inputs['thrust']-inputs['drag'])*sing)/m**2
class SteadyFlightCL(ExplicitComponent):
def initialize(self):
self.options.declare('num_nodes',default=5,desc="Number of Simpson intervals to use per seg (eg. climb, cruise, descend). Number of analysis points is 2N+1")
self.options.declare('mission_segments',default=['climb','cruise','descent'])
def setup(self):
nn = self.options['num_nodes']
arange = np.arange(nn)
self.add_input('weight', units='kg', shape=(nn,))
self.add_input('fltcond|q', units='N * m**-2', shape=(nn,))
self.add_input('ac|geom|wing|S_ref', units='m **2')
self.add_input('fltcond|cosgamma', val=1.0, shape=(nn,))
self.add_output('fltcond|CL',shape=(nn,))
self.declare_partials(['fltcond|CL'], ['weight','fltcond|q',"fltcond|cosgamma"], rows=arange, cols=arange)
self.declare_partials(['fltcond|CL'], ['ac|geom|wing|S_ref'], rows=arange, cols=np.zeros(nn))
def compute(self, inputs, outputs):
g = 9.80665
outputs['fltcond|CL'] = inputs['fltcond|cosgamma']*g*inputs['weight']/inputs['fltcond|q']/inputs['ac|geom|wing|S_ref']
def compute_partials(self, inputs, J):
g = 9.80665
J['fltcond|CL','weight'] = inputs['fltcond|cosgamma']*g/inputs['fltcond|q']/inputs['ac|geom|wing|S_ref']
J['fltcond|CL','fltcond|q'] = - inputs['fltcond|cosgamma']*g*inputs['weight'] / inputs['fltcond|q']**2 / inputs['ac|geom|wing|S_ref']
J['fltcond|CL','ac|geom|wing|S_ref'] = - inputs['fltcond|cosgamma']*g*inputs['weight'] / inputs['fltcond|q'] / inputs['ac|geom|wing|S_ref']**2
J['fltcond|CL','fltcond|cosgamma'] = g*inputs['weight']/inputs['fltcond|q']/inputs['ac|geom|wing|S_ref']
class GroundRollPhase(oc.PhaseGroup):
def initialize(self):
self.options.declare('num_nodes',default=1)
self.options.declare('flight_phase',default=None,desc='Phase of flight e.g. v0v1, cruise')
self.options.declare('aircraft_model',default=None)
def setup(self):
nn = self.options['num_nodes']
ivcomp = self.add_subsystem('const_settings', IndepVarComp(), promotes_outputs=["*"])
ivcomp.add_output('fltcond|CL', val=np.ones((nn,))*0.1)
ivcomp.add_output('vr_vstall_mult',val=1.1)
ivcomp.add_output('fltcond|h',val=np.zeros((nn,)),units='m')
ivcomp.add_output('fltcond|vs',val=np.zeros((nn,)),units='m/s')
ivcomp.add_output('zero_speed',val=2,units='m/s')
flight_phase = self.options['flight_phase']
if flight_phase == 'v0v1':
ivcomp.add_output('braking',val=np.ones((nn,))*0.03)
ivcomp.add_output('propulsor_active',val=np.ones((nn,)))
ivcomp.add_output('throttle',val=np.ones((nn,)))
zero_start = True
elif flight_phase == 'v1vr':
ivcomp.add_output('braking',val=np.ones((nn,))*0.03)
ivcomp.add_output('propulsor_active',val=np.zeros((nn,)))
ivcomp.add_output('throttle',val=np.ones((nn,)))
zero_start = False
elif flight_phase == 'v1v0':
ivcomp.add_output('braking',val=0.4*np.ones((nn,)))
ivcomp.add_output('propulsor_active',val=np.zeros((nn,)))
ivcomp.add_output('throttle',val=np.zeros((nn,)))
zero_start=False
self.add_subsystem('atmos', ComputeAtmosphericProperties(num_nodes=nn, true_airspeed_in=True), promotes_inputs=['*'], promotes_outputs=['*'])
self.add_subsystem('gs',Groundspeeds(num_nodes=nn),promotes_inputs=['*'],promotes_outputs=['*'])
# add the user-defined aircraft model
self.add_subsystem('acmodel',self.options['aircraft_model'](num_nodes=nn,flight_phase=self.options['flight_phase']),promotes_inputs=['*'],promotes_outputs=['*'])
self.add_subsystem('lift',Lift(num_nodes=nn), promotes_inputs=['*'],promotes_outputs=['*'])
self.add_subsystem('stall',StallSpeed(),promotes_inputs=[('CLmax','ac|aero|CLmax_TO'),('weight','ac|weights|MTOW'),'ac|geom|wing|S_ref'],promotes_outputs=['*'])
self.add_subsystem('vrspeed',ElementMultiplyDivideComp(output_name='takeoff|vr',input_names=['Vstall_eas','vr_vstall_mult'],input_units=['m/s',None]),promotes_inputs=['*'],promotes_outputs=['*'])
self.add_subsystem('haccel',HorizontalAcceleration(num_nodes=nn), promotes_inputs=['*'],promotes_outputs=['*'])
if flight_phase == 'v1v0':
#unfortunately need to shoot backwards to avoid negative airspeeds
#reverse the order of the accelerations so the last one is first (and make them negative)
self.add_subsystem('flipaccel', FlipVectorComp(num_nodes=nn, units='m/s**2', negative=True), promotes_inputs=[('vec_in','accel_horiz')])
#integrate the timesteps in reverse from near zero speed.
ode_integ = self.add_subsystem('ode_integ', Integrator(num_nodes=nn, method='simpson', diff_units='s',time_setup='duration'), promotes_inputs=['*'], promotes_outputs=['*'])
ode_integ.add_integrand('vel_q', units='m/s', rate_name='vel_dqdt', start_name='zero_speed', end_name='fltcond|Utrue_initial', lower=1.5)
self.connect('flipaccel.vec_out','vel_dqdt')
#flip the result of the reverse integration again so the flight condition is forward and consistent with everythign else
self.add_subsystem('flipvel', FlipVectorComp(num_nodes=nn, units='m/s', negative=False), promotes_outputs=[('vec_out','fltcond|Utrue')])
self.connect('vel_q','flipvel.vec_in')
# now set the time step so that backwards shooting results in the correct 'initial' segment airspeed
self.add_subsystem('v0constraint',BalanceComp(name='duration',units='s',eq_units='m/s',rhs_name='fltcond|Utrue_initial',lhs_name='takeoff|v1',val=10.,upper=100.,lower=1.),
promotes_inputs=['*'],promotes_outputs=['duration'])
else:
# forward shooting for these acceleration segmentes
ode_integ = self.add_subsystem('ode_integ', Integrator(num_nodes=nn, method='simpson', diff_units='s',time_setup='duration'), promotes_inputs=['*'], promotes_outputs=['*'])
ode_integ.add_integrand('fltcond|Utrue', units='m/s', rate_name='accel_horiz', start_name='fltcond|Utrue_initial', end_name='fltcond|Utrue_final', lower=1.5)
if flight_phase == 'v0v1':
self.connect('zero_speed','fltcond|Utrue_initial')
self.add_subsystem('v1constraint',BalanceComp(name='duration',units='s',eq_units='m/s',rhs_name='fltcond|Utrue_final',lhs_name='takeoff|v1',val=10.,upper=100.,lower=1.),
promotes_inputs=['*'],promotes_outputs=['duration'])
elif flight_phase == 'v1vr':
self.add_subsystem('vrconstraint',BalanceComp(name='duration',units='s',eq_units='m/s',rhs_name='fltcond|Utrue_final',lhs_name='takeoff|vr',val=5.,upper=12.,lower=0.0),
promotes_inputs=['*'],promotes_outputs=['duration'])
if zero_start:
ode_integ.add_integrand('range', rate_name='fltcond|groundspeed', units='m', zero_start=True)
else:
ode_integ.add_integrand('range', rate_name='fltcond|groundspeed', units='m')
class RotationPhase(oc.PhaseGroup):
def initialize(self):
self.options.declare('num_nodes',default=1)
self.options.declare('flight_phase',default=None)
self.options.declare('aircraft_model',default=None)
def setup(self):
nn = self.options['num_nodes']
ivcomp = self.add_subsystem('const_settings', IndepVarComp(), promotes_outputs=["*"])
ivcomp.add_output('CL_rotate_mult', val=np.ones((nn,))*0.83)
ivcomp.add_output('h_obs', val=35, units='ft')
flight_phase = self.options['flight_phase']
if flight_phase == 'rotate':
ivcomp.add_output('braking',val=np.zeros((nn,)))
ivcomp.add_output('propulsor_active',val=np.zeros((nn,)))
ivcomp.add_output('throttle',val=np.ones((nn,)))
self.add_subsystem('atmos', ComputeAtmosphericProperties(num_nodes=nn, true_airspeed_in=True), promotes_inputs=['*'], promotes_outputs=['*'])
self.add_subsystem('gs',Groundspeeds(num_nodes=nn),promotes_inputs=['*'],promotes_outputs=['*'])
clcomp = self.add_subsystem('clcomp',ElementMultiplyDivideComp(output_name='fltcond|CL', input_names=['CL_rotate_mult','ac|aero|CLmax_TO'],
vec_size=[nn,1], length=1),
promotes_inputs=['*'], promotes_outputs=['*'])
self.add_subsystem('acmodel',self.options['aircraft_model'](num_nodes=nn,flight_phase=self.options['flight_phase']),promotes_inputs=['*'],promotes_outputs=['*'])
self.add_subsystem('lift',Lift(num_nodes=nn), promotes_inputs=['*'],promotes_outputs=['*'])
self.add_subsystem('haccel',HorizontalAcceleration(num_nodes=nn), promotes_inputs=['*'],promotes_outputs=['*'])
self.add_subsystem('vaccel',VerticalAcceleration(num_nodes=nn), promotes_inputs=['*'],promotes_outputs=['*'])
# TODO always starts from zero altitude
self.add_subsystem('clear_obstacle',BalanceComp(name='duration',units='s',val=1,eq_units='m',rhs_name='fltcond|h_final',lhs_name='h_obs',lower=0.1,upper=15),
promotes_inputs=['*'],promotes_outputs=['duration'])
int1 = self.add_subsystem('intvelocity', Integrator(num_nodes=nn, method='simpson',diff_units='s',time_setup='duration'), promotes_outputs=['*'], promotes_inputs=['*'])
int1.add_integrand('fltcond|Utrue', rate_name='accel_horiz', units='m/s', lower=0.1)
int2 = self.add_subsystem('intrange', Integrator(num_nodes=nn, method='simpson',diff_units='s',time_setup='duration'), promotes_outputs=['*'], promotes_inputs=['*'])
int2.add_integrand('range', rate_name='fltcond|groundspeed', units='m')
int3 = self.add_subsystem('intvs', Integrator(num_nodes=nn, method='simpson',diff_units='s',time_setup='duration'), promotes_outputs=['*'], promotes_inputs=['*'])
int3.add_integrand('fltcond|vs', rate_name='accel_vert', units='m/s', zero_start=True)
int4 = self.add_subsystem('inth', Integrator(num_nodes=nn, method='simpson',diff_units='s',time_setup='duration'), promotes_outputs=['*'], promotes_inputs=['*'])
int4.add_integrand('fltcond|h', rate_name='fltcond|vs', units='m', zero_start=True)
class SteadyFlightPhase(oc.PhaseGroup):
def initialize(self):
self.options.declare('num_nodes',default=1)
self.options.declare('flight_phase',default=None,desc='Phase of flight e.g. v0v1, cruise')
self.options.declare('aircraft_model',default=None)
def setup(self):
nn = self.options['num_nodes']
ivcomp = self.add_subsystem('const_settings', IndepVarComp(), promotes_outputs=["*"])
ivcomp.add_output('propulsor_active', val=np.ones(nn))
ivcomp.add_output('braking', val=np.zeros(nn))
ivcomp.add_output('fltcond|Ueas',val=np.ones((nn,))*90, units='m/s')
ivcomp.add_output('fltcond|vs',val=np.ones((nn,))*1, units='m/s')
ivcomp.add_output('zero_accel',val=np.zeros((nn,)),units='m/s**2')
integ = self.add_subsystem('ode_integ', Integrator(num_nodes=nn, diff_units='s', time_setup='duration', method='simpson'), promotes_inputs=['fltcond|vs', 'fltcond|groundspeed'], promotes_outputs=['fltcond|h', 'range'])
integ.add_integrand('fltcond|h', rate_name='fltcond|vs', val=1.0, units='m')
self.add_subsystem('atmos', ComputeAtmosphericProperties(num_nodes=nn, true_airspeed_in=False), promotes_inputs=['*'], promotes_outputs=['*'])
self.add_subsystem('gs',Groundspeeds(num_nodes=nn),promotes_inputs=['*'],promotes_outputs=['*'])
# add the user-defined aircraft model
self.add_subsystem('acmodel',self.options['aircraft_model'](num_nodes=nn, flight_phase=self.options['flight_phase']),promotes_inputs=['*'],promotes_outputs=['*'])
self.add_subsystem('clcomp',SteadyFlightCL(num_nodes=nn), promotes_inputs=['*'],promotes_outputs=['*'])
self.add_subsystem('lift',Lift(num_nodes=nn), promotes_inputs=['*'],promotes_outputs=['*'])
self.add_subsystem('haccel',HorizontalAcceleration(num_nodes=nn), promotes_inputs=['*'],promotes_outputs=['*'])
integ.add_integrand('range', rate_name='fltcond|groundspeed', val=1.0, units='m')
self.add_subsystem('steadyflt',BalanceComp(name='throttle',val=np.ones((nn,))*0.5,lower=0.01,upper=2.0,units=None,normalize=False,eq_units='m/s**2',rhs_name='accel_horiz',lhs_name='zero_accel',rhs_val=np.zeros((nn,))),
promotes_inputs=['accel_horiz','zero_accel'],promotes_outputs=['throttle'])
# class OldSteadyFlightPhase(Group):
# """
# This component group models steady flight conditions.
# Settable mission parameters include:
# Airspeed (fltcond|Ueas)
# Vertical speed (fltcond|vs)
# Duration of the segment (duration)
# Throttle is set automatically to ensure steady flight
# The BaseAircraftGroup object is passed in.
# The BaseAircraftGroup should be built to accept the following inputs
# and return the following outputs.
# The outputs should be promoted to the top level in the component.
# Inputs
# ------
# range : float
# Total distance travelled (vector, m)
# fltcond|h : float
# Altitude (vector, m)
# fltcond|vs : float
# Vertical speed (vector, m/s)
# fltcond|Ueas : float
# Equivalent airspeed (vector, m/s)
# fltcond|Utrue : float
# True airspeed (vector, m/s)
# fltcond|p : float
# Pressure (vector, Pa)
# fltcond|rho : float
# Density (vector, kg/m3)
# fltcond|T : float
# Temperature (vector, K)
# fltcond|q : float
# Dynamic pressure (vector, Pa)
# fltcond|CL : float
# Lift coefficient (vector, dimensionless)
# throttle : float
# Motor / propeller throttle setting scaled from 0 to 1 or slightly more (vector, dimensionless)
# propulsor_active : float
# If a multi-propulsor airplane, a failure condition should be modeled in the propulsion model by multiplying throttle by propulsor_active.
# It will generally be 1.0 unless a failure condition is being modeled, in which case it will be 0 (vector, dimensionless)
# braking : float
# Brake friction coefficient (default 0.4 for dry runway braking, 0.03 for resistance unbraked)
# Should not be applied in the air or nonphysical effects will result (vector, dimensionless)
# lift : float
# Lift force (vector, N)
# Outputs
# -------
# thrust : float
# Total thrust force produced by all propulsors (vector, N)
# drag : float
# Total drag force in the airplane axis produced by all sources of drag (vector, N)
# weight : float
# Weight (mass, really) of the airplane at each point in time. (vector, kg)
# ac|geom|wing|S_ref
# Wing reference area (scalar, m**2)
# ac|aero|CLmax_TO
# CLmax with flaps in max takeoff position (scalar, dimensionless)
# ac|weights|MTOW
# Maximum takeoff weight (scalar, kg)
# """
# def initialize(self):
# self.options.declare('num_nodes',default=1)
# self.options.declare('flight_phase',default=None,desc='Phase of flight e.g. v0v1, cruise')
# self.options.declare('aircraft_model',default=None)
# def setup(self):
# nn = self.options['num_nodes']
# ivcomp = self.add_subsystem('const_settings', IndepVarComp(), promotes_outputs=["*"])
# ivcomp.add_output('propulsor_active', val=np.ones(nn))
# ivcomp.add_output('braking', val=np.zeros(nn))
# ivcomp.add_output('fltcond|Ueas',val=np.ones((nn,))*90, units='m/s')
# ivcomp.add_output('fltcond|vs',val=np.ones((nn,))*1, units='m/s')
# ivcomp.add_output('zero_accel',val=np.zeros((nn,)),units='m/s**2')
# self.add_subsystem('inth',Integrator(num_nodes=nn, method='simpson', quantity_units='m', diff_units='s', time_setup='duration'),
# promotes_inputs=[('dqdt','fltcond|vs'),'duration',('q_initial','fltcond|h_initial')],promotes_outputs=[('q','fltcond|h'),('q_final','fltcond|h_final')])
# self.add_subsystem('atmos', ComputeAtmosphericProperties(num_nodes=nn, true_airspeed_in=False), promotes_inputs=['*'], promotes_outputs=['*'])
# self.add_subsystem('gs',Groundspeeds(num_nodes=nn),promotes_inputs=['*'],promotes_outputs=['*'])
# # add the user-defined aircraft model
# self.add_subsystem('acmodel',self.options['aircraft_model'](num_nodes=nn, flight_phase=self.options['flight_phase']),promotes_inputs=['*'],promotes_outputs=['*'])
# self.add_subsystem('clcomp',SteadyFlightCL(num_nodes=nn), promotes_inputs=['*'],promotes_outputs=['*'])
# self.add_subsystem('lift',Lift(num_nodes=nn), promotes_inputs=['*'],promotes_outputs=['*'])
# self.add_subsystem('haccel',HorizontalAcceleration(num_nodes=nn), promotes_inputs=['*'],promotes_outputs=['*'])
# self.add_subsystem('intrange',Integrator(num_nodes=nn, method='simpson', quantity_units='m', diff_units='s', time_setup='duration'),
# promotes_inputs=[('dqdt','fltcond|groundspeed'),'duration',('q_initial','range_initial')],promotes_outputs=[('q','range'),('q_final','range_final')])
# self.add_subsystem('steadyflt',BalanceComp(name='throttle',val=np.ones((nn,))*0.5,lower=0.01,upper=2.0,units=None,normalize=False,eq_units='m/s**2',rhs_name='accel_horiz',lhs_name='zero_accel',rhs_val=np.zeros((nn,))),
# promotes_inputs=['accel_horiz','zero_accel'],promotes_outputs=['throttle'])
class ClimbAnglePhase(Group):
def initialize(self):
self.options.declare('num_nodes',default=1)
self.options.declare('flight_phase',default=None,desc='Phase of flight e.g. v0v1, cruise')
self.options.declare('aircraft_model',default=None)
def setup(self):
nn = self.options['num_nodes']
ivcomp = self.add_subsystem('const_settings', IndepVarComp(), promotes_outputs=["*"])
ivcomp.add_output('v2_vstall_mult',val=1.2)
ivcomp.add_output('fltcond|h',val=np.zeros((nn,)),units='m')
ivcomp.add_output('fltcond|cosgamma', val=np.ones((nn,)))
flight_phase = self.options['flight_phase']
if flight_phase == 'AllEngineClimbAngle':
ivcomp.add_output('propulsor_active',val=np.ones((nn,)))
ivcomp.add_output('throttle',val=np.ones((nn,)))
elif flight_phase == 'EngineOutClimbAngle':
ivcomp.add_output('propulsor_active',val=np.zeros((nn,)))
ivcomp.add_output('throttle',val=np.ones((nn,)))
self.add_subsystem('stall',StallSpeed(),promotes_inputs=[('CLmax','ac|aero|CLmax_TO'),('weight','ac|weights|MTOW'),'ac|geom|wing|S_ref'],promotes_outputs=['*'])
self.add_subsystem('vrspeed',ElementMultiplyDivideComp(output_name='takeoff|v2',input_names=['Vstall_eas','v2_vstall_mult'],input_units=['m/s',None]),promotes_inputs=['*'],promotes_outputs=['*'])
self.add_subsystem('atmos', ComputeAtmosphericProperties(num_nodes=nn, true_airspeed_in=False), promotes_inputs=['*'], promotes_outputs=['*'])
self.add_subsystem('clcomp',SteadyFlightCL(num_nodes=nn), promotes_inputs=[('weight','ac|weights|MTOW'),'fltcond|*','ac|*'],promotes_outputs=['*'])
self.connect('takeoff|v2','fltcond|Ueas')
# the aircraft model needs to provide thrust and drag
self.add_subsystem('acmodel',self.options['aircraft_model'](num_nodes=nn,flight_phase=self.options['flight_phase']),promotes_inputs=['*'],promotes_outputs=['*'])
self.add_subsystem('climbangle',ClimbAngleComp(num_nodes=nn),promotes_inputs=['drag',('weight','ac|weights|MTOW'),'thrust'],promotes_outputs=['gamma'])
class TakeoffTransition(ExplicitComponent):
def initialize(self):
self.options.declare('h_obstacle',default=10.66,desc='Obstacle clearance height in m')
self.options.declare('load_factor', default=1.2, desc='Load factor during circular arc transition')
def setup(self):
self.add_input('fltcond|Utrue', units='m/s', src_indices=0)
self.add_input('gamma', units='rad', src_indices=0)
self.add_output('s_transition', units='m')
self.add_output('h_transition', units='m')
self.add_output('t_transition',units='s')
self.declare_partials(['s_transition','h_transition','t_transition'], ['fltcond|Utrue','gamma'])
def compute(self, inputs, outputs):
hobs = self.options['h_obstacle']
nfactor = self.options['load_factor'] - 1
g = 9.80665 #m/s^2
gam = inputs['gamma']
ut = inputs['fltcond|Utrue']
R = ut**2/nfactor/g
st = R*np.sin(gam)
ht = R*(1-np.cos(gam))
#alternate formula if the obstacle is cleared during transition
if ht > hobs:
st = np.sqrt(R**2-(R-hobs)**2)
ht = hobs
outputs['s_transition'] = st
outputs['h_transition'] = ht
outputs['t_transition'] = st / ut
def compute_partials(self, inputs, J):
hobs = self.options['h_obstacle']
nfactor = self.options['load_factor'] - 1
g = 9.80665 #m/s^2
gam = inputs['gamma']
ut = inputs['fltcond|Utrue']
R = ut**2/nfactor/g
dRdut = 2*ut/nfactor/g
st = R*np.sin(gam)
ht = R*(1-np.cos(gam))
#alternate formula if the obstacle is cleared during transition
if ht > hobs:
st = np.sqrt(R**2-(R-hobs)**2)
dstdut = 1/2/np.sqrt(R**2-(R-hobs)**2) * (2*R*dRdut - 2*(R-hobs)*dRdut)
dstdgam = 0
dhtdut = 0
dhtdgam = 0
else:
dhtdut = dRdut*(1-np.cos(gam))
dhtdgam = R*np.sin(gam)
dstdut = dRdut*np.sin(gam)
dstdgam = R*np.cos(gam)
J['s_transition','gamma'] = dstdgam
J['s_transition','fltcond|Utrue'] = dstdut
J['h_transition','gamma'] = dhtdgam
J['h_transition','fltcond|Utrue'] = dhtdut
J['t_transition','gamma'] = dstdgam / ut
J['t_transition','fltcond|Utrue'] = (dstdut * ut - st) / ut ** 2
class TakeoffClimb(ExplicitComponent):
def initialize(self):
self.options.declare('h_obstacle',default=10.66,desc='Obstacle clearance height in m')
def setup(self):
self.add_input('h_transition', units='m')
self.add_input('gamma', units='rad',src_indices=-1)
self.add_input('fltcond|Utrue', units='m/s',src_indices=-1)
self.add_output('s_climb', units='m')
self.add_output('t_climb', units='s')
self.declare_partials(['s_climb'], ['h_transition','gamma'])
self.declare_partials(['t_climb'], ['h_transition','gamma','fltcond|Utrue'])
def compute(self, inputs, outputs):
hobs = self.options['h_obstacle']
gam = inputs['gamma']
ht = inputs['h_transition']
ut = inputs['fltcond|Utrue']
sc = (hobs-ht)/np.tan(gam)
outputs['s_climb'] = sc
outputs['t_climb'] = sc / ut
def compute_partials(self, inputs, J):
hobs = self.options['h_obstacle']
gam = inputs['gamma']
ht = inputs['h_transition']
ut = inputs['fltcond|Utrue']
sc = (hobs-ht)/np.tan(gam)
J['s_climb','gamma'] = -(hobs-ht)/np.tan(gam)**2 * (1/np.cos(gam))**2
J['s_climb','h_transition'] = -1/np.tan(gam)
J['t_climb','gamma'] = J['s_climb','gamma'] / ut
J['t_climb','h_transition'] = J['s_climb','h_transition'] / ut
J['t_climb','fltcond|Utrue'] = - sc / ut ** 2
class RobustRotationPhase(oc.PhaseGroup):
def initialize(self):
self.options.declare('num_nodes',default=1)
self.options.declare('flight_phase',default=None,desc='Phase of flight e.g. v0v1, cruise')
self.options.declare('aircraft_model',default=None)
self.options.declare('h_obstacle',default=10.66, )
def setup(self):
nn = self.options['num_nodes']
ivcomp = self.add_subsystem('const_settings', IndepVarComp(), promotes_outputs=["*"])
flight_phase = self.options['flight_phase']
if flight_phase == 'rotate':
ivcomp.add_output('braking',val=np.zeros((nn,)))
ivcomp.add_output('propulsor_active',val=np.zeros((nn,)))
ivcomp.add_output('throttle',val=np.ones((nn,)))
# flight conditions are sea level takeoff, transition speed
# split off a single node to compute climb angle
# compute the transition distance and add it to range_initial
# compute the transition time as a function of the groundspeed
# provide transition time as duration
ivcomp.add_output('v2_vstall_mult',val=1.2)
ivcomp.add_output('vr_vstall_mult',val=1.1)
ivcomp.add_output('fltcond|vs', val=np.zeros((nn,)),units='m/s')
ivcomp.add_output('fltcond|cosgamma', val=np.ones((nn,)),units=None)
ivcomp.add_output('h_obstacle',val=35,units='ft')
self.add_subsystem('altitudes',LinearInterpolator(num_nodes=nn, units='m'),promotes_inputs=[('start_val','h_initial')],promotes_outputs=[('vec','fltcond|h')])
self.connect('h_obstacle','altitudes.end_val')
self.add_subsystem('stall',StallSpeed(),promotes_inputs=[('CLmax','ac|aero|CLmax_TO'),('weight','ac|weights|MTOW'),'ac|geom|wing|S_ref'],promotes_outputs=['*'])
self.add_subsystem('vrspeed',ElementMultiplyDivideComp(output_name='takeoff|vr',input_names=['Vstall_eas','vr_vstall_mult'],input_units=['m/s',None]),promotes_inputs=['*'],promotes_outputs=['*'])
self.add_subsystem('v2speed',ElementMultiplyDivideComp(output_name='takeoff|v2',input_names=['Vstall_eas','v2_vstall_mult'],input_units=['m/s',None]),promotes_inputs=['*'],promotes_outputs=['*'])
self.add_subsystem('speeds',LinearInterpolator(num_nodes=nn,units='kn'),promotes_inputs=[('start_val','takeoff|vr'),('end_val','takeoff|v2')],promotes_outputs=[('vec','fltcond|Ueas')])
self.add_subsystem('atmos', ComputeAtmosphericProperties(num_nodes=nn, true_airspeed_in=False), promotes_inputs=['*'], promotes_outputs=['*'])
# pretty confident there's a simpler closed form multiple for CL at v2
self.add_subsystem('clcomp',SteadyFlightCL(num_nodes=nn), promotes_inputs=['weight','fltcond|*','ac|*'],promotes_outputs=['*'])
self.add_subsystem('acmodel',self.options['aircraft_model'](num_nodes=nn,flight_phase=self.options['flight_phase']),promotes_inputs=['*'],promotes_outputs=['*'])
self.add_subsystem('climbangle',ClimbAngleComp(num_nodes=nn),promotes_inputs=['drag','weight','thrust'],promotes_outputs=['gamma'])
self.add_subsystem('transition',TakeoffTransition(),promotes_inputs=['fltcond|Utrue','gamma'],promotes_outputs=['h_transition','s_transition','t_transition'])
self.add_subsystem('v2climb',TakeoffClimb(),promotes_inputs=['h_transition','gamma','fltcond|Utrue'],promotes_outputs=['s_climb','t_climb'])
self.add_subsystem('tod_final',AddSubtractComp(output_name='range_final',input_names=['range_initial','s_transition','s_climb'],units='m'),promotes_inputs=['*'],promotes_outputs=['*'])
self.add_subsystem('duration',AddSubtractComp(output_name='duration',input_names=['t_transition','t_climb'],units='s'),promotes_inputs=['*'],promotes_outputs=['*'])
self.add_subsystem('h_final',AddSubtractComp(output_name='fltcond|h_final',input_names=['h_obstacle'],units='m'),promotes_inputs=['*'],promotes_outputs=['*'])
self.add_subsystem('ranges',LinearInterpolator(num_nodes=nn,units='m'),promotes_inputs=[('start_val','range_initial'),('end_val','range_final')],promotes_outputs=[('vec','range')])
| true | true |
f72ff337bdeb94f68574412ce3e985fde8a68cf6 | 1,360 | py | Python | edumate/settings/production.py | alfarhanzahedi/edumate | 76ced0063d25431098babb1d163c95c9ddaf3307 | [
"MIT"
] | 1 | 2021-11-28T14:18:16.000Z | 2021-11-28T14:18:16.000Z | edumate/settings/production.py | alfarhanzahedi/edumate | 76ced0063d25431098babb1d163c95c9ddaf3307 | [
"MIT"
] | 1 | 2022-02-10T10:53:12.000Z | 2022-02-10T10:53:12.000Z | edumate/settings/production.py | alfarhanzahedi/edumate | 76ced0063d25431098babb1d163c95c9ddaf3307 | [
"MIT"
] | null | null | null | from kombu.utils.url import safequote
from .base import *
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_ROOT = '/var/www/edumate/static/'
STATIC_URL = '/static/'
MEDIA_ROOT = '/var/www/edumate/media/'
MEDIA_URL = '/media/'
# Email
# https://docs.djangoproject.com/en/2.2/topics/email/#email-backends
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = config('EMAIL_HOST')
EMAIL_HOST_USER = config('EMAIL_HOST_USER')
EMAIL_HOST_PASSWORD = config('SENDGRID_API_KEY')
EMAIL_PORT = config('EMAIL_PORT')
EMAIL_USE_TLS = config('EMAIL_USE_TLS')
AZURE_STORAGE_KEY = config('AZURE_STORAGE_KEY')
AZURE_STORAGE_ACCOUNT = config('AZURE_STORAGE_ACCOUNT')
INSTALLED_APPS += [
'storages',
]
AZURE_ACCOUNT_KEY = AZURE_STORAGE_KEY
AZURE_ACCOUNT_NAME = AZURE_STORAGE_ACCOUNT
DEFAULT_FILE_STORAGE = 'edumate.azure.AzureMediaStorage'
STATICFILES_STORAGE = 'edumate.azure.AzureStaticStorage'
STATIC_LOCATION = 'static'
MEDIA_LOCATION = 'media'
AZURE_CUSTOM_DOMAIN = f'{AZURE_ACCOUNT_NAME}.blob.core.windows.net'
STATIC_URL = f'https://{AZURE_CUSTOM_DOMAIN}/{STATIC_LOCATION}/'
MEDIA_URL = f'https://{AZURE_CUSTOM_DOMAIN}/{MEDIA_LOCATION}/'
BROKER_URL = config('CELERY_REDIS_LOCATION')
BROKER_TRANSPORT_OPTIONS = {
'polling_interval': 10,
'visibility_timeout': 3600
}
| 27.2 | 68 | 0.775735 | from kombu.utils.url import safequote
from .base import *
STATIC_ROOT = '/var/www/edumate/static/'
STATIC_URL = '/static/'
MEDIA_ROOT = '/var/www/edumate/media/'
MEDIA_URL = '/media/'
= 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = config('EMAIL_HOST')
EMAIL_HOST_USER = config('EMAIL_HOST_USER')
EMAIL_HOST_PASSWORD = config('SENDGRID_API_KEY')
EMAIL_PORT = config('EMAIL_PORT')
EMAIL_USE_TLS = config('EMAIL_USE_TLS')
AZURE_STORAGE_KEY = config('AZURE_STORAGE_KEY')
AZURE_STORAGE_ACCOUNT = config('AZURE_STORAGE_ACCOUNT')
INSTALLED_APPS += [
'storages',
]
AZURE_ACCOUNT_KEY = AZURE_STORAGE_KEY
AZURE_ACCOUNT_NAME = AZURE_STORAGE_ACCOUNT
DEFAULT_FILE_STORAGE = 'edumate.azure.AzureMediaStorage'
STATICFILES_STORAGE = 'edumate.azure.AzureStaticStorage'
STATIC_LOCATION = 'static'
MEDIA_LOCATION = 'media'
AZURE_CUSTOM_DOMAIN = f'{AZURE_ACCOUNT_NAME}.blob.core.windows.net'
STATIC_URL = f'https://{AZURE_CUSTOM_DOMAIN}/{STATIC_LOCATION}/'
MEDIA_URL = f'https://{AZURE_CUSTOM_DOMAIN}/{MEDIA_LOCATION}/'
BROKER_URL = config('CELERY_REDIS_LOCATION')
BROKER_TRANSPORT_OPTIONS = {
'polling_interval': 10,
'visibility_timeout': 3600
}
| true | true |
f72ff53d421b318c306241599d6708aa4cc0e2a0 | 6,404 | py | Python | test/functional/mempool_persist.py | minblock/Blackcoin | 40cbf6c00d79b2d2d50b0645baa332fc8adc4ba3 | [
"MIT"
] | null | null | null | test/functional/mempool_persist.py | minblock/Blackcoin | 40cbf6c00d79b2d2d50b0645baa332fc8adc4ba3 | [
"MIT"
] | null | null | null | test/functional/mempool_persist.py | minblock/Blackcoin | 40cbf6c00d79b2d2d50b0645baa332fc8adc4ba3 | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) 2014-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test mempool persistence.
By default, bitcoind will dump mempool on shutdown and
then reload it on startup. This can be overridden with
the -persistmempool=0 command line option.
Test is as follows:
- start node0, node1 and node2. node1 has -persistmempool=0
- create 5 transactions on node2 to its own address. Note that these
are not sent to node0 or node1 addresses because we don't want
them to be saved in the wallet.
- check that node0 and node1 have 5 transactions in their mempools
- shutdown all nodes.
- startup node0. Verify that it still has 5 transactions
in its mempool. Shutdown node0. This tests that by default the
mempool is persistent.
- startup node1. Verify that its mempool is empty. Shutdown node1.
This tests that with -persistmempool=0, the mempool is not
dumped to disk when the node is shut down.
- Restart node0 with -persistmempool=0. Verify that its mempool is
empty. Shutdown node0. This tests that with -persistmempool=0,
the mempool is not loaded from disk on start up.
- Restart node0 with -persistmempool. Verify that it has 5
transactions in its mempool. This tests that -persistmempool=0
does not overwrite a previously valid mempool stored on disk.
- Remove node0 mempool.dat and verify savemempool RPC recreates it
and verify that node1 can load it and has 5 transactions in its
mempool.
- Verify that savemempool throws when the RPC is called if
node1 can't write to disk.
"""
from decimal import Decimal
import os
import time
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal, assert_raises_rpc_error, wait_until
class MempoolPersistTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 3
self.extra_args = [[], ["-persistmempool=0"], []]
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
chain_height = self.nodes[0].getblockcount()
assert_equal(chain_height, 200)
self.log.debug("Mine a single block to get out of IBD")
self.nodes[0].generate(1)
self.sync_all()
self.log.debug("Send 5 transactions from node2 (to its own address)")
for i in range(5):
last_txid = self.nodes[2].sendtoaddress(self.nodes[2].getnewaddress(), Decimal("10"))
node2_balance = self.nodes[2].getbalance()
self.sync_all()
self.log.debug("Verify that node0 and node1 have 5 transactions in their mempools")
assert_equal(len(self.nodes[0].getrawmempool()), 5)
assert_equal(len(self.nodes[1].getrawmempool()), 5)
self.log.debug("Prioritize a transaction on node0")
fees = self.nodes[0].getmempoolentry(txid=last_txid)['fees']
assert_equal(fees['base'], fees['modified'])
self.nodes[0].prioritisetransaction(txid=last_txid, fee_delta=1000)
fees = self.nodes[0].getmempoolentry(txid=last_txid)['fees']
assert_equal(fees['base'] + Decimal('0.00001000'), fees['modified'])
self.log.debug("Stop-start the nodes. Verify that node0 has the transactions in its mempool and node1 does not. Verify that node2 calculates its balance correctly after loading wallet transactions.")
self.stop_nodes()
# Give this node a head-start, so we can be "extra-sure" that it didn't load anything later
# Also don't store the mempool, to keep the datadir clean
self.start_node(1, extra_args=["-persistmempool=0"])
self.start_node(0)
self.start_node(2)
# Give bitcoind a second to reload the mempool
wait_until(lambda: len(self.nodes[0].getrawmempool()) == 5, timeout=1)
wait_until(lambda: len(self.nodes[2].getrawmempool()) == 5, timeout=1)
# The others have loaded their mempool. If node_1 loaded anything, we'd probably notice by now:
assert_equal(len(self.nodes[1].getrawmempool()), 0)
self.log.debug('Verify prioritization is loaded correctly')
fees = self.nodes[0].getmempoolentry(txid=last_txid)['fees']
assert_equal(fees['base'] + Decimal('0.00001000'), fees['modified'])
# Verify accounting of mempool transactions after restart is correct
self.nodes[2].syncwithvalidationinterfacequeue() # Flush mempool to wallet
assert_equal(node2_balance, self.nodes[2].getbalance())
self.log.debug("Stop-start node0 with -persistmempool=0. Verify that it doesn't load its mempool.dat file.")
self.stop_nodes()
self.start_node(0, extra_args=["-persistmempool=0"])
# Give bitcoind a second to reload the mempool
time.sleep(1)
assert_equal(len(self.nodes[0].getrawmempool()), 0)
self.log.debug("Stop-start node0. Verify that it has the transactions in its mempool.")
self.stop_nodes()
self.start_node(0)
wait_until(lambda: len(self.nodes[0].getrawmempool()) == 5)
mempooldat0 = os.path.join(self.nodes[0].datadir, 'regtest', 'mempool.dat')
mempooldat1 = os.path.join(self.nodes[1].datadir, 'regtest', 'mempool.dat')
self.log.debug("Remove the mempool.dat file. Verify that savemempool to disk via RPC re-creates it")
os.remove(mempooldat0)
self.nodes[0].savemempool()
assert os.path.isfile(mempooldat0)
self.log.debug("Stop nodes, make node1 use mempool.dat from node0. Verify it has 5 transactions")
os.rename(mempooldat0, mempooldat1)
self.stop_nodes()
self.start_node(1, extra_args=[])
wait_until(lambda: len(self.nodes[1].getrawmempool()) == 5)
self.log.debug("Prevent blackcoind from writing mempool.dat to disk. Verify that `savemempool` fails")
# to test the exception we are creating a tmp folder called mempool.dat.new
# which is an implementation detail that could change and break this test
mempooldotnew1 = mempooldat1 + '.new'
os.mkdir(mempooldotnew1)
assert_raises_rpc_error(-1, "Unable to dump mempool to disk", self.nodes[1].savemempool)
os.rmdir(mempooldotnew1)
if __name__ == '__main__':
MempoolPersistTest().main()
| 47.088235 | 207 | 0.698626 |
from decimal import Decimal
import os
import time
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal, assert_raises_rpc_error, wait_until
class MempoolPersistTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 3
self.extra_args = [[], ["-persistmempool=0"], []]
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
chain_height = self.nodes[0].getblockcount()
assert_equal(chain_height, 200)
self.log.debug("Mine a single block to get out of IBD")
self.nodes[0].generate(1)
self.sync_all()
self.log.debug("Send 5 transactions from node2 (to its own address)")
for i in range(5):
last_txid = self.nodes[2].sendtoaddress(self.nodes[2].getnewaddress(), Decimal("10"))
node2_balance = self.nodes[2].getbalance()
self.sync_all()
self.log.debug("Verify that node0 and node1 have 5 transactions in their mempools")
assert_equal(len(self.nodes[0].getrawmempool()), 5)
assert_equal(len(self.nodes[1].getrawmempool()), 5)
self.log.debug("Prioritize a transaction on node0")
fees = self.nodes[0].getmempoolentry(txid=last_txid)['fees']
assert_equal(fees['base'], fees['modified'])
self.nodes[0].prioritisetransaction(txid=last_txid, fee_delta=1000)
fees = self.nodes[0].getmempoolentry(txid=last_txid)['fees']
assert_equal(fees['base'] + Decimal('0.00001000'), fees['modified'])
self.log.debug("Stop-start the nodes. Verify that node0 has the transactions in its mempool and node1 does not. Verify that node2 calculates its balance correctly after loading wallet transactions.")
self.stop_nodes()
# Also don't store the mempool, to keep the datadir clean
self.start_node(1, extra_args=["-persistmempool=0"])
self.start_node(0)
self.start_node(2)
wait_until(lambda: len(self.nodes[0].getrawmempool()) == 5, timeout=1)
wait_until(lambda: len(self.nodes[2].getrawmempool()) == 5, timeout=1)
assert_equal(len(self.nodes[1].getrawmempool()), 0)
self.log.debug('Verify prioritization is loaded correctly')
fees = self.nodes[0].getmempoolentry(txid=last_txid)['fees']
assert_equal(fees['base'] + Decimal('0.00001000'), fees['modified'])
# Verify accounting of mempool transactions after restart is correct
self.nodes[2].syncwithvalidationinterfacequeue() # Flush mempool to wallet
assert_equal(node2_balance, self.nodes[2].getbalance())
self.log.debug("Stop-start node0 with -persistmempool=0. Verify that it doesn't load its mempool.dat file.")
self.stop_nodes()
self.start_node(0, extra_args=["-persistmempool=0"])
time.sleep(1)
assert_equal(len(self.nodes[0].getrawmempool()), 0)
self.log.debug("Stop-start node0. Verify that it has the transactions in its mempool.")
self.stop_nodes()
self.start_node(0)
wait_until(lambda: len(self.nodes[0].getrawmempool()) == 5)
mempooldat0 = os.path.join(self.nodes[0].datadir, 'regtest', 'mempool.dat')
mempooldat1 = os.path.join(self.nodes[1].datadir, 'regtest', 'mempool.dat')
self.log.debug("Remove the mempool.dat file. Verify that savemempool to disk via RPC re-creates it")
os.remove(mempooldat0)
self.nodes[0].savemempool()
assert os.path.isfile(mempooldat0)
self.log.debug("Stop nodes, make node1 use mempool.dat from node0. Verify it has 5 transactions")
os.rename(mempooldat0, mempooldat1)
self.stop_nodes()
self.start_node(1, extra_args=[])
wait_until(lambda: len(self.nodes[1].getrawmempool()) == 5)
self.log.debug("Prevent blackcoind from writing mempool.dat to disk. Verify that `savemempool` fails")
mempooldotnew1 = mempooldat1 + '.new'
os.mkdir(mempooldotnew1)
assert_raises_rpc_error(-1, "Unable to dump mempool to disk", self.nodes[1].savemempool)
os.rmdir(mempooldotnew1)
if __name__ == '__main__':
MempoolPersistTest().main()
| true | true |
f72ff59bda1aa02358d87a57bd001b6bab504743 | 5,288 | py | Python | sentence_transformers/models/Asym.py | arcada-uas/sentence-transformers | 83ec2145dae858049ce38210860f0d75b4979927 | [
"Apache-2.0"
] | null | null | null | sentence_transformers/models/Asym.py | arcada-uas/sentence-transformers | 83ec2145dae858049ce38210860f0d75b4979927 | [
"Apache-2.0"
] | null | null | null | sentence_transformers/models/Asym.py | arcada-uas/sentence-transformers | 83ec2145dae858049ce38210860f0d75b4979927 | [
"Apache-2.0"
] | null | null | null | from torch import Tensor
from torch import nn
from typing import List, Dict
import os
import json
from ..util import import_from_string
from collections import OrderedDict
from typing import List, Dict, Optional, Union, Tuple
class Asym(nn.Sequential):
def __init__(self, sub_modules: Dict[str, List[nn.Module]], allow_empty_key: bool = True):
"""
This model allows to create asymmetric SentenceTransformer models, that apply different models depending on the specified input key.
In the below example, we create two different Dense models for 'query' and 'doc'. Text that is passed as {'query': 'My query'} will
be passed along along the first Dense model, and text that will be passed as {'doc': 'My document'} will use the other Dense model.
Note, that when you call encode(), that only inputs of the same type can be encoded. Mixed-Types cannot be encoded.
Example::
word_embedding_model = models.Transformer(model_name)
pooling_model = models.Pooling(word_embedding_model.get_word_embedding_dimension())
asym_model = models.Asym({'query': [models.Dense(word_embedding_model.get_word_embedding_dimension(), 128)], 'doc': [models.Dense(word_embedding_model.get_word_embedding_dimension(), 128)]})
model = SentenceTransformer(modules=[word_embedding_model, pooling_model, asym_model])
model.encode([{'query': 'Q1'}, {'query': 'Q2'}]
model.encode([{'doc': 'Doc1'}, {'doc': 'Doc2'}]
#You can train it with InputExample like this. Note, that the order must always be the same:
train_example = InputExample(texts=[{'query': 'Train query'}, {'doc': 'Document'}], label=1)
:param sub_modules: Dict in the format str -> List[models]. The models in the specified list will be applied for input marked with the respective key.
:param allow_empty_key: If true, inputs without a key can be processed. If false, an exception will be thrown if no key is specified.
"""
self.sub_modules = sub_modules
self.allow_empty_key = allow_empty_key
ordered_dict = OrderedDict()
for name, models in sub_modules.items():
if not isinstance(models, List):
models = [models]
for idx, model in enumerate(models):
ordered_dict[name+"-"+str(idx)] = model
super(Asym, self).__init__(ordered_dict)
def forward(self, features: Dict[str, Tensor]):
if 'text_keys' in features and len(features['text_keys']) > 0:
text_key = features['text_keys'][0]
for model in self.sub_modules[text_key]:
features = model(features)
elif not self.allow_empty_key:
raise ValueError('Input did not specify any keys and allow_empty_key is False')
return features
def get_sentence_embedding_dimension(self) -> int:
raise NotImplementedError()
def save(self, output_path):
model_lookup = {}
model_types = {}
model_structure = {}
for name, models in self.sub_modules.items():
model_structure[name] = []
for model in models:
model_id = str(id(model))+'_'+type(model).__name__
model_lookup[model_id] = model
model_types[model_id] = type(model).__module__
model_structure[name].append(model_id)
for model_id, model in model_lookup.items():
model_path = os.path.join(output_path, str(model_id))
os.makedirs(model_path, exist_ok=True)
model.save(model_path)
with open(os.path.join(output_path, 'config.json'), 'w', encoding='utf8') as fOut:
json.dump({'types': model_types, 'structure': model_structure,
'parameters': {'allow_empty_key': self.allow_empty_key}},
fOut, indent=2)
def tokenize(self, texts: Union[List[str], List[Tuple[str, str]]]):
"""
Tokenizes a text and maps tokens to token-ids
"""
if not isinstance(texts[0], dict):
raise AttributeError("Asym. model requires that texts are passed as dicts: {'key': 'text'}")
module_key = None
for lookup in texts:
text_key, text = next(iter(lookup.items()))
if module_key is None:
module_key = text_key
assert text_key == module_key #Mixed batches are not allowed
return self.sub_modules[module_key][0].tokenize(texts)
@staticmethod
def load(input_path):
with open(os.path.join(input_path, 'config.json')) as fIn:
config = json.load(fIn)
modules = {}
for model_id, model_type in config['types'].items():
module_class = import_from_string(model_type)
module = module_class.load(os.path.join(input_path, model_id))
modules[model_id] = module
model_structure = {}
for key_name, models_list in config['structure'].items():
model_structure[key_name] = []
for model_id in models_list:
model_structure[key_name].append(modules[model_id])
model = Asym(model_structure, **config['parameters'])
return model
| 42.99187 | 202 | 0.637103 | from torch import Tensor
from torch import nn
from typing import List, Dict
import os
import json
from ..util import import_from_string
from collections import OrderedDict
from typing import List, Dict, Optional, Union, Tuple
class Asym(nn.Sequential):
def __init__(self, sub_modules: Dict[str, List[nn.Module]], allow_empty_key: bool = True):
self.sub_modules = sub_modules
self.allow_empty_key = allow_empty_key
ordered_dict = OrderedDict()
for name, models in sub_modules.items():
if not isinstance(models, List):
models = [models]
for idx, model in enumerate(models):
ordered_dict[name+"-"+str(idx)] = model
super(Asym, self).__init__(ordered_dict)
def forward(self, features: Dict[str, Tensor]):
if 'text_keys' in features and len(features['text_keys']) > 0:
text_key = features['text_keys'][0]
for model in self.sub_modules[text_key]:
features = model(features)
elif not self.allow_empty_key:
raise ValueError('Input did not specify any keys and allow_empty_key is False')
return features
def get_sentence_embedding_dimension(self) -> int:
raise NotImplementedError()
def save(self, output_path):
model_lookup = {}
model_types = {}
model_structure = {}
for name, models in self.sub_modules.items():
model_structure[name] = []
for model in models:
model_id = str(id(model))+'_'+type(model).__name__
model_lookup[model_id] = model
model_types[model_id] = type(model).__module__
model_structure[name].append(model_id)
for model_id, model in model_lookup.items():
model_path = os.path.join(output_path, str(model_id))
os.makedirs(model_path, exist_ok=True)
model.save(model_path)
with open(os.path.join(output_path, 'config.json'), 'w', encoding='utf8') as fOut:
json.dump({'types': model_types, 'structure': model_structure,
'parameters': {'allow_empty_key': self.allow_empty_key}},
fOut, indent=2)
def tokenize(self, texts: Union[List[str], List[Tuple[str, str]]]):
if not isinstance(texts[0], dict):
raise AttributeError("Asym. model requires that texts are passed as dicts: {'key': 'text'}")
module_key = None
for lookup in texts:
text_key, text = next(iter(lookup.items()))
if module_key is None:
module_key = text_key
assert text_key == module_key
return self.sub_modules[module_key][0].tokenize(texts)
@staticmethod
def load(input_path):
with open(os.path.join(input_path, 'config.json')) as fIn:
config = json.load(fIn)
modules = {}
for model_id, model_type in config['types'].items():
module_class = import_from_string(model_type)
module = module_class.load(os.path.join(input_path, model_id))
modules[model_id] = module
model_structure = {}
for key_name, models_list in config['structure'].items():
model_structure[key_name] = []
for model_id in models_list:
model_structure[key_name].append(modules[model_id])
model = Asym(model_structure, **config['parameters'])
return model
| true | true |
f72ff810f29418ba62e98a1ce7f5672ac06ff9c2 | 1,005 | py | Python | CI/create_release_notes.py | tdhooks/sarus | 64d3152e810b1081e6dbe7b3587e8e5948c3268e | [
"BSD-3-Clause"
] | 84 | 2019-04-30T17:35:14.000Z | 2022-03-20T21:15:41.000Z | CI/create_release_notes.py | tdhooks/sarus | 64d3152e810b1081e6dbe7b3587e8e5948c3268e | [
"BSD-3-Clause"
] | 26 | 2019-11-07T19:24:36.000Z | 2022-02-10T14:18:58.000Z | CI/create_release_notes.py | tdhooks/sarus | 64d3152e810b1081e6dbe7b3587e8e5948c3268e | [
"BSD-3-Clause"
] | 10 | 2019-05-24T02:20:02.000Z | 2022-03-20T14:17:29.000Z | def create_release_notes():
import os
path = os.path.dirname(os.path.abspath(__file__))
changelog_filename = os.path.join(path, "../CHANGELOG.md")
release_notes_filename = os.path.join(path, "../RELEASE_NOTES.md")
with open(changelog_filename, "r") as changelog:
with open(release_notes_filename, "w") as release_notes:
started = False
# Search top-most release notes
while not started:
line = changelog.readline()
if not line:
break
if line.startswith("## ["):
started = True
while started:
# reduce title indentation
if line.startswith("##"):
line = line[1:]
release_notes.write(line)
line = changelog.readline()
if not line or line.startswith("## ["):
break
if __name__ == "__main__":
create_release_notes() | 31.40625 | 70 | 0.528358 | def create_release_notes():
import os
path = os.path.dirname(os.path.abspath(__file__))
changelog_filename = os.path.join(path, "../CHANGELOG.md")
release_notes_filename = os.path.join(path, "../RELEASE_NOTES.md")
with open(changelog_filename, "r") as changelog:
with open(release_notes_filename, "w") as release_notes:
started = False
while not started:
line = changelog.readline()
if not line:
break
if line.startswith("## ["):
started = True
while started:
if line.startswith("##"):
line = line[1:]
release_notes.write(line)
line = changelog.readline()
if not line or line.startswith("## ["):
break
if __name__ == "__main__":
create_release_notes() | true | true |
f72ff87bdabef8d7929fc9db97e276b84505f580 | 1,196 | py | Python | zoomapi/components/chat_messages.py | zihuaweng/zoomapi | 0bd9e57f1b2469b1071e8060feb772748882c175 | [
"Apache-2.0"
] | null | null | null | zoomapi/components/chat_messages.py | zihuaweng/zoomapi | 0bd9e57f1b2469b1071e8060feb772748882c175 | [
"Apache-2.0"
] | null | null | null | zoomapi/components/chat_messages.py | zihuaweng/zoomapi | 0bd9e57f1b2469b1071e8060feb772748882c175 | [
"Apache-2.0"
] | null | null | null | """Zoom.us REST API Python Client -- Chat Messages component"""
from zoomapi.util import require_keys, Throttled
from zoomapi.components import base
class ChatMessagesComponentV2(base.BaseComponent):
"""Component dealing with all chat messages related matters"""
@Throttled
def list(self, **kwargs):
require_keys(kwargs, "user_id")
return self.get_request(
"/chat/users/{}/messages".format(kwargs.get("user_id")), params=kwargs
)
@Throttled
def post(self, **kwargs):
require_keys(kwargs, "message")
return self.post_request("/chat/users/me/messages", data=kwargs)
@Throttled
def send(self, **kwargs):
require_keys(kwargs, "message")
return self.post_request("/chat/users/me/messages", data=kwargs)
@Throttled
def update(self, **kwargs):
require_keys(kwargs, "message")
return self.put_request("/chat/users/me/messages/{}".format(kwargs.get("messageId")), data=kwargs)
@Throttled
def delete(self, **kwargs):
require_keys(kwargs, "messageId")
return self.delete_request("/chat/users/me/messages/{}".format(kwargs.get("messageId")), params=kwargs)
| 34.171429 | 111 | 0.669732 |
from zoomapi.util import require_keys, Throttled
from zoomapi.components import base
class ChatMessagesComponentV2(base.BaseComponent):
@Throttled
def list(self, **kwargs):
require_keys(kwargs, "user_id")
return self.get_request(
"/chat/users/{}/messages".format(kwargs.get("user_id")), params=kwargs
)
@Throttled
def post(self, **kwargs):
require_keys(kwargs, "message")
return self.post_request("/chat/users/me/messages", data=kwargs)
@Throttled
def send(self, **kwargs):
require_keys(kwargs, "message")
return self.post_request("/chat/users/me/messages", data=kwargs)
@Throttled
def update(self, **kwargs):
require_keys(kwargs, "message")
return self.put_request("/chat/users/me/messages/{}".format(kwargs.get("messageId")), data=kwargs)
@Throttled
def delete(self, **kwargs):
require_keys(kwargs, "messageId")
return self.delete_request("/chat/users/me/messages/{}".format(kwargs.get("messageId")), params=kwargs)
| true | true |
f72ff90f4cf8c111f5cfabc254f6f2eba07babf7 | 23,978 | py | Python | pcdet/utils/loss_utils.py | jialeli1/From-Voxel-to-Point | b4dba9c4e9cd83e04199d9224f6ec7bf06b71f93 | [
"Apache-2.0"
] | 26 | 2021-07-14T10:55:14.000Z | 2022-02-25T05:46:42.000Z | pcdet/utils/loss_utils.py | jialeli1/From-Voxel-to-Point | b4dba9c4e9cd83e04199d9224f6ec7bf06b71f93 | [
"Apache-2.0"
] | 2 | 2021-07-12T09:58:00.000Z | 2021-12-14T13:04:47.000Z | pcdet/utils/loss_utils.py | jialeli1/From-Voxel-to-Point | b4dba9c4e9cd83e04199d9224f6ec7bf06b71f93 | [
"Apache-2.0"
] | 4 | 2021-08-22T16:41:35.000Z | 2022-03-18T06:54:52.000Z | import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
from . import box_utils
from . import center_utils
try:
from itertools import ifilterfalse
except ImportError: # py3k
from itertools import filterfalse as ifilterfalse
class SigmoidFocalClassificationLoss(nn.Module):
"""
Sigmoid focal cross entropy loss.
"""
def __init__(self, gamma: float = 2.0, alpha: float = 0.25):
"""
Args:
gamma: Weighting parameter to balance loss for hard and easy examples.
alpha: Weighting parameter to balance loss for positive and negative examples.
"""
super(SigmoidFocalClassificationLoss, self).__init__()
self.alpha = alpha
self.gamma = gamma
@staticmethod
def sigmoid_cross_entropy_with_logits(input: torch.Tensor, target: torch.Tensor):
""" PyTorch Implementation for tf.nn.sigmoid_cross_entropy_with_logits:
max(x, 0) - x * z + log(1 + exp(-abs(x))) in
https://www.tensorflow.org/api_docs/python/tf/nn/sigmoid_cross_entropy_with_logits
Args:
input: (B, #anchors, #classes) float tensor.
Predicted logits for each class
target: (B, #anchors, #classes) float tensor.
One-hot encoded classification targets
Returns:
loss: (B, #anchors, #classes) float tensor.
Sigmoid cross entropy loss without reduction
"""
loss = torch.clamp(input, min=0) - input * target + \
torch.log1p(torch.exp(-torch.abs(input)))
return loss
def forward(self, input: torch.Tensor, target: torch.Tensor, weights: torch.Tensor):
"""
Args:
input: (B, #anchors, #classes) float tensor.
Predicted logits for each class
target: (B, #anchors, #classes) float tensor.
One-hot encoded classification targets
weights: (B, #anchors) float tensor.
Anchor-wise weights.
Returns:
weighted_loss: (B, #anchors, #classes) float tensor after weighting.
"""
pred_sigmoid = torch.sigmoid(input)
alpha_weight = target * self.alpha + (1 - target) * (1 - self.alpha)
pt = target * (1.0 - pred_sigmoid) + (1.0 - target) * pred_sigmoid
focal_weight = alpha_weight * torch.pow(pt, self.gamma)
bce_loss = self.sigmoid_cross_entropy_with_logits(input, target)
loss = focal_weight * bce_loss
if weights.shape.__len__() == 2 or \
(weights.shape.__len__() == 1 and target.shape.__len__() == 2):
weights = weights.unsqueeze(-1)
assert weights.shape.__len__() == loss.shape.__len__()
return loss * weights
class WeightedSmoothL1Loss(nn.Module):
"""
Code-wise Weighted Smooth L1 Loss modified based on fvcore.nn.smooth_l1_loss
https://github.com/facebookresearch/fvcore/blob/master/fvcore/nn/smooth_l1_loss.py
| 0.5 * x ** 2 / beta if abs(x) < beta
smoothl1(x) = |
| abs(x) - 0.5 * beta otherwise,
where x = input - target.
"""
def __init__(self, beta: float = 1.0 / 9.0, code_weights: list = None):
"""
Args:
beta: Scalar float.
L1 to L2 change point.
For beta values < 1e-5, L1 loss is computed.
code_weights: (#codes) float list if not None.
Code-wise weights.
"""
super(WeightedSmoothL1Loss, self).__init__()
self.beta = beta
if code_weights is not None:
self.code_weights = np.array(code_weights, dtype=np.float32)
self.code_weights = torch.from_numpy(self.code_weights).cuda()
@staticmethod
def smooth_l1_loss(diff, beta):
if beta < 1e-5:
loss = torch.abs(diff)
else:
n = torch.abs(diff)
loss = torch.where(n < beta, 0.5 * n ** 2 / beta, n - 0.5 * beta)
return loss
def forward(self, input: torch.Tensor, target: torch.Tensor, weights: torch.Tensor = None):
"""
Args:
input: (B, #anchors, #codes) float tensor.
Ecoded predicted locations of objects.
target: (B, #anchors, #codes) float tensor.
Regression targets.
weights: (B, #anchors) float tensor if not None.
Returns:
loss: (B, #anchors) float tensor.
Weighted smooth l1 loss without reduction.
"""
target = torch.where(torch.isnan(target), input, target) # ignore nan targets
diff = input - target
# code-wise weighting
if self.code_weights is not None:
diff = diff * self.code_weights.view(1, 1, -1)
loss = self.smooth_l1_loss(diff, self.beta)
# anchor-wise weighting
if weights is not None:
assert weights.shape[0] == loss.shape[0] and weights.shape[1] == loss.shape[1]
loss = loss * weights.unsqueeze(-1)
return loss
class WeightedL1Loss(nn.Module):
def __init__(self, code_weights: list = None):
"""
Args:
code_weights: (#codes) float list if not None.
Code-wise weights.
"""
super(WeightedL1Loss, self).__init__()
if code_weights is not None:
self.code_weights = np.array(code_weights, dtype=np.float32)
self.code_weights = torch.from_numpy(self.code_weights).cuda()
def forward(self, input: torch.Tensor, target: torch.Tensor, weights: torch.Tensor = None):
"""
Args:
input: (B, #anchors, #codes) float tensor.
Ecoded predicted locations of objects.
target: (B, #anchors, #codes) float tensor.
Regression targets.
weights: (B, #anchors) float tensor if not None.
Returns:
loss: (B, #anchors) float tensor.
Weighted smooth l1 loss without reduction.
"""
target = torch.where(torch.isnan(target), input, target) # ignore nan targets
diff = input - target
# code-wise weighting
if self.code_weights is not None:
diff = diff * self.code_weights.view(1, 1, -1)
loss = torch.abs(diff)
# anchor-wise weighting
if weights is not None:
assert weights.shape[0] == loss.shape[0] and weights.shape[1] == loss.shape[1]
loss = loss * weights.unsqueeze(-1)
return loss
class WeightedCrossEntropyLoss(nn.Module):
"""
Transform input to fit the fomation of PyTorch offical cross entropy loss
with anchor-wise weighting.
"""
def __init__(self):
super(WeightedCrossEntropyLoss, self).__init__()
def forward(self, input: torch.Tensor, target: torch.Tensor, weights: torch.Tensor):
"""
Args:
input: (B, #anchors, #classes) float tensor.
Predited logits for each class.
target: (B, #anchors, #classes) float tensor.
One-hot classification targets.
weights: (B, #anchors) float tensor.
Anchor-wise weights.
Returns:
loss: (B, #anchors) float tensor.
Weighted cross entropy loss without reduction
"""
input = input.permute(0, 2, 1)
target = target.argmax(dim=-1)
loss = F.cross_entropy(input, target, reduction='none') * weights
return loss
def get_corner_loss_lidar(pred_bbox3d: torch.Tensor, gt_bbox3d: torch.Tensor):
"""
Args:
pred_bbox3d: (N, 7) float Tensor.
gt_bbox3d: (N, 7) float Tensor.
Returns:
corner_loss: (N) float Tensor.
"""
assert pred_bbox3d.shape[0] == gt_bbox3d.shape[0]
pred_box_corners = box_utils.boxes_to_corners_3d(pred_bbox3d)
gt_box_corners = box_utils.boxes_to_corners_3d(gt_bbox3d)
# 这里flip的目的应该是忽略朝向,但实际上呢把朝向也纳入整体更好还是说它会造成不稳定呢?
gt_bbox3d_flip = gt_bbox3d.clone()
gt_bbox3d_flip[:, 6] += np.pi
gt_box_corners_flip = box_utils.boxes_to_corners_3d(gt_bbox3d_flip)
# (N, 8)
corner_dist = torch.min(torch.norm(pred_box_corners - gt_box_corners, dim=2),
torch.norm(pred_box_corners - gt_box_corners_flip, dim=2))
# (N, 8)
corner_loss = WeightedSmoothL1Loss.smooth_l1_loss(corner_dist, beta=1.0)
return corner_loss.mean(dim=1)
def get_corner_loss_mse(pred_bbox3d: torch.Tensor, gt_bbox3d: torch.Tensor):
"""
Args:
pred_bbox3d: (N, 7) float Tensor.
gt_bbox3d: (N, 7) float Tensor.
Returns:
corner_loss: (1,) float scaler
"""
assert pred_bbox3d.shape[0] == gt_bbox3d.shape[0]
# (N, 8, 3)
pred_box_corners = box_utils.boxes_to_corners_3d(pred_bbox3d)
gt_box_corners = box_utils.boxes_to_corners_3d(gt_bbox3d)
# print('==> pred_box_corners[0, :, :]')
# print(pred_box_corners[0,:,:])
# print('==> gt_box_corners[0, :, :]')
# print(gt_box_corners[0,:,:])
# print('==> pred_box_corners[10, :, :]')
# print(pred_box_corners[10,:,:])
# print('==> gt_box_corners[10, :, :]')
# print(gt_box_corners[10,:,:])
# print('==> pred_box_corners[100, :, :]')
# print(pred_box_corners[100,:,:])
# print('==> gt_box_corners[100, :, :]')
# print(gt_box_corners[100,:,:])
# for each box, mean by 8 corners.
corner_loss_x = F.mse_loss(input=pred_box_corners[:,:,0], target=gt_box_corners[:,:,0]) # (N, 8) -> (N)
corner_loss_y = F.mse_loss(input=pred_box_corners[:,:,1], target=gt_box_corners[:,:,1]) # (N, 8) -> (N)
corner_loss_z = F.mse_loss(input=pred_box_corners[:,:,2], target=gt_box_corners[:,:,2]) # (N, 8) -> (N)
# xyz之间求和
corner_loss = corner_loss_x + corner_loss_y + corner_loss_z
return corner_loss
def get_iouscore_loss_bce(iou_preds, iou_gts, iou_fg_thresh=0.75, iou_bg_thresh=0.25):
"""
Args:
iou_preds: (N,)
iou_gts: (N, )
Returns:
loss_iouscore:
"""
# prepare the labels
# now only for car class, 08132020
# iou_preds = iou_preds.view(-1)
# iou_gts = iou_gts.view(-1)
# print('==> iou_preds.size()')
# print(iou_preds.size())
# print(torch.sigmoid(iou_preds))
# print('==> iou_gts.size()')
# print(iou_gts.size())
# print(iou_gts)
# CLS_FG_THRESH: 0.75
# CLS_BG_THRESH: 0.25
# iou_bg_thresh = self.roi_sampler_cfg.CLS_BG_THRESH
# iou_fg_thresh = self.roi_sampler_cfg.CLS_FG_THRESH
# iou_bg_thresh = 0.25
# iou_fg_thresh = 0.75
fg_mask = iou_gts > iou_fg_thresh
bg_mask = iou_gts < iou_bg_thresh
interval_mask = (fg_mask == 0) & (bg_mask == 0)
iou_cls_labels = (fg_mask > 0).float()
iou_cls_labels[interval_mask] = \
(iou_gts[interval_mask] - iou_bg_thresh) / (iou_fg_thresh - iou_bg_thresh)
# print('==> iou_cls_labels')
# print(iou_cls_labels.size())
# print(iou_cls_labels[:50])
# 这里CE是计算的整个范围的iou,但是最后求和的时候只计算了iou>=0这部分的。
# 条件 iou_cls_labels >= 0 选出来了那些iou >= 0 的候选框。
loss_ioucls = F.binary_cross_entropy(torch.sigmoid(iou_preds), iou_cls_labels.float(), reduction='none')
cls_valid_mask = (iou_cls_labels >= 0).float()
loss_iouscore = (loss_ioucls * cls_valid_mask).sum() / torch.clamp(cls_valid_mask.sum(), min=1.0)
return loss_iouscore
def get_rot_binres_loss(pred_reg, reg_label, num_head_bin, get_ry_fine=False):
"""
Bin-based 3D bounding boxes regression loss. See https://arxiv.org/abs/1812.04244 for more details.
:param pred_reg: (N, C)
:param reg_label: (N, 1), ry
:param num_head_bin: constant
:param get_ry_fine: False
:return:
"""
# print('==> pred_reg.size()')
# print(pred_reg.size()) # should be (N, 24)
reg_loss_dict = {}
# angle loss
start_offset = 0
ry_bin_l, ry_bin_r = start_offset, start_offset + num_head_bin
ry_res_l, ry_res_r = ry_bin_r, ry_bin_r + num_head_bin
start_offset = ry_res_r
ry_label = reg_label.squeeze(dim=-1)
# print('==> reg_label[] in encode')
# print(reg_label.size()) # should be (N, C)
# print(reg_label[100:150])
# print('==> ry_label[] in encode')
# print(ry_label.size()) # should be (N,)
# print(ry_label[100:150])
if get_ry_fine:
assert False, "one-stage should not get_ry_fine."
# divide pi/2 into several bins
angle_per_class = (np.pi / 2) / num_head_bin
ry_label = ry_label % (2 * np.pi) # 0 ~ 2pi
opposite_flag = (ry_label > np.pi * 0.5) & (ry_label < np.pi * 1.5)
ry_label[opposite_flag] = (ry_label[opposite_flag] + np.pi) % (2 * np.pi) # (0 ~ pi/2, 3pi/2 ~ 2pi)
shift_angle = (ry_label + np.pi * 0.5) % (2 * np.pi) # (0 ~ pi)
shift_angle = torch.clamp(shift_angle - np.pi * 0.25, min=1e-3, max=np.pi * 0.5 - 1e-3) # (0, pi/2)
# bin center is (5, 10, 15, ..., 85)
ry_bin_label = (shift_angle / angle_per_class).floor().long()
ry_res_label = shift_angle - (ry_bin_label.float() * angle_per_class + angle_per_class / 2)
ry_res_norm_label = ry_res_label / (angle_per_class / 2)
else:
# divide 2pi into several bins
angle_per_class = (2 * np.pi) / num_head_bin
heading_angle = ry_label % (2 * np.pi) # 0 ~ 2pi
# print('==> heading_angle[] in encode')
# print(heading_angle.size())
# print(heading_angle[100:150])
shift_angle = (heading_angle + angle_per_class / 2) % (2 * np.pi)
ry_bin_label = (shift_angle / angle_per_class).floor().long()
ry_res_label = shift_angle - (ry_bin_label.float() * angle_per_class + angle_per_class / 2)
ry_res_norm_label = ry_res_label / (angle_per_class / 2)
# print('==> ry_bin_label in encode')
# print(ry_bin_label.size())
# print(ry_bin_label[100:150])
ry_bin_onehot = torch.cuda.FloatTensor(ry_bin_label.size(0), num_head_bin).zero_()
ry_bin_onehot.scatter_(1, ry_bin_label.view(-1, 1).long(), 1)
loss_ry_bin = F.cross_entropy(pred_reg[:, ry_bin_l:ry_bin_r], ry_bin_label)
loss_ry_res = F.smooth_l1_loss((pred_reg[:, ry_res_l: ry_res_r] * ry_bin_onehot).sum(dim=1), ry_res_norm_label)
reg_loss_dict['loss_ry_bin'] = loss_ry_bin.item()
reg_loss_dict['loss_ry_res'] = loss_ry_res.item()
angle_loss = loss_ry_bin + loss_ry_res
# Total regression loss
reg_loss_dict['loss_angle'] = angle_loss
return angle_loss, reg_loss_dict
class CenterNetFocalLoss(nn.Module):
'''nn.Module warpper for focal loss'''
def __init__(self, gamma=4, alpha=2):
super(CenterNetFocalLoss, self).__init__()
# self.neg_loss = _neg_loss
self.gamma = gamma
self.alpha = alpha
def _sigmoid(self, x):
# y = torch.clamp(x.sigmoid_(), min=1e-4, max=1 - 1e-4)
# dnnt use the replace version!
y = torch.clamp(torch.sigmoid(x), min=1e-4, max=1 - 1e-4)
# too small will cause loss nan.
# y = torch.clamp(x.sigmoid_(), min=1e-12, max=1 - 1e-12)
return y
def _neg_loss(self, pred, gt):
''' Modified focal loss. Exactly the same as CornerNet.
Runs faster and costs a little bit more memory
Arguments:
pred: (batch x c x h x w), do some clamp or not?. should be clampped already.
gt: (batch x c x h x w)
'''
pos_inds = gt.eq(1).float()
neg_inds = gt.lt(1).float()
# neg_weights = torch.pow(1 - gt, 4)
neg_weights = torch.pow(1 - gt, self.gamma)
loss = 0
# pos_loss = torch.log(pred) * torch.pow(1 - pred, 2) * pos_inds
# neg_loss = torch.log(1 - pred) * torch.pow(pred, 2) * neg_weights * neg_inds
pos_loss = torch.log(pred) * torch.pow(1 - pred, self.alpha) * pos_inds
neg_loss = torch.log(1 - pred) * torch.pow(pred, self.alpha) * neg_weights * neg_inds
num_pos = pos_inds.float().sum()
pos_loss = pos_loss.sum()
neg_loss = neg_loss.sum()
if num_pos == 0:
loss = loss - neg_loss
else:
loss = loss - (pos_loss + neg_loss) / num_pos
return loss
def forward(self, out, target):
out_norm = self._sigmoid(out)
return self._neg_loss(out_norm, target)
class CenterNetResLoss(nn.Module):
def __init__(self, cfg):
super(CenterNetResLoss, self).__init__()
self.res_func_type = cfg['res_func']
def forward(self, output, mask, ind, target):
"""
Args:
output: torch.Size([B, C, 152, 152])
mask: torch.Size([B, max_objs])
ind: torch.Size([B, max_objs])
target: torch.Size([B, max_objs, C])
Returns:
reduced and weighted loss term.
"""
pred = center_utils._transpose_and_gather_feat(output, ind) # (B, max_objs, C)
# print('==> (ind != 0).float().sum(): ', (ind != 0).float().sum() )
# print('==> mask.sum(): ', mask.sum() )
if mask.sum():
# 1. flatten.
pred_flat = pred.view(-1, pred.shape[-1]) #(B*max_objs, C)
target_flat = target.view(-1, target.shape[-1]) #(B*max_objs, C)
mask_flat = mask.view(-1).bool() #(B*max_objs)
# 2. valid select
pred_valid = pred_flat[mask_flat] #(num_valid, C)
target_valid = target_flat[mask_flat] #(num_valid, C)
# 3. un-reduced loss term
if self.res_func_type == 'smooth-l1':
loss = F.smooth_l1_loss(pred_valid, target_valid, reduction='none')
elif self.res_func_type == 'l1':
loss = F.l1_loss(pred_valid, target_valid, reduction='none')
elif self.res_func_type == 'balanced_l1':
loss = get_balanced_l1_loss(pred_valid, target_valid)
else:
raise NotImplementedError
# mean for num_obj_dims, sum for channel_dims
# (num_valid, C) -> (C) -> ()
loss = loss.mean(dim=0).sum()
else:
loss = 0.
return loss
class CenterNetRotBinResLoss(nn.Module):
def __init__(self, cfg):
super(CenterNetRotBinResLoss, self).__init__()
self.num_head_bin = cfg['num_bins']
def forward(self, output, mask, ind, target):
"""
Args:
output: torch.Size([B, C, 152, 152])
mask: torch.Size([B, max_objs])
ind: torch.Size([B, max_objs])
target: torch.Size([B, max_objs, C])
Returns:
reduced and weighted loss term.
"""
pred = center_utils._transpose_and_gather_feat(output, ind) # torch.Size([1, 500, 2])
if mask.sum():
# 1. flatten
pred_flat = pred.view(-1, pred.shape[-1]) # (B*max_objs, C)
target_flat = target.view(-1, target.shape[-1]) # (B*max_objs, 1)
mask_flat = mask.view(-1).bool() # (B*max_objs)
# 2. valid select
pred_valid = pred_flat[mask_flat] # (num_valid, C)
target_valid = target_flat[mask_flat] # (num_valid, 1)
# 3. return the reduced rot loss term.
loss, _ = get_rot_binres_loss(pred_valid, target_valid, num_head_bin=self.num_head_bin)
else:
loss = 0.
# print('==> loss in rot')
# print(loss)
return loss
def lovasz_softmax(probas, labels, classes='present', per_image=False, ignore=None):
"""
Multi-class Lovasz-Softmax loss
NOTE probas should be applied with softmax.
probas: [B, C, H, W] Variable, class probabilities at each prediction (between 0 and 1).
Interpreted as binary (sigmoid) output with outputs of size [B, H, W].
labels: [B, H, W] Tensor, ground truth labels (between 0 and C - 1)
classes: 'all' for all, 'present' for classes present in labels, or a list of classes to average.
per_image: compute the loss per image instead of per batch
ignore: void class labels
"""
# print('==> lovasz_softmax, classes: ', classes)
# print('==> lovasz_softmax, per_image: ', per_image)
# print('==> lovasz_softmax, ignore: ', ignore)
if per_image:
loss = mean(lovasz_softmax_flat(*flatten_probas(prob.unsqueeze(0), lab.unsqueeze(0), ignore), classes=classes)
for prob, lab in zip(probas, labels))
else:
loss = lovasz_softmax_flat(*flatten_probas(probas, labels, ignore), classes=classes)
return loss
def lovasz_softmax_flat(probas, labels, classes='present'):
"""
Multi-class Lovasz-Softmax loss
probas: [P, C] Variable, class probabilities at each prediction (between 0 and 1)
labels: [P] Tensor, ground truth labels (between 0 and C - 1)
classes: 'all' for all, 'present' for classes present in labels, or a list of classes to average.
"""
if probas.numel() == 0:
# only void pixels, the gradients should be 0
return probas * 0.
C = probas.size(1)
losses = []
class_to_sum = list(range(C)) if classes in ['all', 'present'] else classes
for c in class_to_sum:
fg = (labels == c).float() # foreground for class c
if (classes is 'present' and fg.sum() == 0):
continue
if C == 1:
if len(classes) > 1:
raise ValueError('Sigmoid output possible only with 1 class')
class_pred = probas[:, 0]
else:
class_pred = probas[:, c]
errors = (Variable(fg) - class_pred).abs()
errors_sorted, perm = torch.sort(errors, 0, descending=True)
perm = perm.data
fg_sorted = fg[perm]
losses.append(torch.dot(errors_sorted, Variable(lovasz_grad(fg_sorted))))
return mean(losses)
def lovasz_grad(gt_sorted):
"""
Computes gradient of the Lovasz extension w.r.t sorted errors
See Alg. 1 in paper
"""
p = len(gt_sorted)
gts = gt_sorted.sum()
intersection = gts - gt_sorted.float().cumsum(0)
union = gts + (1 - gt_sorted).float().cumsum(0)
jaccard = 1. - intersection / union
if p > 1: # cover 1-pixel case
jaccard[1:p] = jaccard[1:p] - jaccard[0:-1]
return jaccard
def flatten_probas(probas, labels, ignore=None):
"""
Flattens predictions in the batch
"""
if probas.dim() == 2:
# do nothing, 3D segmentation for sparse tensor
pass
elif probas.dim() == 3:
# assumes output of a sigmoid layer
B, H, W = probas.size()
probas = probas.view(B, 1, H, W)
probas = probas.permute(0, 2, 3, 1).contiguous().view(-1, C) # B * H * W, C = P, C
elif probas.dim() == 5:
# 3D segmentation for dense tensor
B, C, L, H, W = probas.size()
probas = probas.contiguous().view(B, C, L, H*W)
probas = probas.permute(0, 2, 3, 1).contiguous().view(-1, C) # B * H * W, C = P, C
labels = labels.view(-1)
if ignore is not None:
valid = (labels != ignore)
# vprobas = probas[valid.nonzero().squeeze()]
# for newer pytorch
vprobas = probas[torch.nonzero(valid, as_tuple=False).squeeze()]
vlabels = labels[valid]
return vprobas, vlabels
else:
return probas, labels
# --------------------------- HELPER FUNCTIONS ---------------------------
def isnan(x):
return x != x
def mean(l, ignore_nan=False, empty=0):
"""
nanmean compatible with generators.
"""
l = iter(l)
if ignore_nan:
l = ifilterfalse(isnan, l)
try:
n = 1
acc = next(l)
except StopIteration:
if empty == 'raise':
raise ValueError('Empty mean')
return empty
for n, v in enumerate(l, 2):
acc += v
if n == 1:
return acc
return acc / n
| 35.418021 | 118 | 0.594378 | import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
from . import box_utils
from . import center_utils
try:
from itertools import ifilterfalse
except ImportError:
from itertools import filterfalse as ifilterfalse
class SigmoidFocalClassificationLoss(nn.Module):
def __init__(self, gamma: float = 2.0, alpha: float = 0.25):
super(SigmoidFocalClassificationLoss, self).__init__()
self.alpha = alpha
self.gamma = gamma
@staticmethod
def sigmoid_cross_entropy_with_logits(input: torch.Tensor, target: torch.Tensor):
loss = torch.clamp(input, min=0) - input * target + \
torch.log1p(torch.exp(-torch.abs(input)))
return loss
def forward(self, input: torch.Tensor, target: torch.Tensor, weights: torch.Tensor):
pred_sigmoid = torch.sigmoid(input)
alpha_weight = target * self.alpha + (1 - target) * (1 - self.alpha)
pt = target * (1.0 - pred_sigmoid) + (1.0 - target) * pred_sigmoid
focal_weight = alpha_weight * torch.pow(pt, self.gamma)
bce_loss = self.sigmoid_cross_entropy_with_logits(input, target)
loss = focal_weight * bce_loss
if weights.shape.__len__() == 2 or \
(weights.shape.__len__() == 1 and target.shape.__len__() == 2):
weights = weights.unsqueeze(-1)
assert weights.shape.__len__() == loss.shape.__len__()
return loss * weights
class WeightedSmoothL1Loss(nn.Module):
def __init__(self, beta: float = 1.0 / 9.0, code_weights: list = None):
super(WeightedSmoothL1Loss, self).__init__()
self.beta = beta
if code_weights is not None:
self.code_weights = np.array(code_weights, dtype=np.float32)
self.code_weights = torch.from_numpy(self.code_weights).cuda()
@staticmethod
def smooth_l1_loss(diff, beta):
if beta < 1e-5:
loss = torch.abs(diff)
else:
n = torch.abs(diff)
loss = torch.where(n < beta, 0.5 * n ** 2 / beta, n - 0.5 * beta)
return loss
def forward(self, input: torch.Tensor, target: torch.Tensor, weights: torch.Tensor = None):
target = torch.where(torch.isnan(target), input, target)
diff = input - target
if self.code_weights is not None:
diff = diff * self.code_weights.view(1, 1, -1)
loss = self.smooth_l1_loss(diff, self.beta)
if weights is not None:
assert weights.shape[0] == loss.shape[0] and weights.shape[1] == loss.shape[1]
loss = loss * weights.unsqueeze(-1)
return loss
class WeightedL1Loss(nn.Module):
def __init__(self, code_weights: list = None):
super(WeightedL1Loss, self).__init__()
if code_weights is not None:
self.code_weights = np.array(code_weights, dtype=np.float32)
self.code_weights = torch.from_numpy(self.code_weights).cuda()
def forward(self, input: torch.Tensor, target: torch.Tensor, weights: torch.Tensor = None):
target = torch.where(torch.isnan(target), input, target)
diff = input - target
if self.code_weights is not None:
diff = diff * self.code_weights.view(1, 1, -1)
loss = torch.abs(diff)
if weights is not None:
assert weights.shape[0] == loss.shape[0] and weights.shape[1] == loss.shape[1]
loss = loss * weights.unsqueeze(-1)
return loss
class WeightedCrossEntropyLoss(nn.Module):
def __init__(self):
super(WeightedCrossEntropyLoss, self).__init__()
def forward(self, input: torch.Tensor, target: torch.Tensor, weights: torch.Tensor):
input = input.permute(0, 2, 1)
target = target.argmax(dim=-1)
loss = F.cross_entropy(input, target, reduction='none') * weights
return loss
def get_corner_loss_lidar(pred_bbox3d: torch.Tensor, gt_bbox3d: torch.Tensor):
assert pred_bbox3d.shape[0] == gt_bbox3d.shape[0]
pred_box_corners = box_utils.boxes_to_corners_3d(pred_bbox3d)
gt_box_corners = box_utils.boxes_to_corners_3d(gt_bbox3d)
gt_bbox3d_flip = gt_bbox3d.clone()
gt_bbox3d_flip[:, 6] += np.pi
gt_box_corners_flip = box_utils.boxes_to_corners_3d(gt_bbox3d_flip)
corner_dist = torch.min(torch.norm(pred_box_corners - gt_box_corners, dim=2),
torch.norm(pred_box_corners - gt_box_corners_flip, dim=2))
corner_loss = WeightedSmoothL1Loss.smooth_l1_loss(corner_dist, beta=1.0)
return corner_loss.mean(dim=1)
def get_corner_loss_mse(pred_bbox3d: torch.Tensor, gt_bbox3d: torch.Tensor):
assert pred_bbox3d.shape[0] == gt_bbox3d.shape[0]
pred_box_corners = box_utils.boxes_to_corners_3d(pred_bbox3d)
gt_box_corners = box_utils.boxes_to_corners_3d(gt_bbox3d)
corner_loss_x = F.mse_loss(input=pred_box_corners[:,:,0], target=gt_box_corners[:,:,0])
corner_loss_y = F.mse_loss(input=pred_box_corners[:,:,1], target=gt_box_corners[:,:,1])
corner_loss_z = F.mse_loss(input=pred_box_corners[:,:,2], target=gt_box_corners[:,:,2])
corner_loss = corner_loss_x + corner_loss_y + corner_loss_z
return corner_loss
def get_iouscore_loss_bce(iou_preds, iou_gts, iou_fg_thresh=0.75, iou_bg_thresh=0.25):
fg_mask = iou_gts > iou_fg_thresh
bg_mask = iou_gts < iou_bg_thresh
interval_mask = (fg_mask == 0) & (bg_mask == 0)
iou_cls_labels = (fg_mask > 0).float()
iou_cls_labels[interval_mask] = \
(iou_gts[interval_mask] - iou_bg_thresh) / (iou_fg_thresh - iou_bg_thresh)
loss_ioucls = F.binary_cross_entropy(torch.sigmoid(iou_preds), iou_cls_labels.float(), reduction='none')
cls_valid_mask = (iou_cls_labels >= 0).float()
loss_iouscore = (loss_ioucls * cls_valid_mask).sum() / torch.clamp(cls_valid_mask.sum(), min=1.0)
return loss_iouscore
def get_rot_binres_loss(pred_reg, reg_label, num_head_bin, get_ry_fine=False):
= {}
start_offset = 0
ry_bin_l, ry_bin_r = start_offset, start_offset + num_head_bin
ry_res_l, ry_res_r = ry_bin_r, ry_bin_r + num_head_bin
start_offset = ry_res_r
ry_label = reg_label.squeeze(dim=-1)
_fine:
assert False, "one-stage should not get_ry_fine."
angle_per_class = (np.pi / 2) / num_head_bin
ry_label = ry_label % (2 * np.pi)
opposite_flag = (ry_label > np.pi * 0.5) & (ry_label < np.pi * 1.5)
ry_label[opposite_flag] = (ry_label[opposite_flag] + np.pi) % (2 * np.pi)
shift_angle = (ry_label + np.pi * 0.5) % (2 * np.pi)
shift_angle = torch.clamp(shift_angle - np.pi * 0.25, min=1e-3, max=np.pi * 0.5 - 1e-3)
ry_bin_label = (shift_angle / angle_per_class).floor().long()
ry_res_label = shift_angle - (ry_bin_label.float() * angle_per_class + angle_per_class / 2)
ry_res_norm_label = ry_res_label / (angle_per_class / 2)
else:
angle_per_class = (2 * np.pi) / num_head_bin
heading_angle = ry_label % (2 * np.pi)
shift_angle = (heading_angle + angle_per_class / 2) % (2 * np.pi)
ry_bin_label = (shift_angle / angle_per_class).floor().long()
ry_res_label = shift_angle - (ry_bin_label.float() * angle_per_class + angle_per_class / 2)
ry_res_norm_label = ry_res_label / (angle_per_class / 2)
ry_bin_onehot = torch.cuda.FloatTensor(ry_bin_label.size(0), num_head_bin).zero_()
ry_bin_onehot.scatter_(1, ry_bin_label.view(-1, 1).long(), 1)
loss_ry_bin = F.cross_entropy(pred_reg[:, ry_bin_l:ry_bin_r], ry_bin_label)
loss_ry_res = F.smooth_l1_loss((pred_reg[:, ry_res_l: ry_res_r] * ry_bin_onehot).sum(dim=1), ry_res_norm_label)
reg_loss_dict['loss_ry_bin'] = loss_ry_bin.item()
reg_loss_dict['loss_ry_res'] = loss_ry_res.item()
angle_loss = loss_ry_bin + loss_ry_res
reg_loss_dict['loss_angle'] = angle_loss
return angle_loss, reg_loss_dict
class CenterNetFocalLoss(nn.Module):
def __init__(self, gamma=4, alpha=2):
super(CenterNetFocalLoss, self).__init__()
self.gamma = gamma
self.alpha = alpha
def _sigmoid(self, x):
y = torch.clamp(torch.sigmoid(x), min=1e-4, max=1 - 1e-4)
return y
def _neg_loss(self, pred, gt):
pos_inds = gt.eq(1).float()
neg_inds = gt.lt(1).float()
neg_weights = torch.pow(1 - gt, self.gamma)
loss = 0
pos_loss = torch.log(pred) * torch.pow(1 - pred, self.alpha) * pos_inds
neg_loss = torch.log(1 - pred) * torch.pow(pred, self.alpha) * neg_weights * neg_inds
num_pos = pos_inds.float().sum()
pos_loss = pos_loss.sum()
neg_loss = neg_loss.sum()
if num_pos == 0:
loss = loss - neg_loss
else:
loss = loss - (pos_loss + neg_loss) / num_pos
return loss
def forward(self, out, target):
out_norm = self._sigmoid(out)
return self._neg_loss(out_norm, target)
class CenterNetResLoss(nn.Module):
def __init__(self, cfg):
super(CenterNetResLoss, self).__init__()
self.res_func_type = cfg['res_func']
def forward(self, output, mask, ind, target):
pred = center_utils._transpose_and_gather_feat(output, ind)
if mask.sum():
pred_flat = pred.view(-1, pred.shape[-1])
target_flat = target.view(-1, target.shape[-1])
mask_flat = mask.view(-1).bool()
pred_valid = pred_flat[mask_flat]
target_valid = target_flat[mask_flat]
if self.res_func_type == 'smooth-l1':
loss = F.smooth_l1_loss(pred_valid, target_valid, reduction='none')
elif self.res_func_type == 'l1':
loss = F.l1_loss(pred_valid, target_valid, reduction='none')
elif self.res_func_type == 'balanced_l1':
loss = get_balanced_l1_loss(pred_valid, target_valid)
else:
raise NotImplementedError
loss = loss.mean(dim=0).sum()
else:
loss = 0.
return loss
class CenterNetRotBinResLoss(nn.Module):
def __init__(self, cfg):
super(CenterNetRotBinResLoss, self).__init__()
self.num_head_bin = cfg['num_bins']
def forward(self, output, mask, ind, target):
pred = center_utils._transpose_and_gather_feat(output, ind)
if mask.sum():
pred_flat = pred.view(-1, pred.shape[-1])
target_flat = target.view(-1, target.shape[-1])
mask_flat = mask.view(-1).bool()
pred_valid = pred_flat[mask_flat]
target_valid = target_flat[mask_flat]
loss, _ = get_rot_binres_loss(pred_valid, target_valid, num_head_bin=self.num_head_bin)
else:
loss = 0.
return loss
def lovasz_softmax(probas, labels, classes='present', per_image=False, ignore=None):
if per_image:
loss = mean(lovasz_softmax_flat(*flatten_probas(prob.unsqueeze(0), lab.unsqueeze(0), ignore), classes=classes)
for prob, lab in zip(probas, labels))
else:
loss = lovasz_softmax_flat(*flatten_probas(probas, labels, ignore), classes=classes)
return loss
def lovasz_softmax_flat(probas, labels, classes='present'):
if probas.numel() == 0:
return probas * 0.
C = probas.size(1)
losses = []
class_to_sum = list(range(C)) if classes in ['all', 'present'] else classes
for c in class_to_sum:
fg = (labels == c).float()
if (classes is 'present' and fg.sum() == 0):
continue
if C == 1:
if len(classes) > 1:
raise ValueError('Sigmoid output possible only with 1 class')
class_pred = probas[:, 0]
else:
class_pred = probas[:, c]
errors = (Variable(fg) - class_pred).abs()
errors_sorted, perm = torch.sort(errors, 0, descending=True)
perm = perm.data
fg_sorted = fg[perm]
losses.append(torch.dot(errors_sorted, Variable(lovasz_grad(fg_sorted))))
return mean(losses)
def lovasz_grad(gt_sorted):
p = len(gt_sorted)
gts = gt_sorted.sum()
intersection = gts - gt_sorted.float().cumsum(0)
union = gts + (1 - gt_sorted).float().cumsum(0)
jaccard = 1. - intersection / union
if p > 1:
jaccard[1:p] = jaccard[1:p] - jaccard[0:-1]
return jaccard
def flatten_probas(probas, labels, ignore=None):
if probas.dim() == 2:
pass
elif probas.dim() == 3:
B, H, W = probas.size()
probas = probas.view(B, 1, H, W)
probas = probas.permute(0, 2, 3, 1).contiguous().view(-1, C)
elif probas.dim() == 5:
B, C, L, H, W = probas.size()
probas = probas.contiguous().view(B, C, L, H*W)
probas = probas.permute(0, 2, 3, 1).contiguous().view(-1, C)
labels = labels.view(-1)
if ignore is not None:
valid = (labels != ignore)
vprobas = probas[torch.nonzero(valid, as_tuple=False).squeeze()]
vlabels = labels[valid]
return vprobas, vlabels
else:
return probas, labels
def isnan(x):
return x != x
def mean(l, ignore_nan=False, empty=0):
l = iter(l)
if ignore_nan:
l = ifilterfalse(isnan, l)
try:
n = 1
acc = next(l)
except StopIteration:
if empty == 'raise':
raise ValueError('Empty mean')
return empty
for n, v in enumerate(l, 2):
acc += v
if n == 1:
return acc
return acc / n
| true | true |
f72ff95e1b300049408b355850be32b4312a414b | 10,905 | py | Python | wsd/graph_wsd_test_v1.py | Bharat-Runwal/path2vec | f99188b882752ff9aa2c87334979b75483940ae0 | [
"Apache-2.0"
] | 31 | 2018-08-19T22:34:53.000Z | 2022-03-23T13:39:48.000Z | wsd/graph_wsd_test_v1.py | Bharat-Runwal/path2vec | f99188b882752ff9aa2c87334979b75483940ae0 | [
"Apache-2.0"
] | 21 | 2018-08-24T11:52:59.000Z | 2021-01-30T18:39:47.000Z | wsd/graph_wsd_test_v1.py | Bharat-Runwal/path2vec | f99188b882752ff9aa2c87334979b75483940ae0 | [
"Apache-2.0"
] | 11 | 2018-08-20T05:34:06.000Z | 2021-12-07T06:53:23.000Z | # -*- coding: utf-8 -*-
"""
Created on Mon May 7 17:13:25 2018
@author: dorgham
"""
import networkx as nx
from nltk.corpus import wordnet as wn
from nltk.corpus import wordnet_ic
from nltk.stem import WordNetLemmatizer
import matplotlib.pyplot as plt
import xml.etree.ElementTree as ET
from collections import OrderedDict
import codecs
import string
from nltk.corpus import stopwords
from sklearn.metrics import f1_score, precision_score, recall_score
#algorithm parameters
USE_POS_INFO = True
USE_LESK = False
USE_PAGERANK = True
AVG_METHOD = 'micro'
MAX_DEPTH = 3
LESK_NORM_FACTOR = 20 #this value is emperical
senseval_fpath = 'WSD_Unified_Evaluation_Datasets/senseval2/senseval2.data.xml'
gold_tags_fpath = 'WSD_Unified_Evaluation_Datasets/senseval2/senseval2.gold.key.txt'
info_content = wordnet_ic.ic('ic-semcor.dat')
wnlemmatizer = WordNetLemmatizer()
pywsd_stopwords = [u"'s", u"``", u"`"]
STOPWORDS = set(stopwords.words('english') + list(string.punctuation) + pywsd_stopwords)
def lch_similarity(synset1, synset2):
return wn.lch_similarity(synset1, synset2)
def jcn_similarity(synset1, synset2):
return wn.jcn_similarity(synset1, synset2, info_content)
def lesk_similarity(synset1, synset2):
str1 = str(synset1.definition()).translate(str.maketrans('','',string.punctuation))
for example in synset1.examples():
str1 += ' ' + str(example).translate(str.maketrans('','',string.punctuation))
lemmatized_str1=''
for word in set(str1.split()):
lemmatized_str1 += wnlemmatizer.lemmatize(word) + ' '
for lemma in synset1.lemma_names():
lemmatized_str1 += ' ' + lemma
hyper_hypo = set(synset1.hyponyms() + synset1.hypernyms() + synset1.instance_hyponyms() + synset1.instance_hypernyms())
for hh in hyper_hypo:
for lemma in hh.lemma_names():
lemmatized_str1 += ' ' + lemma
current_set = set(lemmatized_str1.split())
current_set = set(cs.lower() for cs in current_set)
current_set = current_set.difference(STOPWORDS)
#print (current_set)
str2 = str(synset2.definition()).translate(str.maketrans('','',string.punctuation))
for example in synset2.examples():
str2 += ' ' + str(example).translate(str.maketrans('','',string.punctuation))
lemmatized_str2=''
for word in set(str2.split()):
lemmatized_str2 += wnlemmatizer.lemmatize(word) + ' '
for lemma in synset2.lemma_names():
lemmatized_str2 += ' ' + lemma
hyper_hypo = set(synset2.hyponyms() + synset2.hypernyms() + synset2.instance_hyponyms() + synset2.instance_hypernyms())
for hh in hyper_hypo:
for lemma in hh.lemma_names():
lemmatized_str2 += ' ' + lemma
neighbor_set = set(lemmatized_str2.split())
neighbor_set = set(ns.lower() for ns in neighbor_set)
neighbor_set = neighbor_set.difference(STOPWORDS)
#print (neighbor_set)
return len(current_set.intersection(neighbor_set))
def convert_to_wordnet_pos(senseval_pos):
if senseval_pos == 'VERB':
return wn.VERB
elif senseval_pos == 'NOUN':
return wn.NOUN
elif senseval_pos == 'ADV':
return wn.ADV
elif senseval_pos == 'ADJ':
return wn.ADJ
else:
return None
def sentence_wsd(sentences, poses):
counter=0
output_dict = dict()
for sentence in sentences:
G=nx.Graph()
sent_len = len(sentence.keys())
G_pos = dict() #used for aligning the nodes when drawing the graph
pos_idx=1
token_nodeNames_map = dict()
pos_dict = poses[counter]
#construct the nodes of the graph
for i, _id in enumerate(sentence.keys()):
if USE_POS_INFO: #restrict the retrieved snysets from wordnet to the target pos
wn_pos = convert_to_wordnet_pos(pos_dict[_id])
else:
wn_pos = None
synsets_list = list(wn.synsets(sentence[_id], pos=wn_pos))
if len(synsets_list) > 0:
node_names = []
for synset in synsets_list:
node_name = str(i) + ' ' + synset.name()
#adding the index to the node name is important in the case of
#having a word that is repeated in the sentence but with
#different sense each time, so we want unique node for each one.
G.add_node(node_name)
node_names.append(node_name)
token_nodeNames_map[_id] = node_names
G_pos.update( (label, (pos_idx, j)) for j, label in enumerate(node_names) )
pos_idx+=1
#compute word similarity
ids_list = list(sentence.keys())
lch_sim_dict = dict()
jcn_sim_dict = dict()
lesk_sim_dict = dict()
#print sentence.values()
for idx, key in enumerate(ids_list):
if USE_POS_INFO:
wn_pos = convert_to_wordnet_pos(pos_dict[ids_list[idx]])
else:
wn_pos = None
synsets_list = list(wn.synsets(sentence[ids_list[idx]], pos=wn_pos))
if len(synsets_list) > 0:
i = 1
while i<=MAX_DEPTH and idx+i<sent_len:
if USE_POS_INFO:
wn_pos = convert_to_wordnet_pos(pos_dict[ids_list[idx+i]])
else:
wn_pos = None
next_synsets_list = list(wn.synsets(sentence[ids_list[idx+i]], pos=wn_pos))
if len(next_synsets_list) > 0:
for current_synset in synsets_list:
for neighbor_synset in next_synsets_list:
nodes = str(idx) + ' ' + current_synset.name() + ';'
nodes += str(idx+i) + ' ' + neighbor_synset.name()
if current_synset.pos() == 'v' and neighbor_synset.pos() == 'v':
sim_weight = lch_similarity(current_synset, neighbor_synset)
lch_sim_dict[nodes] = sim_weight
elif current_synset.pos() == 'n' and neighbor_synset.pos() == 'n':
sim_weight = jcn_similarity(current_synset, neighbor_synset)
jcn_sim_dict[nodes] = sim_weight
elif USE_LESK:
sim_weight = lesk_similarity(current_synset, neighbor_synset)
lesk_sim_dict[nodes] = sim_weight
i+=1
#normalize the similarity weights and build edges
if lch_sim_dict:
max_lch_score = max(lch_sim_dict.values())
for key in lch_sim_dict:
nodeIds = key.split(';')
G.add_edge(nodeIds[0],nodeIds[1], weight=(lch_sim_dict[key]/max_lch_score))
if jcn_sim_dict:
max_jcn_score = max(jcn_sim_dict.values())
for key in jcn_sim_dict:
nodeIds = key.split(';')
G.add_edge(nodeIds[0],nodeIds[1], weight=(jcn_sim_dict[key]/max_jcn_score))
if USE_LESK:
if lesk_sim_dict:
max_lesk_score = max(lesk_sim_dict.values())
if max_lesk_score > 0:
for key in lesk_sim_dict:
nodeIds = key.split(';')
G.add_edge(nodeIds[0],nodeIds[1], weight=(lesk_sim_dict[key]/LESK_NORM_FACTOR))
#compute graph centrality
node_scores = dict()
if USE_PAGERANK:
node_scores = nx.pagerank(G)
else:
node_scores = G.degree(G.nodes(), "weight")
for token_id in ids_list:
nodeNames = token_nodeNames_map.get(token_id)
scores = []
max_label = ""
wordnet_key = ""
if nodeNames:
for nodeName in nodeNames:
scores.append(node_scores[nodeName])
if scores:
max_index = max(range(len(scores)), key=scores.__getitem__)
max_label = nodeNames[max_index]
if max_label:
i = max_label.find(' ')
lemmas = wn.synset(max_label[i+1:]).lemmas()
for lemma in lemmas:
wordnet_key += lemma.key()+';'
wordnet_key = wordnet_key[0:-1]
output_dict[token_id] = wordnet_key
#add the weight as attribute to the nodes of the graph
#for node in node_scores.keys():
# G.node[node]['weight']=node_scores[node]
counter += 1
if counter==1: #draw the graph of the first sentence
plt.close()
nx.draw(G, pos=G_pos, with_labels = True)
plt.show()
G.clear()
return output_dict
def load_senseval_data(file_path):
tokens_dict = OrderedDict()
pos_dict = OrderedDict()
sentences = []
pos_list = []
tree = ET.parse(file_path)
root = tree.getroot()
for text in root:
for sentence in text:
for word in sentence:
if word.tag == 'instance' and word.attrib['id']: #only include words with the <instance> tag
tokens_dict[word.attrib['id']] = word.text
pos_dict[word.attrib['id']] = word.attrib['pos']
if tokens_dict:
sentences.append(tokens_dict)
pos_list.append(pos_dict)
tokens_dict = dict()
pos_dict = dict()
return sentences, pos_list
if __name__ == "__main__":
sents, poses = load_senseval_data(senseval_fpath)
output_dict = sentence_wsd(sents, poses)
#load the gold results
with codecs.open(gold_tags_fpath, 'r', 'utf-8') as f:
lines = f.readlines()
wsd_output = []
gold_output = []
for line in lines:
id_key_pair = line.split()
predicted_keys = output_dict[id_key_pair[0]].split(';')
gold_keys_set = set(id_key_pair[1:])
predected_keys_set = set(predicted_keys)
if len(predected_keys_set.intersection(gold_keys_set)) > 0:
wsd_output.append(predicted_keys[0])
gold_output.append(predicted_keys[0])
else:
wsd_output.append(predicted_keys[0])
gold_output.append(id_key_pair[1])
assert len(wsd_output) == len(gold_output)
f1 = f1_score(gold_output, wsd_output, average=AVG_METHOD)
precision = precision_score(gold_output, wsd_output, average=AVG_METHOD)
recall = recall_score(gold_output, wsd_output, average=AVG_METHOD)
print ('F-score: %1.4f' % f1, ' Precision: %1.4f' % precision, ' Recall: %1.4f' % recall)
| 40.239852 | 123 | 0.584411 |
import networkx as nx
from nltk.corpus import wordnet as wn
from nltk.corpus import wordnet_ic
from nltk.stem import WordNetLemmatizer
import matplotlib.pyplot as plt
import xml.etree.ElementTree as ET
from collections import OrderedDict
import codecs
import string
from nltk.corpus import stopwords
from sklearn.metrics import f1_score, precision_score, recall_score
USE_POS_INFO = True
USE_LESK = False
USE_PAGERANK = True
AVG_METHOD = 'micro'
MAX_DEPTH = 3
LESK_NORM_FACTOR = 20
senseval_fpath = 'WSD_Unified_Evaluation_Datasets/senseval2/senseval2.data.xml'
gold_tags_fpath = 'WSD_Unified_Evaluation_Datasets/senseval2/senseval2.gold.key.txt'
info_content = wordnet_ic.ic('ic-semcor.dat')
wnlemmatizer = WordNetLemmatizer()
pywsd_stopwords = [u"'s", u"``", u"`"]
STOPWORDS = set(stopwords.words('english') + list(string.punctuation) + pywsd_stopwords)
def lch_similarity(synset1, synset2):
return wn.lch_similarity(synset1, synset2)
def jcn_similarity(synset1, synset2):
return wn.jcn_similarity(synset1, synset2, info_content)
def lesk_similarity(synset1, synset2):
str1 = str(synset1.definition()).translate(str.maketrans('','',string.punctuation))
for example in synset1.examples():
str1 += ' ' + str(example).translate(str.maketrans('','',string.punctuation))
lemmatized_str1=''
for word in set(str1.split()):
lemmatized_str1 += wnlemmatizer.lemmatize(word) + ' '
for lemma in synset1.lemma_names():
lemmatized_str1 += ' ' + lemma
hyper_hypo = set(synset1.hyponyms() + synset1.hypernyms() + synset1.instance_hyponyms() + synset1.instance_hypernyms())
for hh in hyper_hypo:
for lemma in hh.lemma_names():
lemmatized_str1 += ' ' + lemma
current_set = set(lemmatized_str1.split())
current_set = set(cs.lower() for cs in current_set)
current_set = current_set.difference(STOPWORDS)
#print (current_set)
str2 = str(synset2.definition()).translate(str.maketrans('','',string.punctuation))
for example in synset2.examples():
str2 += ' ' + str(example).translate(str.maketrans('','',string.punctuation))
lemmatized_str2=''
for word in set(str2.split()):
lemmatized_str2 += wnlemmatizer.lemmatize(word) + ' '
for lemma in synset2.lemma_names():
lemmatized_str2 += ' ' + lemma
hyper_hypo = set(synset2.hyponyms() + synset2.hypernyms() + synset2.instance_hyponyms() + synset2.instance_hypernyms())
for hh in hyper_hypo:
for lemma in hh.lemma_names():
lemmatized_str2 += ' ' + lemma
neighbor_set = set(lemmatized_str2.split())
neighbor_set = set(ns.lower() for ns in neighbor_set)
neighbor_set = neighbor_set.difference(STOPWORDS)
#print (neighbor_set)
return len(current_set.intersection(neighbor_set))
def convert_to_wordnet_pos(senseval_pos):
if senseval_pos == 'VERB':
return wn.VERB
elif senseval_pos == 'NOUN':
return wn.NOUN
elif senseval_pos == 'ADV':
return wn.ADV
elif senseval_pos == 'ADJ':
return wn.ADJ
else:
return None
def sentence_wsd(sentences, poses):
counter=0
output_dict = dict()
for sentence in sentences:
G=nx.Graph()
sent_len = len(sentence.keys())
G_pos = dict() #used for aligning the nodes when drawing the graph
pos_idx=1
token_nodeNames_map = dict()
pos_dict = poses[counter]
#construct the nodes of the graph
for i, _id in enumerate(sentence.keys()):
if USE_POS_INFO: #restrict the retrieved snysets from wordnet to the target pos
wn_pos = convert_to_wordnet_pos(pos_dict[_id])
else:
wn_pos = None
synsets_list = list(wn.synsets(sentence[_id], pos=wn_pos))
if len(synsets_list) > 0:
node_names = []
for synset in synsets_list:
node_name = str(i) + ' ' + synset.name()
#adding the index to the node name is important in the case of
#having a word that is repeated in the sentence but with
#different sense each time, so we want unique node for each one.
G.add_node(node_name)
node_names.append(node_name)
token_nodeNames_map[_id] = node_names
G_pos.update( (label, (pos_idx, j)) for j, label in enumerate(node_names) )
pos_idx+=1
#compute word similarity
ids_list = list(sentence.keys())
lch_sim_dict = dict()
jcn_sim_dict = dict()
lesk_sim_dict = dict()
#print sentence.values()
for idx, key in enumerate(ids_list):
if USE_POS_INFO:
wn_pos = convert_to_wordnet_pos(pos_dict[ids_list[idx]])
else:
wn_pos = None
synsets_list = list(wn.synsets(sentence[ids_list[idx]], pos=wn_pos))
if len(synsets_list) > 0:
i = 1
while i<=MAX_DEPTH and idx+i<sent_len:
if USE_POS_INFO:
wn_pos = convert_to_wordnet_pos(pos_dict[ids_list[idx+i]])
else:
wn_pos = None
next_synsets_list = list(wn.synsets(sentence[ids_list[idx+i]], pos=wn_pos))
if len(next_synsets_list) > 0:
for current_synset in synsets_list:
for neighbor_synset in next_synsets_list:
nodes = str(idx) + ' ' + current_synset.name() + ';'
nodes += str(idx+i) + ' ' + neighbor_synset.name()
if current_synset.pos() == 'v' and neighbor_synset.pos() == 'v':
sim_weight = lch_similarity(current_synset, neighbor_synset)
lch_sim_dict[nodes] = sim_weight
elif current_synset.pos() == 'n' and neighbor_synset.pos() == 'n':
sim_weight = jcn_similarity(current_synset, neighbor_synset)
jcn_sim_dict[nodes] = sim_weight
elif USE_LESK:
sim_weight = lesk_similarity(current_synset, neighbor_synset)
lesk_sim_dict[nodes] = sim_weight
i+=1
#normalize the similarity weights and build edges
if lch_sim_dict:
max_lch_score = max(lch_sim_dict.values())
for key in lch_sim_dict:
nodeIds = key.split(';')
G.add_edge(nodeIds[0],nodeIds[1], weight=(lch_sim_dict[key]/max_lch_score))
if jcn_sim_dict:
max_jcn_score = max(jcn_sim_dict.values())
for key in jcn_sim_dict:
nodeIds = key.split(';')
G.add_edge(nodeIds[0],nodeIds[1], weight=(jcn_sim_dict[key]/max_jcn_score))
if USE_LESK:
if lesk_sim_dict:
max_lesk_score = max(lesk_sim_dict.values())
if max_lesk_score > 0:
for key in lesk_sim_dict:
nodeIds = key.split(';')
G.add_edge(nodeIds[0],nodeIds[1], weight=(lesk_sim_dict[key]/LESK_NORM_FACTOR))
#compute graph centrality
node_scores = dict()
if USE_PAGERANK:
node_scores = nx.pagerank(G)
else:
node_scores = G.degree(G.nodes(), "weight")
for token_id in ids_list:
nodeNames = token_nodeNames_map.get(token_id)
scores = []
max_label = ""
wordnet_key = ""
if nodeNames:
for nodeName in nodeNames:
scores.append(node_scores[nodeName])
if scores:
max_index = max(range(len(scores)), key=scores.__getitem__)
max_label = nodeNames[max_index]
if max_label:
i = max_label.find(' ')
lemmas = wn.synset(max_label[i+1:]).lemmas()
for lemma in lemmas:
wordnet_key += lemma.key()+';'
wordnet_key = wordnet_key[0:-1]
output_dict[token_id] = wordnet_key
#add the weight as attribute to the nodes of the graph
#for node in node_scores.keys():
# G.node[node]['weight']=node_scores[node]
counter += 1
if counter==1: #draw the graph of the first sentence
plt.close()
nx.draw(G, pos=G_pos, with_labels = True)
plt.show()
G.clear()
return output_dict
def load_senseval_data(file_path):
tokens_dict = OrderedDict()
pos_dict = OrderedDict()
sentences = []
pos_list = []
tree = ET.parse(file_path)
root = tree.getroot()
for text in root:
for sentence in text:
for word in sentence:
if word.tag == 'instance' and word.attrib['id']: #only include words with the <instance> tag
tokens_dict[word.attrib['id']] = word.text
pos_dict[word.attrib['id']] = word.attrib['pos']
if tokens_dict:
sentences.append(tokens_dict)
pos_list.append(pos_dict)
tokens_dict = dict()
pos_dict = dict()
return sentences, pos_list
if __name__ == "__main__":
sents, poses = load_senseval_data(senseval_fpath)
output_dict = sentence_wsd(sents, poses)
#load the gold results
with codecs.open(gold_tags_fpath, 'r', 'utf-8') as f:
lines = f.readlines()
wsd_output = []
gold_output = []
for line in lines:
id_key_pair = line.split()
predicted_keys = output_dict[id_key_pair[0]].split(';')
gold_keys_set = set(id_key_pair[1:])
predected_keys_set = set(predicted_keys)
if len(predected_keys_set.intersection(gold_keys_set)) > 0:
wsd_output.append(predicted_keys[0])
gold_output.append(predicted_keys[0])
else:
wsd_output.append(predicted_keys[0])
gold_output.append(id_key_pair[1])
assert len(wsd_output) == len(gold_output)
f1 = f1_score(gold_output, wsd_output, average=AVG_METHOD)
precision = precision_score(gold_output, wsd_output, average=AVG_METHOD)
recall = recall_score(gold_output, wsd_output, average=AVG_METHOD)
print ('F-score: %1.4f' % f1, ' Precision: %1.4f' % precision, ' Recall: %1.4f' % recall)
| true | true |
f72ffbe57a59f3600333518d118a3d3eda4b5c23 | 5,497 | py | Python | python_src/mysetup.py | softmatterlab/DeepTrack-2.0-app | 3bc661987cba53519ebefcc0b7221994a6e2d317 | [
"MIT"
] | null | null | null | python_src/mysetup.py | softmatterlab/DeepTrack-2.0-app | 3bc661987cba53519ebefcc0b7221994a6e2d317 | [
"MIT"
] | 6 | 2020-10-27T15:50:49.000Z | 2021-10-19T14:37:47.000Z | python_src/mysetup.py | softmatterlab/DeepTrack-2.0-app | 3bc661987cba53519ebefcc0b7221994a6e2d317 | [
"MIT"
] | 3 | 2020-10-16T11:04:42.000Z | 2021-10-19T14:26:52.000Z | #!/usr/bin/python3
# -*- coding: utf-8 -*-
# Created by: python.exe -m py2exe myscript.py -W mysetup.py
from distutils.core import setup
import py2exe
class Target(object):
'''Target is the baseclass for all executables that are created.
It defines properties that are shared by all of them.
'''
def __init__(self, **kw):
self.__dict__.update(kw)
# the VersionInfo resource, uncomment and fill in those items
# that make sense:
# The 'version' attribute MUST be defined, otherwise no versioninfo will be built:
# self.version = "1.0"
# self.company_name = "Company Name"
# self.copyright = "Copyright Company Name © 2013"
# self.legal_copyright = "Copyright Company Name © 2013"
# self.legal_trademark = ""
# self.product_version = "1.0.0.0"
# self.product_name = "Product Name"
# self.private_build = "foo"
# self.special_build = "bar"
def copy(self):
return Target(**self.__dict__)
def __setitem__(self, name, value):
self.__dict__[name] = value
RT_BITMAP = 2
RT_MANIFEST = 24
# A manifest which specifies the executionlevel
# and windows common-controls library version 6
manifest_template = '''<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
<assemblyIdentity
version="5.0.0.0"
processorArchitecture="*"
name="%(prog)s"
type="win32"
/>
<description>%(prog)s</description>
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
<security>
<requestedPrivileges>
<requestedExecutionLevel
level="%(level)s"
uiAccess="false">
</requestedExecutionLevel>
</requestedPrivileges>
</security>
</trustInfo>
<dependency>
<dependentAssembly>
<assemblyIdentity
type="win32"
name="Microsoft.Windows.Common-Controls"
version="6.0.0.0"
processorArchitecture="*"
publicKeyToken="6595b64144ccf1df"
language="*"
/>
</dependentAssembly>
</dependency>
</assembly>
'''
myscript = Target(
# We can extend or override the VersionInfo of the base class:
# version = "1.0",
# file_description = "File Description",
# comments = "Some Comments",
# internal_name = "spam",
script="server.py", # path of the main script
# Allows to specify the basename of the executable, if different from 'myscript'
# dest_base = "myscript",
# Icon resources:[(resource_id, path to .ico file), ...]
# icon_resources=[(1, r"myscript.ico")]
other_resources = [(RT_MANIFEST, 1, (manifest_template % dict(prog="server", level="asInvoker")).encode("utf-8")),
# for bitmap resources, the first 14 bytes must be skipped when reading the file:
# (RT_BITMAP, 1, open("bitmap.bmp", "rb").read()[14:]),
]
)
# ``zipfile`` and ``bundle_files`` options explained:
# ===================================================
#
# zipfile is the Python runtime library for your exe/dll-files; it
# contains in a ziparchive the modules needed as compiled bytecode.
#
# If 'zipfile=None' is used, the runtime library is appended to the
# exe/dll-files (which will then grow quite large), otherwise the
# zipfile option should be set to a pathname relative to the exe/dll
# files, and a library-file shared by all executables will be created.
#
# The py2exe runtime *can* use extension module by directly importing
# the from a zip-archive - without the need to unpack them to the file
# system. The bundle_files option specifies where the extension modules,
# the python dll itself, and other needed dlls are put.
#
# bundle_files == 3:
# Extension modules, the Python dll and other needed dlls are
# copied into the directory where the zipfile or the exe/dll files
# are created, and loaded in the normal way.
#
# bundle_files == 2:
# Extension modules are put into the library ziparchive and loaded
# from it directly.
# The Python dll and any other needed dlls are copied into the
# directory where the zipfile or the exe/dll files are created,
# and loaded in the normal way.
#
# bundle_files == 1:
# Extension modules and the Python dll are put into the zipfile or
# the exe/dll files, and everything is loaded without unpacking to
# the file system. This does not work for some dlls, so use with
# caution.
#
# bundle_files == 0:
# Extension modules, the Python dll, and other needed dlls are put
# into the zipfile or the exe/dll files, and everything is loaded
# without unpacking to the file system. This does not work for
# some dlls, so use with caution.
py2exe_options = dict(
packages = [],
## excludes = "tof_specials Tkinter".split(),
## ignores = "dotblas gnosis.xml.pickle.parsers._cexpat mx.DateTime".split(),
## dll_excludes = "MSVCP90.dll mswsock.dll powrprof.dll".split(),
optimize=0,
compressed=False, # uncompressed may or may not have a faster startup
bundle_files=3,
dist_dir='dist',
)
# Some options can be overridden by command line options...
setup(name="name",
# console based executables
console=[myscript],
# windows subsystem executables (no console)
windows=[],
# py2exe options
zipfile=None,
options={"py2exe": py2exe_options},
)
| 32.720238 | 118 | 0.65272 |
from distutils.core import setup
import py2exe
class Target(object):
def __init__(self, **kw):
self.__dict__.update(kw)
def copy(self):
return Target(**self.__dict__)
def __setitem__(self, name, value):
self.__dict__[name] = value
RT_BITMAP = 2
RT_MANIFEST = 24
manifest_template = '''<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
<assemblyIdentity
version="5.0.0.0"
processorArchitecture="*"
name="%(prog)s"
type="win32"
/>
<description>%(prog)s</description>
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
<security>
<requestedPrivileges>
<requestedExecutionLevel
level="%(level)s"
uiAccess="false">
</requestedExecutionLevel>
</requestedPrivileges>
</security>
</trustInfo>
<dependency>
<dependentAssembly>
<assemblyIdentity
type="win32"
name="Microsoft.Windows.Common-Controls"
version="6.0.0.0"
processorArchitecture="*"
publicKeyToken="6595b64144ccf1df"
language="*"
/>
</dependentAssembly>
</dependency>
</assembly>
'''
myscript = Target(
script="server.py",
other_resources = [(RT_MANIFEST, 1, (manifest_template % dict(prog="server", level="asInvoker")).encode("utf-8")),
]
)
py2exe_options = dict(
packages = [],
None,
options={"py2exe": py2exe_options},
)
| true | true |
f72ffc1214ecd15928542a7ea9a9182e72d89e05 | 798 | py | Python | create_tables.py | RammySekham/Creating-Cloud-Datawarehouse | 62a92a225c3b59d0fed118453651159ccdf8ff38 | [
"MIT"
] | null | null | null | create_tables.py | RammySekham/Creating-Cloud-Datawarehouse | 62a92a225c3b59d0fed118453651159ccdf8ff38 | [
"MIT"
] | null | null | null | create_tables.py | RammySekham/Creating-Cloud-Datawarehouse | 62a92a225c3b59d0fed118453651159ccdf8ff38 | [
"MIT"
] | null | null | null | import configparser
import psycopg2
from sql_queries import create_table_queries, drop_table_queries
def drop_tables(cur, conn):
'''
Drop the existing tables
'''
for query in drop_table_queries:
cur.execute(query)
conn.commit()
def create_tables(cur, conn):
'''
Create the tables as specified in queries
'''
for query in create_table_queries:
cur.execute(query)
conn.commit()
def main():
config = configparser.ConfigParser()
config.read('dwh.cfg')
conn = psycopg2.connect("host={} dbname={} user={} password={} port={}".format(*config['CLUSTER'].values()))
cur = conn.cursor()
drop_tables(cur, conn)
create_tables(cur, conn)
conn.close()
if __name__ == "__main__":
main() | 19 | 112 | 0.631579 | import configparser
import psycopg2
from sql_queries import create_table_queries, drop_table_queries
def drop_tables(cur, conn):
for query in drop_table_queries:
cur.execute(query)
conn.commit()
def create_tables(cur, conn):
for query in create_table_queries:
cur.execute(query)
conn.commit()
def main():
config = configparser.ConfigParser()
config.read('dwh.cfg')
conn = psycopg2.connect("host={} dbname={} user={} password={} port={}".format(*config['CLUSTER'].values()))
cur = conn.cursor()
drop_tables(cur, conn)
create_tables(cur, conn)
conn.close()
if __name__ == "__main__":
main() | true | true |
f72ffc4b88ea7b559670f1d9a1678141ddbe338d | 4,559 | py | Python | selfdrive/controls/lib/latcontrol_torque.py | salah608/OPENPILOT | be214b44947d2a52571b1031c25dde5d54a5fe10 | [
"MIT"
] | 1 | 2022-03-31T05:07:44.000Z | 2022-03-31T05:07:44.000Z | selfdrive/controls/lib/latcontrol_torque.py | salah608/OPENPILOT | be214b44947d2a52571b1031c25dde5d54a5fe10 | [
"MIT"
] | null | null | null | selfdrive/controls/lib/latcontrol_torque.py | salah608/OPENPILOT | be214b44947d2a52571b1031c25dde5d54a5fe10 | [
"MIT"
] | 1 | 2019-07-04T05:35:42.000Z | 2019-07-04T05:35:42.000Z | import math
from cereal import log
from common.numpy_fast import interp
from selfdrive.controls.lib.latcontrol import LatControl, MIN_STEER_SPEED
from selfdrive.controls.lib.pid import PIDController
from selfdrive.controls.lib.drive_helpers import apply_deadzone
from selfdrive.controls.lib.vehicle_model import ACCELERATION_DUE_TO_GRAVITY
# At higher speeds (25+mph) we can assume:
# Lateral acceleration achieved by a specific car correlates to
# torque applied to the steering rack. It does not correlate to
# wheel slip, or to speed.
# This controller applies torque to achieve desired lateral
# accelerations. To compensate for the low speed effects we
# use a LOW_SPEED_FACTOR in the error. Additionally, there is
# friction in the steering wheel that needs to be overcome to
# move it at all, this is compensated for too.
FRICTION_THRESHOLD = 0.2
def set_torque_tune(tune, MAX_LAT_ACCEL=2.5, FRICTION=0.01, steering_angle_deadzone_deg=0.0):
tune.init('torque')
tune.torque.useSteeringAngle = True
tune.torque.kp = 1.0 / MAX_LAT_ACCEL
tune.torque.kf = 1.0 / MAX_LAT_ACCEL
tune.torque.ki = 0.1 / MAX_LAT_ACCEL
tune.torque.friction = FRICTION
tune.torque.steeringAngleDeadzoneDeg = steering_angle_deadzone_deg
class LatControlTorque(LatControl):
def __init__(self, CP, CI):
super().__init__(CP, CI)
self.pid = PIDController(CP.lateralTuning.torque.kp, CP.lateralTuning.torque.ki,
k_f=CP.lateralTuning.torque.kf, pos_limit=self.steer_max, neg_limit=-self.steer_max)
self.get_steer_feedforward = CI.get_steer_feedforward_function()
self.use_steering_angle = CP.lateralTuning.torque.useSteeringAngle
self.friction = CP.lateralTuning.torque.friction
self.kf = CP.lateralTuning.torque.kf
self.steering_angle_deadzone_deg = CP.lateralTuning.torque.steeringAngleDeadzoneDeg
def update(self, active, CS, VM, params, last_actuators, desired_curvature, desired_curvature_rate, llk):
pid_log = log.ControlsState.LateralTorqueState.new_message()
if CS.vEgo < MIN_STEER_SPEED or not active:
output_torque = 0.0
pid_log.active = False
else:
if self.use_steering_angle:
actual_curvature = -VM.calc_curvature(math.radians(CS.steeringAngleDeg - params.angleOffsetDeg), CS.vEgo, params.roll)
curvature_deadzone = abs(VM.calc_curvature(math.radians(self.steering_angle_deadzone_deg), CS.vEgo, 0.0))
else:
actual_curvature_vm = -VM.calc_curvature(math.radians(CS.steeringAngleDeg - params.angleOffsetDeg), CS.vEgo, params.roll)
actual_curvature_llk = llk.angularVelocityCalibrated.value[2] / CS.vEgo
actual_curvature = interp(CS.vEgo, [2.0, 5.0], [actual_curvature_vm, actual_curvature_llk])
curvature_deadzone = 0.0
desired_lateral_accel = desired_curvature * CS.vEgo ** 2
# desired rate is the desired rate of change in the setpoint, not the absolute desired curvature
#desired_lateral_jerk = desired_curvature_rate * CS.vEgo ** 2
actual_lateral_accel = actual_curvature * CS.vEgo ** 2
lateral_accel_deadzone = curvature_deadzone * CS.vEgo ** 2
low_speed_factor = interp(CS.vEgo, [0, 15], [500, 0])
setpoint = desired_lateral_accel + low_speed_factor * desired_curvature
measurement = actual_lateral_accel + low_speed_factor * actual_curvature
error = apply_deadzone(setpoint - measurement, lateral_accel_deadzone)
pid_log.error = error
ff = desired_lateral_accel - params.roll * ACCELERATION_DUE_TO_GRAVITY
# convert friction into lateral accel units for feedforward
friction_compensation = interp(error, [-FRICTION_THRESHOLD, FRICTION_THRESHOLD], [-self.friction, self.friction])
ff += friction_compensation / self.kf
freeze_integrator = CS.steeringRateLimited or CS.steeringPressed or CS.vEgo < 5
output_torque = self.pid.update(error,
feedforward=ff,
speed=CS.vEgo,
freeze_integrator=freeze_integrator)
pid_log.active = True
pid_log.p = self.pid.p
pid_log.i = self.pid.i
pid_log.d = self.pid.d
pid_log.f = self.pid.f
pid_log.output = -output_torque
pid_log.saturated = self._check_saturation(self.steer_max - abs(output_torque) < 1e-3, CS)
pid_log.actualLateralAccel = actual_lateral_accel
pid_log.desiredLateralAccel = desired_lateral_accel
# TODO left is positive in this convention
return -output_torque, 0.0, pid_log
| 47 | 129 | 0.733055 | import math
from cereal import log
from common.numpy_fast import interp
from selfdrive.controls.lib.latcontrol import LatControl, MIN_STEER_SPEED
from selfdrive.controls.lib.pid import PIDController
from selfdrive.controls.lib.drive_helpers import apply_deadzone
from selfdrive.controls.lib.vehicle_model import ACCELERATION_DUE_TO_GRAVITY
FRICTION_THRESHOLD = 0.2
def set_torque_tune(tune, MAX_LAT_ACCEL=2.5, FRICTION=0.01, steering_angle_deadzone_deg=0.0):
tune.init('torque')
tune.torque.useSteeringAngle = True
tune.torque.kp = 1.0 / MAX_LAT_ACCEL
tune.torque.kf = 1.0 / MAX_LAT_ACCEL
tune.torque.ki = 0.1 / MAX_LAT_ACCEL
tune.torque.friction = FRICTION
tune.torque.steeringAngleDeadzoneDeg = steering_angle_deadzone_deg
class LatControlTorque(LatControl):
def __init__(self, CP, CI):
super().__init__(CP, CI)
self.pid = PIDController(CP.lateralTuning.torque.kp, CP.lateralTuning.torque.ki,
k_f=CP.lateralTuning.torque.kf, pos_limit=self.steer_max, neg_limit=-self.steer_max)
self.get_steer_feedforward = CI.get_steer_feedforward_function()
self.use_steering_angle = CP.lateralTuning.torque.useSteeringAngle
self.friction = CP.lateralTuning.torque.friction
self.kf = CP.lateralTuning.torque.kf
self.steering_angle_deadzone_deg = CP.lateralTuning.torque.steeringAngleDeadzoneDeg
def update(self, active, CS, VM, params, last_actuators, desired_curvature, desired_curvature_rate, llk):
pid_log = log.ControlsState.LateralTorqueState.new_message()
if CS.vEgo < MIN_STEER_SPEED or not active:
output_torque = 0.0
pid_log.active = False
else:
if self.use_steering_angle:
actual_curvature = -VM.calc_curvature(math.radians(CS.steeringAngleDeg - params.angleOffsetDeg), CS.vEgo, params.roll)
curvature_deadzone = abs(VM.calc_curvature(math.radians(self.steering_angle_deadzone_deg), CS.vEgo, 0.0))
else:
actual_curvature_vm = -VM.calc_curvature(math.radians(CS.steeringAngleDeg - params.angleOffsetDeg), CS.vEgo, params.roll)
actual_curvature_llk = llk.angularVelocityCalibrated.value[2] / CS.vEgo
actual_curvature = interp(CS.vEgo, [2.0, 5.0], [actual_curvature_vm, actual_curvature_llk])
curvature_deadzone = 0.0
desired_lateral_accel = desired_curvature * CS.vEgo ** 2
actual_lateral_accel = actual_curvature * CS.vEgo ** 2
lateral_accel_deadzone = curvature_deadzone * CS.vEgo ** 2
low_speed_factor = interp(CS.vEgo, [0, 15], [500, 0])
setpoint = desired_lateral_accel + low_speed_factor * desired_curvature
measurement = actual_lateral_accel + low_speed_factor * actual_curvature
error = apply_deadzone(setpoint - measurement, lateral_accel_deadzone)
pid_log.error = error
ff = desired_lateral_accel - params.roll * ACCELERATION_DUE_TO_GRAVITY
friction_compensation = interp(error, [-FRICTION_THRESHOLD, FRICTION_THRESHOLD], [-self.friction, self.friction])
ff += friction_compensation / self.kf
freeze_integrator = CS.steeringRateLimited or CS.steeringPressed or CS.vEgo < 5
output_torque = self.pid.update(error,
feedforward=ff,
speed=CS.vEgo,
freeze_integrator=freeze_integrator)
pid_log.active = True
pid_log.p = self.pid.p
pid_log.i = self.pid.i
pid_log.d = self.pid.d
pid_log.f = self.pid.f
pid_log.output = -output_torque
pid_log.saturated = self._check_saturation(self.steer_max - abs(output_torque) < 1e-3, CS)
pid_log.actualLateralAccel = actual_lateral_accel
pid_log.desiredLateralAccel = desired_lateral_accel
return -output_torque, 0.0, pid_log
| true | true |
f72ffc7299c2bfe078237263c5c34c71dccfc1d9 | 250 | py | Python | manage.py | muhiza/bestb | 3c25db0b31c736a59e6a6623615da50a1ab5f196 | [
"MIT"
] | 110 | 2016-11-25T14:25:10.000Z | 2022-02-16T08:25:57.000Z | manage.py | muhiza/bestb | 3c25db0b31c736a59e6a6623615da50a1ab5f196 | [
"MIT"
] | 86 | 2016-11-13T10:04:07.000Z | 2022-03-11T23:14:01.000Z | manage.py | muhiza/bestb | 3c25db0b31c736a59e6a6623615da50a1ab5f196 | [
"MIT"
] | 21 | 2016-12-06T15:03:44.000Z | 2021-12-30T11:38:19.000Z | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "sciblog.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| 22.727273 | 71 | 0.772 |
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "sciblog.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| true | true |
f72ffcc2bb7815a6350f846fc32861403f679efd | 7 | py | Python | tests/unit/conftest.py | bernease/whylogs-python | cfd2a2f71280537aae584cbd40a752fbe7da647b | [
"Apache-2.0"
] | null | null | null | tests/unit/conftest.py | bernease/whylogs-python | cfd2a2f71280537aae584cbd40a752fbe7da647b | [
"Apache-2.0"
] | null | null | null | tests/unit/conftest.py | bernease/whylogs-python | cfd2a2f71280537aae584cbd40a752fbe7da647b | [
"Apache-2.0"
] | null | null | null | # | 7 | 7 | 0 | true | true | |
f72ffd82aa9586a214a2d9cf5db0f17af8e80cc5 | 1,026 | py | Python | tests/helpers.py | der-gabe/pynonymizer | 3e53bb1f27c2446672f7c2794009354dc8d95ace | [
"MIT"
] | null | null | null | tests/helpers.py | der-gabe/pynonymizer | 3e53bb1f27c2446672f7c2794009354dc8d95ace | [
"MIT"
] | null | null | null | tests/helpers.py | der-gabe/pynonymizer | 3e53bb1f27c2446672f7c2794009354dc8d95ace | [
"MIT"
] | null | null | null | import re
import pytest
from contextlib import contextmanager
class AnyObject:
def __eq__(self, actual):
return True
def __ne__(self, other):
return False
class SuperdictOf:
def __init__(self, required_dict):
self.required_dict = required_dict
def __eq__(self, actual):
return self.required_dict.items() <= actual.items()
def __ne__(self, actual):
return not(self.required_dict.items() <= actual.items())
class ComparableRegex:
"""Assert that a given string meets some expectations."""
def __init__(self, pattern, flags=0):
self._regex = re.compile(pattern, flags)
def __eq__(self, actual):
return bool(self._regex.match(actual))
def __repr__(self):
return self._regex.pattern
@contextmanager
def not_raises(exception):
try:
yield
except exception:
raise pytest.fail("DID RAISE {0}".format(exception))
def list_rindex(alist, value):
return len(alist) - alist[-1::-1].index(value) - 1 | 21.375 | 64 | 0.665692 | import re
import pytest
from contextlib import contextmanager
class AnyObject:
def __eq__(self, actual):
return True
def __ne__(self, other):
return False
class SuperdictOf:
def __init__(self, required_dict):
self.required_dict = required_dict
def __eq__(self, actual):
return self.required_dict.items() <= actual.items()
def __ne__(self, actual):
return not(self.required_dict.items() <= actual.items())
class ComparableRegex:
def __init__(self, pattern, flags=0):
self._regex = re.compile(pattern, flags)
def __eq__(self, actual):
return bool(self._regex.match(actual))
def __repr__(self):
return self._regex.pattern
@contextmanager
def not_raises(exception):
try:
yield
except exception:
raise pytest.fail("DID RAISE {0}".format(exception))
def list_rindex(alist, value):
return len(alist) - alist[-1::-1].index(value) - 1 | true | true |
f72ffe71d2e1e836e254e19dc8302d96f10fbef4 | 12,747 | py | Python | Examples/Tests/PythonWrappers/PICMI_inputs_2d.py | oshapoval/WarpX | 84d687da21ee93db67fdc43efec8a9cc80d0e6f9 | [
"BSD-3-Clause-LBNL"
] | null | null | null | Examples/Tests/PythonWrappers/PICMI_inputs_2d.py | oshapoval/WarpX | 84d687da21ee93db67fdc43efec8a9cc80d0e6f9 | [
"BSD-3-Clause-LBNL"
] | null | null | null | Examples/Tests/PythonWrappers/PICMI_inputs_2d.py | oshapoval/WarpX | 84d687da21ee93db67fdc43efec8a9cc80d0e6f9 | [
"BSD-3-Clause-LBNL"
] | null | null | null | import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.axes_grid1.axes_divider import make_axes_locatable
from pywarpx import picmi
# Number of time steps
max_steps = 100
# Grid
nx = 128
nz = 128
# Domain
xmin = 0.e-6
zmin = 0.e-6
xmax = 50.e-6
zmax = 50.e-6
# Cell size
dx = (xmax - xmin) / nx
dz = (zmax - zmin) / nz
# Domain decomposition
max_grid_size_x = 64
max_grid_size_z = 64
# PML
nxpml = 10
nzpml = 10
field_boundary = ['open', 'open']
# Spectral order
nox = 8
noz = 8
# Guard cells
nxg = 8
nzg = 8
# Initialize grid
grid = picmi.Cartesian2DGrid(number_of_cells = [nx,nz],
lower_bound = [xmin,zmin],
upper_bound = [xmax,zmax],
lower_boundary_conditions = field_boundary,
upper_boundary_conditions = field_boundary,
guard_cells = [nxg,nzg],
moving_window_velocity = [0.,0.,0],
warpx_max_grid_size_x = max_grid_size_x,
warpx_max_grid_size_y = max_grid_size_z)
# Initialize field solver
solver = picmi.ElectromagneticSolver(grid=grid, cfl=0.95, method='PSATD',
stencil_order = [nox,noz],
divE_cleaning = 1,
divB_cleaning = 1,
pml_divE_cleaning = 1,
pml_divB_cleaning = 1,
warpx_psatd_update_with_rho = True)
# Initialize diagnostics
diag_field_list = ["E", "B"]
field_diag = picmi.FieldDiagnostic(name = 'diag1',
grid = grid,
period = 10,
write_dir = '.',
warpx_file_prefix = 'Python_wrappers_plt',
data_list = diag_field_list)
# Initialize simulation
sim = picmi.Simulation(solver = solver,
max_steps = max_steps,
verbose = 1,
particle_shape = 'cubic',
warpx_current_deposition_algo = 'direct',
warpx_particle_pusher_algo = 'boris',
warpx_field_gathering_algo = 'energy-conserving',
warpx_use_filter = 1)
# Add diagnostics to simulation
sim.add_diagnostic(field_diag)
# Write input file to run with compiled version
sim.write_input_file(file_name = 'inputs_2d')
# Whether to include guard cells in data returned by Python wrappers
include_ghosts = 1
# Compute min and max of fields data
def compute_minmax(data):
vmax = np.abs(data).max()
vmin = -vmax
return vmin, vmax
# Plot fields data either in valid domain or in PML
def plot_data(data, pml, title, name):
fig, ax = plt.subplots(nrows = 1, ncols = 1, gridspec_kw = dict(wspace = 0.5), figsize = [6,5])
cax = make_axes_locatable(ax).append_axes('right', size='5%', pad='5%')
lw = 0.8
ls = '--'
if pml:
# Draw PMLs and ghost regions
ax.axvline(x = 0 , linewidth = lw, linestyle = ls)
ax.axvline(x = 0+nxg , linewidth = lw, linestyle = ls)
ax.axvline(x = -nxpml , linewidth = lw, linestyle = ls)
ax.axvline(x = nx , linewidth = lw, linestyle = ls)
ax.axvline(x = nx-nxg , linewidth = lw, linestyle = ls)
ax.axvline(x = nx+nxpml, linewidth = lw, linestyle = ls)
ax.axhline(y = 0 , linewidth = lw, linestyle = ls)
ax.axhline(y = 0+nzg , linewidth = lw, linestyle = ls)
ax.axhline(y = -nzpml , linewidth = lw, linestyle = ls)
ax.axhline(y = nz , linewidth = lw, linestyle = ls)
ax.axhline(y = nz-nzg , linewidth = lw, linestyle = ls)
ax.axhline(y = nz+nzpml, linewidth = lw, linestyle = ls)
# Annotations
ax.annotate('PML', xy = (-nxpml//2,nz//2), rotation = 'vertical', ha = 'center', va = 'center')
ax.annotate('PML', xy = (nx+nxpml//2,nz//2), rotation = 'vertical', ha = 'center', va = 'center')
ax.annotate('PML', xy = (nx//2,-nzpml//2), rotation = 'horizontal', ha = 'center', va = 'center')
ax.annotate('PML', xy = (nx//2,nz+nzpml//2), rotation = 'horizontal', ha = 'center', va = 'center')
ax.annotate('PML ghost', xy = (nxg//2,nz//2), rotation = 'vertical', ha = 'center', va = 'center')
ax.annotate('PML ghost', xy = (-nxpml-nxg//2,nz//2), rotation = 'vertical', ha = 'center', va = 'center')
ax.annotate('PML ghost', xy = (nx-nxg//2,nz//2), rotation = 'vertical', ha = 'center', va = 'center')
ax.annotate('PML ghost', xy = (nx+nxpml+nxg//2,nz//2), rotation = 'vertical', ha = 'center', va = 'center')
ax.annotate('PML ghost', xy = (nx//2,nzg//2), rotation = 'horizontal', ha = 'center', va = 'center')
ax.annotate('PML ghost', xy = (nx//2,-nzpml-nzg//2), rotation = 'horizontal', ha = 'center', va = 'center')
ax.annotate('PML ghost', xy = (nx//2,nz-nzg//2), rotation = 'horizontal', ha = 'center', va = 'center')
ax.annotate('PML ghost', xy = (nx//2,nz+nzpml+nzg//2), rotation = 'horizontal', ha = 'center', va = 'center')
# Set extent and sliced data
extent = np.array([-nxg-nxpml, nx+nxpml+nxg, -nzg-nzpml, nz+nzpml+nzg])
else:
# Draw ghost regions
ax.axvline(x = 0 , linewidth = lw, linestyle = ls)
ax.axvline(x = nx, linewidth = lw, linestyle = ls)
ax.axhline(y = 0 , linewidth = lw, linestyle = ls)
ax.axhline(y = nz, linewidth = lw, linestyle = ls)
# Annotations
ax.annotate('ghost', xy = (-nxg//2,nz//2), rotation = 'vertical', ha = 'center', va = 'center')
ax.annotate('ghost', xy = (nx+nxg//2,nz//2), rotation = 'vertical', ha = 'center', va = 'center')
ax.annotate('ghost', xy = (nx//2,-nzg//2), rotation = 'horizontal', ha = 'center', va = 'center')
ax.annotate('ghost', xy = (nx//2,nz+nzg//2), rotation = 'horizontal', ha = 'center', va = 'center')
# Set extent and sliced data
extent = np.array([-nxg, nx+nxg, -nzg, nz+nzg])
X = data[:,:].transpose()
# Min and max for colorbar
vmin, vmax = compute_minmax(X)
# Display data as image
im = ax.imshow(X = X, origin = 'lower', extent = extent, vmin = vmin, vmax = vmax, cmap = 'seismic')
# Add colorbar to plot
fig.colorbar(im, cax = cax)
# Set label for x- and y-axis, set title
ax.set_xlabel('x')
ax.set_ylabel('z')
ax.set_title(title)
# Set plot title
suptitle = 'PML in (x,z), 4 grids 64 x 64'
plt.suptitle(suptitle)
# Save figure
figname = 'figure_' + name + '.png'
fig.savefig(figname, dpi = 100)
# Initialize fields data (unit pulse) and apply smoothing
def init_data(data):
impulse_1d = np.array([1./4., 1./2., 1./4.])
impulse = np.outer(impulse_1d, impulse_1d)
data[nx//2-1:nx//2+2,nz//2-1:nz//2+2] = impulse
# Initialize inputs and WarpX instance
sim.initialize_inputs()
sim.initialize_warpx()
# Get fields data using Python wrappers
import pywarpx.fields as pwxf
Ex = pwxf.ExFPWrapper(include_ghosts = include_ghosts)
Ey = pwxf.EyFPWrapper(include_ghosts = include_ghosts)
Ez = pwxf.EzFPWrapper(include_ghosts = include_ghosts)
Bx = pwxf.BxFPWrapper(include_ghosts = include_ghosts)
By = pwxf.ByFPWrapper(include_ghosts = include_ghosts)
Bz = pwxf.BzFPWrapper(include_ghosts = include_ghosts)
F = pwxf.FFPWrapper(include_ghosts = include_ghosts)
G = pwxf.GFPWrapper(include_ghosts = include_ghosts)
Expml = pwxf.ExFPPMLWrapper(include_ghosts = include_ghosts)
Eypml = pwxf.EyFPPMLWrapper(include_ghosts = include_ghosts)
Ezpml = pwxf.EzFPPMLWrapper(include_ghosts = include_ghosts)
Bxpml = pwxf.BxFPPMLWrapper(include_ghosts = include_ghosts)
Bypml = pwxf.ByFPPMLWrapper(include_ghosts = include_ghosts)
Bzpml = pwxf.BzFPPMLWrapper(include_ghosts = include_ghosts)
Fpml = pwxf.FFPPMLWrapper(include_ghosts = include_ghosts)
Gpml = pwxf.GFPPMLWrapper(include_ghosts = include_ghosts)
# Initialize fields data in valid domain
init_data(Ex)
init_data(Ey)
init_data(Ez)
init_data(Bx)
init_data(By)
init_data(Bz)
init_data(F)
init_data(G)
# Advance simulation until last time step
sim.step(max_steps)
# Plot E
plot_data(Ex, pml = False, title = 'Ex', name = 'Ex')
plot_data(Ey, pml = False, title = 'Ey', name = 'Ey')
plot_data(Ez, pml = False, title = 'Ez', name = 'Ez')
# Plot B
plot_data(Bx, pml = False, title = 'Bx', name = 'Bx')
plot_data(By, pml = False, title = 'By', name = 'By')
plot_data(Bz, pml = False, title = 'Bz', name = 'Bz')
# F and G
plot_data(F, pml = False, title = 'F', name = 'F')
plot_data(G, pml = False, title = 'G', name = 'G')
# Plot E in PML
plot_data(Expml[:,:,0], pml = True, title = 'Exy in PML', name = 'Exy')
plot_data(Expml[:,:,1], pml = True, title = 'Exz in PML', name = 'Exz')
plot_data(Expml[:,:,2], pml = True, title = 'Exx in PML', name = 'Exx')
plot_data(Eypml[:,:,0], pml = True, title = 'Eyz in PML', name = 'Eyz')
plot_data(Eypml[:,:,1], pml = True, title = 'Eyx in PML', name = 'Eyx')
plot_data(Eypml[:,:,2], pml = True, title = 'Eyy in PML', name = 'Eyy') # zero
plot_data(Ezpml[:,:,0], pml = True, title = 'Ezx in PML', name = 'Ezx')
plot_data(Ezpml[:,:,1], pml = True, title = 'Ezy in PML', name = 'Ezy') # zero
plot_data(Ezpml[:,:,2], pml = True, title = 'Ezz in PML', name = 'Ezz')
# Plot B in PML
plot_data(Bxpml[:,:,0], pml = True, title = 'Bxy in PML', name = 'Bxy')
plot_data(Bxpml[:,:,1], pml = True, title = 'Bxz in PML', name = 'Bxz')
plot_data(Bxpml[:,:,2], pml = True, title = 'Bxx in PML', name = 'Bxx')
plot_data(Bypml[:,:,0], pml = True, title = 'Byz in PML', name = 'Byz')
plot_data(Bypml[:,:,1], pml = True, title = 'Byx in PML', name = 'Byx')
plot_data(Bypml[:,:,2], pml = True, title = 'Byy in PML', name = 'Byy') # zero
plot_data(Bzpml[:,:,0], pml = True, title = 'Bzx in PML', name = 'Bzx')
plot_data(Bzpml[:,:,1], pml = True, title = 'Bzy in PML', name = 'Bzy') # zero
plot_data(Bzpml[:,:,2], pml = True, title = 'Bzz in PML', name = 'Bzz')
# Plot F and G in PML
plot_data(Fpml[:,:,0], pml = True, title = 'Fx in PML', name = 'Fx')
plot_data(Fpml[:,:,1], pml = True, title = 'Fy in PML', name = 'Fy')
plot_data(Fpml[:,:,2], pml = True, title = 'Fz in PML', name = 'Fz')
plot_data(Gpml[:,:,0], pml = True, title = 'Gx in PML', name = 'Gx')
plot_data(Gpml[:,:,1], pml = True, title = 'Gy in PML', name = 'Gy')
plot_data(Gpml[:,:,2], pml = True, title = 'Gz in PML', name = 'Gz')
# Check values with benchmarks (precomputed from the same Python arrays)
def check_values(benchmark, data, rtol, atol):
passed = np.allclose(benchmark, np.sum(np.abs(data[:,:])), rtol = rtol, atol = atol)
assert(passed)
rtol = 1e-09
atol = 1e-12
# E
check_values(1013263608.6369569, Ex[:,:], rtol, atol)
check_values(717278253.4505507 , Ey[:,:], rtol, atol)
check_values(717866566.5718911 , Ez[:,:], rtol, atol)
# B
check_values(3.0214509313437636, Bx[:,:], rtol, atol)
check_values(3.0242765102729985, By[:,:], rtol, atol)
check_values(3.0214509326970465, Bz[:,:], rtol, atol)
# F and G
check_values(3.0188584528062377, F[:,:], rtol, atol)
check_values(1013672631.8764204, G[:,:], rtol, atol)
# E in PML
check_values(364287936.1526477 , Expml[:,:,0], rtol, atol)
check_values(183582351.3212558 , Expml[:,:,1], rtol, atol)
check_values(190065766.41491824, Expml[:,:,2], rtol, atol)
check_values(440581905.9336025 , Eypml[:,:,0], rtol, atol)
check_values(178117293.6629357 , Eypml[:,:,1], rtol, atol)
check_values(0.0 , Eypml[:,:,2], rtol, atol)
check_values(430277101.26568377, Ezpml[:,:,0], rtol, atol)
check_values(0.0 , Ezpml[:,:,1], rtol, atol)
check_values(190919663.2167449 , Ezpml[:,:,2], rtol, atol)
# B in PML
check_values(1.0565189315366146 , Bxpml[:,:,0], rtol, atol)
check_values(0.4618191395098556 , Bxpml[:,:,1], rtol, atol)
check_values(0.6849858273929585 , Bxpml[:,:,2], rtol, atol)
check_values(1.7228584190213505 , Bypml[:,:,0], rtol, atol)
check_values(0.47697331996765685, Bypml[:,:,1], rtol, atol)
check_values(0.0 , Bypml[:,:,2], rtol, atol)
check_values(1.5183380774611628 , Bzpml[:,:,0], rtol, atol)
check_values(0.0 , Bzpml[:,:,1], rtol, atol)
check_values(0.6849858291863835 , Bzpml[:,:,2], rtol, atol)
# F and G in PML
check_values(1.7808748509425263, Fpml[:,:,0], rtol, atol)
check_values(0.0 , Fpml[:,:,1], rtol, atol)
check_values(0.4307845604625681, Fpml[:,:,2], rtol, atol)
check_values(536552745.42701197, Gpml[:,:,0], rtol, atol)
check_values(0.0 , Gpml[:,:,1], rtol, atol)
check_values(196016270.97767758, Gpml[:,:,2], rtol, atol)
| 43.65411 | 117 | 0.607045 | import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.axes_grid1.axes_divider import make_axes_locatable
from pywarpx import picmi
max_steps = 100
nx = 128
nz = 128
xmin = 0.e-6
zmin = 0.e-6
xmax = 50.e-6
zmax = 50.e-6
dx = (xmax - xmin) / nx
dz = (zmax - zmin) / nz
max_grid_size_x = 64
max_grid_size_z = 64
nxpml = 10
nzpml = 10
field_boundary = ['open', 'open']
nox = 8
noz = 8
nxg = 8
nzg = 8
grid = picmi.Cartesian2DGrid(number_of_cells = [nx,nz],
lower_bound = [xmin,zmin],
upper_bound = [xmax,zmax],
lower_boundary_conditions = field_boundary,
upper_boundary_conditions = field_boundary,
guard_cells = [nxg,nzg],
moving_window_velocity = [0.,0.,0],
warpx_max_grid_size_x = max_grid_size_x,
warpx_max_grid_size_y = max_grid_size_z)
solver = picmi.ElectromagneticSolver(grid=grid, cfl=0.95, method='PSATD',
stencil_order = [nox,noz],
divE_cleaning = 1,
divB_cleaning = 1,
pml_divE_cleaning = 1,
pml_divB_cleaning = 1,
warpx_psatd_update_with_rho = True)
diag_field_list = ["E", "B"]
field_diag = picmi.FieldDiagnostic(name = 'diag1',
grid = grid,
period = 10,
write_dir = '.',
warpx_file_prefix = 'Python_wrappers_plt',
data_list = diag_field_list)
sim = picmi.Simulation(solver = solver,
max_steps = max_steps,
verbose = 1,
particle_shape = 'cubic',
warpx_current_deposition_algo = 'direct',
warpx_particle_pusher_algo = 'boris',
warpx_field_gathering_algo = 'energy-conserving',
warpx_use_filter = 1)
sim.add_diagnostic(field_diag)
sim.write_input_file(file_name = 'inputs_2d')
include_ghosts = 1
def compute_minmax(data):
vmax = np.abs(data).max()
vmin = -vmax
return vmin, vmax
def plot_data(data, pml, title, name):
fig, ax = plt.subplots(nrows = 1, ncols = 1, gridspec_kw = dict(wspace = 0.5), figsize = [6,5])
cax = make_axes_locatable(ax).append_axes('right', size='5%', pad='5%')
lw = 0.8
ls = '--'
if pml:
ax.axvline(x = 0 , linewidth = lw, linestyle = ls)
ax.axvline(x = 0+nxg , linewidth = lw, linestyle = ls)
ax.axvline(x = -nxpml , linewidth = lw, linestyle = ls)
ax.axvline(x = nx , linewidth = lw, linestyle = ls)
ax.axvline(x = nx-nxg , linewidth = lw, linestyle = ls)
ax.axvline(x = nx+nxpml, linewidth = lw, linestyle = ls)
ax.axhline(y = 0 , linewidth = lw, linestyle = ls)
ax.axhline(y = 0+nzg , linewidth = lw, linestyle = ls)
ax.axhline(y = -nzpml , linewidth = lw, linestyle = ls)
ax.axhline(y = nz , linewidth = lw, linestyle = ls)
ax.axhline(y = nz-nzg , linewidth = lw, linestyle = ls)
ax.axhline(y = nz+nzpml, linewidth = lw, linestyle = ls)
ax.annotate('PML', xy = (-nxpml//2,nz//2), rotation = 'vertical', ha = 'center', va = 'center')
ax.annotate('PML', xy = (nx+nxpml//2,nz//2), rotation = 'vertical', ha = 'center', va = 'center')
ax.annotate('PML', xy = (nx//2,-nzpml//2), rotation = 'horizontal', ha = 'center', va = 'center')
ax.annotate('PML', xy = (nx//2,nz+nzpml//2), rotation = 'horizontal', ha = 'center', va = 'center')
ax.annotate('PML ghost', xy = (nxg//2,nz//2), rotation = 'vertical', ha = 'center', va = 'center')
ax.annotate('PML ghost', xy = (-nxpml-nxg//2,nz//2), rotation = 'vertical', ha = 'center', va = 'center')
ax.annotate('PML ghost', xy = (nx-nxg//2,nz//2), rotation = 'vertical', ha = 'center', va = 'center')
ax.annotate('PML ghost', xy = (nx+nxpml+nxg//2,nz//2), rotation = 'vertical', ha = 'center', va = 'center')
ax.annotate('PML ghost', xy = (nx//2,nzg//2), rotation = 'horizontal', ha = 'center', va = 'center')
ax.annotate('PML ghost', xy = (nx//2,-nzpml-nzg//2), rotation = 'horizontal', ha = 'center', va = 'center')
ax.annotate('PML ghost', xy = (nx//2,nz-nzg//2), rotation = 'horizontal', ha = 'center', va = 'center')
ax.annotate('PML ghost', xy = (nx//2,nz+nzpml+nzg//2), rotation = 'horizontal', ha = 'center', va = 'center')
extent = np.array([-nxg-nxpml, nx+nxpml+nxg, -nzg-nzpml, nz+nzpml+nzg])
else:
ax.axvline(x = 0 , linewidth = lw, linestyle = ls)
ax.axvline(x = nx, linewidth = lw, linestyle = ls)
ax.axhline(y = 0 , linewidth = lw, linestyle = ls)
ax.axhline(y = nz, linewidth = lw, linestyle = ls)
ax.annotate('ghost', xy = (-nxg//2,nz//2), rotation = 'vertical', ha = 'center', va = 'center')
ax.annotate('ghost', xy = (nx+nxg//2,nz//2), rotation = 'vertical', ha = 'center', va = 'center')
ax.annotate('ghost', xy = (nx//2,-nzg//2), rotation = 'horizontal', ha = 'center', va = 'center')
ax.annotate('ghost', xy = (nx//2,nz+nzg//2), rotation = 'horizontal', ha = 'center', va = 'center')
extent = np.array([-nxg, nx+nxg, -nzg, nz+nzg])
X = data[:,:].transpose()
vmin, vmax = compute_minmax(X)
im = ax.imshow(X = X, origin = 'lower', extent = extent, vmin = vmin, vmax = vmax, cmap = 'seismic')
fig.colorbar(im, cax = cax)
ax.set_xlabel('x')
ax.set_ylabel('z')
ax.set_title(title)
suptitle = 'PML in (x,z), 4 grids 64 x 64'
plt.suptitle(suptitle)
figname = 'figure_' + name + '.png'
fig.savefig(figname, dpi = 100)
def init_data(data):
impulse_1d = np.array([1./4., 1./2., 1./4.])
impulse = np.outer(impulse_1d, impulse_1d)
data[nx//2-1:nx//2+2,nz//2-1:nz//2+2] = impulse
sim.initialize_inputs()
sim.initialize_warpx()
import pywarpx.fields as pwxf
Ex = pwxf.ExFPWrapper(include_ghosts = include_ghosts)
Ey = pwxf.EyFPWrapper(include_ghosts = include_ghosts)
Ez = pwxf.EzFPWrapper(include_ghosts = include_ghosts)
Bx = pwxf.BxFPWrapper(include_ghosts = include_ghosts)
By = pwxf.ByFPWrapper(include_ghosts = include_ghosts)
Bz = pwxf.BzFPWrapper(include_ghosts = include_ghosts)
F = pwxf.FFPWrapper(include_ghosts = include_ghosts)
G = pwxf.GFPWrapper(include_ghosts = include_ghosts)
Expml = pwxf.ExFPPMLWrapper(include_ghosts = include_ghosts)
Eypml = pwxf.EyFPPMLWrapper(include_ghosts = include_ghosts)
Ezpml = pwxf.EzFPPMLWrapper(include_ghosts = include_ghosts)
Bxpml = pwxf.BxFPPMLWrapper(include_ghosts = include_ghosts)
Bypml = pwxf.ByFPPMLWrapper(include_ghosts = include_ghosts)
Bzpml = pwxf.BzFPPMLWrapper(include_ghosts = include_ghosts)
Fpml = pwxf.FFPPMLWrapper(include_ghosts = include_ghosts)
Gpml = pwxf.GFPPMLWrapper(include_ghosts = include_ghosts)
init_data(Ex)
init_data(Ey)
init_data(Ez)
init_data(Bx)
init_data(By)
init_data(Bz)
init_data(F)
init_data(G)
sim.step(max_steps)
plot_data(Ex, pml = False, title = 'Ex', name = 'Ex')
plot_data(Ey, pml = False, title = 'Ey', name = 'Ey')
plot_data(Ez, pml = False, title = 'Ez', name = 'Ez')
plot_data(Bx, pml = False, title = 'Bx', name = 'Bx')
plot_data(By, pml = False, title = 'By', name = 'By')
plot_data(Bz, pml = False, title = 'Bz', name = 'Bz')
plot_data(F, pml = False, title = 'F', name = 'F')
plot_data(G, pml = False, title = 'G', name = 'G')
plot_data(Expml[:,:,0], pml = True, title = 'Exy in PML', name = 'Exy')
plot_data(Expml[:,:,1], pml = True, title = 'Exz in PML', name = 'Exz')
plot_data(Expml[:,:,2], pml = True, title = 'Exx in PML', name = 'Exx')
plot_data(Eypml[:,:,0], pml = True, title = 'Eyz in PML', name = 'Eyz')
plot_data(Eypml[:,:,1], pml = True, title = 'Eyx in PML', name = 'Eyx')
plot_data(Eypml[:,:,2], pml = True, title = 'Eyy in PML', name = 'Eyy')
plot_data(Ezpml[:,:,0], pml = True, title = 'Ezx in PML', name = 'Ezx')
plot_data(Ezpml[:,:,1], pml = True, title = 'Ezy in PML', name = 'Ezy')
plot_data(Ezpml[:,:,2], pml = True, title = 'Ezz in PML', name = 'Ezz')
plot_data(Bxpml[:,:,0], pml = True, title = 'Bxy in PML', name = 'Bxy')
plot_data(Bxpml[:,:,1], pml = True, title = 'Bxz in PML', name = 'Bxz')
plot_data(Bxpml[:,:,2], pml = True, title = 'Bxx in PML', name = 'Bxx')
plot_data(Bypml[:,:,0], pml = True, title = 'Byz in PML', name = 'Byz')
plot_data(Bypml[:,:,1], pml = True, title = 'Byx in PML', name = 'Byx')
plot_data(Bypml[:,:,2], pml = True, title = 'Byy in PML', name = 'Byy')
plot_data(Bzpml[:,:,0], pml = True, title = 'Bzx in PML', name = 'Bzx')
plot_data(Bzpml[:,:,1], pml = True, title = 'Bzy in PML', name = 'Bzy')
plot_data(Bzpml[:,:,2], pml = True, title = 'Bzz in PML', name = 'Bzz')
plot_data(Fpml[:,:,0], pml = True, title = 'Fx in PML', name = 'Fx')
plot_data(Fpml[:,:,1], pml = True, title = 'Fy in PML', name = 'Fy')
plot_data(Fpml[:,:,2], pml = True, title = 'Fz in PML', name = 'Fz')
plot_data(Gpml[:,:,0], pml = True, title = 'Gx in PML', name = 'Gx')
plot_data(Gpml[:,:,1], pml = True, title = 'Gy in PML', name = 'Gy')
plot_data(Gpml[:,:,2], pml = True, title = 'Gz in PML', name = 'Gz')
def check_values(benchmark, data, rtol, atol):
passed = np.allclose(benchmark, np.sum(np.abs(data[:,:])), rtol = rtol, atol = atol)
assert(passed)
rtol = 1e-09
atol = 1e-12
check_values(1013263608.6369569, Ex[:,:], rtol, atol)
check_values(717278253.4505507 , Ey[:,:], rtol, atol)
check_values(717866566.5718911 , Ez[:,:], rtol, atol)
check_values(3.0214509313437636, Bx[:,:], rtol, atol)
check_values(3.0242765102729985, By[:,:], rtol, atol)
check_values(3.0214509326970465, Bz[:,:], rtol, atol)
check_values(3.0188584528062377, F[:,:], rtol, atol)
check_values(1013672631.8764204, G[:,:], rtol, atol)
check_values(364287936.1526477 , Expml[:,:,0], rtol, atol)
check_values(183582351.3212558 , Expml[:,:,1], rtol, atol)
check_values(190065766.41491824, Expml[:,:,2], rtol, atol)
check_values(440581905.9336025 , Eypml[:,:,0], rtol, atol)
check_values(178117293.6629357 , Eypml[:,:,1], rtol, atol)
check_values(0.0 , Eypml[:,:,2], rtol, atol)
check_values(430277101.26568377, Ezpml[:,:,0], rtol, atol)
check_values(0.0 , Ezpml[:,:,1], rtol, atol)
check_values(190919663.2167449 , Ezpml[:,:,2], rtol, atol)
check_values(1.0565189315366146 , Bxpml[:,:,0], rtol, atol)
check_values(0.4618191395098556 , Bxpml[:,:,1], rtol, atol)
check_values(0.6849858273929585 , Bxpml[:,:,2], rtol, atol)
check_values(1.7228584190213505 , Bypml[:,:,0], rtol, atol)
check_values(0.47697331996765685, Bypml[:,:,1], rtol, atol)
check_values(0.0 , Bypml[:,:,2], rtol, atol)
check_values(1.5183380774611628 , Bzpml[:,:,0], rtol, atol)
check_values(0.0 , Bzpml[:,:,1], rtol, atol)
check_values(0.6849858291863835 , Bzpml[:,:,2], rtol, atol)
check_values(1.7808748509425263, Fpml[:,:,0], rtol, atol)
check_values(0.0 , Fpml[:,:,1], rtol, atol)
check_values(0.4307845604625681, Fpml[:,:,2], rtol, atol)
check_values(536552745.42701197, Gpml[:,:,0], rtol, atol)
check_values(0.0 , Gpml[:,:,1], rtol, atol)
check_values(196016270.97767758, Gpml[:,:,2], rtol, atol)
| true | true |
f72ffeac425776525d48f18b9e6845cf44684f3c | 35,719 | py | Python | Script/test.py | hlebars/YoutubeDataAnalysis | 0845effcdfdf6ab3281adc25840ed090e47498c8 | [
"MIT"
] | null | null | null | Script/test.py | hlebars/YoutubeDataAnalysis | 0845effcdfdf6ab3281adc25840ed090e47498c8 | [
"MIT"
] | null | null | null | Script/test.py | hlebars/YoutubeDataAnalysis | 0845effcdfdf6ab3281adc25840ed090e47498c8 | [
"MIT"
] | null | null | null | import pandas as pd
import datetime
import numpy as np
import os
import re
import matplotlib.pyplot as plot
import pytz
# @timeit (repeat=3,number=10)
def EclatedSubPlot(SerieAfterGrpBy,ActivatePlotting,ListOfDateAndTime,Abbreviation):
DicoDayOfWeek={
"00":('Mon','Monday'), "01":('Tue','Tuesday'), "02":('Wed','Wednesday'), "03":('Thu','Thursday'),
"04":('Fri','Friday'), "05":('Sat','Saturday'), "06":('Sun','Sunday')
}
DicoMonthOfTheYear = {
"01":("Jan", "January"),"02":("Feb","February"),"03":("Mar","March"),"04":("Apr","April"),"05":("May","May"),
"06":("Jun","June"),"07":("Jul","July"),"08":("Aug","August"),"09":("Sep","September"),"10":("Oct","October"),
"11":("Nov","November"),"12":("Dec","December")
}
df_unstack=SerieAfterGrpBy.unstack(level=0)
nblevels = df_unstack.index.nlevels
if nblevels!=1:
for ColumnsName in ListOfDateAndTime:
ListMultiIndexName=df_unstack.index.names
if ColumnsName in ListMultiIndexName:
level_index=ListMultiIndexName.index(ColumnsName)
if Abbreviation==True:
if ColumnsName=="WeekDay":
df_unstack.index = df_unstack.index.set_levels(df_unstack.index.levels[level_index].map(lambda x : DicoDayOfWeek[x][0],DicoDayOfWeek), level=level_index)
elif ColumnsName=="M":
df_unstack.index = df_unstack.index.set_levels(df_unstack.index.levels[level_index].map(lambda x : DicoMonthOfTheYear[x][0],DicoDayOfWeek), level=level_index)
elif Abbreviation==False:
if ColumnsName=="WeekDay":
df_unstack.index = df_unstack.index.set_levels(df_unstack.index.levels[level_index].map(lambda x : DicoDayOfWeek[x][1],DicoDayOfWeek), level=level_index)
elif ColumnsName=="M":
df_unstack.index = df_unstack.index.set_levels(df_unstack.index.levels[level_index].map(lambda x : DicoMonthOfTheYear[x][1],DicoDayOfWeek), level=level_index)
else:
if Abbreviation==True:
if ColumnsName=="WeekDay":
df_unstack.columns = df_unstack.columns.map(lambda x : DicoDayOfWeek[x][0],DicoDayOfWeek)
elif ColumnsName=="M":
df_unstack.columns = df_unstack.columns.map(lambda x : DicoMonthOfTheYear[x][0],DicoMonthOfTheYear)
elif Abbreviation==False:
if ColumnsName=="WeekDay":
df_unstack.columns = df_unstack.columns.map(lambda x : DicoDayOfWeek[x][1],DicoDayOfWeek)
elif ColumnsName=="M":
df_unstack.columns = df_unstack.columns.map(lambda x : DicoMonthOfTheYear[x][1],DicoMonthOfTheYear)
else:
if "WeekDay" in ListOfDateAndTime and "WeekDay"==ListOfDateAndTime[0]:
if Abbreviation==True:
df_unstack.columns = df_unstack.columns.map(lambda x : DicoDayOfWeek[x][0],DicoDayOfWeek)
else:
df_unstack.columns = df_unstack.columns.map(lambda x : DicoDayOfWeek[x][1],DicoDayOfWeek)
if "M" in ListOfDateAndTime and "M"==ListOfDateAndTime[0]:
if Abbreviation==True:
df_unstack.columns = df_unstack.columns.map(lambda x : DicoMonthOfTheYear[x][0],DicoMonthOfTheYear)
elif Abbreviation==False:
df_unstack.columns = df_unstack.columns.map(lambda x : DicoMonthOfTheYear[x][1],DicoMonthOfTheYear)
DicoConfigRowColumsSubPlot={"Y":(4,3),"M":(4,3),"W":(13,4),"D":(8,4),"WeekDay":(4,2),"h":(6,4),"m":(10,6),"s":(10,6)}
fig=df_unstack.plot(subplots=True,figsize=(70, 60), layout=DicoConfigRowColumsSubPlot[ListOfDateAndTime[0]],kind="bar",sharex=True,sharey=True,legend=False,)#.flatten()#.map(set_xlabel=("toto"))#**kwargs)
# Add Legend for axis in function of the dimention of the subplot
for Row in range(DicoConfigRowColumsSubPlot[ListOfDateAndTime[0]][0]):
FigRow=fig[Row].flatten()
if DicoConfigRowColumsSubPlot[ListOfDateAndTime[0]][0]%2!=0 and Row%3==1 and Row!=DicoConfigRowColumsSubPlot[ListOfDateAndTime[0]][0]:
FigRow[0].set_ylabel("Nb. Video Trending")
elif DicoConfigRowColumsSubPlot[ListOfDateAndTime[0]][0]%2==0 and Row%2==1 and Row!=DicoConfigRowColumsSubPlot[ListOfDateAndTime[0]][0]:
FigRow[0].set_ylabel("Nb. Video Trending")
elif DicoConfigRowColumsSubPlot[ListOfDateAndTime[0]][0]==4:
FigRow[0].set_ylabel("Nb. Video Trending")
for Column in range(len(FigRow)):
FigRow[Column].set_xlabel("Time")
plot.subplots_adjust(left=None, bottom=None, right=None, top=None, wspace=0.2, hspace=0.5)
plot.show()
return df_unstack
def testtemps():
print(pytz.country_timezones('JP'))
# Hours=[0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23]
# Hours=pd.date_range('17:30:00', '21:00:00',freq='15T').strftime('%H:%M').tolist()
# pd.to_datetime(Hours,format='%H:%M')
# print(Hours)
Hours=pd.date_range('00:00:00', '23:59:00',freq=str(30)+'T').time
df_NumberHours=pd.DataFrame(0,index=Hours,columns=["Number","Label"])
# df_NumberHours["Label"]=HoursForLabels
# print(df_NumberHours["Label"].head(3))
Country="FRA"
PathToInputData=os.path.join("Script","Data","Data_IN","Youtube_CSV__And_JSON",Country+"videos.csv")
df=pd.read_csv(PathToInputData)#,engine="python")
#'video_id','title',
df=df.drop(columns=['channel_title','category_id','tags','thumbnail_link','comments_disabled','ratings_disabled','video_error_or_removed','description'])
#get the plublish time and put in the column publish time
df['publish_time'] = pd.to_datetime(df['publish_time'], format='%Y-%m-%dT%H:%M:%S.%fZ')
# print(df['publish_time'])
# ["JPN",
LocalTime=False
if LocalTime==True:
if Country=="USA":
df['publish_time']=pd.DatetimeIndex(df['publish_time']).tz_localize('utc').tz_convert('US/Central')
elif Country=="MEX":
df['publish_time']=pd.DatetimeIndex(df['publish_time']).tz_localize('utc').tz_convert('America/Mexico_City')
elif Country=="FRA":
df['publish_time']=pd.DatetimeIndex(df['publish_time']).tz_localize('utc').tz_convert('Europe/Paris')
elif Country=="DEU":
df['publish_time']=pd.DatetimeIndex(df['publish_time']).tz_localize('utc').tz_convert('Europe/Berlin')
elif Country=="GBR":
df['publish_time']=pd.DatetimeIndex(df['publish_time']).tz_localize('utc').tz_convert('Europe/London')
elif Country=="IND":
df['publish_time']=pd.DatetimeIndex(df['publish_time']).tz_localize('utc').tz_convert('Asia/Kolkata')
elif Country=="CAN":
df['publish_time']=pd.DatetimeIndex(df['publish_time']).tz_localize('utc').tz_convert('America/Winnipeg')
elif Country=="KOR":
df['publish_time']=pd.DatetimeIndex(df['publish_time']).tz_localize('utc').tz_convert('Asia/Seoul')
elif Country=="RUS":
df['publish_time']=pd.DatetimeIndex(df['publish_time']).tz_localize('utc').tz_convert('Asia/Krasnoyarsk')
elif Country=="JPN":
df['publish_time']=pd.DatetimeIndex(df['publish_time']).tz_localize('utc').tz_convert('Asia/Tokyo')
# filtertime=(df[df.index.time > datetime.time(12),] & df[df.index.time < datetime.time(13)])
#Converting LOcal time to UTC time if LocalToUTCTime==True
# df=ConvertLocalTimeToUTC(df,Country,LocalToUTCTime)
print(df["video_id"].nunique())
df = df.drop_duplicates(subset = 'video_id', keep = 'first')
print(df)
df.set_index( df['publish_time'], inplace=True)
# df_FiltResult=
# df=df.groupby([df.index.day_name()],)["views"].count()#,df.index.hour
# df.plot(kind="bar")
# plot.show()
df_grp=df.groupby([df.index.weekday,df.index.hour])
ser=df_grp["views"].count()
# print(df_grp["views"].agg(["count"]))
# print(df_grp["views"].agg(["count"]).loc[1])
# print(df_grp.get_group((1,0)))
# df.unstack(level=0).plot(kind='bar', subplots=True)
# plot.show()
DicoDayOfWeek={
"00":('Mon','Monday'), "01":('Tue','Tuesday'), "02":('Wed','Wednesday'), "03":('Thu','Thursday'),
"04":('Fri','Friday'), "05":('Sat','Saturday'), "06":('Sun','Sunday')
}
# ser.index[0][0] = df.index[0][0].map(lambda x : DicoDayOfWeek[x][1],DicoDayOfWeek)
# ser.unstack(level=0).plot(subplots=True, figsize=(70, 60), layout=(4, 2),kind="bar",sharex=True,title=ser.index[0][0] )
# plot.show()
# for i in range(1,max(df_grp.keys[0])):
# print(df_grp["views"].agg(["count"]).loc[i])
# df_grp.plot(y=df_grp["views"].agg(["count"]).loc[i].count)
# plot.show()
# fig, ax = plot.subplots(figsize=(10,4))
# # ax.plot(df_grp["views"].loc[1], df_grp['views'].count(), label=df_grp["views"].loc[1])
# for key, grp in df_grp:#df.groupby(ListOfDateAndTime):
# print(key,grp)
# ax.plot(grp.groupby(grp.index.hour), grp['views'].count(), label=key)
# ax.legend()
# plot.show()
# df.plot()
# plot.show()
# plot.show()
# filt=(df.title.str.find(sub)!=-1)
# filt=None
# df_FiltResult=df["title"].resample("D")
#juste le filtre
# df_FiltResultsample=df["title"][filt].resample("M").count()
# totalite de la periode
DicoMonthOfTheYear = {
"01":("Jan", "January"),"02":("Feb","February"),"03":("Mar","March"),"04":("Apr","April"),"05":("May","May"),
"06":("Jun","June"),"07":("Jul","July"),"08":("Aug","August"),"09":("Sep","September"),"10":("Oct","October"),
"11":("Nov","November"),"12":("Dec","December")
}
# sub=""
#fictionnary of group by possibilities
DicoGroubyPossibility={
"Y":df.index.year,
"M":df.index.month,
"W":df.index.week,
"D":df.index.day,
"h":df.index.hour,
"m":df.index.minute,
"s":df.index.second,
"time":df.index.time,
"date":df.index.date,
"WeekDay":df.index.weekday,
}
# ListOfDateAndTime=["M","D"]#,"M","D"]
ListOfDateAndTime=["WeekDay"]#,"M","D"]
#test if the list contain more than one parameter for grouby if it is true then it will group by by the composant o the list
if len(ListOfDateAndTime)==1:
#Create empty list for date and time classification
ListOfDate=[]
ListOfTime=[]
#Classify Date and time in the corresponding list in fucntion of it is in upper case or not upper=date low=time
for i in ListOfDateAndTime:
if i.isupper() or i=="date" or i=="WeekDay":
ListOfDate.append(i)
else:
ListOfTime.append(i)
#get the list of all indexes
SegmentOfDateOrTime=DicoGroubyPossibility[i].astype(str).tolist()
# and add a zero in front of the index string to have 00 h and not 0h or days etc
for DateOrTime in range(len(SegmentOfDateOrTime)):
if len(SegmentOfDateOrTime[DateOrTime])==1:
SegmentOfDateOrTime[DateOrTime]=str(0)+SegmentOfDateOrTime[DateOrTime]
#Place it back in the columns of the date or time correspondant like Y(Year) or h(hour) to get a series grouby with different name
df.loc[:,i]=SegmentOfDateOrTime
#grouby in function of the entry in the list of date and time
# df_grp=df.groupby(ListOfDateAndTime)#["views"].count()
Abbreviation=True
df_grp=df.groupby([df.index.weekday,df.index.hour])#["views"].count()
df=df_grp["views"].count()
EclatedSubPlot(df,True,ListOfDateAndTime,Abbreviation)
# Abbreviation=False
# # fig, (ax1, ax2) = plot.subplots(2, 1)
# # df.plot(x='Weekday', y='h', ax=ax1, legend=False)
# # df.sort_values().plot(kind='barh', ax=ax2)
# ser=df_grp["views"].count()
# df_unstack=ser.unstack(level=0)
# nblevels = df_unstack.index.nlevels
# print(nblevels)
# if nblevels!=1:
# for ColumnsName in ListOfDateAndTime:
# ListMultiIndexName=df_unstack.index.names
# if ColumnsName in ListMultiIndexName:
# level_index=ListMultiIndexName.index(ColumnsName)
# if Abbreviation==True:
# if ColumnsName=="WeekDay":
# df_unstack.index = df_unstack.index.set_levels(df_unstack.index.levels[level_index].map(lambda x : DicoDayOfWeek[x][0],DicoDayOfWeek), level=level_index)
# elif ColumnsName=="M":
# df_unstack.index = df_unstack.index.set_levels(df_unstack.index.levels[level_index].map(lambda x : DicoMonthOfTheYear[x][0],DicoDayOfWeek), level=level_index)
# elif Abbreviation==False:
# if ColumnsName=="WeekDay":
# df_unstack.index = df_unstack.index.set_levels(df_unstack.index.levels[level_index].map(lambda x : DicoDayOfWeek[x][1],DicoDayOfWeek), level=level_index)
# elif ColumnsName=="M":
# df_unstack.index = df_unstack.index.set_levels(df_unstack.index.levels[level_index].map(lambda x : DicoMonthOfTheYear[x][1],DicoDayOfWeek), level=level_index)
# else:
# if Abbreviation==True:
# if ColumnsName=="WeekDay":
# df_unstack.columns = df_unstack.columns.map(lambda x : DicoDayOfWeek[x][0],DicoDayOfWeek)
# elif ColumnsName=="M":
# df_unstack.columns = df_unstack.columns.map(lambda x : DicoMonthOfTheYear[x][0],DicoMonthOfTheYear)
# elif Abbreviation==False:
# if ColumnsName=="WeekDay":
# df_unstack.columns = df_unstack.columns.map(lambda x : DicoDayOfWeek[x][1],DicoDayOfWeek)
# elif ColumnsName=="M":
# df_unstack.columns = df_unstack.columns.map(lambda x : DicoMonthOfTheYear[x][1],DicoMonthOfTheYear)
# else:
# if "WeekDay" in ListOfDateAndTime and "WeekDay"==ListOfDateAndTime[0]:
# if Abbreviation==True:
# df_unstack.columns = df_unstack.columns.map(lambda x : DicoDayOfWeek[x][0],DicoDayOfWeek)
# else:
# df_unstack.columns = df_unstack.columns.map(lambda x : DicoDayOfWeek[x][1],DicoDayOfWeek)
# else:
# if Abbreviation==True:
# df_unstack.index = df_unstack.index.map(lambda x : DicoDayOfWeek[x][0],DicoDayOfWeek)
# else:
# df_unstack.index = df_unstack.index.map(lambda x : DicoDayOfWeek[x][1],DicoDayOfWeek)
# if "M" in ListOfDateAndTime and "M"==ListOfDateAndTime[0]:
# if Abbreviation==True:
# df_unstack.columns = df_unstack.columns.map(lambda x : DicoMonthOfTheYear[x][0],DicoMonthOfTheYear)
# elif Abbreviation==False:
# df_unstack.columns = df_unstack.columns.map(lambda x : DicoMonthOfTheYear[x][1],DicoMonthOfTheYear)
# else:
# if Abbreviation==True:
# df_unstack.index = df_unstack.index.map(lambda x : DicoMonthOfTheYear[x][0],DicoMonthOfTheYear)
# elif Abbreviation==False:
# df_unstack.index = df_unstack.index.map(lambda x : DicoMonthOfTheYear[x][1],DicoMonthOfTheYear)
# print(df_unstack.index)
# # fig, axes=plot.subplots(nrows=4,ncols=2,)
# # axes[0][0].plot(df_unstack)
# # plot.show()
# # ax.plot(df_unstack)
# # fig = plot.figure() # create a figure object
# # axs = fig.subplots(nrows=4,ncols=2)
# # fig
# # for ax in axs:
# # ax.plot(df_grp[0])
# # create an axes object in the figure
# # ax.plot(df_unstack)
# # ax.set_ylabel('some numbers')
# # plot.figure(1)
# # df_unstack.plot()
# # fig=plot.figure()
# # ax1=fig.add_subplot(df_unstack)
# DicoConfigRowColumsSubPlot={"Y":(4,3),"M":(4,3),"W":(13,4),"D":(8,4),"WeekDay":(4,2),"h":(6,4),"m":(10,6),"s":(10,6)}
# fig=df_unstack.plot(subplots=True,figsize=(70, 60), layout=DicoConfigRowColumsSubPlot[ListOfDateAndTime[0]],kind="bar",sharex=True,sharey=True,legend=False,).flatten()#.map(set_xlabel=("toto"))#**kwargs)
# fig=fig.flatten()
# # fig.text(0.5, 0.04, 'common xlabel', ha='center', va='center')
# # fig.text(0.06, 0.5, 'common ylabel', ha='center', va='center', rotation='vertical')
# # fig.subplots_adjust(left=None, bottom=None, right=None, top=None, wspace=0.2, hspace=0.2)
# for i in range(len(fig)):
# fig[i].set_ylabel("Nb. Video Trending")
# fig[i].set_xlabel("Time")
# plot.subplots_adjust(left=None, bottom=None, right=None, top=None, wspace=0.2, hspace=0.5)
# plot.show()
# plot.show()
# df_unstack[df_unstack.columns[0]].plot(ax=axes[0,0])
# df_unstack[df_unstack.columns[1]].plot(ax=axes[0,1])
# plot.show()
# rowlength = df_grp.ngroups//2
# fig, axs = plot.subplots()
# df_unstack.plot(subplot=True,layout=(4, 2), figsize=(70, 60),kind="bar",sharex=True,sharey=True,)
# fig=df_unstack.plot(subplot=True,ax=ax,kind="bar")
#title of the x axis of the plot
# ax.set_xlabel('common xlabel')
# fig.xlabel('common xlabel')
# fig.ylabel('common ylabel')
# plot.xlabel("hours")
#title of y axis of the plot
# plot.ylabel("Number Of Video Trending")
# plot.(xtitle="hours",ytitle="Number Of Video Trending")
# plot.tight_layout()
plot.show()
# plot.show()
# fig, ax = plot.subplots(figsize=(10,4))
# for key, grp in df.groupby(ListOfDateAndTime):
# ax.plot(grp['WeekDay'], grp['h'], label=key)
# ax.legend()
# plot.show()
#Go from pd series to dataframe with another index
df=df.to_frame(name = 'Number Of Video Trending').reset_index()
# fig, axs = plot.subplots(2, 1, sharex=True)
# # gs = df.groupby(["WeekDay","h"], axis=1)
# # df.set_index('WeekDay',inplace=True)
# gs = df.groupby(["WeekDay"], axis=1)
# for (_, g), ax in zip(gs, axs):
# g.plot.bar(stacked=True, ax=ax)
# plot.show()
if "WeekDay" in ListOfDateAndTime:
dayOfWeek={"00":'Monday', "01":'Tuesday', "02":'Wednesday', "03":'Thursday', "04":'Friday', "05":'Saturday', "06":'Sunday'}
df['WeekDay'] = df['WeekDay'].map(dayOfWeek)
#create the columns time in function of the date and time in listoftime
if len(ListOfDate)>0 and len(ListOfTime)>0:
df['Time'] = df[ListOfDate].astype(str).agg('-'.join, axis=1)+" "+df[ListOfTime].astype(str).agg(':'.join, axis=1)
elif len(ListOfDate)>0 and len(ListOfTime)==0:
df['Time'] = df[ListOfDate].astype(str).agg('-'.join, axis=1)
elif len(ListOfDate)==0 and len(ListOfTime)>0:
df['Time'] = df[ListOfTime].astype(str).agg(':'.join, axis=1)
#Put the column Time in index
df.set_index( df['Time'], inplace=True)
#add the column Time to ListOfDateAndTime before dropping every columns of ListOfDateAndTime to have a nice dataframe with just the number
#of videos trending and the time index
ListOfDateAndTime.append('Time')
df=df.drop(ListOfDateAndTime,axis=1)
else:
#if their is only one thing in the list
#get the list of all indexes
SegmentOfDateOrTime=DicoGroubyPossibility[ListOfDateAndTime[0]].astype(str).tolist()
# and add a zero in front of the index string to have 00 h and not 0h or days etc
for DateOrTime in range(len(SegmentOfDateOrTime)):
if len(SegmentOfDateOrTime[DateOrTime])==1:
SegmentOfDateOrTime[DateOrTime]=str(0)+SegmentOfDateOrTime[DateOrTime]
#grouby in function of the entry in the list of index
df=df.groupby(SegmentOfDateOrTime)["views"].count()
#Create a dataframe with the grouby serie
df=df.to_frame(name = 'Number Of Video Trending')#.reset_index()
# Rename the dataframe index in Time
df.index=df.index.rename('Time')
# df1.columns=ListOfDateAndTime.split("_")
# df1=df1.to_frame(name = 'count').reset_index()
# df=df.loc[:,ListOfTime].join()
# df=df.resample("60T").views.count()#, df.index.minute df.index.hour
# df=df.groupby(pd.Grouper(key='publish_time',freq='30T')).views.count()#, df.index.minute df.index.hour
# df=df.groupby([df.index.second]).views.count()#df.index.hour,
# df=df.groupby([df.index.hour,df.index.minute,df.index.second]).views.count()
# df=df.groupby([df.index.year,df.index.month,df.index.day,df.index.hour,df.index.minute,df.index.second]).views.count()
# print(df)
df.plot(kind="bar")
plot.show()
# df_FiltResult=df["views"].resample("H").count()
# print(df_FiltResult)
FindText=" !"
filtre="Minute"
NumberOfVideoTrendingByCountry="Number Of Video "+Country
DicoResampleAndGraph={"Year":("Y","%y"),"Month":("M","%y/%m"),"Day":("D","%y/%m/%d"),"Hour":("H","%y/%m/%d %H"),"Minute":("m","%y/%m/%d %H:%m")}
# filt=(df.index.year==2017) | (df.index.year==2018)
filt=(df.index.month==12) | (df.index.day==25)
df=df[filt]
if FindText!="":
df["result"]=df["title"].apply(lambda x: 1 if x.find(FindText)!=-1 else 0)
df_FiltResult=df["result"].resample(DicoResampleAndGraph[filtre][0]).sum()
else:
df_FiltResult=df["views"].resample(DicoResampleAndGraph[filtre][0]).count()
df_FiltResult.columns=["Label",NumberOfVideoTrendingByCountry]
df_FiltResult.index=df_FiltResult.index.strftime(DicoResampleAndGraph[filtre][1])#-%d
# df_FiltResult.index=df_FiltResult.index.strftime("%V")#-%d
# print(df_FiltResult.index)
# filt=(df.title.str.find(sub)!=-1)
# df_FiltResult=df["title"][filt].resample("W").count()
# df_FiltResult=df["title"].resample("W").count()
# df_FiltResult.index=df_FiltResult.index.strftime("%V")#-%d
print(df_FiltResult)
# if df
# df_FiltResult.loc["value"]=df["title"][filt].count()
# df.index=pd.to_datetime(df.index,format='%Y-%m-%d')
# df_FiltResultsample.plot(y=0,kind="bar")
df_FiltResult.plot(y=0,kind="bar")
plot.show()
NumberOfVideoTrendingByCountry="Number Of Video "+Country
Months=["January","February","March","April","May","June","July","August","October","November","December"]
Years=[]
for Year in range(min(df.publish_time.dt.year),max(df.publish_time.dt.year)+1):
Years.append(Year)
df_VideoCountForDayOfTheWeek=pd.DataFrame(0,index=Years,columns=[NumberOfVideoTrendingByCountry])
print(min(df.publish_time.dt.year))
print(max(df.publish_time.dt.year))
sub=" Noël "
for Year in Years:
filtervalue=(df.publish_time.dt.year==Year) & (df.title.str.find(sub)!=-1)
df_VideoCountForDayOfTheWeek.loc[Year,NumberOfVideoTrendingByCountry]=max(df[filtervalue].count())
print(df_VideoCountForDayOfTheWeek)
WeekDays=["Monday","Tuesday","Wednesday","Thursday","Friday","Saturday","Sunday"]
df_VideoCountForDayOfTheWeek=pd.DataFrame(0,index=WeekDays,columns=["Number Of Videos"])
for WeekDay in WeekDays:
df_VideoCountForDayOfTheWeek.loc[WeekDay,"Number Of Videos"]=max(df[df.publish_time.dt.day_name()==WeekDay].count())
print(df_VideoCountForDayOfTheWeek)
df_VideoCountForDayOfTheWeek.plot(y="Number Of Videos",kind="bar")
plot.show()
#insert publish date in the corresponding columns
df.insert(5, 'publish_date', df['publish_time'].dt.date)
# convert them into datetime time
df['publish_time'] = df['publish_time'].dt.time
#convert the trending date string into a datetime format
df['trending_date'] = pd.to_datetime(df['trending_date'], format='%y.%d.%m')
#Put the trending date in the same format before soustracting them to
# get the time before trending
df["trending_date"]=df["trending_date"].values.astype('datetime64[D]')
df["publish_date"]=df["publish_date"].values.astype('datetime64[D]')
# functionning from 1 s tp 24h
IntervalMinute=1/60
if IntervalMinute==1/60:
counttotal=0
countindex=0
HoursForLabels=pd.date_range('00:00:00', '23:59:59',freq=str(IntervalMinute)+'T').strftime('%H:%M:%S').tolist()
NumberOfVideoTrendingByCountry="Number Of Video "+Country
df_NumberHours=pd.DataFrame(0,index=HoursForLabels,columns=["Label",NumberOfVideoTrendingByCountry])
df_NumberHours["Label"]=HoursForLabels
for index in range(len(HoursForLabels)):
if index<(len(HoursForLabels)-1):
df_NumberHours.loc[HoursForLabels[index],NumberOfVideoTrendingByCountry]=df["views"].between_time(start_time=HoursForLabels[index],end_time=HoursForLabels[index+1],include_end=False).count()
else:
df_NumberHours.loc[HoursForLabels[index],NumberOfVideoTrendingByCountry]=df["views"].between_time(start_time=HoursForLabels[index],end_time="23:59:59",include_start=True,include_end=True).count()
else:
#insert publish date in the corresponding columns
df.insert(5, 'publish_date', df['publish_time'].dt.date)
# convert them into datetime time
df['publish_time'] = df['publish_time'].dt.time
#convert the trending date string into a datetime format
df['trending_date'] = pd.to_datetime(df['trending_date'], format='%y.%d.%m')
#Put the trending date in the same format before soustracting them to
# get the time before trending
df["trending_date"]=df["trending_date"].values.astype('datetime64[D]')
df["publish_date"]=df["publish_date"].values.astype('datetime64[D]')
#Get all time data in function of the day of the week if DayOfTheWeek=="All" skip this to have all day of the dataframe
df["weekday_publish_date"] = df["publish_date"].dt.day_name()
# df=GetDFFromWeekDay(df,DayOfTheWeek)
# get the time before trending
df["Time_Before_Trending"]=df["trending_date"].sub(df["publish_date"],axis=0)
# count the number of video publish in the same time
df_NumberHours=df['publish_time'].value_counts()
df_NumberHours.sort_values(0,ascending=True)
# df_NumberHours.index=sorted(df_NumberHours.index,key=)
df_NumberHours=df_NumberHours.sort_index()
HoursForLabels=pd.date_range('00:00:00', '23:59:59',freq=str(IntervalMinute)+'T').strftime('%H:%M:%S').tolist()
for time in HoursForLabels:
if time not in df_NumberHours.index:
df_NumberHours.set_value(time,0)
df_NumberHours.index=df_NumberHours.index.time
#Supres the last row of the df for interval and video publish in the interval
# because it is 23:59:59 but is empty cause every thing goes to 00:00:00
df_NumberHours.drop(df_NumberHours.tail(1).index,inplace=True)
# print(df_NumberHours)
# print(len(df))
# print(df_NumberHours[NumberOfVideoTrendingByCountry].sum())
# df_NumberHours.plot(y=NumberOfVideoTrendingByCountry,kind="bar")
# plot.show()
##############################################################################################################################
# x=2
# print(df)
# print(df["views"].between_time(start_time="00:00:00",end_time="23:59:59").count())
# print(df["views"].count())
# print(len(df["views"]))
# df_NumberHours.loc["23:59",["Label",NumberOfVideoTrendingByCountry]] = "23:59",0
# print(df_NumberHours)
# for index in range(len(HoursForLabels)+1):
# if index<(len(HoursForLabels)-1):
# # if HoursForLabels[index]=="23:30":
# # x=1
# df_NumberHours.loc[HoursForLabels[index],NumberOfVideoTrendingByCountry]=df["views"].between_time(start_time=HoursForLabels[index],end_time=HoursForLabels[index+1],include_end=False).count()
# elif index==(len(HoursForLabels)-1):
# df_NumberHours.loc[HoursForLabels[-1],NumberOfVideoTrendingByCountry]=df["views"].between_time(start_time=HoursForLabels[index-1],end_time=HoursForLabels[-1],include_end=False).count()
# else:
# df_NumberHours.loc["23:59",NumberOfVideoTrendingByCountry]=df["views"].between_time(start_time=HoursForLabels[-1],end_time="23:59:59",include_start=True,include_end=True).count()
# df_NumberHours.set_index("Label",inplace=True)
# for index in range(len(HoursForLabels)):
# if index<(len(HoursForLabels)-1):
# df_NumberHours.loc[HoursForLabels[index],NumberOfVideoTrendingByCountry]=df["views"].between_time(start_time=HoursForLabels[index],end_time=HoursForLabels[index+1],include_end=False).count()
# elif index==len(HoursForLabels)-1:
# df_NumberHours.loc[HoursForLabels[-1],NumberOfVideoTrendingByCountry]=df["views"].between_time(start_time=HoursForLabels[-1],end_time="23:59:59",include_end=True).count()
# df_NumberHours.loc["23:59",NumberOfVideoTrendingByCountry]=df["views"].between_time(start_time=HoursForLabels[-1],end_time="23:59:59",include_start=True,include_end=True).count()
# elif index==len(HoursForLabels):
# print(df_NumberHours[NumberOfVideoTrendingByCountry].sum())
#0 a 03
def anepasutiliser():
print(df_NumberHours[NumberOfVideoTrendingByCountry].sum())
print(df_NumberHours)
df_NumberHours=pd.DataFrame(0,index=HoursForLabels,columns=["Label",NumberOfVideoTrendingByCountry])
df.insert(5, 'publish_date', df['publish_time'].dt.date)
#convert them into datetime time
# df['publish_time'] = df['publish_time'].dt.time
# df['publish_time'] =df['publish_time'] .astype('datetime64[D]')
df['publish_time'] = pd.DatetimeIndex(df['publish_time'])
df['publish_time']=df['publish_time'].dt.time
print(df['publish_time'])
# count the number of video publish in the same time
df["Count"]=df['publish_time'].value_counts()
df.sort_values('Count',ascending=True)
print(df)
pd.to_timedelta(df['publish_time'])
df.set_index(pd.to_datetime(df['publish_time'],"hh:mm:ss"), inplace=True)
print(df.index.time)
# df.set_index(pd.DatetimeIndex(df['publish_time']), inplace=True)
print(df.index)
print(df['views'].resample('T').sum())
df['publish_time'] = df['publish_time']
#convert the trending date string into a datetime format
df['trending_date'] = pd.to_datetime(df['trending_date'], format='%y.%d.%m')
#Put the trending date in the same format before soustracting them to
# get the time before trending
df["trending_date"]=df["trending_date"].values.astype('datetime64[D]')
df["publish_date"]=df["publish_date"].values.astype('datetime64[D]')
df["weekday_publish_date"] = df["publish_date"].dt.day_name()
# df=df[df.weekday_publish_date==DayOfTheWeek]
print(df)
# get the time before trending
df["Time_Before_Trending"]=df["trending_date"].sub(df["publish_date"],axis=0)
# count the number of video publish in the same time
Df_TimeAndNumberOfPublication=df['publish_time'].value_counts()
Df_TimeAndNumberOfPublication.sort_values(0,ascending=True)
# print(datetime.time(hour=,minute=-30,second=40))
print(df_NumberHours.tail(5))
#40562 via fonction via tableau 40723
#il faut que les valeur centrer entre 16:30 avec 15 min a gauche 15 min a droite soit increment/2
print(df_NumberHours["Number Of Video"].sum())
#et si les minutes sont egales a zero alors il faut retirer une heure
#
# df_NumberHours.plot(x="Label",y=NumberOfVideoTrendingByCountry, kind='bar')
# #title of the plot
# plot.title("Number of Video Trending in " +Country +" by publication time")
# #title of the x axis of the plot
# plot.xlabel('Time')
# #title of y axis of the plot
# plot.ylabel('Number of Video Trending')
# #show the graph
# plot.show()
testtemps()
def NumberOfVideoFilterByPublishTime(df,Country,IntervalMinute):
if IntervalMinute!=1/60:
df.set_index( df['publish_time'], inplace=True)
counttotal=0
countindex=0
IntervalMinute=1/60
HoursForLabels=pd.date_range('00:00:00', '23:59:59',freq=str(IntervalMinute)+'T').strftime('%H:%M:%S').tolist()
NumberOfVideoTrendingByCountry="Number Of Video "+Country
df_NumberHours=pd.DataFrame(0,index=HoursForLabels,columns=["Label",NumberOfVideoTrendingByCountry])
df_NumberHours["Label"]=HoursForLabels
for index in range(len(HoursForLabels)):
if index<(len(HoursForLabels)-1):
df_NumberHours.loc[HoursForLabels[index],NumberOfVideoTrendingByCountry]=df["views"].between_time(start_time=HoursForLabels[index],end_time=HoursForLabels[index+1],include_end=False).count()
else:
df_NumberHours.loc[HoursForLabels[index],NumberOfVideoTrendingByCountry]=df["views"].between_time(start_time=HoursForLabels[index],end_time="23:59:59",include_start=True,include_end=True).count()
else:
#insert publish date in the corresponding columns
df.insert(5, 'publish_date', df['publish_time'].dt.date)
# convert them into datetime time
df['publish_time'] = df['publish_time'].dt.time
#convert the trending date string into a datetime format
df['trending_date'] = pd.to_datetime(df['trending_date'], format='%y.%d.%m')
#Put the trending date in the same format before soustracting them to
# get the time before trending
df["trending_date"]=df["trending_date"].values.astype('datetime64[D]')
df["publish_date"]=df["publish_date"].values.astype('datetime64[D]')
#Get all time data in function of the day of the week if DayOfTheWeek=="All" skip this to have all day of the dataframe
df["weekday_publish_date"] = df["publish_date"].dt.day_name()
df=GetDFFromWeekDay(df,DayOfTheWeek)
# get the time before trending
df["Time_Before_Trending"]=df["trending_date"].sub(df["publish_date"],axis=0)
# count the number of video publish in the same time
df_NumberHours=df['publish_time'].value_counts()
# df_NumberHours.sort_values(0,ascending=True)
#Supres the last row of the df for interval and video publish in the interval
# because it is 23:59:59 but is empty cause every thing goes to 00:00:00
df_NumberHours.drop(df_NumberHours.tail(1).index,inplace=True)
return df_NumberHours | 42.777246 | 213 | 0.625605 | import pandas as pd
import datetime
import numpy as np
import os
import re
import matplotlib.pyplot as plot
import pytz
def EclatedSubPlot(SerieAfterGrpBy,ActivatePlotting,ListOfDateAndTime,Abbreviation):
DicoDayOfWeek={
"00":('Mon','Monday'), "01":('Tue','Tuesday'), "02":('Wed','Wednesday'), "03":('Thu','Thursday'),
"04":('Fri','Friday'), "05":('Sat','Saturday'), "06":('Sun','Sunday')
}
DicoMonthOfTheYear = {
"01":("Jan", "January"),"02":("Feb","February"),"03":("Mar","March"),"04":("Apr","April"),"05":("May","May"),
"06":("Jun","June"),"07":("Jul","July"),"08":("Aug","August"),"09":("Sep","September"),"10":("Oct","October"),
"11":("Nov","November"),"12":("Dec","December")
}
df_unstack=SerieAfterGrpBy.unstack(level=0)
nblevels = df_unstack.index.nlevels
if nblevels!=1:
for ColumnsName in ListOfDateAndTime:
ListMultiIndexName=df_unstack.index.names
if ColumnsName in ListMultiIndexName:
level_index=ListMultiIndexName.index(ColumnsName)
if Abbreviation==True:
if ColumnsName=="WeekDay":
df_unstack.index = df_unstack.index.set_levels(df_unstack.index.levels[level_index].map(lambda x : DicoDayOfWeek[x][0],DicoDayOfWeek), level=level_index)
elif ColumnsName=="M":
df_unstack.index = df_unstack.index.set_levels(df_unstack.index.levels[level_index].map(lambda x : DicoMonthOfTheYear[x][0],DicoDayOfWeek), level=level_index)
elif Abbreviation==False:
if ColumnsName=="WeekDay":
df_unstack.index = df_unstack.index.set_levels(df_unstack.index.levels[level_index].map(lambda x : DicoDayOfWeek[x][1],DicoDayOfWeek), level=level_index)
elif ColumnsName=="M":
df_unstack.index = df_unstack.index.set_levels(df_unstack.index.levels[level_index].map(lambda x : DicoMonthOfTheYear[x][1],DicoDayOfWeek), level=level_index)
else:
if Abbreviation==True:
if ColumnsName=="WeekDay":
df_unstack.columns = df_unstack.columns.map(lambda x : DicoDayOfWeek[x][0],DicoDayOfWeek)
elif ColumnsName=="M":
df_unstack.columns = df_unstack.columns.map(lambda x : DicoMonthOfTheYear[x][0],DicoMonthOfTheYear)
elif Abbreviation==False:
if ColumnsName=="WeekDay":
df_unstack.columns = df_unstack.columns.map(lambda x : DicoDayOfWeek[x][1],DicoDayOfWeek)
elif ColumnsName=="M":
df_unstack.columns = df_unstack.columns.map(lambda x : DicoMonthOfTheYear[x][1],DicoMonthOfTheYear)
else:
if "WeekDay" in ListOfDateAndTime and "WeekDay"==ListOfDateAndTime[0]:
if Abbreviation==True:
df_unstack.columns = df_unstack.columns.map(lambda x : DicoDayOfWeek[x][0],DicoDayOfWeek)
else:
df_unstack.columns = df_unstack.columns.map(lambda x : DicoDayOfWeek[x][1],DicoDayOfWeek)
if "M" in ListOfDateAndTime and "M"==ListOfDateAndTime[0]:
if Abbreviation==True:
df_unstack.columns = df_unstack.columns.map(lambda x : DicoMonthOfTheYear[x][0],DicoMonthOfTheYear)
elif Abbreviation==False:
df_unstack.columns = df_unstack.columns.map(lambda x : DicoMonthOfTheYear[x][1],DicoMonthOfTheYear)
DicoConfigRowColumsSubPlot={"Y":(4,3),"M":(4,3),"W":(13,4),"D":(8,4),"WeekDay":(4,2),"h":(6,4),"m":(10,6),"s":(10,6)}
fig=df_unstack.plot(subplots=True,figsize=(70, 60), layout=DicoConfigRowColumsSubPlot[ListOfDateAndTime[0]],kind="bar",sharex=True,sharey=True,legend=False,)msSubPlot[ListOfDateAndTime[0]][0]):
FigRow=fig[Row].flatten()
if DicoConfigRowColumsSubPlot[ListOfDateAndTime[0]][0]%2!=0 and Row%3==1 and Row!=DicoConfigRowColumsSubPlot[ListOfDateAndTime[0]][0]:
FigRow[0].set_ylabel("Nb. Video Trending")
elif DicoConfigRowColumsSubPlot[ListOfDateAndTime[0]][0]%2==0 and Row%2==1 and Row!=DicoConfigRowColumsSubPlot[ListOfDateAndTime[0]][0]:
FigRow[0].set_ylabel("Nb. Video Trending")
elif DicoConfigRowColumsSubPlot[ListOfDateAndTime[0]][0]==4:
FigRow[0].set_ylabel("Nb. Video Trending")
for Column in range(len(FigRow)):
FigRow[Column].set_xlabel("Time")
plot.subplots_adjust(left=None, bottom=None, right=None, top=None, wspace=0.2, hspace=0.5)
plot.show()
return df_unstack
def testtemps():
print(pytz.country_timezones('JP'))
Hours=pd.date_range('00:00:00', '23:59:00',freq=str(30)+'T').time
df_NumberHours=pd.DataFrame(0,index=Hours,columns=["Number","Label"])
Country="FRA"
PathToInputData=os.path.join("Script","Data","Data_IN","Youtube_CSV__And_JSON",Country+"videos.csv")
df=pd.read_csv(PathToInputData)
df=df.drop(columns=['channel_title','category_id','tags','thumbnail_link','comments_disabled','ratings_disabled','video_error_or_removed','description'])
df['publish_time'] = pd.to_datetime(df['publish_time'], format='%Y-%m-%dT%H:%M:%S.%fZ')
LocalTime=False
if LocalTime==True:
if Country=="USA":
df['publish_time']=pd.DatetimeIndex(df['publish_time']).tz_localize('utc').tz_convert('US/Central')
elif Country=="MEX":
df['publish_time']=pd.DatetimeIndex(df['publish_time']).tz_localize('utc').tz_convert('America/Mexico_City')
elif Country=="FRA":
df['publish_time']=pd.DatetimeIndex(df['publish_time']).tz_localize('utc').tz_convert('Europe/Paris')
elif Country=="DEU":
df['publish_time']=pd.DatetimeIndex(df['publish_time']).tz_localize('utc').tz_convert('Europe/Berlin')
elif Country=="GBR":
df['publish_time']=pd.DatetimeIndex(df['publish_time']).tz_localize('utc').tz_convert('Europe/London')
elif Country=="IND":
df['publish_time']=pd.DatetimeIndex(df['publish_time']).tz_localize('utc').tz_convert('Asia/Kolkata')
elif Country=="CAN":
df['publish_time']=pd.DatetimeIndex(df['publish_time']).tz_localize('utc').tz_convert('America/Winnipeg')
elif Country=="KOR":
df['publish_time']=pd.DatetimeIndex(df['publish_time']).tz_localize('utc').tz_convert('Asia/Seoul')
elif Country=="RUS":
df['publish_time']=pd.DatetimeIndex(df['publish_time']).tz_localize('utc').tz_convert('Asia/Krasnoyarsk')
elif Country=="JPN":
df['publish_time']=pd.DatetimeIndex(df['publish_time']).tz_localize('utc').tz_convert('Asia/Tokyo')
print(df["video_id"].nunique())
df = df.drop_duplicates(subset = 'video_id', keep = 'first')
print(df)
df.set_index( df['publish_time'], inplace=True)
df_grp=df.groupby([df.index.weekday,df.index.hour])
ser=df_grp["views"].count()
DicoDayOfWeek={
"00":('Mon','Monday'), "01":('Tue','Tuesday'), "02":('Wed','Wednesday'), "03":('Thu','Thursday'),
"04":('Fri','Friday'), "05":('Sat','Saturday'), "06":('Sun','Sunday')
}
"01":("Jan", "January"),"02":("Feb","February"),"03":("Mar","March"),"04":("Apr","April"),"05":("May","May"),
"06":("Jun","June"),"07":("Jul","July"),"08":("Aug","August"),"09":("Sep","September"),"10":("Oct","October"),
"11":("Nov","November"),"12":("Dec","December")
}
DicoGroubyPossibility={
"Y":df.index.year,
"M":df.index.month,
"W":df.index.week,
"D":df.index.day,
"h":df.index.hour,
"m":df.index.minute,
"s":df.index.second,
"time":df.index.time,
"date":df.index.date,
"WeekDay":df.index.weekday,
}
fDateAndTime=["WeekDay"]
if len(ListOfDateAndTime)==1:
ListOfDate=[]
ListOfTime=[]
for i in ListOfDateAndTime:
if i.isupper() or i=="date" or i=="WeekDay":
ListOfDate.append(i)
else:
ListOfTime.append(i)
SegmentOfDateOrTime=DicoGroubyPossibility[i].astype(str).tolist()
for DateOrTime in range(len(SegmentOfDateOrTime)):
if len(SegmentOfDateOrTime[DateOrTime])==1:
SegmentOfDateOrTime[DateOrTime]=str(0)+SegmentOfDateOrTime[DateOrTime]
df.loc[:,i]=SegmentOfDateOrTime
ion=True
df_grp=df.groupby([df.index.weekday,df.index.hour])
df=df_grp["views"].count()
EclatedSubPlot(df,True,ListOfDateAndTime,Abbreviation)
ay', "05":'Saturday', "06":'Sunday'}
df['WeekDay'] = df['WeekDay'].map(dayOfWeek)
if len(ListOfDate)>0 and len(ListOfTime)>0:
df['Time'] = df[ListOfDate].astype(str).agg('-'.join, axis=1)+" "+df[ListOfTime].astype(str).agg(':'.join, axis=1)
elif len(ListOfDate)>0 and len(ListOfTime)==0:
df['Time'] = df[ListOfDate].astype(str).agg('-'.join, axis=1)
elif len(ListOfDate)==0 and len(ListOfTime)>0:
df['Time'] = df[ListOfTime].astype(str).agg(':'.join, axis=1)
df.set_index( df['Time'], inplace=True)
ListOfDateAndTime.append('Time')
df=df.drop(ListOfDateAndTime,axis=1)
else:
SegmentOfDateOrTime=DicoGroubyPossibility[ListOfDateAndTime[0]].astype(str).tolist()
for DateOrTime in range(len(SegmentOfDateOrTime)):
if len(SegmentOfDateOrTime[DateOrTime])==1:
SegmentOfDateOrTime[DateOrTime]=str(0)+SegmentOfDateOrTime[DateOrTime]
df=df.groupby(SegmentOfDateOrTime)["views"].count()
df=df.to_frame(name = 'Number Of Video Trending')
df.index=df.index.rename('Time')
FindText=" !"
filtre="Minute"
NumberOfVideoTrendingByCountry="Number Of Video "+Country
DicoResampleAndGraph={"Year":("Y","%y"),"Month":("M","%y/%m"),"Day":("D","%y/%m/%d"),"Hour":("H","%y/%m/%d %H"),"Minute":("m","%y/%m/%d %H:%m")}
filt=(df.index.month==12) | (df.index.day==25)
df=df[filt]
if FindText!="":
df["result"]=df["title"].apply(lambda x: 1 if x.find(FindText)!=-1 else 0)
df_FiltResult=df["result"].resample(DicoResampleAndGraph[filtre][0]).sum()
else:
df_FiltResult=df["views"].resample(DicoResampleAndGraph[filtre][0]).count()
df_FiltResult.columns=["Label",NumberOfVideoTrendingByCountry]
df_FiltResult.index=df_FiltResult.index.strftime(DicoResampleAndGraph[filtre][1])
print(df_FiltResult)
df_FiltResult.plot(y=0,kind="bar")
plot.show()
NumberOfVideoTrendingByCountry="Number Of Video "+Country
Months=["January","February","March","April","May","June","July","August","October","November","December"]
Years=[]
for Year in range(min(df.publish_time.dt.year),max(df.publish_time.dt.year)+1):
Years.append(Year)
df_VideoCountForDayOfTheWeek=pd.DataFrame(0,index=Years,columns=[NumberOfVideoTrendingByCountry])
print(min(df.publish_time.dt.year))
print(max(df.publish_time.dt.year))
sub=" Noël "
for Year in Years:
filtervalue=(df.publish_time.dt.year==Year) & (df.title.str.find(sub)!=-1)
df_VideoCountForDayOfTheWeek.loc[Year,NumberOfVideoTrendingByCountry]=max(df[filtervalue].count())
print(df_VideoCountForDayOfTheWeek)
WeekDays=["Monday","Tuesday","Wednesday","Thursday","Friday","Saturday","Sunday"]
df_VideoCountForDayOfTheWeek=pd.DataFrame(0,index=WeekDays,columns=["Number Of Videos"])
for WeekDay in WeekDays:
df_VideoCountForDayOfTheWeek.loc[WeekDay,"Number Of Videos"]=max(df[df.publish_time.dt.day_name()==WeekDay].count())
print(df_VideoCountForDayOfTheWeek)
df_VideoCountForDayOfTheWeek.plot(y="Number Of Videos",kind="bar")
plot.show()
df.insert(5, 'publish_date', df['publish_time'].dt.date)
df['publish_time'] = df['publish_time'].dt.time
df['trending_date'] = pd.to_datetime(df['trending_date'], format='%y.%d.%m')
df["trending_date"]=df["trending_date"].values.astype('datetime64[D]')
df["publish_date"]=df["publish_date"].values.astype('datetime64[D]')
IntervalMinute=1/60
if IntervalMinute==1/60:
counttotal=0
countindex=0
HoursForLabels=pd.date_range('00:00:00', '23:59:59',freq=str(IntervalMinute)+'T').strftime('%H:%M:%S').tolist()
NumberOfVideoTrendingByCountry="Number Of Video "+Country
df_NumberHours=pd.DataFrame(0,index=HoursForLabels,columns=["Label",NumberOfVideoTrendingByCountry])
df_NumberHours["Label"]=HoursForLabels
for index in range(len(HoursForLabels)):
if index<(len(HoursForLabels)-1):
df_NumberHours.loc[HoursForLabels[index],NumberOfVideoTrendingByCountry]=df["views"].between_time(start_time=HoursForLabels[index],end_time=HoursForLabels[index+1],include_end=False).count()
else:
df_NumberHours.loc[HoursForLabels[index],NumberOfVideoTrendingByCountry]=df["views"].between_time(start_time=HoursForLabels[index],end_time="23:59:59",include_start=True,include_end=True).count()
else:
df.insert(5, 'publish_date', df['publish_time'].dt.date)
df['publish_time'] = df['publish_time'].dt.time
df['trending_date'] = pd.to_datetime(df['trending_date'], format='%y.%d.%m')
df["trending_date"]=df["trending_date"].values.astype('datetime64[D]')
df["publish_date"]=df["publish_date"].values.astype('datetime64[D]')
df["weekday_publish_date"] = df["publish_date"].dt.day_name()
df["Time_Before_Trending"]=df["trending_date"].sub(df["publish_date"],axis=0)
df_NumberHours=df['publish_time'].value_counts()
df_NumberHours.sort_values(0,ascending=True)
df_NumberHours=df_NumberHours.sort_index()
HoursForLabels=pd.date_range('00:00:00', '23:59:59',freq=str(IntervalMinute)+'T').strftime('%H:%M:%S').tolist()
for time in HoursForLabels:
if time not in df_NumberHours.index:
df_NumberHours.set_value(time,0)
df_NumberHours.index=df_NumberHours.index.time
df_NumberHours.drop(df_NumberHours.tail(1).index,inplace=True)
| true | true |
f73000570bed55023fdcd7e0333417e05cb7f21a | 297 | py | Python | einops/__init__.py | ductm104/einops | a9e3f6b0d18e01e326f74bd9861288aff94e3b2c | [
"MIT"
] | 2 | 2021-07-17T09:30:42.000Z | 2021-12-10T07:42:21.000Z | einops/__init__.py | ductm104/einops | a9e3f6b0d18e01e326f74bd9861288aff94e3b2c | [
"MIT"
] | null | null | null | einops/__init__.py | ductm104/einops | a9e3f6b0d18e01e326f74bd9861288aff94e3b2c | [
"MIT"
] | null | null | null | __author__ = 'Alex Rogozhnikov'
__version__ = '0.3.0'
class EinopsError(RuntimeError):
""" Runtime error thrown by einops """
pass
__all__ = ['rearrange', 'reduce', 'repeat', 'parse_shape', 'asnumpy', 'EinopsError']
from .einops import rearrange, reduce, repeat, parse_shape, asnumpy
| 22.846154 | 84 | 0.703704 | __author__ = 'Alex Rogozhnikov'
__version__ = '0.3.0'
class EinopsError(RuntimeError):
pass
__all__ = ['rearrange', 'reduce', 'repeat', 'parse_shape', 'asnumpy', 'EinopsError']
from .einops import rearrange, reduce, repeat, parse_shape, asnumpy
| true | true |
f730008035e6d577e29225eff7316628cc5ad753 | 68 | py | Python | inference_converter/__init__.py | mzeynali/dl-model-converter | 3adff16661254f29a4e9b2d76402ba9b064d3d97 | [
"Apache-2.0"
] | null | null | null | inference_converter/__init__.py | mzeynali/dl-model-converter | 3adff16661254f29a4e9b2d76402ba9b064d3d97 | [
"Apache-2.0"
] | null | null | null | inference_converter/__init__.py | mzeynali/dl-model-converter | 3adff16661254f29a4e9b2d76402ba9b064d3d97 | [
"Apache-2.0"
] | null | null | null | import os
import sys
sys.path.append(os.path.dirname(__file__))
| 8.5 | 42 | 0.75 | import os
import sys
sys.path.append(os.path.dirname(__file__))
| true | true |
f7300106c5722946f16c6d7a68325a64b58c05ce | 787 | py | Python | tests/test_convention.py | henhans/TBmodels | 7424acaea8d91850d80bb48898af875430f25fa0 | [
"Apache-2.0"
] | 1 | 2021-01-18T13:55:40.000Z | 2021-01-18T13:55:40.000Z | tests/test_convention.py | henhans/TBmodels | 7424acaea8d91850d80bb48898af875430f25fa0 | [
"Apache-2.0"
] | null | null | null | tests/test_convention.py | henhans/TBmodels | 7424acaea8d91850d80bb48898af875430f25fa0 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# (c) 2015-2018, ETH Zurich, Institut fuer Theoretische Physik
# Author: Dominik Gresch <greschd@gmx.ch>
import numpy as np
import pythtb as pt
import tbmodels as tb
def test_compare_pythtb():
pt_model = pt.tb_model(1, 1, lat=[[1]], orb=[[0], [0.2]])
tb_model = tb.Model(dim=1, pos=[[0], [0.2]], uc=[[1]])
pt_model.set_hop(3j, 0, 1, [1])
tb_model.add_hop(3j, 0, 1, [1])
assert np.isclose(pt_model._gen_ham([0]), tb_model.hamilton([0])).all()
assert np.isclose(pt_model._gen_ham([0]), tb_model.hamilton([0], convention=1)).all()
assert np.isclose(pt_model._gen_ham([1]), tb_model.hamilton([1], convention=1)).all()
assert np.isclose(pt_model._gen_ham([0.2]), tb_model.hamilton(0.2, convention=1)).all()
| 32.791667 | 91 | 0.655654 |
import numpy as np
import pythtb as pt
import tbmodels as tb
def test_compare_pythtb():
pt_model = pt.tb_model(1, 1, lat=[[1]], orb=[[0], [0.2]])
tb_model = tb.Model(dim=1, pos=[[0], [0.2]], uc=[[1]])
pt_model.set_hop(3j, 0, 1, [1])
tb_model.add_hop(3j, 0, 1, [1])
assert np.isclose(pt_model._gen_ham([0]), tb_model.hamilton([0])).all()
assert np.isclose(pt_model._gen_ham([0]), tb_model.hamilton([0], convention=1)).all()
assert np.isclose(pt_model._gen_ham([1]), tb_model.hamilton([1], convention=1)).all()
assert np.isclose(pt_model._gen_ham([0.2]), tb_model.hamilton(0.2, convention=1)).all()
| true | true |
f73001805276877dee1c73c528d58c9860590720 | 10,586 | py | Python | google/cloud/aiplatform_v1/types/featurestore_online_service.py | lclc19/python-aiplatform | d8da2e365277441abadb04328943f23345d72b0e | [
"Apache-2.0"
] | null | null | null | google/cloud/aiplatform_v1/types/featurestore_online_service.py | lclc19/python-aiplatform | d8da2e365277441abadb04328943f23345d72b0e | [
"Apache-2.0"
] | null | null | null | google/cloud/aiplatform_v1/types/featurestore_online_service.py | lclc19/python-aiplatform | d8da2e365277441abadb04328943f23345d72b0e | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
from google.cloud.aiplatform_v1.types import feature_selector as gca_feature_selector
from google.cloud.aiplatform_v1.types import types
from google.protobuf import timestamp_pb2 # type: ignore
__protobuf__ = proto.module(
package="google.cloud.aiplatform.v1",
manifest={
"ReadFeatureValuesRequest",
"ReadFeatureValuesResponse",
"StreamingReadFeatureValuesRequest",
"FeatureValue",
"FeatureValueList",
},
)
class ReadFeatureValuesRequest(proto.Message):
r"""Request message for
[FeaturestoreOnlineServingService.ReadFeatureValues][google.cloud.aiplatform.v1.FeaturestoreOnlineServingService.ReadFeatureValues].
Attributes:
entity_type (str):
Required. The resource name of the EntityType for the entity
being read. Value format:
``projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entityType}``.
For example, for a machine learning model predicting user
clicks on a website, an EntityType ID could be ``user``.
entity_id (str):
Required. ID for a specific entity. For example, for a
machine learning model predicting user clicks on a website,
an entity ID could be ``user_123``.
feature_selector (google.cloud.aiplatform_v1.types.FeatureSelector):
Required. Selector choosing Features of the
target EntityType.
"""
entity_type = proto.Field(proto.STRING, number=1,)
entity_id = proto.Field(proto.STRING, number=2,)
feature_selector = proto.Field(
proto.MESSAGE, number=3, message=gca_feature_selector.FeatureSelector,
)
class ReadFeatureValuesResponse(proto.Message):
r"""Response message for
[FeaturestoreOnlineServingService.ReadFeatureValues][google.cloud.aiplatform.v1.FeaturestoreOnlineServingService.ReadFeatureValues].
Attributes:
header (google.cloud.aiplatform_v1.types.ReadFeatureValuesResponse.Header):
Response header.
entity_view (google.cloud.aiplatform_v1.types.ReadFeatureValuesResponse.EntityView):
Entity view with Feature values. This may be
the entity in the Featurestore if values for all
Features were requested, or a projection of the
entity in the Featurestore if values for only
some Features were requested.
"""
class FeatureDescriptor(proto.Message):
r"""Metadata for requested Features.
Attributes:
id (str):
Feature ID.
"""
id = proto.Field(proto.STRING, number=1,)
class Header(proto.Message):
r"""Response header with metadata for the requested
[ReadFeatureValuesRequest.entity_type][google.cloud.aiplatform.v1.ReadFeatureValuesRequest.entity_type]
and Features.
Attributes:
entity_type (str):
The resource name of the EntityType from the
[ReadFeatureValuesRequest][google.cloud.aiplatform.v1.ReadFeatureValuesRequest].
Value format:
``projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entityType}``.
feature_descriptors (Sequence[google.cloud.aiplatform_v1.types.ReadFeatureValuesResponse.FeatureDescriptor]):
List of Feature metadata corresponding to each piece of
[ReadFeatureValuesResponse.data][].
"""
entity_type = proto.Field(proto.STRING, number=1,)
feature_descriptors = proto.RepeatedField(
proto.MESSAGE,
number=2,
message="ReadFeatureValuesResponse.FeatureDescriptor",
)
class EntityView(proto.Message):
r"""Entity view with Feature values.
Attributes:
entity_id (str):
ID of the requested entity.
data (Sequence[google.cloud.aiplatform_v1.types.ReadFeatureValuesResponse.EntityView.Data]):
Each piece of data holds the k requested values for one
requested Feature. If no values for the requested Feature
exist, the corresponding cell will be empty. This has the
same size and is in the same order as the features from the
header
[ReadFeatureValuesResponse.header][google.cloud.aiplatform.v1.ReadFeatureValuesResponse.header].
"""
class Data(proto.Message):
r"""Container to hold value(s), successive in time, for one
Feature from the request.
Attributes:
value (google.cloud.aiplatform_v1.types.FeatureValue):
Feature value if a single value is requested.
values (google.cloud.aiplatform_v1.types.FeatureValueList):
Feature values list if values, successive in
time, are requested. If the requested number of
values is greater than the number of existing
Feature values, nonexistent values are omitted
instead of being returned as empty.
"""
value = proto.Field(
proto.MESSAGE, number=1, oneof="data", message="FeatureValue",
)
values = proto.Field(
proto.MESSAGE, number=2, oneof="data", message="FeatureValueList",
)
entity_id = proto.Field(proto.STRING, number=1,)
data = proto.RepeatedField(
proto.MESSAGE,
number=2,
message="ReadFeatureValuesResponse.EntityView.Data",
)
header = proto.Field(proto.MESSAGE, number=1, message=Header,)
entity_view = proto.Field(proto.MESSAGE, number=2, message=EntityView,)
class StreamingReadFeatureValuesRequest(proto.Message):
r"""Request message for
[FeaturestoreOnlineServingService.StreamingFeatureValuesRead][].
Attributes:
entity_type (str):
Required. The resource name of the entities' type. Value
format:
``projects/{project}/locations/{location}/featurestores/{featurestore}/entityTypes/{entityType}``.
For example, for a machine learning model predicting user
clicks on a website, an EntityType ID could be ``user``.
entity_ids (Sequence[str]):
Required. IDs of entities to read Feature values of. The
maximum number of IDs is 100. For example, for a machine
learning model predicting user clicks on a website, an
entity ID could be ``user_123``.
feature_selector (google.cloud.aiplatform_v1.types.FeatureSelector):
Required. Selector choosing Features of the
target EntityType. Feature IDs will be
deduplicated.
"""
entity_type = proto.Field(proto.STRING, number=1,)
entity_ids = proto.RepeatedField(proto.STRING, number=2,)
feature_selector = proto.Field(
proto.MESSAGE, number=3, message=gca_feature_selector.FeatureSelector,
)
class FeatureValue(proto.Message):
r"""Value for a feature.
NEXT ID: 15
Attributes:
bool_value (bool):
Bool type feature value.
double_value (float):
Double type feature value.
int64_value (int):
Int64 feature value.
string_value (str):
String feature value.
bool_array_value (google.cloud.aiplatform_v1.types.BoolArray):
A list of bool type feature value.
double_array_value (google.cloud.aiplatform_v1.types.DoubleArray):
A list of double type feature value.
int64_array_value (google.cloud.aiplatform_v1.types.Int64Array):
A list of int64 type feature value.
string_array_value (google.cloud.aiplatform_v1.types.StringArray):
A list of string type feature value.
bytes_value (bytes):
Bytes feature value.
metadata (google.cloud.aiplatform_v1.types.FeatureValue.Metadata):
Metadata of feature value.
"""
class Metadata(proto.Message):
r"""Metadata of feature value.
Attributes:
generate_time (google.protobuf.timestamp_pb2.Timestamp):
Feature generation timestamp. Typically, it
is provided by user at feature ingestion time.
If not, feature store will use the system
timestamp when the data is ingested into feature
store.
"""
generate_time = proto.Field(
proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,
)
bool_value = proto.Field(proto.BOOL, number=1, oneof="value",)
double_value = proto.Field(proto.DOUBLE, number=2, oneof="value",)
int64_value = proto.Field(proto.INT64, number=5, oneof="value",)
string_value = proto.Field(proto.STRING, number=6, oneof="value",)
bool_array_value = proto.Field(
proto.MESSAGE, number=7, oneof="value", message=types.BoolArray,
)
double_array_value = proto.Field(
proto.MESSAGE, number=8, oneof="value", message=types.DoubleArray,
)
int64_array_value = proto.Field(
proto.MESSAGE, number=11, oneof="value", message=types.Int64Array,
)
string_array_value = proto.Field(
proto.MESSAGE, number=12, oneof="value", message=types.StringArray,
)
bytes_value = proto.Field(proto.BYTES, number=13, oneof="value",)
metadata = proto.Field(proto.MESSAGE, number=14, message=Metadata,)
class FeatureValueList(proto.Message):
r"""Container for list of values.
Attributes:
values (Sequence[google.cloud.aiplatform_v1.types.FeatureValue]):
A list of feature values. All of them should
be the same data type.
"""
values = proto.RepeatedField(proto.MESSAGE, number=1, message="FeatureValue",)
__all__ = tuple(sorted(__protobuf__.manifest))
| 40.250951 | 136 | 0.657472 |
import proto
from google.cloud.aiplatform_v1.types import feature_selector as gca_feature_selector
from google.cloud.aiplatform_v1.types import types
from google.protobuf import timestamp_pb2
__protobuf__ = proto.module(
package="google.cloud.aiplatform.v1",
manifest={
"ReadFeatureValuesRequest",
"ReadFeatureValuesResponse",
"StreamingReadFeatureValuesRequest",
"FeatureValue",
"FeatureValueList",
},
)
class ReadFeatureValuesRequest(proto.Message):
entity_type = proto.Field(proto.STRING, number=1,)
entity_id = proto.Field(proto.STRING, number=2,)
feature_selector = proto.Field(
proto.MESSAGE, number=3, message=gca_feature_selector.FeatureSelector,
)
class ReadFeatureValuesResponse(proto.Message):
class FeatureDescriptor(proto.Message):
id = proto.Field(proto.STRING, number=1,)
class Header(proto.Message):
entity_type = proto.Field(proto.STRING, number=1,)
feature_descriptors = proto.RepeatedField(
proto.MESSAGE,
number=2,
message="ReadFeatureValuesResponse.FeatureDescriptor",
)
class EntityView(proto.Message):
class Data(proto.Message):
value = proto.Field(
proto.MESSAGE, number=1, oneof="data", message="FeatureValue",
)
values = proto.Field(
proto.MESSAGE, number=2, oneof="data", message="FeatureValueList",
)
entity_id = proto.Field(proto.STRING, number=1,)
data = proto.RepeatedField(
proto.MESSAGE,
number=2,
message="ReadFeatureValuesResponse.EntityView.Data",
)
header = proto.Field(proto.MESSAGE, number=1, message=Header,)
entity_view = proto.Field(proto.MESSAGE, number=2, message=EntityView,)
class StreamingReadFeatureValuesRequest(proto.Message):
entity_type = proto.Field(proto.STRING, number=1,)
entity_ids = proto.RepeatedField(proto.STRING, number=2,)
feature_selector = proto.Field(
proto.MESSAGE, number=3, message=gca_feature_selector.FeatureSelector,
)
class FeatureValue(proto.Message):
class Metadata(proto.Message):
generate_time = proto.Field(
proto.MESSAGE, number=1, message=timestamp_pb2.Timestamp,
)
bool_value = proto.Field(proto.BOOL, number=1, oneof="value",)
double_value = proto.Field(proto.DOUBLE, number=2, oneof="value",)
int64_value = proto.Field(proto.INT64, number=5, oneof="value",)
string_value = proto.Field(proto.STRING, number=6, oneof="value",)
bool_array_value = proto.Field(
proto.MESSAGE, number=7, oneof="value", message=types.BoolArray,
)
double_array_value = proto.Field(
proto.MESSAGE, number=8, oneof="value", message=types.DoubleArray,
)
int64_array_value = proto.Field(
proto.MESSAGE, number=11, oneof="value", message=types.Int64Array,
)
string_array_value = proto.Field(
proto.MESSAGE, number=12, oneof="value", message=types.StringArray,
)
bytes_value = proto.Field(proto.BYTES, number=13, oneof="value",)
metadata = proto.Field(proto.MESSAGE, number=14, message=Metadata,)
class FeatureValueList(proto.Message):
values = proto.RepeatedField(proto.MESSAGE, number=1, message="FeatureValue",)
__all__ = tuple(sorted(__protobuf__.manifest))
| true | true |
f7300271abe53d1c530313d92118bc1bdad057e3 | 2,612 | py | Python | ooni/report/cli.py | irl/ooni-probe | c21861c28ca6bd667715872d099006fab87222fd | [
"BSD-2-Clause"
] | null | null | null | ooni/report/cli.py | irl/ooni-probe | c21861c28ca6bd667715872d099006fab87222fd | [
"BSD-2-Clause"
] | null | null | null | ooni/report/cli.py | irl/ooni-probe | c21861c28ca6bd667715872d099006fab87222fd | [
"BSD-2-Clause"
] | null | null | null | from __future__ import print_function
import os
import sys
from ooni import canonical_bouncer
from ooni.report import __version__
from ooni.report import tool
from ooni.settings import config
from twisted.python import usage
class Options(usage.Options):
synopsis = """%s [options] upload | status
""" % (os.path.basename(sys.argv[0]),)
optFlags = [
["default-collector", "d", "Upload the reports to the default "
"collector that is looked up with the "
"canonical bouncer."]
]
optParameters = [
["configfile", "f", None,
"Specify the configuration file to use."],
["collector", "c", None,
"Specify the collector to upload the result to."],
["bouncer", "b", None,
"Specify the bouncer to query for a collector."]
]
def opt_version(self):
print("oonireport version: %s" % __version__)
sys.exit(0)
def parseArgs(self, *args):
if len(args) == 0:
raise usage.UsageError(
"Must specify at least one command"
)
return
self['command'] = args[0]
if self['command'] not in ("upload", "status"):
raise usage.UsageError(
"Must specify either command upload or status"
)
if self['command'] == "upload":
try:
self['report_file'] = args[1]
except IndexError:
self['report_file'] = None
def tor_check():
if not config.tor.socks_port:
print("Currently oonireport requires that you start Tor yourself "
"and set the socks_port inside of ooniprobe.conf")
sys.exit(1)
def run():
options = Options()
try:
options.parseOptions()
except Exception as exc:
print("Error: %s" % exc)
print(options)
sys.exit(2)
config.global_options = dict(options)
config.set_paths()
config.read_config_file()
if options['default-collector']:
options['bouncer'] = canonical_bouncer
if options['command'] == "upload" and options['report_file']:
tor_check()
return tool.upload(options['report_file'],
options['collector'],
options['bouncer'])
elif options['command'] == "upload":
tor_check()
return tool.upload_all(options['collector'],
options['bouncer'])
elif options['command'] == "status":
return tool.status()
else:
print(options)
| 28.703297 | 74 | 0.561256 | from __future__ import print_function
import os
import sys
from ooni import canonical_bouncer
from ooni.report import __version__
from ooni.report import tool
from ooni.settings import config
from twisted.python import usage
class Options(usage.Options):
synopsis = """%s [options] upload | status
""" % (os.path.basename(sys.argv[0]),)
optFlags = [
["default-collector", "d", "Upload the reports to the default "
"collector that is looked up with the "
"canonical bouncer."]
]
optParameters = [
["configfile", "f", None,
"Specify the configuration file to use."],
["collector", "c", None,
"Specify the collector to upload the result to."],
["bouncer", "b", None,
"Specify the bouncer to query for a collector."]
]
def opt_version(self):
print("oonireport version: %s" % __version__)
sys.exit(0)
def parseArgs(self, *args):
if len(args) == 0:
raise usage.UsageError(
"Must specify at least one command"
)
return
self['command'] = args[0]
if self['command'] not in ("upload", "status"):
raise usage.UsageError(
"Must specify either command upload or status"
)
if self['command'] == "upload":
try:
self['report_file'] = args[1]
except IndexError:
self['report_file'] = None
def tor_check():
if not config.tor.socks_port:
print("Currently oonireport requires that you start Tor yourself "
"and set the socks_port inside of ooniprobe.conf")
sys.exit(1)
def run():
options = Options()
try:
options.parseOptions()
except Exception as exc:
print("Error: %s" % exc)
print(options)
sys.exit(2)
config.global_options = dict(options)
config.set_paths()
config.read_config_file()
if options['default-collector']:
options['bouncer'] = canonical_bouncer
if options['command'] == "upload" and options['report_file']:
tor_check()
return tool.upload(options['report_file'],
options['collector'],
options['bouncer'])
elif options['command'] == "upload":
tor_check()
return tool.upload_all(options['collector'],
options['bouncer'])
elif options['command'] == "status":
return tool.status()
else:
print(options)
| true | true |
f7300289bf48754135726dad8a8c684a9ab7d495 | 14,855 | py | Python | queryable_properties/managers.py | W1ldPo1nter/django-queryable-properties | 9bb4ecb4fbdd7a9e0f610f937c8101a643027fb1 | [
"BSD-3-Clause"
] | 36 | 2019-10-22T11:44:37.000Z | 2022-03-15T21:27:03.000Z | queryable_properties/managers.py | W1ldPo1nter/django-queryable-properties | 9bb4ecb4fbdd7a9e0f610f937c8101a643027fb1 | [
"BSD-3-Clause"
] | 6 | 2020-10-03T15:13:26.000Z | 2021-09-25T14:05:50.000Z | queryable_properties/managers.py | W1ldPo1nter/django-queryable-properties | 9bb4ecb4fbdd7a9e0f610f937c8101a643027fb1 | [
"BSD-3-Clause"
] | 3 | 2021-04-26T08:30:46.000Z | 2021-08-18T09:04:49.000Z | # encoding: utf-8
from __future__ import unicode_literals
import six
from django.db.models import Manager
from django.db.models.query import QuerySet
from .compat import (ANNOTATION_SELECT_CACHE_NAME, ANNOTATION_TO_AGGREGATE_ATTRIBUTES_MAP, chain_query, chain_queryset,
ModelIterable, ValuesQuerySet)
from .exceptions import QueryablePropertyDoesNotExist, QueryablePropertyError
from .query import QueryablePropertiesQueryMixin
from .utils import get_queryable_property
from .utils.internal import InjectableMixin, QueryPath, QueryablePropertyReference
class QueryablePropertiesIterable(InjectableMixin):
"""
An iterable that yields the actual results of a queryset while correctly
processing columns of queryable properties. It is closely related to
Django's BaseIterable and will be used as a mixin for its subclasses in all
(recent) Django versions that have it. In all other (older) versions, this
class will be used as a standalone iterable instead.
"""
def __init__(self, queryset, *args, **kwargs):
"""
Initialize a new iterable for the given queryset. If an iterable is
given it will be used to retrieve the model instances before applying
queryable properties logic (standalone usage for older Django
versions). Otherwise, the __iter__ implementation of the base class
is used to get the model instances (usage as mixin).
:param QuerySet queryset: The queryset to perform the database query
for.
:param collections.Iterable iterable: The optional iterable to use for
standalone usage.
:param args: Positional arguments to pass through to the base class
initialization when used as a mixin.
:param kwargs: Keyword arguments to pass through to the base class
initialization when used as a mixin.
:keyword collections.Iterable iterable: The optional iterable to use
for standalone usage.
"""
self.queryset = queryset
# Only perform the super call if the class is used as a mixin
if self.__class__.__bases__ != (InjectableMixin,):
super(QueryablePropertiesIterable, self).__init__(queryset, *args, **kwargs)
self.iterable = kwargs.get('iterable') or super(QueryablePropertiesIterable, self).__iter__()
self.yields_model_instances = ((ModelIterable is not None and isinstance(self, ModelIterable)) or
(ValuesQuerySet is not None and not isinstance(self.queryset, ValuesQuerySet)))
def __iter__(self):
"""
Yield the model objects for the queryset associated with this iterator
with their correctly processed selected queryable properties.
:return: A generator that yields the model objects.
"""
original_query = self.queryset.query
try:
self.queryset.query = chain_query(original_query)
final_aliases = self._setup_queryable_properties()
for obj in self.iterable:
if self.yields_model_instances:
# Retrieve the annotation values from each renamed
# attribute and use it to populate the cache for the
# corresponding queryable property on each object while
# removing the weird, renamed attributes.
for changed_name, property_ref in six.iteritems(final_aliases):
value = getattr(obj, changed_name)
delattr(obj, changed_name)
if property_ref:
property_ref.descriptor.set_cached_value(obj, value)
yield obj
finally:
self.queryset.query = original_query
def _setup_queryable_properties(self):
"""
Perform the required setup to correctly process queryable property
values.
Change the internal aliases of the annotations that belong to queryable
properties in the query of the associated queryset to something unique
and return a dictionary mapping the queryable properties to the changed
aliases. This is necessary to allow Django to populate the annotation
attributes on the resulting model instances, which would otherwise call
the setter of the queryable properties. This way, Django can populate
attributes with different names and avoid using the setter methods.
Also make sure that ordering by queryable properties works in older
Django versions.
:return: A dictionary mapping the final aliases for queryable
properties to the corresponding references to be able to
retrieve the values from the DB and apply them to the correct
property. The property reference may be None, indicating that
the retrieved value should be discarded.
:rtype: dict[str, QueryablePropertyReference | None]
"""
query = self.queryset.query
final_aliases = {}
select = dict(query.annotation_select)
for property_ref in query._queryable_property_annotations:
annotation_name = six.text_type(property_ref.full_path)
# Older Django versions don't work with the annotation select dict
# when it comes to ordering, so queryable property annotations used
# for ordering need special treatment.
order_by_occurrences = []
if ANNOTATION_TO_AGGREGATE_ATTRIBUTES_MAP: # pragma: no cover
order_by_occurrences = [index for index, field_name in enumerate(query.order_by)
if field_name in (annotation_name, '-{}'.format(annotation_name))]
if order_by_occurrences and annotation_name not in select and annotation_name in query.annotations:
select[annotation_name] = query.annotations[annotation_name]
final_aliases[annotation_name] = None
if not self.yields_model_instances or annotation_name not in select:
# The queryable property annotation does not require selection
# or no renaming needs to occur since the queryset doesn't
# yield model instances.
continue
# Suffix the original annotation name with the lookup separator to
# create a non-clashing name: both model field an queryable
# property names are not allowed to contain the separator and a
# relation path ending with the separator would be invalid as well.
changed_name = six.text_type(property_ref.full_path + '')
final_aliases[changed_name] = final_aliases.pop(annotation_name, property_ref)
select[changed_name] = select.pop(annotation_name)
for index in order_by_occurrences: # pragma: no cover
# Apply the changed names to the ORDER BY clause.
query.order_by[index] = query.order_by[index].replace(annotation_name, changed_name)
# Patch the correct select property on the query with the new names,
# since this property is used by the SQL compiler to build the actual
# SQL query (which is where the changed names should be used).
setattr(query, ANNOTATION_SELECT_CACHE_NAME, select)
return final_aliases
class QueryablePropertiesQuerySetMixin(InjectableMixin):
"""
A mixin for Django's :class:`django.db.models.QuerySet` objects that allows
to use queryable properties in filters, annotations and update queries.
"""
def init_injected_attrs(self):
# To work correctly, a query using the QueryablePropertiesQueryMixin is
# required. If the current query is not using the mixin already, it
# will be dynamically injected into the query. That way, other Django
# extensions using custom query objects are also supported.
class_name = 'QueryableProperties' + self.query.__class__.__name__
self.query = QueryablePropertiesQueryMixin.inject_into_object(chain_query(self.query), class_name)
@property
def _iterable_class(self):
# Override the regular _iterable_class attribute of recent Django
# versions with a property that also stores the value in the instance
# dict, but automatically mixes the QueryablePropertiesModelIterable
# into the base class on getter access if the base class yields model
# instances. That way, the queryable properties extensions stays
# compatible to custom iterable classes while querysets can still be
# pickled due to the base class being in the instance dict.
cls = self.__dict__['_iterable_class']
return QueryablePropertiesIterable.mix_with_class(cls, 'QueryableProperties' + cls.__name__)
@_iterable_class.setter
def _iterable_class(self, value):
self.__dict__['_iterable_class'] = value
def _clone(self, klass=None, *args, **kwargs):
if klass: # pragma: no cover
# In older Django versions, the class of the queryset may be
# replaced with a dynamically created class based on the current
# class and the value of klass while cloning (e.g when using
# .values()). Therefore this needs to be re-injected to be on top
# of the MRO again to enable queryable properties functionality.
klass = QueryablePropertiesQuerySetMixin.mix_with_class(klass, 'QueryableProperties' + klass.__name__)
args = (klass,) + args
clone = super(QueryablePropertiesQuerySetMixin, self)._clone(*args, **kwargs)
# Since the _iterable_class property may return a dynamically created
# class, the value of a clone must be reset to the base class.
if '_iterable_class' in self.__dict__:
clone._iterable_class = self.__dict__['_iterable_class']
return clone
def _resolve_update_kwargs(self, **kwargs):
"""
Look for the names of queryable properties in the given keyword
arguments for an update query and correctly resolve them into their
actual keyword arguments.
:param kwargs: Keyword arguments of an update query.
:return: A dictionary containing the resolved arguments.
:rtype: dict
"""
original_names = set(kwargs)
for original_name in original_names:
try:
prop = get_queryable_property(self.model, original_name)
except QueryablePropertyDoesNotExist:
continue
if not prop.get_update_kwargs:
raise QueryablePropertyError('Queryable property "{}" does not implement queryset updating.'
.format(prop))
# Call the method recursively since queryable properties can build
# upon each other.
additional_kwargs = self._resolve_update_kwargs(
**prop.get_update_kwargs(self.model, kwargs.pop(original_name)))
# Make sure that there are no conflicting values after resolving
# the update keyword arguments of the queryable properties.
for additional_name, value in six.iteritems(additional_kwargs):
if additional_name in kwargs and kwargs[additional_name] != value:
raise QueryablePropertyError(
'Updating queryable property "{prop}" would change field "{field}", but a conflicting value '
'was set for this field by another queryable property or explicitly in the update arguments.'
.format(prop=prop, field=additional_name)
)
kwargs[additional_name] = value
return kwargs
def select_properties(self, *names):
"""
Add the annotations of the queryable properties with the specified
names to this query. The annotation values will be cached in the
properties of resulting model instances, regardless of the regular
caching behavior of the queried properties.
:param names: Names of queryable properties.
:return: A copy of this queryset with the added annotations.
:rtype: QuerySet
"""
queryset = chain_queryset(self)
for name in names:
property_ref = QueryablePropertyReference(get_queryable_property(self.model, name), self.model, QueryPath())
# A full GROUP BY is required if the query is not limited to
# certain fields. Since only certain types of queries had the
# _fields attribute in old Django versions, fall back to checking
# for existing selection, on which the GROUP BY would be based.
full_group_by = not getattr(self, '_fields', self.query.select)
with queryset.query._add_queryable_property_annotation(property_ref, full_group_by, select=True):
pass
return queryset
def iterator(self, *args, **kwargs):
# Recent Django versions use the associated iterable class for the
# iterator() implementation, where the QueryablePropertiesModelIterable
# will be already mixed in. In older Django versions, use a standalone
# QueryablePropertiesModelIterable instead to perform the queryable
# properties processing.
iterable = super(QueryablePropertiesQuerySetMixin, self).iterator(*args, **kwargs)
if '_iterable_class' not in self.__dict__: # pragma: no cover
return iter(QueryablePropertiesIterable(self, iterable=iterable))
return iterable
def update(self, **kwargs):
# Resolve any queryable properties into their actual update kwargs
# before calling the base update method.
kwargs = self._resolve_update_kwargs(**kwargs)
return super(QueryablePropertiesQuerySetMixin, self).update(**kwargs)
class QueryablePropertiesQuerySet(QueryablePropertiesQuerySetMixin, QuerySet):
"""
A special queryset class that allows to use queryable properties in its
filter conditions, annotations and update queries.
"""
pass
if hasattr(Manager, 'from_queryset'):
QueryablePropertiesManager = Manager.from_queryset(QueryablePropertiesQuerySet)
else: # pragma: no cover
class QueryablePropertiesManager(Manager):
def get_queryset(self):
return QueryablePropertiesQuerySet(self.model, using=self._db)
get_query_set = get_queryset
def select_properties(self, *names):
return self.get_queryset().select_properties(*names)
| 51.401384 | 120 | 0.671289 |
from __future__ import unicode_literals
import six
from django.db.models import Manager
from django.db.models.query import QuerySet
from .compat import (ANNOTATION_SELECT_CACHE_NAME, ANNOTATION_TO_AGGREGATE_ATTRIBUTES_MAP, chain_query, chain_queryset,
ModelIterable, ValuesQuerySet)
from .exceptions import QueryablePropertyDoesNotExist, QueryablePropertyError
from .query import QueryablePropertiesQueryMixin
from .utils import get_queryable_property
from .utils.internal import InjectableMixin, QueryPath, QueryablePropertyReference
class QueryablePropertiesIterable(InjectableMixin):
def __init__(self, queryset, *args, **kwargs):
self.queryset = queryset
if self.__class__.__bases__ != (InjectableMixin,):
super(QueryablePropertiesIterable, self).__init__(queryset, *args, **kwargs)
self.iterable = kwargs.get('iterable') or super(QueryablePropertiesIterable, self).__iter__()
self.yields_model_instances = ((ModelIterable is not None and isinstance(self, ModelIterable)) or
(ValuesQuerySet is not None and not isinstance(self.queryset, ValuesQuerySet)))
def __iter__(self):
original_query = self.queryset.query
try:
self.queryset.query = chain_query(original_query)
final_aliases = self._setup_queryable_properties()
for obj in self.iterable:
if self.yields_model_instances:
for changed_name, property_ref in six.iteritems(final_aliases):
value = getattr(obj, changed_name)
delattr(obj, changed_name)
if property_ref:
property_ref.descriptor.set_cached_value(obj, value)
yield obj
finally:
self.queryset.query = original_query
def _setup_queryable_properties(self):
query = self.queryset.query
final_aliases = {}
select = dict(query.annotation_select)
for property_ref in query._queryable_property_annotations:
annotation_name = six.text_type(property_ref.full_path)
# when it comes to ordering, so queryable property annotations used
# for ordering need special treatment.
order_by_occurrences = []
if ANNOTATION_TO_AGGREGATE_ATTRIBUTES_MAP: # pragma: no cover
order_by_occurrences = [index for index, field_name in enumerate(query.order_by)
if field_name in (annotation_name, '-{}'.format(annotation_name))]
if order_by_occurrences and annotation_name not in select and annotation_name in query.annotations:
select[annotation_name] = query.annotations[annotation_name]
final_aliases[annotation_name] = None
if not self.yields_model_instances or annotation_name not in select:
# The queryable property annotation does not require selection
# or no renaming needs to occur since the queryset doesn't
continue
changed_name = six.text_type(property_ref.full_path + '')
final_aliases[changed_name] = final_aliases.pop(annotation_name, property_ref)
select[changed_name] = select.pop(annotation_name)
for index in order_by_occurrences:
query.order_by[index] = query.order_by[index].replace(annotation_name, changed_name)
setattr(query, ANNOTATION_SELECT_CACHE_NAME, select)
return final_aliases
class QueryablePropertiesQuerySetMixin(InjectableMixin):
def init_injected_attrs(self):
class_name = 'QueryableProperties' + self.query.__class__.__name__
self.query = QueryablePropertiesQueryMixin.inject_into_object(chain_query(self.query), class_name)
@property
def _iterable_class(self):
cls = self.__dict__['_iterable_class']
return QueryablePropertiesIterable.mix_with_class(cls, 'QueryableProperties' + cls.__name__)
@_iterable_class.setter
def _iterable_class(self, value):
self.__dict__['_iterable_class'] = value
def _clone(self, klass=None, *args, **kwargs):
if klass:
klass = QueryablePropertiesQuerySetMixin.mix_with_class(klass, 'QueryableProperties' + klass.__name__)
args = (klass,) + args
clone = super(QueryablePropertiesQuerySetMixin, self)._clone(*args, **kwargs)
if '_iterable_class' in self.__dict__:
clone._iterable_class = self.__dict__['_iterable_class']
return clone
def _resolve_update_kwargs(self, **kwargs):
original_names = set(kwargs)
for original_name in original_names:
try:
prop = get_queryable_property(self.model, original_name)
except QueryablePropertyDoesNotExist:
continue
if not prop.get_update_kwargs:
raise QueryablePropertyError('Queryable property "{}" does not implement queryset updating.'
.format(prop))
additional_kwargs = self._resolve_update_kwargs(
**prop.get_update_kwargs(self.model, kwargs.pop(original_name)))
for additional_name, value in six.iteritems(additional_kwargs):
if additional_name in kwargs and kwargs[additional_name] != value:
raise QueryablePropertyError(
'Updating queryable property "{prop}" would change field "{field}", but a conflicting value '
'was set for this field by another queryable property or explicitly in the update arguments.'
.format(prop=prop, field=additional_name)
)
kwargs[additional_name] = value
return kwargs
def select_properties(self, *names):
queryset = chain_queryset(self)
for name in names:
property_ref = QueryablePropertyReference(get_queryable_property(self.model, name), self.model, QueryPath())
full_group_by = not getattr(self, '_fields', self.query.select)
with queryset.query._add_queryable_property_annotation(property_ref, full_group_by, select=True):
pass
return queryset
def iterator(self, *args, **kwargs):
iterable = super(QueryablePropertiesQuerySetMixin, self).iterator(*args, **kwargs)
if '_iterable_class' not in self.__dict__:
return iter(QueryablePropertiesIterable(self, iterable=iterable))
return iterable
def update(self, **kwargs):
kwargs = self._resolve_update_kwargs(**kwargs)
return super(QueryablePropertiesQuerySetMixin, self).update(**kwargs)
class QueryablePropertiesQuerySet(QueryablePropertiesQuerySetMixin, QuerySet):
pass
if hasattr(Manager, 'from_queryset'):
QueryablePropertiesManager = Manager.from_queryset(QueryablePropertiesQuerySet)
else:
class QueryablePropertiesManager(Manager):
def get_queryset(self):
return QueryablePropertiesQuerySet(self.model, using=self._db)
get_query_set = get_queryset
def select_properties(self, *names):
return self.get_queryset().select_properties(*names)
| true | true |
f73002d98b59c3477dc664163095a60c163f8748 | 1,765 | py | Python | scripts/postprocess_score.py | sumanthd17/indicTrans | e78ab48d33ffaa51af818e28226b281aae495994 | [
"MIT"
] | null | null | null | scripts/postprocess_score.py | sumanthd17/indicTrans | e78ab48d33ffaa51af818e28226b281aae495994 | [
"MIT"
] | null | null | null | scripts/postprocess_score.py | sumanthd17/indicTrans | e78ab48d33ffaa51af818e28226b281aae495994 | [
"MIT"
] | null | null | null | import sys
def postprocess(
infname, outfname, input_size
):
"""
parse fairseq interactive output, convert script back to native Indic script (in case of Indic languages) and detokenize.
infname: fairseq log file
outfname: output file of translation (sentences not translated contain the dummy string 'DUMMY_OUTPUT'
input_size: expected number of output sentences
"""
consolidated_testoutput = []
# with open(infname,'r',encoding='utf-8') as infile:
# consolidated_testoutput= list(map(lambda x: x.strip(), filter(lambda x: x.startswith('H-'),infile) ))
# consolidated_testoutput.sort(key=lambda x: int(x.split('\t')[0].split('-')[1]))
# consolidated_testoutput=[ x.split('\t')[2] for x in consolidated_testoutput ]
consolidated_testoutput = [(x, 0.0, "") for x in range(input_size)]
temp_testoutput = []
with open(infname, "r", encoding="utf-8") as infile:
temp_testoutput = list(
map(
lambda x: x.strip().split("\t"),
filter(lambda x: x.startswith("H-"), infile),
)
)
temp_testoutput = list(
map(lambda x: (int(x[0].split("-")[1]), float(x[1]), x[2]), temp_testoutput)
)
for sid, score, hyp in temp_testoutput:
consolidated_testoutput[sid] = (sid, score, hyp)
#consolidated_testoutput = [x[2] for x in consolidated_testoutput]
with open(outfname, "w", encoding="utf-8") as outfile:
for (sid, score, hyp) in consolidated_testoutput:
outfile.write("{}\n".format(score))
if __name__ == "__main__":
infname = sys.argv[1]
outfname = sys.argv[2]
input_size = int(sys.argv[3])
postprocess(
infname, outfname, input_size
)
| 36.020408 | 125 | 0.626629 | import sys
def postprocess(
infname, outfname, input_size
):
consolidated_testoutput = []
consolidated_testoutput = [(x, 0.0, "") for x in range(input_size)]
temp_testoutput = []
with open(infname, "r", encoding="utf-8") as infile:
temp_testoutput = list(
map(
lambda x: x.strip().split("\t"),
filter(lambda x: x.startswith("H-"), infile),
)
)
temp_testoutput = list(
map(lambda x: (int(x[0].split("-")[1]), float(x[1]), x[2]), temp_testoutput)
)
for sid, score, hyp in temp_testoutput:
consolidated_testoutput[sid] = (sid, score, hyp)
with open(outfname, "w", encoding="utf-8") as outfile:
for (sid, score, hyp) in consolidated_testoutput:
outfile.write("{}\n".format(score))
if __name__ == "__main__":
infname = sys.argv[1]
outfname = sys.argv[2]
input_size = int(sys.argv[3])
postprocess(
infname, outfname, input_size
)
| true | true |
f730037de960ab141d2243d99c48b409e7c12847 | 424 | py | Python | services/datalad/tests/test_validator.py | build3/openneuro | ae8f6edbab243703b38cefd729629c1741eb3839 | [
"MIT"
] | null | null | null | services/datalad/tests/test_validator.py | build3/openneuro | ae8f6edbab243703b38cefd729629c1741eb3839 | [
"MIT"
] | 1 | 2020-09-25T11:06:37.000Z | 2020-09-25T11:06:37.000Z | services/datalad/tests/test_validator.py | adswa/openneuro | 64e7fdbeb8b3c567c340b80f22a6134e4ee8070a | [
"MIT"
] | null | null | null | import json
from .dataset_fixtures import *
from datalad_service.tasks.validator import validate_dataset_sync
def test_validator(new_dataset):
results = validate_dataset_sync(new_dataset.path, 'HEAD')
# new_dataset doesn't pass validation, should return an error
assert 'issues' in results
assert 'errors' in results['issues']
assert results['issues']['errors'][0]['key'] == 'QUICK_VALIDATION_FAILED'
| 32.615385 | 77 | 0.757075 | import json
from .dataset_fixtures import *
from datalad_service.tasks.validator import validate_dataset_sync
def test_validator(new_dataset):
results = validate_dataset_sync(new_dataset.path, 'HEAD')
assert 'issues' in results
assert 'errors' in results['issues']
assert results['issues']['errors'][0]['key'] == 'QUICK_VALIDATION_FAILED'
| true | true |
f73003dbae406346ceffaad933d70e33d38fff14 | 190 | py | Python | cride/circles/apps.py | jpcano1/cride-platzi | 6548b5a4c42c2acc9c888f93d5479be9c8b7e6d7 | [
"MIT"
] | null | null | null | cride/circles/apps.py | jpcano1/cride-platzi | 6548b5a4c42c2acc9c888f93d5479be9c8b7e6d7 | [
"MIT"
] | null | null | null | cride/circles/apps.py | jpcano1/cride-platzi | 6548b5a4c42c2acc9c888f93d5479be9c8b7e6d7 | [
"MIT"
] | null | null | null | """ Circles app """
# Django
from django.apps import AppConfig
class CirclesAppConfig(AppConfig):
""" Circles app config. """
name = 'cride.circles'
verbose_name = 'Circles'
| 15.833333 | 34 | 0.663158 |
from django.apps import AppConfig
class CirclesAppConfig(AppConfig):
name = 'cride.circles'
verbose_name = 'Circles'
| true | true |
f730044e72d0f566ba1c776e68f1ecc503b9cd45 | 7,888 | py | Python | modules/mdebugger/attachment_marker/motionsense.py | MD2Korg/CerebralCortex-DataAnalysis | 73f5ea2430bc7c23de422dccb7b65ef9f8917595 | [
"BSD-2-Clause"
] | 1 | 2018-04-24T18:11:24.000Z | 2018-04-24T18:11:24.000Z | modules/mdebugger/attachment_marker/motionsense.py | Boris69bg/CerebralCortex-DataAnalysis | 49565bdff348d69153bd5d3a37e73f1645f82b32 | [
"BSD-2-Clause"
] | 10 | 2018-03-13T19:04:09.000Z | 2018-05-12T01:40:03.000Z | modules/mdebugger/attachment_marker/motionsense.py | Boris69bg/CerebralCortex-DataAnalysis | 49565bdff348d69153bd5d3a37e73f1645f82b32 | [
"BSD-2-Clause"
] | 42 | 2017-12-07T17:08:14.000Z | 2019-06-02T08:25:12.000Z | # Copyright (c) 2017, MD2K Center of Excellence
# - Nasir Ali <nasir.ali08@gmail.com>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import uuid
from collections import OrderedDict
from cerebralcortex.cerebralcortex import CerebralCortex
from modules.mdebugger.post_processing import get_execution_context, get_annotations
from modules.mdebugger.post_processing import store
from modules.mdebugger.util import get_stream_days
from modules.mdebugger.util import merge_consective_windows
from core.signalprocessing.window import window
from cerebralcortex.core.data_manager.raw.stream_handler import DataSet
def attachment_marker(raw_stream_id: uuid, stream_name: str, owner_id: uuid, dd_stream_name, CC: CerebralCortex,
config: dict):
"""
Label sensor data as sensor-on-body, sensor-off-body, or improper-attachment.
All the labeled data (st, et, label) with its metadata are then stored in a datastore
"""
# TODO: quality streams could be multiple so find the one computed with CC
# using stream_id, data-diagnostic-stream-id, and owner id to generate a unique stream ID for battery-marker
attachment_marker_stream_id = uuid.uuid3(uuid.NAMESPACE_DNS, str(raw_stream_id + dd_stream_name + owner_id+"ATTACHMENT MARKER"))
stream_days = get_stream_days(raw_stream_id, attachment_marker_stream_id, CC)
for day in stream_days:
# load stream data to be diagnosed
raw_stream = CC.get_stream(raw_stream_id, day=day, data_type=DataSet.COMPLETE)
if len(raw_stream.data) > 0:
windowed_data = window(raw_stream.data, config['general']['window_size'], True)
results = process_windows(windowed_data, config)
merged_windows = merge_consective_windows(results)
input_streams = [{"owner_id": owner_id, "id": str(raw_stream_id), "name": stream_name}]
output_stream = {"id": attachment_marker_stream_id, "name": dd_stream_name,
"algo_type": config["algo_type"]["attachment_marker"]}
metadata = get_metadata(dd_stream_name, input_streams, config)
store(merged_windows, input_streams, output_stream, metadata, CC, config)
def process_windows(windowed_data: OrderedDict, config: dict) -> OrderedDict:
"""
:param windowed_data:
:param config:
:return:
"""
results = OrderedDict()
threshold_improper_attachment = config['attachment_marker']['motionsense_improper_attachment']
threshold_onbody = config['attachment_marker']['motionsense_onbody']
threshold_offbody = config['attachment_marker']['motionsense_offbody']
label_improper_attachment = config['labels']['motionsense_improper_attachment']
label_onbody = config['labels']['motionsense_onbody']
label_offbody = config['labels']['motionsense_offbody']
if windowed_data:
for key, data in windowed_data.items():
one_minute_window = 0
for k in data:
if k.sample == 0:
one_minute_window += 1
if (one_minute_window / 20) > threshold_offbody and (
one_minute_window / 20) < threshold_improper_attachment:
results[key] = label_improper_attachment
elif (one_minute_window / 20) > threshold_onbody:
results[key] = label_onbody
else:
results[key] = label_offbody
return results
def get_metadata(dd_stream_name: str, input_streams: dict, config: dict) -> dict:
"""
:param generated_stream_id:
:param dd_stream_name:
:param input_streams:
:param config:
:return:
"""
if dd_stream_name == config["stream_names"]["autosense_rip_attachment_marker"]:
input_param = {"window_size": config["general"]["window_size"],
"onbody_threshold": config["attachment_marker"]["rip_on_body"],
"improper_attachment": config["attachment_marker"]["improper_attachment"]}
data_descriptor = {"NAME": dd_stream_name, "DATA_TYPE": "int",
"DESCRIPTION": "Attachment labels: Improper attachment: " + str(
config["labels"]["rip_improper_attachment"]) + ", Offbody: " + str(
config["labels"]["rip_off_body"]) + ", Onbody: " + str(config["labels"]["rip_on_body"])}
elif dd_stream_name == config["stream_names"]["autosense_ecg_attachment_marker"]:
input_param = {"window_size": config["general"]["window_size"],
"ecg_vairance_threshold": config["attachment_marker"]["ecg_on_body"],
"improper_attachment": config["attachment_marker"]["improper_attachment"]}
data_descriptor = {"NAME": dd_stream_name, "DATA_TYPE": "int",
"DESCRIPTION": "Attachment labels: Improper attachment: " + str(
config["labels"]["ecg_improper_attachment"]) + ", Offbody: " + str(
config["labels"]["ecg_off_body"]) + ", Onbody: " + str(config["labels"]["ecg_on_body"])}
elif dd_stream_name == config["stream_names"]["motionsense_hrv_right_attachment_marker"] or dd_stream_name == \
config["stream_names"]["motionsense_hrv_left_attachment_marker"]:
input_param = {"window_size": config["general"]["window_size"],
"motionsense_improper_attachment_threshold": config["attachment_marker"][
"motionsense_improper_attachment"],
"motionsense_onbody_threshold": config["attachment_marker"]["motionsense_onbody"],
"motionsense_offbody_threshold": config["attachment_marker"]["motionsense_offbody"]
}
data_descriptor = {"NAME": dd_stream_name, "DATA_TYPE": "int",
"DESCRIPTION": "Attachment labels: Improper attachment: " + str(
config["labels"]["motionsense_improper_attachment"]) + ", Offbody: " + str(
config["labels"]["motionsense_offbody"]) + ", Onbody: " + str(
config["labels"]["motionsense_onbody"])}
else:
raise ValueError("Incorrect sensor type")
method = 'cerebralcortex.data_processor.data_diagnostic.attachment_marker'
algo_description = config["description"]["attachment_marker"]
ec = get_execution_context(dd_stream_name, input_param, input_streams, method,
algo_description, config)
anno = get_annotations()
return {"ec": ec, "dd": data_descriptor, "anno": anno}
| 54.4 | 132 | 0.676344 |
import uuid
from collections import OrderedDict
from cerebralcortex.cerebralcortex import CerebralCortex
from modules.mdebugger.post_processing import get_execution_context, get_annotations
from modules.mdebugger.post_processing import store
from modules.mdebugger.util import get_stream_days
from modules.mdebugger.util import merge_consective_windows
from core.signalprocessing.window import window
from cerebralcortex.core.data_manager.raw.stream_handler import DataSet
def attachment_marker(raw_stream_id: uuid, stream_name: str, owner_id: uuid, dd_stream_name, CC: CerebralCortex,
config: dict):
attachment_marker_stream_id = uuid.uuid3(uuid.NAMESPACE_DNS, str(raw_stream_id + dd_stream_name + owner_id+"ATTACHMENT MARKER"))
stream_days = get_stream_days(raw_stream_id, attachment_marker_stream_id, CC)
for day in stream_days:
raw_stream = CC.get_stream(raw_stream_id, day=day, data_type=DataSet.COMPLETE)
if len(raw_stream.data) > 0:
windowed_data = window(raw_stream.data, config['general']['window_size'], True)
results = process_windows(windowed_data, config)
merged_windows = merge_consective_windows(results)
input_streams = [{"owner_id": owner_id, "id": str(raw_stream_id), "name": stream_name}]
output_stream = {"id": attachment_marker_stream_id, "name": dd_stream_name,
"algo_type": config["algo_type"]["attachment_marker"]}
metadata = get_metadata(dd_stream_name, input_streams, config)
store(merged_windows, input_streams, output_stream, metadata, CC, config)
def process_windows(windowed_data: OrderedDict, config: dict) -> OrderedDict:
results = OrderedDict()
threshold_improper_attachment = config['attachment_marker']['motionsense_improper_attachment']
threshold_onbody = config['attachment_marker']['motionsense_onbody']
threshold_offbody = config['attachment_marker']['motionsense_offbody']
label_improper_attachment = config['labels']['motionsense_improper_attachment']
label_onbody = config['labels']['motionsense_onbody']
label_offbody = config['labels']['motionsense_offbody']
if windowed_data:
for key, data in windowed_data.items():
one_minute_window = 0
for k in data:
if k.sample == 0:
one_minute_window += 1
if (one_minute_window / 20) > threshold_offbody and (
one_minute_window / 20) < threshold_improper_attachment:
results[key] = label_improper_attachment
elif (one_minute_window / 20) > threshold_onbody:
results[key] = label_onbody
else:
results[key] = label_offbody
return results
def get_metadata(dd_stream_name: str, input_streams: dict, config: dict) -> dict:
if dd_stream_name == config["stream_names"]["autosense_rip_attachment_marker"]:
input_param = {"window_size": config["general"]["window_size"],
"onbody_threshold": config["attachment_marker"]["rip_on_body"],
"improper_attachment": config["attachment_marker"]["improper_attachment"]}
data_descriptor = {"NAME": dd_stream_name, "DATA_TYPE": "int",
"DESCRIPTION": "Attachment labels: Improper attachment: " + str(
config["labels"]["rip_improper_attachment"]) + ", Offbody: " + str(
config["labels"]["rip_off_body"]) + ", Onbody: " + str(config["labels"]["rip_on_body"])}
elif dd_stream_name == config["stream_names"]["autosense_ecg_attachment_marker"]:
input_param = {"window_size": config["general"]["window_size"],
"ecg_vairance_threshold": config["attachment_marker"]["ecg_on_body"],
"improper_attachment": config["attachment_marker"]["improper_attachment"]}
data_descriptor = {"NAME": dd_stream_name, "DATA_TYPE": "int",
"DESCRIPTION": "Attachment labels: Improper attachment: " + str(
config["labels"]["ecg_improper_attachment"]) + ", Offbody: " + str(
config["labels"]["ecg_off_body"]) + ", Onbody: " + str(config["labels"]["ecg_on_body"])}
elif dd_stream_name == config["stream_names"]["motionsense_hrv_right_attachment_marker"] or dd_stream_name == \
config["stream_names"]["motionsense_hrv_left_attachment_marker"]:
input_param = {"window_size": config["general"]["window_size"],
"motionsense_improper_attachment_threshold": config["attachment_marker"][
"motionsense_improper_attachment"],
"motionsense_onbody_threshold": config["attachment_marker"]["motionsense_onbody"],
"motionsense_offbody_threshold": config["attachment_marker"]["motionsense_offbody"]
}
data_descriptor = {"NAME": dd_stream_name, "DATA_TYPE": "int",
"DESCRIPTION": "Attachment labels: Improper attachment: " + str(
config["labels"]["motionsense_improper_attachment"]) + ", Offbody: " + str(
config["labels"]["motionsense_offbody"]) + ", Onbody: " + str(
config["labels"]["motionsense_onbody"])}
else:
raise ValueError("Incorrect sensor type")
method = 'cerebralcortex.data_processor.data_diagnostic.attachment_marker'
algo_description = config["description"]["attachment_marker"]
ec = get_execution_context(dd_stream_name, input_param, input_streams, method,
algo_description, config)
anno = get_annotations()
return {"ec": ec, "dd": data_descriptor, "anno": anno}
| true | true |
f73004f4a82f8a65d0815fa2c8179029a64348c3 | 17,171 | py | Python | aiida/backends/tests/backup_script.py | joepvd/aiida_core | 6e9711046753332933f982971db1d7ac7e7ade58 | [
"BSD-2-Clause"
] | null | null | null | aiida/backends/tests/backup_script.py | joepvd/aiida_core | 6e9711046753332933f982971db1d7ac7e7ade58 | [
"BSD-2-Clause"
] | null | null | null | aiida/backends/tests/backup_script.py | joepvd/aiida_core | 6e9711046753332933f982971db1d7ac7e7ade58 | [
"BSD-2-Clause"
] | null | null | null | # -*- coding: utf-8 -*-
###########################################################################
# Copyright (c), The AiiDA team. All rights reserved. #
# This file is part of the AiiDA code. #
# #
# The code is hosted on GitHub at https://github.com/aiidateam/aiida_core #
# For further information on the license, see the LICENSE.txt file #
# For further information please visit http://www.aiida.net #
###########################################################################
from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
import datetime
import importlib
import shutil
import sys
import tempfile
from dateutil.parser import parse
from aiida.backends.utils import is_dbenv_loaded, load_dbenv, BACKEND_SQLA, BACKEND_DJANGO
from aiida.backends.settings import BACKEND
from aiida.backends.testbase import AiidaTestCase
from aiida.common import utils
from aiida.common.additions.backup_script import backup_setup
from aiida.orm.node import Node
import aiida.utils.json as json
if not is_dbenv_loaded():
load_dbenv()
class TestBackupScriptUnit(AiidaTestCase):
_json_test_input_1 = '{"backup_length_threshold": 2, "periodicity": 2,' + \
' "oldest_object_backedup": "2014-07-18 13:54:53.688484+00:00", ' + \
'"end_date_of_backup": null, "days_to_backup": null, "backup_dir": ' +\
'"/scratch/aiida_user/backupScriptDest"}'
_json_test_input_2 = '{"backup_length_threshold": 2, "periodicity": 2, ' +\
'"oldest_object_backedup": "2014-07-18 13:54:53.688484+00:00", ' + \
'"end_date_of_backup": null, "days_to_backup": null, "backup_dir": ' +\
'"/scratch/aiida_user/backupScriptDest"}'
_json_test_input_3 = '{"backup_length_threshold": 2, "periodicity": 2, ' +\
'"oldest_object_backedup": "2014-07-18 13:54:53.688484+00:00", ' + \
'"end_date_of_backup": null, "days_to_backup": 2, "backup_dir": ' + \
'"/scratch/aiida_user/backupScriptDest"}'
_json_test_input_4 = '{"backup_length_threshold": 2, "periodicity": 2, ' +\
'"oldest_object_backedup": "2014-07-18 13:54:53.688484+00:00", ' + \
'"end_date_of_backup": "2014-07-22 14:54:53.688484+00:00", ' + \
'"days_to_backup": null, "backup_dir": ' + \
'"/scratch/aiida_user/backupScriptDest"}'
_json_test_input_5 = '{"backup_length_threshold": 2, "periodicity": 2, ' +\
'"oldest_object_backedup": "2014-07-18 13:54:53.688484+00:00", ' + \
'"end_date_of_backup": "2014-07-22 14:54:53.688484+00:00", ' + \
'"days_to_backup": 2, "backup_dir": "/scratch/aiida_user/backup"}'
_json_test_input_6 = '{"backup_length_threshold": 2, "periodicity": 2, ' +\
'"oldest_object_backedup": "2014-07-18 13:54:53.688484", ' + \
'"end_date_of_backup": "2014-07-22 14:54:53.688484", ' + \
'"days_to_backup": null, ' \
'"backup_dir": "/scratch/./aiida_user////backup//"}'
def setUp(self):
super(TestBackupScriptUnit, self).setUp()
if not is_dbenv_loaded():
load_dbenv()
if BACKEND == BACKEND_SQLA:
from aiida.common.additions.backup_script.backup_sqlalchemy import Backup
elif BACKEND == BACKEND_DJANGO:
from aiida.common.additions.backup_script.backup_django import Backup
else:
self.skipTest("Unknown backend")
self._backup_setup_inst = Backup("", 2)
def tearDown(self):
super(TestBackupScriptUnit, self).tearDown()
self._backup_setup_inst = None
def test_loading_basic_params_from_file(self):
"""
This method tests the correct loading of the basic _backup_setup_inst
parameters from a JSON string.
"""
backup_variables = json.loads(self._json_test_input_1)
self._backup_setup_inst._ignore_backup_dir_existence_check = True
self._backup_setup_inst._read_backup_info_from_dict(backup_variables)
self.assertEqual(
self._backup_setup_inst._oldest_object_bk,
parse("2014-07-18 13:54:53.688484+00:00"),
"Last _backup_setup_inst start date is not parsed correctly")
# The destination directory of the _backup_setup_inst
self.assertEqual(
self._backup_setup_inst._backup_dir,
"/scratch/aiida_user/backupScriptDest",
"_backup_setup_inst destination directory not parsed correctly")
self.assertEqual(
self._backup_setup_inst._backup_length_threshold,
datetime.timedelta(hours=2),
"_backup_length_threshold not parsed correctly")
self.assertEqual(
self._backup_setup_inst._periodicity,
2,
"_periodicity not parsed correctly")
def test_loading_backup_time_params_from_file_1(self):
"""
This method tests that the _backup_setup_inst limits are correctly
loaded from the JSON string and are correctly set.
In the parsed JSON string, no _backup_setup_inst end limits are set
"""
backup_variables = json.loads(self._json_test_input_2)
self._backup_setup_inst._ignore_backup_dir_existence_check = True
self._backup_setup_inst._read_backup_info_from_dict(backup_variables)
self.assertEqual(
self._backup_setup_inst._days_to_backup,
None,
"_days_to_backup should be None/null but it is not")
self.assertEqual(
self._backup_setup_inst._end_date_of_backup,
None,
"_end_date_of_backup should be None/null but it is not")
self.assertEqual(
self._backup_setup_inst._internal_end_date_of_backup,
None,
"_internal_end_date_of_backup should be None/null but it is not")
def test_loading_backup_time_params_from_file_2(self):
"""
This method tests that the _backup_setup_inst limits are correctly
loaded from the JSON string and are correctly set.
In the parsed JSON string, only the daysToBackup limit is set.
"""
backup_variables = json.loads(self._json_test_input_3)
self._backup_setup_inst._ignore_backup_dir_existence_check = True
self._backup_setup_inst._read_backup_info_from_dict(backup_variables)
self.assertEqual(
self._backup_setup_inst._days_to_backup,
2,
"_days_to_backup should be 2 but it is not")
self.assertEqual(
self._backup_setup_inst._end_date_of_backup,
None,
"_end_date_of_backup should be None/null but it is not")
self.assertEqual(
self._backup_setup_inst._internal_end_date_of_backup,
parse("2014-07-20 13:54:53.688484+00:00"),
"_internal_end_date_of_backup is not the expected one")
def test_loading_backup_time_params_from_file_3(self):
"""
This method tests that the _backup_setup_inst limits are correctly
loaded from the JSON string and are correctly set.
In the parsed JSON string, only the endDateOfBackup limit is set.
"""
backup_variables = json.loads(self._json_test_input_4)
self._backup_setup_inst._ignore_backup_dir_existence_check = True
self._backup_setup_inst._read_backup_info_from_dict(backup_variables)
self.assertEqual(
self._backup_setup_inst._days_to_backup,
None,
"_days_to_backup should be None/null but it is not")
self.assertEqual(
self._backup_setup_inst._end_date_of_backup,
parse("2014-07-22 14:54:53.688484+00:00"),
"_end_date_of_backup should be None/null but it is not")
self.assertEqual(
self._backup_setup_inst._internal_end_date_of_backup,
parse("2014-07-22 14:54:53.688484+00:00"),
"_internal_end_date_of_backup is not the expected one")
def test_loading_backup_time_params_from_file_4(self):
"""
This method tests that the _backup_setup_inst limits are correctly
loaded from the JSON string and are correctly set.
In the parsed JSON string, the endDateOfBackup & daysToBackuplimit
are set which should lead to an exception.
"""
from aiida.common.additions.backup_script.backup_base import BackupError
backup_variables = json.loads(self._json_test_input_5)
self._backup_setup_inst._ignore_backup_dir_existence_check = True
# An exception should be raised because endDateOfBackup
# & daysToBackuplimit have been defined in the same time.
with self.assertRaises(BackupError):
self._backup_setup_inst._read_backup_info_from_dict(backup_variables)
def check_full_deserialization_serialization(self, input_string, backup_inst):
input_variables = json.loads(input_string)
backup_inst._ignore_backup_dir_existence_check = True
backup_inst._read_backup_info_from_dict(input_variables)
target_variables = backup_inst._dictionarize_backup_info()
self.assertEqual(input_variables, target_variables,
"The test string {} did not succeed".format(
input_string) +
" the serialization deserialization test.\n" +
"Input variables: {}\n".format(input_variables) +
"Output variables: {}\n".format(target_variables))
def test_full_deserialization_serialization_1(self):
"""
This method tests the correct deserialization / serialization of the
variables that should be stored in a file.
"""
input_string = self._json_test_input_1
backup_inst = self._backup_setup_inst
self.check_full_deserialization_serialization(input_string, backup_inst)
def test_full_deserialization_serialization_2(self):
"""
This method tests the correct deserialization / serialization of the
variables that should be stored in a file.
"""
input_string = self._json_test_input_2
backup_inst = self._backup_setup_inst
self.check_full_deserialization_serialization(input_string, backup_inst)
def test_full_deserialization_serialization_3(self):
"""
This method tests the correct deserialization / serialization of the
variables that should be stored in a file.
"""
input_string = self._json_test_input_3
backup_inst = self._backup_setup_inst
self.check_full_deserialization_serialization(input_string, backup_inst)
def test_full_deserialization_serialization_4(self):
"""
This method tests the correct deserialization / serialization of the
variables that should be stored in a file.
"""
input_string = self._json_test_input_4
backup_inst = self._backup_setup_inst
self.check_full_deserialization_serialization(input_string, backup_inst)
def test_timezone_addition_and_dir_correction(self):
"""
This method tests if the timezone is added correctly to timestamps
that don't have a timezone. Moreover, it checks if the given directory
paths are normalized as expected.
"""
backup_variables = json.loads(self._json_test_input_6)
self._backup_setup_inst._ignore_backup_dir_existence_check = True
self._backup_setup_inst._read_backup_info_from_dict(backup_variables)
self.assertIsNotNone(
self._backup_setup_inst._oldest_object_bk.tzinfo,
"Timezone info should not be none (timestamp: {})."
.format(self._backup_setup_inst._oldest_object_bk))
self.assertIsNotNone(
self._backup_setup_inst._end_date_of_backup.tzinfo,
"Timezone info should not be none (timestamp: {})."
.format(self._backup_setup_inst._end_date_of_backup))
self.assertIsNotNone(
self._backup_setup_inst._internal_end_date_of_backup.tzinfo,
"Timezone info should not be none (timestamp: {})."
.format(self._backup_setup_inst._internal_end_date_of_backup))
# The destination directory of the _backup_setup_inst
self.assertEqual(
self._backup_setup_inst._backup_dir,
"/scratch/aiida_user/backup",
"_backup_setup_inst destination directory is "
"not normalized as expected.")
class TestBackupScriptIntegration(AiidaTestCase):
_aiida_rel_path = ".aiida"
_backup_rel_path = "backup"
_repo_rel_path = "repository"
_bs_instance = backup_setup.BackupSetup()
def test_integration(self):
from aiida.utils.capturing import Capturing
# Fill in the repository with data
self.fill_repo()
try:
# Create a temp folder where the backup files will be placed
# and the backup will be stored
temp_folder = tempfile.mkdtemp()
# Capture the sysout of the following command
with Capturing():
# Create the backup scripts
backup_full_path = self.create_backup_scripts(temp_folder)
# Put the backup folder in the path
sys.path.append(backup_full_path)
# Import the backup script - this action will also run it
# It is assumed that the backup script ends with .py
importlib.import_module(self._bs_instance._script_filename[:-3])
# Check the backup
from aiida import settings
from filecmp import dircmp
import os
from aiida.common.utils import are_dir_trees_equal
source_dir = os.path.join(settings.REPOSITORY_PATH,
self._repo_rel_path)
dest_dir = os.path.join(backup_full_path,
self._bs_instance._file_backup_folder_rel,
self._repo_rel_path)
res, msg = are_dir_trees_equal(source_dir, dest_dir)
self.assertTrue(res, "The backed-up repository has differences to the original one. " + str(msg)
+ ". If the test fails, report it in issue #2134.")
finally:
shutil.rmtree(temp_folder, ignore_errors=True)
def fill_repo(self):
from aiida.orm import JobCalculation, CalculationFactory, Data, DataFactory
extra_name = self.__class__.__name__ + "/test_with_subclasses"
calc_params = {
'computer': self.computer,
'resources': {'num_machines': 1,
'num_mpiprocs_per_machine': 1}
}
TemplateReplacerCalc = CalculationFactory('simpleplugins.templatereplacer')
ParameterData = DataFactory('parameter')
a1 = JobCalculation(**calc_params).store()
# To query only these nodes later
a1.set_extra(extra_name, True)
a2 = TemplateReplacerCalc(**calc_params).store()
# To query only these nodes later
a2.set_extra(extra_name, True)
a3 = Data().store()
a3.set_extra(extra_name, True)
a4 = ParameterData(dict={'a': 'b'}).store()
a4.set_extra(extra_name, True)
a5 = Node().store()
a5.set_extra(extra_name, True)
# I don't set the extras, just to be sure that the filtering works
# The filtering is needed because other tests will put stuff int he DB
a6 = JobCalculation(**calc_params)
a6.store()
a7 = Node()
a7.store()
def create_backup_scripts(self, tmp_folder):
backup_full_path = "{}/{}/{}/".format(tmp_folder, self._aiida_rel_path,
self._backup_rel_path)
# The predefined answers for the setup script
ac = utils.ArrayCounter()
answers = [backup_full_path, # the backup folder path
"", # should the folder be created?
"", # destination folder of the backup
"", # should the folder be created?
"n", # print config explanation?
"", # configure the backup conf file now?
"", # start date of backup?
"", # is it correct?
"", # days to backup?
"", # is it correct?
"", # end date of backup
"", # is it correct?
"1", # periodicity
"", # is it correct?
"0", # threshold?
""] # is it correct?
utils.input = lambda _: answers[ac.array_counter()]
# Run the setup script
self._bs_instance.run()
return backup_full_path
| 42.502475 | 108 | 0.632695 |
up_inst._internal_end_date_of_backup,
parse("2014-07-20 13:54:53.688484+00:00"),
"_internal_end_date_of_backup is not the expected one")
def test_loading_backup_time_params_from_file_3(self):
backup_variables = json.loads(self._json_test_input_4)
self._backup_setup_inst._ignore_backup_dir_existence_check = True
self._backup_setup_inst._read_backup_info_from_dict(backup_variables)
self.assertEqual(
self._backup_setup_inst._days_to_backup,
None,
"_days_to_backup should be None/null but it is not")
self.assertEqual(
self._backup_setup_inst._end_date_of_backup,
parse("2014-07-22 14:54:53.688484+00:00"),
"_end_date_of_backup should be None/null but it is not")
self.assertEqual(
self._backup_setup_inst._internal_end_date_of_backup,
parse("2014-07-22 14:54:53.688484+00:00"),
"_internal_end_date_of_backup is not the expected one")
def test_loading_backup_time_params_from_file_4(self):
from aiida.common.additions.backup_script.backup_base import BackupError
backup_variables = json.loads(self._json_test_input_5)
self._backup_setup_inst._ignore_backup_dir_existence_check = True
with self.assertRaises(BackupError):
self._backup_setup_inst._read_backup_info_from_dict(backup_variables)
def check_full_deserialization_serialization(self, input_string, backup_inst):
input_variables = json.loads(input_string)
backup_inst._ignore_backup_dir_existence_check = True
backup_inst._read_backup_info_from_dict(input_variables)
target_variables = backup_inst._dictionarize_backup_info()
self.assertEqual(input_variables, target_variables,
"The test string {} did not succeed".format(
input_string) +
" the serialization deserialization test.\n" +
"Input variables: {}\n".format(input_variables) +
"Output variables: {}\n".format(target_variables))
def test_full_deserialization_serialization_1(self):
input_string = self._json_test_input_1
backup_inst = self._backup_setup_inst
self.check_full_deserialization_serialization(input_string, backup_inst)
def test_full_deserialization_serialization_2(self):
input_string = self._json_test_input_2
backup_inst = self._backup_setup_inst
self.check_full_deserialization_serialization(input_string, backup_inst)
def test_full_deserialization_serialization_3(self):
input_string = self._json_test_input_3
backup_inst = self._backup_setup_inst
self.check_full_deserialization_serialization(input_string, backup_inst)
def test_full_deserialization_serialization_4(self):
input_string = self._json_test_input_4
backup_inst = self._backup_setup_inst
self.check_full_deserialization_serialization(input_string, backup_inst)
def test_timezone_addition_and_dir_correction(self):
backup_variables = json.loads(self._json_test_input_6)
self._backup_setup_inst._ignore_backup_dir_existence_check = True
self._backup_setup_inst._read_backup_info_from_dict(backup_variables)
self.assertIsNotNone(
self._backup_setup_inst._oldest_object_bk.tzinfo,
"Timezone info should not be none (timestamp: {})."
.format(self._backup_setup_inst._oldest_object_bk))
self.assertIsNotNone(
self._backup_setup_inst._end_date_of_backup.tzinfo,
"Timezone info should not be none (timestamp: {})."
.format(self._backup_setup_inst._end_date_of_backup))
self.assertIsNotNone(
self._backup_setup_inst._internal_end_date_of_backup.tzinfo,
"Timezone info should not be none (timestamp: {})."
.format(self._backup_setup_inst._internal_end_date_of_backup))
self.assertEqual(
self._backup_setup_inst._backup_dir,
"/scratch/aiida_user/backup",
"_backup_setup_inst destination directory is "
"not normalized as expected.")
class TestBackupScriptIntegration(AiidaTestCase):
_aiida_rel_path = ".aiida"
_backup_rel_path = "backup"
_repo_rel_path = "repository"
_bs_instance = backup_setup.BackupSetup()
def test_integration(self):
from aiida.utils.capturing import Capturing
self.fill_repo()
try:
temp_folder = tempfile.mkdtemp()
with Capturing():
backup_full_path = self.create_backup_scripts(temp_folder)
sys.path.append(backup_full_path)
importlib.import_module(self._bs_instance._script_filename[:-3])
from aiida import settings
from filecmp import dircmp
import os
from aiida.common.utils import are_dir_trees_equal
source_dir = os.path.join(settings.REPOSITORY_PATH,
self._repo_rel_path)
dest_dir = os.path.join(backup_full_path,
self._bs_instance._file_backup_folder_rel,
self._repo_rel_path)
res, msg = are_dir_trees_equal(source_dir, dest_dir)
self.assertTrue(res, "The backed-up repository has differences to the original one. " + str(msg)
+ ". If the test fails, report it in issue #2134.")
finally:
shutil.rmtree(temp_folder, ignore_errors=True)
def fill_repo(self):
from aiida.orm import JobCalculation, CalculationFactory, Data, DataFactory
extra_name = self.__class__.__name__ + "/test_with_subclasses"
calc_params = {
'computer': self.computer,
'resources': {'num_machines': 1,
'num_mpiprocs_per_machine': 1}
}
TemplateReplacerCalc = CalculationFactory('simpleplugins.templatereplacer')
ParameterData = DataFactory('parameter')
a1 = JobCalculation(**calc_params).store()
a1.set_extra(extra_name, True)
a2 = TemplateReplacerCalc(**calc_params).store()
a2.set_extra(extra_name, True)
a3 = Data().store()
a3.set_extra(extra_name, True)
a4 = ParameterData(dict={'a': 'b'}).store()
a4.set_extra(extra_name, True)
a5 = Node().store()
a5.set_extra(extra_name, True)
# The filtering is needed because other tests will put stuff int he DB
a6 = JobCalculation(**calc_params)
a6.store()
a7 = Node()
a7.store()
def create_backup_scripts(self, tmp_folder):
backup_full_path = "{}/{}/{}/".format(tmp_folder, self._aiida_rel_path,
self._backup_rel_path)
# The predefined answers for the setup script
ac = utils.ArrayCounter()
answers = [backup_full_path, # the backup folder path
"", # should the folder be created?
"", # destination folder of the backup
"", # should the folder be created?
"n", # print config explanation?
"", # configure the backup conf file now?
"", # start date of backup?
"", # is it correct?
"", # days to backup?
"", # is it correct?
"", # end date of backup
"", # is it correct?
"1", # periodicity
"", # is it correct?
"0", # threshold?
""] # is it correct?
utils.input = lambda _: answers[ac.array_counter()]
# Run the setup script
self._bs_instance.run()
return backup_full_path
| true | true |
f73005b31846f4a78636353fe91ff4e0db98bd66 | 221 | pyde | Python | sketch_17/sketch_17.pyde | Minindosyan/2019-fall-polytech-cs | f022362e4bf6d69d623d3212df9b038f7abf5790 | [
"MIT"
] | null | null | null | sketch_17/sketch_17.pyde | Minindosyan/2019-fall-polytech-cs | f022362e4bf6d69d623d3212df9b038f7abf5790 | [
"MIT"
] | null | null | null | sketch_17/sketch_17.pyde | Minindosyan/2019-fall-polytech-cs | f022362e4bf6d69d623d3212df9b038f7abf5790 | [
"MIT"
] | null | null | null | def setup():
size(500,500)
smooth()
background(235)
strokeWeight(30)
noLoop()
def draw():
for i in range(1,8):
stroke(20)
line(i*50,200,150+(i-1)*50,300)
| 17 | 40 | 0.475113 | def setup():
size(500,500)
smooth()
background(235)
strokeWeight(30)
noLoop()
def draw():
for i in range(1,8):
stroke(20)
line(i*50,200,150+(i-1)*50,300)
| true | true |
f73005c3d0ecb0b6aa0d1b17a014e3ce2b1283cd | 2,751 | py | Python | task_manager/tasks/migrations/0001_initial.py | Ritesh-Aggarwal/Task-Manager-Django | b8f8df10b0b0a9cc9cd27346a0b5d4d5892d2f24 | [
"MIT"
] | null | null | null | task_manager/tasks/migrations/0001_initial.py | Ritesh-Aggarwal/Task-Manager-Django | b8f8df10b0b0a9cc9cd27346a0b5d4d5892d2f24 | [
"MIT"
] | null | null | null | task_manager/tasks/migrations/0001_initial.py | Ritesh-Aggarwal/Task-Manager-Django | b8f8df10b0b0a9cc9cd27346a0b5d4d5892d2f24 | [
"MIT"
] | null | null | null | # Generated by Django 3.2.12 on 2022-03-04 13:16
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import tasks.models
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Task',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=100)),
('description', models.TextField()),
('completed', models.BooleanField(default=False)),
('created_date', models.DateTimeField(auto_now_add=True)),
('deleted', models.BooleanField(default=False)),
('priority', models.IntegerField(default=0)),
('status', models.CharField(choices=[('PENDING', 'PENDING'), ('IN_PROGRESS', 'IN_PROGRESS'), ('COMPLETED', 'COMPLETED'), ('CANCELLED', 'CANCELLED')], default='PENDING', max_length=100)),
('user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='ReportSchedule',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('report_at', models.TimeField(default=tasks.models.default_start_time)),
('last_run_at', models.DateTimeField(default=tasks.models.default_last_runtime)),
('email', models.EmailField(max_length=254)),
('user', models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='History',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('old_status', models.CharField(choices=[('PENDING', 'PENDING'), ('IN_PROGRESS', 'IN_PROGRESS'), ('COMPLETED', 'COMPLETED'), ('CANCELLED', 'CANCELLED')], default='PENDING', max_length=100)),
('new_status', models.CharField(choices=[('PENDING', 'PENDING'), ('IN_PROGRESS', 'IN_PROGRESS'), ('COMPLETED', 'COMPLETED'), ('CANCELLED', 'CANCELLED')], default='PENDING', max_length=100)),
('updated_at', models.DateTimeField(auto_now=True)),
('task', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='tasks.task')),
],
),
]
| 51.90566 | 206 | 0.616503 |
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import tasks.models
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Task',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=100)),
('description', models.TextField()),
('completed', models.BooleanField(default=False)),
('created_date', models.DateTimeField(auto_now_add=True)),
('deleted', models.BooleanField(default=False)),
('priority', models.IntegerField(default=0)),
('status', models.CharField(choices=[('PENDING', 'PENDING'), ('IN_PROGRESS', 'IN_PROGRESS'), ('COMPLETED', 'COMPLETED'), ('CANCELLED', 'CANCELLED')], default='PENDING', max_length=100)),
('user', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='ReportSchedule',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('report_at', models.TimeField(default=tasks.models.default_start_time)),
('last_run_at', models.DateTimeField(default=tasks.models.default_last_runtime)),
('email', models.EmailField(max_length=254)),
('user', models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='History',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('old_status', models.CharField(choices=[('PENDING', 'PENDING'), ('IN_PROGRESS', 'IN_PROGRESS'), ('COMPLETED', 'COMPLETED'), ('CANCELLED', 'CANCELLED')], default='PENDING', max_length=100)),
('new_status', models.CharField(choices=[('PENDING', 'PENDING'), ('IN_PROGRESS', 'IN_PROGRESS'), ('COMPLETED', 'COMPLETED'), ('CANCELLED', 'CANCELLED')], default='PENDING', max_length=100)),
('updated_at', models.DateTimeField(auto_now=True)),
('task', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='tasks.task')),
],
),
]
| true | true |
f73006eb526b889195551e800d11eb06f680327c | 5,986 | py | Python | adb/windows/platform-tools/systrace/catapult/telemetry/telemetry/internal/platform/linux_based_platform_backend.py | llaske/sugarizer-deployment-tool-desktop | 34df1a56b68b15b6771671f87ab66586d60c514a | [
"Apache-2.0"
] | 1 | 2019-01-17T19:03:17.000Z | 2019-01-17T19:03:17.000Z | adb/MACOS/platform-tools/systrace/catapult/telemetry/telemetry/internal/platform/linux_based_platform_backend.py | llaske/sugarizer-deployment-tool-desktop | 34df1a56b68b15b6771671f87ab66586d60c514a | [
"Apache-2.0"
] | 2 | 2017-09-08T20:26:05.000Z | 2017-09-08T20:29:07.000Z | adb/windows/platform-tools/systrace/catapult/telemetry/telemetry/internal/platform/linux_based_platform_backend.py | llaske/sugarizer-deployment-tool-desktop | 34df1a56b68b15b6771671f87ab66586d60c514a | [
"Apache-2.0"
] | null | null | null | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
try:
import resource # pylint: disable=import-error
except ImportError:
resource = None # Not available on all platforms
import re
from telemetry.core import exceptions
from telemetry import decorators
from telemetry.internal.platform import platform_backend
class LinuxBasedPlatformBackend(platform_backend.PlatformBackend):
"""Abstract platform containing functionality domain.shared by all Linux based OSes.
This includes Android and ChromeOS.
Subclasses must implement RunCommand, GetFileContents, GetPsOutput, and
ParseCStateSample."""
# Get the commit charge in kB.
def GetSystemCommitCharge(self):
meminfo_contents = self.GetFileContents('/proc/meminfo')
meminfo = self._GetProcFileDict(meminfo_contents)
if not meminfo:
return None
return (self._ConvertToKb(meminfo['MemTotal'])
- self._ConvertToKb(meminfo['MemFree'])
- self._ConvertToKb(meminfo['Buffers'])
- self._ConvertToKb(meminfo['Cached']))
@decorators.Cache
def GetSystemTotalPhysicalMemory(self):
meminfo_contents = self.GetFileContents('/proc/meminfo')
meminfo = self._GetProcFileDict(meminfo_contents)
if not meminfo:
return None
return self._ConvertToBytes(meminfo['MemTotal'])
def GetCpuStats(self, pid):
results = {}
stats = self._GetProcFileForPid(pid, 'stat')
if not stats:
return results
stats = stats.split()
utime = float(stats[13])
stime = float(stats[14])
cpu_process_jiffies = utime + stime
clock_ticks = self.GetClockTicks()
results.update({'CpuProcessTime': cpu_process_jiffies / clock_ticks})
return results
def GetCpuTimestamp(self):
total_jiffies = self._GetProcJiffies()
clock_ticks = self.GetClockTicks()
return {'TotalTime': total_jiffies / clock_ticks}
@decorators.Deprecated(
2017, 11, 4,
'Clients should use tracing and memory-infra in new Telemetry '
'benchmarks. See for context: https://crbug.com/632021')
def GetMemoryStats(self, pid):
status_contents = self._GetProcFileForPid(pid, 'status')
stats = self._GetProcFileForPid(pid, 'stat').split()
status = self._GetProcFileDict(status_contents)
if not status or not stats or 'Z' in status['State']:
return {}
vm = int(stats[22])
vm_peak = (self._ConvertToBytes(status['VmPeak'])
if 'VmPeak' in status else vm)
wss = int(stats[23]) * resource.getpagesize()
wss_peak = (self._ConvertToBytes(status['VmHWM'])
if 'VmHWM' in status else wss)
private_dirty_bytes = 0
for line in self._GetProcFileForPid(pid, 'smaps').splitlines():
if line.startswith('Private_Dirty:'):
private_dirty_bytes += self._ConvertToBytes(line.split(':')[1].strip())
return {'VM': vm,
'VMPeak': vm_peak,
'PrivateDirty': private_dirty_bytes,
'WorkingSetSize': wss,
'WorkingSetSizePeak': wss_peak}
@decorators.Cache
def GetClockTicks(self):
"""Returns the number of clock ticks per second.
The proper way is to call os.sysconf('SC_CLK_TCK') but that is not easy to
do on Android/CrOS. In practice, nearly all Linux machines have a USER_HZ
of 100, so just return that.
"""
return 100
def GetFileContents(self, filename):
raise NotImplementedError()
def GetPsOutput(self, columns, pid=None):
raise NotImplementedError()
def RunCommand(self, cmd):
"""Runs the specified command.
Args:
cmd: A list of program arguments or the path string of the program.
Returns:
A string whose content is the output of the command.
"""
raise NotImplementedError()
@staticmethod
def ParseCStateSample(sample):
"""Parse a single c-state residency sample.
Args:
sample: A sample of c-state residency times to be parsed. Organized as
a dictionary mapping CPU name to a string containing all c-state
names, the times in each state, the latency of each state, and the
time at which the sample was taken all separated by newlines.
Ex: {'cpu0': 'C0\nC1\n5000\n2000\n20\n30\n1406673171'}
Returns:
Dictionary associating a c-state with a time.
"""
raise NotImplementedError()
def _IsPidAlive(self, pid):
assert pid, 'pid is required'
return bool(self.GetPsOutput(['pid'], pid) == str(pid))
def _GetProcFileForPid(self, pid, filename):
try:
return self.GetFileContents('/proc/%s/%s' % (pid, filename))
except IOError:
if not self._IsPidAlive(pid):
raise exceptions.ProcessGoneException()
raise
def _ConvertToKb(self, value):
return int(value.replace('kB', ''))
def _ConvertToBytes(self, value):
return self._ConvertToKb(value) * 1024
def _GetProcFileDict(self, contents):
retval = {}
for line in contents.splitlines():
key, value = line.split(':')
retval[key.strip()] = value.strip()
return retval
def _GetProcJiffies(self):
"""Parse '/proc/timer_list' output and returns the first jiffies attribute.
Multi-CPU machines will have multiple 'jiffies:' lines, all of which will be
essentially the same. Return the first one."""
jiffies_timer_lines = self.RunCommand(
['grep', 'jiffies', '/proc/timer_list'])
if not jiffies_timer_lines:
raise Exception('Unable to find jiffies from /proc/timer_list')
jiffies_timer_list = jiffies_timer_lines.splitlines()
# Each line should look something like 'jiffies: 4315883489'.
for line in jiffies_timer_list:
match = re.match(r'\s*jiffies\s*:\s*(\d+)', line)
if match:
value = match.group(1)
return float(value)
raise Exception('Unable to parse jiffies attribute: %s' %
repr(jiffies_timer_lines))
| 34.011364 | 86 | 0.683762 |
try:
import resource
except ImportError:
resource = None
import re
from telemetry.core import exceptions
from telemetry import decorators
from telemetry.internal.platform import platform_backend
class LinuxBasedPlatformBackend(platform_backend.PlatformBackend):
def GetSystemCommitCharge(self):
meminfo_contents = self.GetFileContents('/proc/meminfo')
meminfo = self._GetProcFileDict(meminfo_contents)
if not meminfo:
return None
return (self._ConvertToKb(meminfo['MemTotal'])
- self._ConvertToKb(meminfo['MemFree'])
- self._ConvertToKb(meminfo['Buffers'])
- self._ConvertToKb(meminfo['Cached']))
@decorators.Cache
def GetSystemTotalPhysicalMemory(self):
meminfo_contents = self.GetFileContents('/proc/meminfo')
meminfo = self._GetProcFileDict(meminfo_contents)
if not meminfo:
return None
return self._ConvertToBytes(meminfo['MemTotal'])
def GetCpuStats(self, pid):
results = {}
stats = self._GetProcFileForPid(pid, 'stat')
if not stats:
return results
stats = stats.split()
utime = float(stats[13])
stime = float(stats[14])
cpu_process_jiffies = utime + stime
clock_ticks = self.GetClockTicks()
results.update({'CpuProcessTime': cpu_process_jiffies / clock_ticks})
return results
def GetCpuTimestamp(self):
total_jiffies = self._GetProcJiffies()
clock_ticks = self.GetClockTicks()
return {'TotalTime': total_jiffies / clock_ticks}
@decorators.Deprecated(
2017, 11, 4,
'Clients should use tracing and memory-infra in new Telemetry '
'benchmarks. See for context: https://crbug.com/632021')
def GetMemoryStats(self, pid):
status_contents = self._GetProcFileForPid(pid, 'status')
stats = self._GetProcFileForPid(pid, 'stat').split()
status = self._GetProcFileDict(status_contents)
if not status or not stats or 'Z' in status['State']:
return {}
vm = int(stats[22])
vm_peak = (self._ConvertToBytes(status['VmPeak'])
if 'VmPeak' in status else vm)
wss = int(stats[23]) * resource.getpagesize()
wss_peak = (self._ConvertToBytes(status['VmHWM'])
if 'VmHWM' in status else wss)
private_dirty_bytes = 0
for line in self._GetProcFileForPid(pid, 'smaps').splitlines():
if line.startswith('Private_Dirty:'):
private_dirty_bytes += self._ConvertToBytes(line.split(':')[1].strip())
return {'VM': vm,
'VMPeak': vm_peak,
'PrivateDirty': private_dirty_bytes,
'WorkingSetSize': wss,
'WorkingSetSizePeak': wss_peak}
@decorators.Cache
def GetClockTicks(self):
return 100
def GetFileContents(self, filename):
raise NotImplementedError()
def GetPsOutput(self, columns, pid=None):
raise NotImplementedError()
def RunCommand(self, cmd):
raise NotImplementedError()
@staticmethod
def ParseCStateSample(sample):
raise NotImplementedError()
def _IsPidAlive(self, pid):
assert pid, 'pid is required'
return bool(self.GetPsOutput(['pid'], pid) == str(pid))
def _GetProcFileForPid(self, pid, filename):
try:
return self.GetFileContents('/proc/%s/%s' % (pid, filename))
except IOError:
if not self._IsPidAlive(pid):
raise exceptions.ProcessGoneException()
raise
def _ConvertToKb(self, value):
return int(value.replace('kB', ''))
def _ConvertToBytes(self, value):
return self._ConvertToKb(value) * 1024
def _GetProcFileDict(self, contents):
retval = {}
for line in contents.splitlines():
key, value = line.split(':')
retval[key.strip()] = value.strip()
return retval
def _GetProcJiffies(self):
jiffies_timer_lines = self.RunCommand(
['grep', 'jiffies', '/proc/timer_list'])
if not jiffies_timer_lines:
raise Exception('Unable to find jiffies from /proc/timer_list')
jiffies_timer_list = jiffies_timer_lines.splitlines()
for line in jiffies_timer_list:
match = re.match(r'\s*jiffies\s*:\s*(\d+)', line)
if match:
value = match.group(1)
return float(value)
raise Exception('Unable to parse jiffies attribute: %s' %
repr(jiffies_timer_lines))
| true | true |
f73008d4776542c97030787b5e4f290a43754608 | 267 | py | Python | devel/test_wv.py | binnietom/py21cmmc_wv-1 | 2d5405700c1d99bd5f22c762999aea89d1ca1c23 | [
"MIT"
] | null | null | null | devel/test_wv.py | binnietom/py21cmmc_wv-1 | 2d5405700c1d99bd5f22c762999aea89d1ca1c23 | [
"MIT"
] | null | null | null | devel/test_wv.py | binnietom/py21cmmc_wv-1 | 2d5405700c1d99bd5f22c762999aea89d1ca1c23 | [
"MIT"
] | 1 | 2022-03-04T16:21:16.000Z | 2022-03-04T16:21:16.000Z | from py21cmmc_wv import morlet
import numpy as np
bw = 50.0
numin = 130.0
N = 736
nu = np.arange(N) * bw/N + numin
mid = (nu[0] + nu[-1])/2
spectrum = np.exp(-(nu-mid)**2/ (2*4.0**2))
trnsc, fc, _ = morlet.morlet_transform_c(spectrum, nu)
trnsc = np.abs(trnsc)**2
| 19.071429 | 54 | 0.636704 | from py21cmmc_wv import morlet
import numpy as np
bw = 50.0
numin = 130.0
N = 736
nu = np.arange(N) * bw/N + numin
mid = (nu[0] + nu[-1])/2
spectrum = np.exp(-(nu-mid)**2/ (2*4.0**2))
trnsc, fc, _ = morlet.morlet_transform_c(spectrum, nu)
trnsc = np.abs(trnsc)**2
| true | true |
f7300951a7ef4c3c8387293955d5b336a64a4701 | 151,815 | py | Python | zerver/tests/test_events.py | Spian91/zulip | 2893b98ef8ba44f91966a2455a49ed8bd86e0b7b | [
"Apache-2.0"
] | null | null | null | zerver/tests/test_events.py | Spian91/zulip | 2893b98ef8ba44f91966a2455a49ed8bd86e0b7b | [
"Apache-2.0"
] | null | null | null | zerver/tests/test_events.py | Spian91/zulip | 2893b98ef8ba44f91966a2455a49ed8bd86e0b7b | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# See https://zulip.readthedocs.io/en/latest/subsystems/events-system.html for
# high-level documentation on how this system works.
from typing import Any, Callable, Dict, List, Optional, Set, Tuple
import copy
import os
import shutil
import sys
from django.conf import settings
from django.http import HttpRequest, HttpResponse
from django.utils.timezone import now as timezone_now
from io import StringIO
from zerver.models import (
get_client, get_stream_recipient, get_stream, get_realm, get_system_bot,
Message, RealmDomain, Recipient, UserMessage, UserPresence, UserProfile,
Realm, Subscription, Stream, flush_per_request_caches, UserGroup, Service,
Attachment, PreregistrationUser, get_user_by_delivery_email, MultiuseInvite,
RealmAuditLog
)
from zerver.lib.actions import (
try_update_realm_custom_profile_field,
bulk_add_subscriptions,
bulk_remove_subscriptions,
check_add_realm_emoji,
check_send_message,
check_send_typing_notification,
do_add_alert_words,
do_add_default_stream,
do_add_reaction,
do_add_reaction_legacy,
do_add_realm_domain,
do_add_realm_filter,
do_add_streams_to_default_stream_group,
do_add_submessage,
do_change_avatar_fields,
do_change_bot_owner,
do_change_default_all_public_streams,
do_change_default_events_register_stream,
do_change_default_sending_stream,
do_change_default_stream_group_description,
do_change_default_stream_group_name,
do_change_full_name,
do_change_icon_source,
do_change_logo_source,
do_change_is_admin,
do_change_is_guest,
do_change_notification_settings,
do_change_plan_type,
do_change_realm_domain,
do_change_stream_description,
do_change_stream_invite_only,
do_change_stream_announcement_only,
do_change_subscription_property,
do_change_user_delivery_email,
do_create_user,
do_create_default_stream_group,
do_create_multiuse_invite_link,
do_deactivate_stream,
do_deactivate_user,
do_delete_messages,
do_invite_users,
do_mark_hotspot_as_read,
do_mute_topic,
do_reactivate_user,
do_regenerate_api_key,
do_remove_alert_words,
do_remove_default_stream,
do_remove_default_stream_group,
do_remove_reaction,
do_remove_reaction_legacy,
do_remove_realm_domain,
do_remove_realm_emoji,
do_remove_realm_filter,
do_remove_streams_from_default_stream_group,
do_rename_stream,
do_revoke_multi_use_invite,
do_revoke_user_invite,
do_set_realm_authentication_methods,
do_set_realm_message_editing,
do_set_realm_property,
do_set_user_display_setting,
do_set_realm_notifications_stream,
do_set_realm_signup_notifications_stream,
do_unmute_topic,
do_update_embedded_data,
do_update_message,
do_update_message_flags,
do_update_outgoing_webhook_service,
do_update_pointer,
do_update_user_presence,
do_update_user_status,
get_typing_user_profiles,
log_event,
lookup_default_stream_groups,
notify_realm_custom_profile_fields,
check_add_user_group,
do_update_user_group_name,
do_update_user_group_description,
bulk_add_members_to_user_group,
remove_members_from_user_group,
check_delete_user_group,
do_update_user_custom_profile_data_if_changed,
)
from zerver.lib.events import (
apply_events,
fetch_initial_state_data,
get_raw_user_data,
post_process_state,
)
from zerver.lib.message import (
aggregate_unread_data,
get_raw_unread_data,
render_markdown,
UnreadMessagesResult,
)
from zerver.lib.test_helpers import POSTRequestMock, get_subscription, \
get_test_image_file, stub_event_queue_user_events, queries_captured, \
create_dummy_file, stdout_suppressed
from zerver.lib.test_classes import (
ZulipTestCase,
)
from zerver.lib.test_runner import slow
from zerver.lib.topic import (
ORIG_TOPIC,
TOPIC_NAME,
TOPIC_LINKS,
)
from zerver.lib.topic_mutes import (
add_topic_mute,
)
from zerver.lib.validator import (
check_bool, check_dict, check_dict_only, check_float, check_int, check_list, check_string,
equals, check_none_or, Validator, check_url
)
from zerver.lib.users import get_api_key
from zerver.views.events_register import _default_all_public_streams, _default_narrow
from zerver.tornado.event_queue import (
allocate_client_descriptor,
clear_client_event_queues_for_testing,
get_client_info_for_message_event,
process_message_event,
)
from zerver.tornado.views import get_events
import mock
import time
import ujson
class LogEventsTest(ZulipTestCase):
def test_with_missing_event_log_dir_setting(self) -> None:
with self.settings(EVENT_LOG_DIR=None):
log_event(dict())
def test_log_event_mkdir(self) -> None:
dir_name = os.path.join(settings.TEST_WORKER_DIR, "test-log-dir")
try:
shutil.rmtree(dir_name)
except OSError: # nocoverage
# assume it doesn't exist already
pass
self.assertFalse(os.path.exists(dir_name))
with self.settings(EVENT_LOG_DIR=dir_name):
event = {} # type: Dict[str, int]
log_event(event)
self.assertTrue(os.path.exists(dir_name))
class EventsEndpointTest(ZulipTestCase):
def test_events_register_endpoint(self) -> None:
# This test is intended to get minimal coverage on the
# events_register code paths
email = self.example_email("hamlet")
with mock.patch('zerver.views.events_register.do_events_register', return_value={}):
result = self.api_post(email, '/json/register')
self.assert_json_success(result)
with mock.patch('zerver.lib.events.request_event_queue', return_value=None):
result = self.api_post(email, '/json/register')
self.assert_json_error(result, "Could not allocate event queue")
return_event_queue = '15:11'
return_user_events = [] # type: List[Dict[str, Any]]
# Test that call is made to deal with a returning soft deactivated user.
with mock.patch('zerver.lib.events.reactivate_user_if_soft_deactivated') as fa:
with stub_event_queue_user_events(return_event_queue, return_user_events):
result = self.api_post(email, '/json/register', dict(event_types=ujson.dumps(['pointer'])))
self.assertEqual(fa.call_count, 1)
with stub_event_queue_user_events(return_event_queue, return_user_events):
result = self.api_post(email, '/json/register', dict(event_types=ujson.dumps(['pointer'])))
self.assert_json_success(result)
result_dict = result.json()
self.assertEqual(result_dict['last_event_id'], -1)
self.assertEqual(result_dict['queue_id'], '15:11')
return_event_queue = '15:12'
return_user_events = [
{
'id': 6,
'type': 'pointer',
'pointer': 15,
}
]
with stub_event_queue_user_events(return_event_queue, return_user_events):
result = self.api_post(email, '/json/register', dict(event_types=ujson.dumps(['pointer'])))
self.assert_json_success(result)
result_dict = result.json()
self.assertEqual(result_dict['last_event_id'], 6)
self.assertEqual(result_dict['pointer'], 15)
self.assertEqual(result_dict['queue_id'], '15:12')
# Now test with `fetch_event_types` not matching the event
return_event_queue = '15:13'
with stub_event_queue_user_events(return_event_queue, return_user_events):
result = self.api_post(email, '/json/register',
dict(event_types=ujson.dumps(['pointer']),
fetch_event_types=ujson.dumps(['message'])))
self.assert_json_success(result)
result_dict = result.json()
self.assertEqual(result_dict['last_event_id'], 6)
# Check that the message event types data is in there
self.assertIn('max_message_id', result_dict)
# Check that the pointer event types data is not in there
self.assertNotIn('pointer', result_dict)
self.assertEqual(result_dict['queue_id'], '15:13')
# Now test with `fetch_event_types` matching the event
with stub_event_queue_user_events(return_event_queue, return_user_events):
result = self.api_post(email, '/json/register',
dict(fetch_event_types=ujson.dumps(['pointer']),
event_types=ujson.dumps(['message'])))
self.assert_json_success(result)
result_dict = result.json()
self.assertEqual(result_dict['last_event_id'], 6)
# Check that we didn't fetch the messages data
self.assertNotIn('max_message_id', result_dict)
# Check that the pointer data is in there, and is correctly
# updated (presering our atomicity guaranteed), though of
# course any future pointer events won't be distributed
self.assertIn('pointer', result_dict)
self.assertEqual(result_dict['pointer'], 15)
self.assertEqual(result_dict['queue_id'], '15:13')
def test_tornado_endpoint(self) -> None:
# This test is mostly intended to get minimal coverage on
# the /notify_tornado endpoint, so we can have 100% URL coverage,
# but it does exercise a little bit of the codepath.
post_data = dict(
data=ujson.dumps(
dict(
event=dict(
type='other'
),
users=[self.example_user('hamlet').id],
),
),
)
req = POSTRequestMock(post_data, user_profile=None)
req.META['REMOTE_ADDR'] = '127.0.0.1'
result = self.client_post_request('/notify_tornado', req)
self.assert_json_error(result, 'Access denied', status_code=403)
post_data['secret'] = settings.SHARED_SECRET
req = POSTRequestMock(post_data, user_profile=None)
req.META['REMOTE_ADDR'] = '127.0.0.1'
result = self.client_post_request('/notify_tornado', req)
self.assert_json_success(result)
class GetEventsTest(ZulipTestCase):
def tornado_call(self, view_func: Callable[[HttpRequest, UserProfile], HttpResponse],
user_profile: UserProfile,
post_data: Dict[str, Any]) -> HttpResponse:
request = POSTRequestMock(post_data, user_profile)
return view_func(request, user_profile)
def test_get_events(self) -> None:
user_profile = self.example_user('hamlet')
email = user_profile.email
recipient_user_profile = self.example_user('othello')
recipient_email = recipient_user_profile.email
self.login(email)
result = self.tornado_call(get_events, user_profile,
{"apply_markdown": ujson.dumps(True),
"client_gravatar": ujson.dumps(True),
"event_types": ujson.dumps(["message"]),
"user_client": "website",
"dont_block": ujson.dumps(True),
})
self.assert_json_success(result)
queue_id = ujson.loads(result.content)["queue_id"]
recipient_result = self.tornado_call(get_events, recipient_user_profile,
{"apply_markdown": ujson.dumps(True),
"client_gravatar": ujson.dumps(True),
"event_types": ujson.dumps(["message"]),
"user_client": "website",
"dont_block": ujson.dumps(True),
})
self.assert_json_success(recipient_result)
recipient_queue_id = ujson.loads(recipient_result.content)["queue_id"]
result = self.tornado_call(get_events, user_profile,
{"queue_id": queue_id,
"user_client": "website",
"last_event_id": -1,
"dont_block": ujson.dumps(True),
})
events = ujson.loads(result.content)["events"]
self.assert_json_success(result)
self.assert_length(events, 0)
local_id = '10.01'
check_send_message(
sender=user_profile,
client=get_client('whatever'),
message_type_name='private',
message_to=[recipient_email],
topic_name=None,
message_content='hello',
local_id=local_id,
sender_queue_id=queue_id,
)
result = self.tornado_call(get_events, user_profile,
{"queue_id": queue_id,
"user_client": "website",
"last_event_id": -1,
"dont_block": ujson.dumps(True),
})
events = ujson.loads(result.content)["events"]
self.assert_json_success(result)
self.assert_length(events, 1)
self.assertEqual(events[0]["type"], "message")
self.assertEqual(events[0]["message"]["sender_email"], email)
self.assertEqual(events[0]["local_message_id"], local_id)
self.assertEqual(events[0]["message"]["display_recipient"][0]["is_mirror_dummy"], False)
self.assertEqual(events[0]["message"]["display_recipient"][1]["is_mirror_dummy"], False)
last_event_id = events[0]["id"]
local_id = '10.02'
check_send_message(
sender=user_profile,
client=get_client('whatever'),
message_type_name='private',
message_to=[recipient_email],
topic_name=None,
message_content='hello',
local_id=local_id,
sender_queue_id=queue_id,
)
result = self.tornado_call(get_events, user_profile,
{"queue_id": queue_id,
"user_client": "website",
"last_event_id": last_event_id,
"dont_block": ujson.dumps(True),
})
events = ujson.loads(result.content)["events"]
self.assert_json_success(result)
self.assert_length(events, 1)
self.assertEqual(events[0]["type"], "message")
self.assertEqual(events[0]["message"]["sender_email"], email)
self.assertEqual(events[0]["local_message_id"], local_id)
# Test that the received message in the receiver's event queue
# exists and does not contain a local id
recipient_result = self.tornado_call(get_events, recipient_user_profile,
{"queue_id": recipient_queue_id,
"user_client": "website",
"last_event_id": -1,
"dont_block": ujson.dumps(True),
})
recipient_events = ujson.loads(recipient_result.content)["events"]
self.assert_json_success(recipient_result)
self.assertEqual(len(recipient_events), 2)
self.assertEqual(recipient_events[0]["type"], "message")
self.assertEqual(recipient_events[0]["message"]["sender_email"], email)
self.assertTrue("local_message_id" not in recipient_events[0])
self.assertEqual(recipient_events[1]["type"], "message")
self.assertEqual(recipient_events[1]["message"]["sender_email"], email)
self.assertTrue("local_message_id" not in recipient_events[1])
def test_get_events_narrow(self) -> None:
user_profile = self.example_user('hamlet')
email = user_profile.email
self.login(email)
def get_message(apply_markdown: bool, client_gravatar: bool) -> Dict[str, Any]:
result = self.tornado_call(
get_events,
user_profile,
dict(
apply_markdown=ujson.dumps(apply_markdown),
client_gravatar=ujson.dumps(client_gravatar),
event_types=ujson.dumps(["message"]),
narrow=ujson.dumps([["stream", "denmark"]]),
user_client="website",
dont_block=ujson.dumps(True),
)
)
self.assert_json_success(result)
queue_id = ujson.loads(result.content)["queue_id"]
result = self.tornado_call(get_events, user_profile,
{"queue_id": queue_id,
"user_client": "website",
"last_event_id": -1,
"dont_block": ujson.dumps(True),
})
events = ujson.loads(result.content)["events"]
self.assert_json_success(result)
self.assert_length(events, 0)
self.send_personal_message(email, self.example_email("othello"), "hello")
self.send_stream_message(email, "Denmark", "**hello**")
result = self.tornado_call(get_events, user_profile,
{"queue_id": queue_id,
"user_client": "website",
"last_event_id": -1,
"dont_block": ujson.dumps(True),
})
events = ujson.loads(result.content)["events"]
self.assert_json_success(result)
self.assert_length(events, 1)
self.assertEqual(events[0]["type"], "message")
return events[0]['message']
message = get_message(apply_markdown=False, client_gravatar=False)
self.assertEqual(message["display_recipient"], "Denmark")
self.assertEqual(message["content"], "**hello**")
self.assertIn('gravatar.com', message["avatar_url"])
message = get_message(apply_markdown=True, client_gravatar=False)
self.assertEqual(message["display_recipient"], "Denmark")
self.assertEqual(message["content"], "<p><strong>hello</strong></p>")
self.assertIn('gravatar.com', message["avatar_url"])
message = get_message(apply_markdown=False, client_gravatar=True)
self.assertEqual(message["display_recipient"], "Denmark")
self.assertEqual(message["content"], "**hello**")
self.assertEqual(message["avatar_url"], None)
message = get_message(apply_markdown=True, client_gravatar=True)
self.assertEqual(message["display_recipient"], "Denmark")
self.assertEqual(message["content"], "<p><strong>hello</strong></p>")
self.assertEqual(message["avatar_url"], None)
class EventsRegisterTest(ZulipTestCase):
def setUp(self) -> None:
super().setUp()
self.user_profile = self.example_user('hamlet')
def create_bot(self, email: str, **extras: Any) -> Optional[UserProfile]:
return self.create_test_bot(email, self.user_profile, **extras)
def realm_bot_schema(self, field_name: str, check: Validator) -> Validator:
return self.check_events_dict([
('type', equals('realm_bot')),
('op', equals('update')),
('bot', check_dict_only([
('email', check_string),
('user_id', check_int),
(field_name, check),
])),
])
def do_test(self, action: Callable[[], object], event_types: Optional[List[str]]=None,
include_subscribers: bool=True, state_change_expected: bool=True,
notification_settings_null: bool=False,
client_gravatar: bool=False, num_events: int=1) -> List[Dict[str, Any]]:
'''
Make sure we have a clean slate of client descriptors for these tests.
If we don't do this, then certain failures will only manifest when you
run multiple tests within a single test function.
See also https://zulip.readthedocs.io/en/latest/subsystems/events-system.html#testing
for details on the design of this test system.
'''
clear_client_event_queues_for_testing()
client = allocate_client_descriptor(
dict(user_profile_id = self.user_profile.id,
user_profile_email = self.user_profile.email,
realm_id = self.user_profile.realm_id,
event_types = event_types,
client_type_name = "website",
apply_markdown = True,
client_gravatar = client_gravatar,
all_public_streams = False,
queue_timeout = 600,
last_connection_time = time.time(),
narrow = [])
)
# hybrid_state = initial fetch state + re-applying events triggered by our action
# normal_state = do action then fetch at the end (the "normal" code path)
hybrid_state = fetch_initial_state_data(
self.user_profile, event_types, "",
client_gravatar=True,
include_subscribers=include_subscribers
)
action()
events = client.event_queue.contents()
self.assertEqual(len(events), num_events)
initial_state = copy.deepcopy(hybrid_state)
post_process_state(self.user_profile, initial_state, notification_settings_null)
before = ujson.dumps(initial_state)
apply_events(hybrid_state, events, self.user_profile,
client_gravatar=True, include_subscribers=include_subscribers)
post_process_state(self.user_profile, hybrid_state, notification_settings_null)
after = ujson.dumps(hybrid_state)
if state_change_expected:
if before == after: # nocoverage
print(ujson.dumps(initial_state, indent=2))
print(events)
raise AssertionError('Test does not exercise enough code -- events do not change state.')
else:
try:
self.match_states(initial_state, copy.deepcopy(hybrid_state), events)
except AssertionError: # nocoverage
raise AssertionError('Test is invalid--state actually does change here.')
normal_state = fetch_initial_state_data(
self.user_profile, event_types, "",
client_gravatar=True,
include_subscribers=include_subscribers,
)
post_process_state(self.user_profile, normal_state, notification_settings_null)
self.match_states(hybrid_state, normal_state, events)
return events
def assert_on_error(self, error: Optional[str]) -> None:
if error:
raise AssertionError(error)
def match_states(self, state1: Dict[str, Any], state2: Dict[str, Any],
events: List[Dict[str, Any]]) -> None:
def normalize(state: Dict[str, Any]) -> None:
for u in state['never_subscribed']:
if 'subscribers' in u:
u['subscribers'].sort()
for u in state['subscriptions']:
if 'subscribers' in u:
u['subscribers'].sort()
state['subscriptions'] = {u['name']: u for u in state['subscriptions']}
state['unsubscribed'] = {u['name']: u for u in state['unsubscribed']}
if 'realm_bots' in state:
state['realm_bots'] = {u['email']: u for u in state['realm_bots']}
normalize(state1)
normalize(state2)
# If this assertions fails, we have unusual problems.
self.assertEqual(state1.keys(), state2.keys())
# The far more likely scenario is that some section of
# our enormous payload does not get updated properly. We
# want the diff here to be developer-friendly, hence
# the somewhat tedious code to provide useful output.
if state1 != state2: # nocoverage
print('\n---States DO NOT MATCH---')
print('\nEVENTS:\n')
# Printing out the events is a big help to
# developers.
import json
for event in events:
print(json.dumps(event, indent=4))
print('\nMISMATCHES:\n')
for k in state1:
if state1[k] != state2[k]:
print('\nkey = ' + k)
try:
self.assertEqual({k: state1[k]}, {k: state2[k]})
except AssertionError as e:
print(e)
print('''
NOTE:
This is an advanced test that verifies how
we apply events after fetching data. If you
do not know how to debug it, you can ask for
help on chat.
''')
sys.stdout.flush()
raise AssertionError('Mismatching states')
def check_events_dict(self, required_keys: List[Tuple[str, Validator]]) -> Validator:
required_keys.append(('id', check_int))
# Raise AssertionError if `required_keys` contains duplicate items.
keys = [key[0] for key in required_keys]
self.assertEqual(len(keys), len(set(keys)), 'Duplicate items found in required_keys.')
return check_dict_only(required_keys)
def test_mentioned_send_message_events(self) -> None:
user = self.example_user('hamlet')
for i in range(3):
content = 'mentioning... @**' + user.full_name + '** hello ' + str(i)
self.do_test(
lambda: self.send_stream_message(self.example_email('cordelia'),
"Verona",
content)
)
def test_wildcard_mentioned_send_message_events(self) -> None:
for i in range(3):
content = 'mentioning... @**all** hello ' + str(i)
self.do_test(
lambda: self.send_stream_message(self.example_email('cordelia'),
"Verona",
content)
)
def test_pm_send_message_events(self) -> None:
self.do_test(
lambda: self.send_personal_message(self.example_email('cordelia'),
self.example_email('hamlet'),
'hola')
)
def test_huddle_send_message_events(self) -> None:
huddle = [
self.example_email('hamlet'),
self.example_email('othello'),
]
self.do_test(
lambda: self.send_huddle_message(self.example_email('cordelia'),
huddle,
'hola')
)
def test_stream_send_message_events(self) -> None:
def get_checker(check_gravatar: Validator) -> Validator:
schema_checker = self.check_events_dict([
('type', equals('message')),
('flags', check_list(None)),
('message', self.check_events_dict([
('avatar_url', check_gravatar),
('client', check_string),
('content', check_string),
('content_type', equals('text/html')),
('display_recipient', check_string),
('is_me_message', check_bool),
('reactions', check_list(None)),
('recipient_id', check_int),
('sender_realm_str', check_string),
('sender_email', check_string),
('sender_full_name', check_string),
('sender_id', check_int),
('sender_short_name', check_string),
('stream_id', check_int),
(TOPIC_NAME, check_string),
(TOPIC_LINKS, check_list(None)),
('submessages', check_list(None)),
('timestamp', check_int),
('type', check_string),
])),
])
return schema_checker
events = self.do_test(
lambda: self.send_stream_message(self.example_email("hamlet"), "Verona", "hello"),
client_gravatar=False,
)
schema_checker = get_checker(check_gravatar=check_string)
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
events = self.do_test(
lambda: self.send_stream_message(self.example_email("hamlet"), "Verona", "hello"),
client_gravatar=True,
)
schema_checker = get_checker(check_gravatar=equals(None))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
# Verify message editing
schema_checker = self.check_events_dict([
('type', equals('update_message')),
('flags', check_list(None)),
('content', check_string),
('edit_timestamp', check_int),
('message_id', check_int),
('message_ids', check_list(check_int)),
('prior_mention_user_ids', check_list(check_int)),
('mention_user_ids', check_list(check_int)),
('presence_idle_user_ids', check_list(check_int)),
('stream_push_user_ids', check_list(check_int)),
('stream_email_user_ids', check_list(check_int)),
('push_notify_user_ids', check_list(check_int)),
('orig_content', check_string),
('orig_rendered_content', check_string),
(ORIG_TOPIC, check_string),
('prev_rendered_content_version', check_int),
('propagate_mode', check_string),
('rendered_content', check_string),
('sender', check_string),
('stream_id', check_int),
('stream_name', check_string),
(TOPIC_NAME, check_string),
(TOPIC_LINKS, check_list(None)),
('user_id', check_int),
('is_me_message', check_bool),
])
message = Message.objects.order_by('-id')[0]
topic = 'new_topic'
propagate_mode = 'change_all'
content = 'new content'
rendered_content = render_markdown(message, content)
prior_mention_user_ids = set() # type: Set[int]
mentioned_user_ids = set() # type: Set[int]
events = self.do_test(
lambda: do_update_message(self.user_profile, message, topic,
propagate_mode, content, rendered_content,
prior_mention_user_ids,
mentioned_user_ids),
state_change_expected=True,
)
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
# Verify do_update_embedded_data
schema_checker = self.check_events_dict([
('type', equals('update_message')),
('flags', check_list(None)),
('content', check_string),
('message_id', check_int),
('message_ids', check_list(check_int)),
('rendered_content', check_string),
('sender', check_string),
])
events = self.do_test(
lambda: do_update_embedded_data(self.user_profile, message,
u"embed_content", "<p>embed_content</p>"),
state_change_expected=False,
)
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_update_message_flags(self) -> None:
# Test message flag update events
schema_checker = self.check_events_dict([
('all', check_bool),
('type', equals('update_message_flags')),
('flag', check_string),
('messages', check_list(check_int)),
('operation', equals("add")),
])
message = self.send_personal_message(
self.example_email("cordelia"),
self.example_email("hamlet"),
"hello",
)
user_profile = self.example_user('hamlet')
events = self.do_test(
lambda: do_update_message_flags(user_profile, get_client("website"), 'add', 'starred', [message]),
state_change_expected=True,
)
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
schema_checker = self.check_events_dict([
('all', check_bool),
('type', equals('update_message_flags')),
('flag', check_string),
('messages', check_list(check_int)),
('operation', equals("remove")),
])
events = self.do_test(
lambda: do_update_message_flags(user_profile, get_client("website"), 'remove', 'starred', [message]),
state_change_expected=True,
)
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_update_read_flag_removes_unread_msg_ids(self) -> None:
user_profile = self.example_user('hamlet')
mention = '@**' + user_profile.full_name + '**'
for content in ['hello', mention]:
message = self.send_stream_message(
self.example_email('cordelia'),
"Verona",
content
)
self.do_test(
lambda: do_update_message_flags(user_profile, get_client("website"), 'add', 'read', [message]),
state_change_expected=True,
)
def test_send_message_to_existing_recipient(self) -> None:
self.send_stream_message(
self.example_email('cordelia'),
"Verona",
"hello 1"
)
self.do_test(
lambda: self.send_stream_message("cordelia@zulip.com", "Verona", "hello 2"),
state_change_expected=True,
)
def test_add_reaction_legacy(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('reaction')),
('op', equals('add')),
('message_id', check_int),
('emoji_name', check_string),
('emoji_code', check_string),
('reaction_type', check_string),
('user', check_dict_only([
('email', check_string),
('full_name', check_string),
('user_id', check_int)
])),
])
message_id = self.send_stream_message(self.example_email("hamlet"), "Verona", "hello")
message = Message.objects.get(id=message_id)
events = self.do_test(
lambda: do_add_reaction_legacy(
self.user_profile, message, "tada"),
state_change_expected=False,
)
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_remove_reaction_legacy(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('reaction')),
('op', equals('remove')),
('message_id', check_int),
('emoji_name', check_string),
('emoji_code', check_string),
('reaction_type', check_string),
('user', check_dict_only([
('email', check_string),
('full_name', check_string),
('user_id', check_int)
])),
])
message_id = self.send_stream_message(self.example_email("hamlet"), "Verona", "hello")
message = Message.objects.get(id=message_id)
do_add_reaction_legacy(self.user_profile, message, "tada")
events = self.do_test(
lambda: do_remove_reaction_legacy(
self.user_profile, message, "tada"),
state_change_expected=False,
)
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_add_reaction(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('reaction')),
('op', equals('add')),
('message_id', check_int),
('emoji_name', check_string),
('emoji_code', check_string),
('reaction_type', check_string),
('user', check_dict_only([
('email', check_string),
('full_name', check_string),
('user_id', check_int)
])),
])
message_id = self.send_stream_message(self.example_email("hamlet"), "Verona", "hello")
message = Message.objects.get(id=message_id)
events = self.do_test(
lambda: do_add_reaction(
self.user_profile, message, "tada", "1f389", "unicode_emoji"),
state_change_expected=False,
)
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_add_submessage(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('submessage')),
('message_id', check_int),
('submessage_id', check_int),
('sender_id', check_int),
('msg_type', check_string),
('content', check_string),
])
cordelia = self.example_user('cordelia')
stream_name = 'Verona'
message_id = self.send_stream_message(
sender_email=cordelia.email,
stream_name=stream_name,
)
events = self.do_test(
lambda: do_add_submessage(
realm=cordelia.realm,
sender_id=cordelia.id,
message_id=message_id,
msg_type='whatever',
content='"stuff"',
),
state_change_expected=False,
)
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_remove_reaction(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('reaction')),
('op', equals('remove')),
('message_id', check_int),
('emoji_name', check_string),
('emoji_code', check_string),
('reaction_type', check_string),
('user', check_dict_only([
('email', check_string),
('full_name', check_string),
('user_id', check_int)
])),
])
message_id = self.send_stream_message(self.example_email("hamlet"), "Verona", "hello")
message = Message.objects.get(id=message_id)
do_add_reaction(self.user_profile, message, "tada", "1f389", "unicode_emoji")
events = self.do_test(
lambda: do_remove_reaction(
self.user_profile, message, "1f389", "unicode_emoji"),
state_change_expected=False,
)
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_invite_user_event(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('invites_changed')),
])
self.user_profile = self.example_user('iago')
streams = []
for stream_name in ["Denmark", "Scotland"]:
streams.append(get_stream(stream_name, self.user_profile.realm))
events = self.do_test(
lambda: do_invite_users(self.user_profile, ["foo@zulip.com"], streams, False),
state_change_expected=False,
)
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_create_multiuse_invite_event(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('invites_changed')),
])
self.user_profile = self.example_user('iago')
streams = []
for stream_name in ["Denmark", "Verona"]:
streams.append(get_stream(stream_name, self.user_profile.realm))
events = self.do_test(
lambda: do_create_multiuse_invite_link(self.user_profile, PreregistrationUser.INVITE_AS['MEMBER'], streams),
state_change_expected=False,
)
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_revoke_user_invite_event(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('invites_changed')),
])
self.user_profile = self.example_user('iago')
streams = []
for stream_name in ["Denmark", "Verona"]:
streams.append(get_stream(stream_name, self.user_profile.realm))
do_invite_users(self.user_profile, ["foo@zulip.com"], streams, False)
prereg_users = PreregistrationUser.objects.filter(referred_by__realm=self.user_profile.realm)
events = self.do_test(
lambda: do_revoke_user_invite(prereg_users[0]),
state_change_expected=False,
)
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_revoke_multiuse_invite_event(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('invites_changed')),
])
self.user_profile = self.example_user('iago')
streams = []
for stream_name in ["Denmark", "Verona"]:
streams.append(get_stream(stream_name, self.user_profile.realm))
do_create_multiuse_invite_link(self.user_profile, PreregistrationUser.INVITE_AS['MEMBER'], streams)
multiuse_object = MultiuseInvite.objects.get()
events = self.do_test(
lambda: do_revoke_multi_use_invite(multiuse_object),
state_change_expected=False,
)
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_invitation_accept_invite_event(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('invites_changed')),
])
self.user_profile = self.example_user('iago')
streams = []
for stream_name in ["Denmark", "Scotland"]:
streams.append(get_stream(stream_name, self.user_profile.realm))
do_invite_users(self.user_profile, ["foo@zulip.com"], streams, False)
prereg_users = PreregistrationUser.objects.get(email="foo@zulip.com")
events = self.do_test(
lambda: do_create_user('foo@zulip.com', 'password', self.user_profile.realm,
'full name', 'short name', prereg_user=prereg_users),
state_change_expected=True,
num_events=5,
)
error = schema_checker('events[4]', events[4])
self.assert_on_error(error)
def test_typing_events(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('typing')),
('op', equals('start')),
('sender', check_dict_only([
('email', check_string),
('user_id', check_int)])),
('recipients', check_list(check_dict_only([
('email', check_string),
('user_id', check_int),
]))),
])
events = self.do_test(
lambda: check_send_typing_notification(
self.user_profile, [self.example_email("cordelia")], "start"),
state_change_expected=False,
)
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_get_typing_user_profiles(self) -> None:
"""
Make sure we properly assert failures for recipient types that should not
get typing... notifications.
"""
sender_profile = self.example_user('cordelia')
stream = get_stream('Rome', sender_profile.realm)
# Test stream
with self.assertRaisesRegex(ValueError, 'not supported for streams'):
recipient = Recipient.objects.get(type_id=stream.id, type=Recipient.STREAM)
get_typing_user_profiles(recipient, sender_profile.id)
# Test some other recipient type
with self.assertRaisesRegex(ValueError, 'Bad recipient type'):
recipient = Recipient(type=999) # invalid type
get_typing_user_profiles(recipient, sender_profile.id)
def test_custom_profile_fields_events(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('custom_profile_fields')),
('op', equals('add')),
('fields', check_list(check_dict_only([
('id', check_int),
('type', check_int),
('name', check_string),
('hint', check_string),
('field_data', check_string),
('order', check_int),
]))),
])
events = self.do_test(
lambda: notify_realm_custom_profile_fields(
self.user_profile.realm, 'add'),
state_change_expected=False,
)
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
realm = self.user_profile.realm
field = realm.customprofilefield_set.get(realm=realm, name='Biography')
name = field.name
hint = 'Biography of the user'
try_update_realm_custom_profile_field(realm, field, name, hint=hint)
events = self.do_test(
lambda: notify_realm_custom_profile_fields(
self.user_profile.realm, 'add'),
state_change_expected=False,
)
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_custom_profile_field_data_events(self) -> None:
schema_checker_basic = self.check_events_dict([
('type', equals('realm_user')),
('op', equals('update')),
('person', check_dict_only([
('user_id', check_int),
('custom_profile_field', check_dict([
('id', check_int),
('value', check_none_or(check_string)),
])),
])),
])
schema_checker_with_rendered_value = self.check_events_dict([
('type', equals('realm_user')),
('op', equals('update')),
('person', check_dict_only([
('user_id', check_int),
('custom_profile_field', check_dict([
('id', check_int),
('value', check_none_or(check_string)),
('rendered_value', check_none_or(check_string)),
])),
])),
])
field_id = self.user_profile.realm.customprofilefield_set.get(
realm=self.user_profile.realm, name='Biography').id
field = {
"id": field_id,
"value": "New value",
}
events = self.do_test(lambda: do_update_user_custom_profile_data_if_changed(self.user_profile, [field]))
error = schema_checker_with_rendered_value('events[0]', events[0])
self.assert_on_error(error)
# Test we pass correct stringify value in custom-user-field data event
field_id = self.user_profile.realm.customprofilefield_set.get(
realm=self.user_profile.realm, name='Mentor').id
field = {
"id": field_id,
"value": [self.example_user("ZOE").id],
}
events = self.do_test(lambda: do_update_user_custom_profile_data_if_changed(self.user_profile, [field]))
error = schema_checker_basic('events[0]', events[0])
self.assert_on_error(error)
def test_presence_events(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('presence')),
('email', check_string),
('server_timestamp', check_float),
('presence', check_dict_only([
('website', check_dict_only([
('status', equals('active')),
('timestamp', check_int),
('client', check_string),
('pushable', check_bool),
])),
])),
])
events = self.do_test(lambda: do_update_user_presence(
self.user_profile, get_client("website"), timezone_now(), UserPresence.ACTIVE))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_presence_events_multiple_clients(self) -> None:
schema_checker_android = self.check_events_dict([
('type', equals('presence')),
('email', check_string),
('server_timestamp', check_float),
('presence', check_dict_only([
('ZulipAndroid/1.0', check_dict_only([
('status', equals('idle')),
('timestamp', check_int),
('client', check_string),
('pushable', check_bool),
])),
])),
])
self.api_post(self.user_profile.email, "/api/v1/users/me/presence", {'status': 'idle'},
HTTP_USER_AGENT="ZulipAndroid/1.0")
self.do_test(lambda: do_update_user_presence(
self.user_profile, get_client("website"), timezone_now(), UserPresence.ACTIVE))
events = self.do_test(lambda: do_update_user_presence(
self.user_profile, get_client("ZulipAndroid/1.0"), timezone_now(), UserPresence.IDLE))
error = schema_checker_android('events[0]', events[0])
self.assert_on_error(error)
def test_pointer_events(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('pointer')),
('pointer', check_int)
])
events = self.do_test(lambda: do_update_pointer(self.user_profile, get_client("website"), 1500))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_register_events(self) -> None:
realm_user_add_checker = self.check_events_dict([
('type', equals('realm_user')),
('op', equals('add')),
('person', check_dict_only([
('user_id', check_int),
('email', check_string),
('avatar_url', check_none_or(check_string)),
('full_name', check_string),
('is_admin', check_bool),
('is_bot', check_bool),
('is_guest', check_bool),
('profile_data', check_dict_only([])),
('timezone', check_string),
('date_joined', check_string),
])),
])
events = self.do_test(lambda: self.register("test1@zulip.com", "test1"))
self.assert_length(events, 1)
error = realm_user_add_checker('events[0]', events[0])
self.assert_on_error(error)
new_user_profile = get_user_by_delivery_email("test1@zulip.com", self.user_profile.realm)
self.assertEqual(new_user_profile.email, "test1@zulip.com")
def test_register_events_email_address_visibility(self) -> None:
realm_user_add_checker = self.check_events_dict([
('type', equals('realm_user')),
('op', equals('add')),
('person', check_dict_only([
('user_id', check_int),
('email', check_string),
('avatar_url', check_none_or(check_string)),
('full_name', check_string),
('is_admin', check_bool),
('is_bot', check_bool),
('is_guest', check_bool),
('profile_data', check_dict_only([])),
('timezone', check_string),
('date_joined', check_string),
])),
])
do_set_realm_property(self.user_profile.realm, "email_address_visibility",
Realm.EMAIL_ADDRESS_VISIBILITY_ADMINS)
events = self.do_test(lambda: self.register("test1@zulip.com", "test1"))
self.assert_length(events, 1)
error = realm_user_add_checker('events[0]', events[0])
self.assert_on_error(error)
new_user_profile = get_user_by_delivery_email("test1@zulip.com", self.user_profile.realm)
self.assertEqual(new_user_profile.email, "user%s@zulip.testserver" % (new_user_profile.id,))
def test_alert_words_events(self) -> None:
alert_words_checker = self.check_events_dict([
('type', equals('alert_words')),
('alert_words', check_list(check_string)),
])
events = self.do_test(lambda: do_add_alert_words(self.user_profile, ["alert_word"]))
error = alert_words_checker('events[0]', events[0])
self.assert_on_error(error)
events = self.do_test(lambda: do_remove_alert_words(self.user_profile, ["alert_word"]))
error = alert_words_checker('events[0]', events[0])
self.assert_on_error(error)
def test_away_events(self) -> None:
checker = self.check_events_dict([
('type', equals('user_status')),
('user_id', check_int),
('away', check_bool),
('status_text', check_string),
])
client = get_client("website")
events = self.do_test(lambda: do_update_user_status(user_profile=self.user_profile,
away=True,
status_text='out to lunch',
client_id=client.id))
error = checker('events[0]', events[0])
self.assert_on_error(error)
events = self.do_test(lambda: do_update_user_status(user_profile=self.user_profile,
away=False,
status_text='',
client_id=client.id))
error = checker('events[0]', events[0])
self.assert_on_error(error)
def test_user_group_events(self) -> None:
user_group_add_checker = self.check_events_dict([
('type', equals('user_group')),
('op', equals('add')),
('group', check_dict_only([
('id', check_int),
('name', check_string),
('members', check_list(check_int)),
('description', check_string),
])),
])
othello = self.example_user('othello')
events = self.do_test(lambda: check_add_user_group(self.user_profile.realm,
'backend', [othello],
'Backend team'))
error = user_group_add_checker('events[0]', events[0])
self.assert_on_error(error)
# Test name update
user_group_update_checker = self.check_events_dict([
('type', equals('user_group')),
('op', equals('update')),
('group_id', check_int),
('data', check_dict_only([
('name', check_string),
])),
])
backend = UserGroup.objects.get(name='backend')
events = self.do_test(lambda: do_update_user_group_name(backend, 'backendteam'))
error = user_group_update_checker('events[0]', events[0])
self.assert_on_error(error)
# Test description update
user_group_update_checker = self.check_events_dict([
('type', equals('user_group')),
('op', equals('update')),
('group_id', check_int),
('data', check_dict_only([
('description', check_string),
])),
])
description = "Backend team to deal with backend code."
events = self.do_test(lambda: do_update_user_group_description(backend, description))
error = user_group_update_checker('events[0]', events[0])
self.assert_on_error(error)
# Test add members
user_group_add_member_checker = self.check_events_dict([
('type', equals('user_group')),
('op', equals('add_members')),
('group_id', check_int),
('user_ids', check_list(check_int)),
])
hamlet = self.example_user('hamlet')
events = self.do_test(lambda: bulk_add_members_to_user_group(backend, [hamlet]))
error = user_group_add_member_checker('events[0]', events[0])
self.assert_on_error(error)
# Test remove members
user_group_remove_member_checker = self.check_events_dict([
('type', equals('user_group')),
('op', equals('remove_members')),
('group_id', check_int),
('user_ids', check_list(check_int)),
])
hamlet = self.example_user('hamlet')
events = self.do_test(lambda: remove_members_from_user_group(backend, [hamlet]))
error = user_group_remove_member_checker('events[0]', events[0])
self.assert_on_error(error)
# Test delete event
user_group_remove_checker = self.check_events_dict([
('type', equals('user_group')),
('op', equals('remove')),
('group_id', check_int),
])
events = self.do_test(lambda: check_delete_user_group(backend.id, othello))
error = user_group_remove_checker('events[0]', events[0])
self.assert_on_error(error)
def test_default_stream_groups_events(self) -> None:
default_stream_groups_checker = self.check_events_dict([
('type', equals('default_stream_groups')),
('default_stream_groups', check_list(check_dict_only([
('name', check_string),
('id', check_int),
('description', check_string),
('streams', check_list(check_dict_only([
('description', check_string),
('rendered_description', check_string),
('invite_only', check_bool),
('is_web_public', check_bool),
('is_announcement_only', check_bool),
('name', check_string),
('stream_id', check_int),
('first_message_id', check_none_or(check_int)),
('history_public_to_subscribers', check_bool)]))),
]))),
])
streams = []
for stream_name in ["Scotland", "Verona", "Denmark"]:
streams.append(get_stream(stream_name, self.user_profile.realm))
events = self.do_test(lambda: do_create_default_stream_group(
self.user_profile.realm, "group1", "This is group1", streams))
error = default_stream_groups_checker('events[0]', events[0])
self.assert_on_error(error)
group = lookup_default_stream_groups(["group1"], self.user_profile.realm)[0]
venice_stream = get_stream("Venice", self.user_profile.realm)
events = self.do_test(lambda: do_add_streams_to_default_stream_group(self.user_profile.realm,
group, [venice_stream]))
error = default_stream_groups_checker('events[0]', events[0])
self.assert_on_error(error)
events = self.do_test(lambda: do_remove_streams_from_default_stream_group(self.user_profile.realm,
group, [venice_stream]))
error = default_stream_groups_checker('events[0]', events[0])
self.assert_on_error(error)
events = self.do_test(lambda: do_change_default_stream_group_description(self.user_profile.realm,
group, "New description"))
error = default_stream_groups_checker('events[0]', events[0])
self.assert_on_error(error)
events = self.do_test(lambda: do_change_default_stream_group_name(self.user_profile.realm,
group, "New Group Name"))
error = default_stream_groups_checker('events[0]', events[0])
self.assert_on_error(error)
events = self.do_test(lambda: do_remove_default_stream_group(self.user_profile.realm, group))
error = default_stream_groups_checker('events[0]', events[0])
self.assert_on_error(error)
def test_default_stream_group_events_guest(self) -> None:
streams = []
for stream_name in ["Scotland", "Verona", "Denmark"]:
streams.append(get_stream(stream_name, self.user_profile.realm))
do_create_default_stream_group(self.user_profile.realm, "group1",
"This is group1", streams)
group = lookup_default_stream_groups(["group1"], self.user_profile.realm)[0]
do_change_is_guest(self.user_profile, True)
venice_stream = get_stream("Venice", self.user_profile.realm)
self.do_test(lambda: do_add_streams_to_default_stream_group(self.user_profile.realm,
group, [venice_stream]),
state_change_expected = False, num_events=0)
def test_default_streams_events(self) -> None:
default_streams_checker = self.check_events_dict([
('type', equals('default_streams')),
('default_streams', check_list(check_dict_only([
('description', check_string),
('invite_only', check_bool),
('name', check_string),
('stream_id', check_int),
]))),
])
stream = get_stream("Scotland", self.user_profile.realm)
events = self.do_test(lambda: do_add_default_stream(stream))
error = default_streams_checker('events[0]', events[0])
events = self.do_test(lambda: do_remove_default_stream(stream))
error = default_streams_checker('events[0]', events[0])
self.assert_on_error(error)
def test_default_streams_events_guest(self) -> None:
do_change_is_guest(self.user_profile, True)
stream = get_stream("Scotland", self.user_profile.realm)
self.do_test(lambda: do_add_default_stream(stream),
state_change_expected = False, num_events=0)
self.do_test(lambda: do_remove_default_stream(stream),
state_change_expected = False, num_events=0)
def test_muted_topics_events(self) -> None:
muted_topics_checker = self.check_events_dict([
('type', equals('muted_topics')),
('muted_topics', check_list(check_list(check_string, 2))),
])
stream = get_stream('Denmark', self.user_profile.realm)
recipient = get_stream_recipient(stream.id)
events = self.do_test(lambda: do_mute_topic(
self.user_profile, stream, recipient, "topic"))
error = muted_topics_checker('events[0]', events[0])
self.assert_on_error(error)
events = self.do_test(lambda: do_unmute_topic(
self.user_profile, stream, "topic"))
error = muted_topics_checker('events[0]', events[0])
self.assert_on_error(error)
def test_change_avatar_fields(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('realm_user')),
('op', equals('update')),
('person', check_dict_only([
('email', check_string),
('user_id', check_int),
('avatar_url', check_string),
('avatar_url_medium', check_string),
('avatar_source', check_string),
])),
])
events = self.do_test(
lambda: do_change_avatar_fields(self.user_profile, UserProfile.AVATAR_FROM_USER),
)
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
schema_checker = self.check_events_dict([
('type', equals('realm_user')),
('op', equals('update')),
('person', check_dict_only([
('email', check_string),
('user_id', check_int),
('avatar_url', check_none_or(check_string)),
('avatar_url_medium', check_none_or(check_string)),
('avatar_source', check_string),
])),
])
events = self.do_test(
lambda: do_change_avatar_fields(self.user_profile, UserProfile.AVATAR_FROM_GRAVATAR),
)
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_change_full_name(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('realm_user')),
('op', equals('update')),
('person', check_dict_only([
('email', check_string),
('full_name', check_string),
('user_id', check_int),
])),
])
events = self.do_test(lambda: do_change_full_name(self.user_profile, 'Sir Hamlet', self.user_profile))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_change_user_delivery_email_email_address_visibilty_admins(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('realm_user')),
('op', equals('update')),
('person', check_dict_only([
('delivery_email', check_string),
('user_id', check_int),
])),
])
do_set_realm_property(self.user_profile.realm, "email_address_visibility",
Realm.EMAIL_ADDRESS_VISIBILITY_ADMINS)
# Important: We need to refresh from the database here so that
# we don't have a stale UserProfile object with an old value
# for email being passed into this next function.
self.user_profile.refresh_from_db()
action = lambda: do_change_user_delivery_email(self.user_profile, 'newhamlet@zulip.com')
events = self.do_test(action, num_events=1)
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def do_set_realm_property_test(self, name: str) -> None:
bool_tests = [True, False, True] # type: List[bool]
test_values = dict(
default_language=[u'es', u'de', u'en'],
description=[u'Realm description', u'New description'],
digest_weekday=[0, 1, 2],
message_retention_days=[10, 20],
name=[u'Zulip', u'New Name'],
waiting_period_threshold=[10, 20],
create_stream_policy=[3, 2, 1],
invite_to_stream_policy=[3, 2, 1],
email_address_visibility=[Realm.EMAIL_ADDRESS_VISIBILITY_ADMINS],
bot_creation_policy=[Realm.BOT_CREATION_EVERYONE],
video_chat_provider=[
Realm.VIDEO_CHAT_PROVIDERS['jitsi_meet']['id'],
Realm.VIDEO_CHAT_PROVIDERS['google_hangouts']['id']
],
google_hangouts_domain=[u"zulip.com", u"zulip.org"],
zoom_api_secret=[u"abc", u"xyz"],
zoom_api_key=[u"abc", u"xyz"],
zoom_user_id=[u"example@example.com", u"example@example.org"]
) # type: Dict[str, Any]
vals = test_values.get(name)
property_type = Realm.property_types[name]
if property_type is bool:
validator = check_bool
vals = bool_tests
elif property_type is str:
validator = check_string
elif property_type is int:
validator = check_int
elif property_type == (int, type(None)):
validator = check_int
else:
raise AssertionError("Unexpected property type %s" % (property_type,))
schema_checker = self.check_events_dict([
('type', equals('realm')),
('op', equals('update')),
('property', equals(name)),
('value', validator),
])
if vals is None:
raise AssertionError('No test created for %s' % (name,))
do_set_realm_property(self.user_profile.realm, name, vals[0])
for val in vals[1:]:
state_change_expected = True
if name == "zoom_api_secret":
state_change_expected = False
events = self.do_test(
lambda: do_set_realm_property(self.user_profile.realm, name, val),
state_change_expected=state_change_expected)
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
@slow("Actually runs several full-stack fetching tests")
def test_change_realm_property(self) -> None:
for prop in Realm.property_types:
with self.settings(SEND_DIGEST_EMAILS=True):
self.do_set_realm_property_test(prop)
@slow("Runs a large matrix of tests")
def test_change_realm_authentication_methods(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('realm')),
('op', equals('update_dict')),
('property', equals('default')),
('data', check_dict_only([
('authentication_methods', check_dict([]))
])),
])
def fake_backends() -> Any:
backends = (
'zproject.backends.DevAuthBackend',
'zproject.backends.EmailAuthBackend',
'zproject.backends.GitHubAuthBackend',
'zproject.backends.GoogleAuthBackend',
'zproject.backends.ZulipLDAPAuthBackend',
)
return self.settings(AUTHENTICATION_BACKENDS=backends)
# Test transitions; any new backends should be tested with T/T/T/F/T
for (auth_method_dict) in \
({'Google': True, 'Email': True, 'GitHub': True, 'LDAP': False, 'Dev': False},
{'Google': True, 'Email': True, 'GitHub': False, 'LDAP': False, 'Dev': False},
{'Google': True, 'Email': False, 'GitHub': False, 'LDAP': False, 'Dev': False},
{'Google': True, 'Email': False, 'GitHub': True, 'LDAP': False, 'Dev': False},
{'Google': False, 'Email': False, 'GitHub': False, 'LDAP': False, 'Dev': True},
{'Google': False, 'Email': False, 'GitHub': True, 'LDAP': False, 'Dev': True},
{'Google': False, 'Email': True, 'GitHub': True, 'LDAP': True, 'Dev': False}):
with fake_backends():
events = self.do_test(
lambda: do_set_realm_authentication_methods(
self.user_profile.realm,
auth_method_dict))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_change_pin_stream(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('subscription')),
('op', equals('update')),
('property', equals('pin_to_top')),
('stream_id', check_int),
('value', check_bool),
('name', check_string),
('email', check_string),
])
stream = get_stream("Denmark", self.user_profile.realm)
sub = get_subscription(stream.name, self.user_profile)
do_change_subscription_property(self.user_profile, sub, stream, "pin_to_top", False)
for pinned in (True, False):
events = self.do_test(lambda: do_change_subscription_property(self.user_profile, sub, stream, "pin_to_top", pinned))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_change_stream_notification_settings(self) -> None:
for setting_name in ['email_notifications']:
schema_checker = self.check_events_dict([
('type', equals('subscription')),
('op', equals('update')),
('property', equals(setting_name)),
('stream_id', check_int),
('value', check_bool),
('name', check_string),
('email', check_string),
])
stream = get_stream("Denmark", self.user_profile.realm)
sub = get_subscription(stream.name, self.user_profile)
# First test with notification_settings_null enabled
for value in (True, False):
events = self.do_test(lambda: do_change_subscription_property(self.user_profile, sub, stream,
setting_name, value),
notification_settings_null=True)
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
for value in (True, False):
events = self.do_test(lambda: do_change_subscription_property(self.user_profile, sub, stream,
setting_name, value))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
@slow("Runs a matrix of 6 queries to the /home view")
def test_change_realm_message_edit_settings(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('realm')),
('op', equals('update_dict')),
('property', equals('default')),
('data', check_dict_only([
('allow_message_editing', check_bool),
('message_content_edit_limit_seconds', check_int),
('allow_community_topic_editing', check_bool),
])),
])
# Test every transition among the four possibilities {T,F} x {0, non-0}
for (allow_message_editing, message_content_edit_limit_seconds) in \
((True, 0), (False, 0), (False, 1234),
(True, 600), (False, 0), (True, 1234)):
events = self.do_test(
lambda: do_set_realm_message_editing(self.user_profile.realm,
allow_message_editing,
message_content_edit_limit_seconds,
False))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_change_realm_notifications_stream(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('realm')),
('op', equals('update')),
('property', equals('notifications_stream_id')),
('value', check_int),
])
stream = get_stream("Rome", self.user_profile.realm)
for notifications_stream, notifications_stream_id in ((stream, stream.id), (None, -1)):
events = self.do_test(
lambda: do_set_realm_notifications_stream(self.user_profile.realm,
notifications_stream,
notifications_stream_id))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_change_realm_signup_notifications_stream(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('realm')),
('op', equals('update')),
('property', equals('signup_notifications_stream_id')),
('value', check_int),
])
stream = get_stream("Rome", self.user_profile.realm)
for signup_notifications_stream, signup_notifications_stream_id in ((stream, stream.id), (None, -1)):
events = self.do_test(
lambda: do_set_realm_signup_notifications_stream(self.user_profile.realm,
signup_notifications_stream,
signup_notifications_stream_id))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_change_is_admin(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('realm_user')),
('op', equals('update')),
('person', check_dict_only([
('email', check_string),
('is_admin', check_bool),
('user_id', check_int),
])),
])
do_change_is_admin(self.user_profile, False)
for is_admin in [True, False]:
events = self.do_test(lambda: do_change_is_admin(self.user_profile, is_admin))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def do_set_user_display_settings_test(self, setting_name: str) -> None:
"""Test updating each setting in UserProfile.property_types dict."""
test_changes = dict(
emojiset = [u'twitter'],
default_language = [u'es', u'de', u'en'],
timezone = [u'US/Mountain', u'US/Samoa', u'Pacific/Galapogos', u''],
demote_inactive_streams = [2, 3, 1],
) # type: Dict[str, Any]
property_type = UserProfile.property_types[setting_name]
if property_type is bool:
validator = check_bool
elif property_type is str:
validator = check_string
elif property_type is int:
validator = check_int
else:
raise AssertionError("Unexpected property type %s" % (property_type,))
num_events = 1
if setting_name == "timezone":
num_events = 2
values = test_changes.get(setting_name)
if property_type is bool:
if getattr(self.user_profile, setting_name) is False:
values = [True, False, True]
else:
values = [False, True, False]
if values is None:
raise AssertionError('No test created for %s' % (setting_name,))
for value in values:
events = self.do_test(lambda: do_set_user_display_setting(
self.user_profile, setting_name, value), num_events=num_events)
schema_checker = self.check_events_dict([
('type', equals('update_display_settings')),
('setting_name', equals(setting_name)),
('user', check_string),
('setting', validator),
])
language_schema_checker = self.check_events_dict([
('type', equals('update_display_settings')),
('language_name', check_string),
('setting_name', equals(setting_name)),
('user', check_string),
('setting', validator),
])
if setting_name == "default_language":
error = language_schema_checker('events[0]', events[0])
else:
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
timezone_schema_checker = self.check_events_dict([
('type', equals('realm_user')),
('op', equals('update')),
('person', check_dict_only([
('email', check_string),
('user_id', check_int),
('timezone', check_string),
])),
])
if setting_name == "timezone":
error = timezone_schema_checker('events[1]', events[1])
@slow("Actually runs several full-stack fetching tests")
def test_set_user_display_settings(self) -> None:
for prop in UserProfile.property_types:
self.do_set_user_display_settings_test(prop)
@slow("Actually runs several full-stack fetching tests")
def test_change_notification_settings(self) -> None:
for notification_setting, v in self.user_profile.notification_setting_types.items():
if notification_setting in ["notification_sound", "desktop_icon_count_display"]:
# These settings are tested in their own tests.
continue
schema_checker = self.check_events_dict([
('type', equals('update_global_notifications')),
('notification_name', equals(notification_setting)),
('user', check_string),
('setting', check_bool),
])
do_change_notification_settings(self.user_profile, notification_setting, False)
for setting_value in [True, False]:
events = self.do_test(lambda: do_change_notification_settings(
self.user_profile, notification_setting, setting_value, log=False))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
# Also test with notification_settings_null=True
events = self.do_test(
lambda: do_change_notification_settings(
self.user_profile, notification_setting, setting_value, log=False),
notification_settings_null=True,
state_change_expected=False)
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_change_notification_sound(self) -> None:
notification_setting = "notification_sound"
schema_checker = self.check_events_dict([
('type', equals('update_global_notifications')),
('notification_name', equals(notification_setting)),
('user', check_string),
('setting', equals("ding")),
])
events = self.do_test(lambda: do_change_notification_settings(
self.user_profile, notification_setting, 'ding', log=False))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_change_desktop_icon_count_display(self) -> None:
notification_setting = "desktop_icon_count_display"
schema_checker = self.check_events_dict([
('type', equals('update_global_notifications')),
('notification_name', equals(notification_setting)),
('user', check_string),
('setting', equals(2)),
])
events = self.do_test(lambda: do_change_notification_settings(
self.user_profile, notification_setting, 2, log=False))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
schema_checker = self.check_events_dict([
('type', equals('update_global_notifications')),
('notification_name', equals(notification_setting)),
('user', check_string),
('setting', equals(1)),
])
events = self.do_test(lambda: do_change_notification_settings(
self.user_profile, notification_setting, 1, log=False))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_realm_update_plan_type(self) -> None:
realm = self.user_profile.realm
state_data = fetch_initial_state_data(self.user_profile, None, "", False)
self.assertEqual(state_data['realm_plan_type'], Realm.SELF_HOSTED)
self.assertEqual(state_data['plan_includes_wide_organization_logo'], True)
schema_checker = self.check_events_dict([
('type', equals('realm')),
('op', equals('update')),
('property', equals('plan_type')),
('value', equals(Realm.LIMITED)),
('extra_data', check_dict_only([
('upload_quota', check_int)
])),
])
events = self.do_test(lambda: do_change_plan_type(realm, Realm.LIMITED))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
state_data = fetch_initial_state_data(self.user_profile, None, "", False)
self.assertEqual(state_data['realm_plan_type'], Realm.LIMITED)
self.assertEqual(state_data['plan_includes_wide_organization_logo'], False)
def test_realm_emoji_events(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('realm_emoji')),
('op', equals('update')),
('realm_emoji', check_dict([])),
])
author = self.example_user('iago')
with get_test_image_file('img.png') as img_file:
events = self.do_test(lambda: check_add_realm_emoji(self.user_profile.realm,
"my_emoji",
author,
img_file))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
events = self.do_test(lambda: do_remove_realm_emoji(self.user_profile.realm, "my_emoji"))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_realm_filter_events(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('realm_filters')),
('realm_filters', check_list(None)), # TODO: validate tuples in the list
])
events = self.do_test(lambda: do_add_realm_filter(self.user_profile.realm, "#(?P<id>[123])",
"https://realm.com/my_realm_filter/%(id)s"))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
self.do_test(lambda: do_remove_realm_filter(self.user_profile.realm, "#(?P<id>[123])"))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_realm_domain_events(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('realm_domains')),
('op', equals('add')),
('realm_domain', check_dict_only([
('domain', check_string),
('allow_subdomains', check_bool),
])),
])
events = self.do_test(lambda: do_add_realm_domain(
self.user_profile.realm, 'zulip.org', False))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
schema_checker = self.check_events_dict([
('type', equals('realm_domains')),
('op', equals('change')),
('realm_domain', check_dict_only([
('domain', equals('zulip.org')),
('allow_subdomains', equals(True)),
])),
])
test_domain = RealmDomain.objects.get(realm=self.user_profile.realm,
domain='zulip.org')
events = self.do_test(lambda: do_change_realm_domain(test_domain, True))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
schema_checker = self.check_events_dict([
('type', equals('realm_domains')),
('op', equals('remove')),
('domain', equals('zulip.org')),
])
events = self.do_test(lambda: do_remove_realm_domain(test_domain))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_create_bot(self) -> None:
def get_bot_created_checker(bot_type: str) -> Validator:
if bot_type == "GENERIC_BOT":
check_services = check_list(sub_validator=None, length=0)
elif bot_type == "OUTGOING_WEBHOOK_BOT":
check_services = check_list(check_dict_only([
('base_url', check_url),
('interface', check_int),
('token', check_string),
]), length=1)
elif bot_type == "EMBEDDED_BOT":
check_services = check_list(check_dict_only([
('service_name', check_string),
('config_data', check_dict(value_validator=check_string)),
]), length=1)
return self.check_events_dict([
('type', equals('realm_bot')),
('op', equals('add')),
('bot', check_dict_only([
('email', check_string),
('user_id', check_int),
('bot_type', check_int),
('full_name', check_string),
('is_active', check_bool),
('api_key', check_string),
('default_sending_stream', check_none_or(check_string)),
('default_events_register_stream', check_none_or(check_string)),
('default_all_public_streams', check_bool),
('avatar_url', check_string),
('owner', check_string),
('services', check_services),
])),
])
action = lambda: self.create_bot('test')
events = self.do_test(action, num_events=3)
error = get_bot_created_checker(bot_type="GENERIC_BOT")('events[1]', events[1])
self.assert_on_error(error)
action = lambda: self.create_bot('test_outgoing_webhook',
full_name='Outgoing Webhook Bot',
payload_url=ujson.dumps('https://foo.bar.com'),
interface_type=Service.GENERIC,
bot_type=UserProfile.OUTGOING_WEBHOOK_BOT)
events = self.do_test(action, num_events=3)
# The third event is the second call of notify_created_bot, which contains additional
# data for services (in contrast to the first call).
error = get_bot_created_checker(bot_type="OUTGOING_WEBHOOK_BOT")('events[2]', events[2])
self.assert_on_error(error)
action = lambda: self.create_bot('test_embedded',
full_name='Embedded Bot',
service_name='helloworld',
config_data=ujson.dumps({'foo': 'bar'}),
bot_type=UserProfile.EMBEDDED_BOT)
events = self.do_test(action, num_events=3)
error = get_bot_created_checker(bot_type="EMBEDDED_BOT")('events[2]', events[2])
self.assert_on_error(error)
def test_change_bot_full_name(self) -> None:
bot = self.create_bot('test')
action = lambda: do_change_full_name(bot, 'New Bot Name', self.user_profile)
events = self.do_test(action, num_events=2)
error = self.realm_bot_schema('full_name', check_string)('events[1]', events[1])
self.assert_on_error(error)
def test_regenerate_bot_api_key(self) -> None:
bot = self.create_bot('test')
action = lambda: do_regenerate_api_key(bot, self.user_profile)
events = self.do_test(action)
error = self.realm_bot_schema('api_key', check_string)('events[0]', events[0])
self.assert_on_error(error)
def test_change_bot_avatar_source(self) -> None:
bot = self.create_bot('test')
action = lambda: do_change_avatar_fields(bot, bot.AVATAR_FROM_USER)
events = self.do_test(action, num_events=2)
error = self.realm_bot_schema('avatar_url', check_string)('events[0]', events[0])
self.assertEqual(events[1]['type'], 'realm_user')
self.assert_on_error(error)
def test_change_realm_icon_source(self) -> None:
action = lambda: do_change_icon_source(self.user_profile.realm, Realm.ICON_UPLOADED)
events = self.do_test(action, state_change_expected=True)
schema_checker = self.check_events_dict([
('type', equals('realm')),
('op', equals('update_dict')),
('property', equals('icon')),
('data', check_dict_only([
('icon_url', check_string),
('icon_source', check_string),
])),
])
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_change_realm_day_mode_logo_source(self) -> None:
action = lambda: do_change_logo_source(self.user_profile.realm, Realm.LOGO_UPLOADED, False)
events = self.do_test(action, state_change_expected=True)
schema_checker = self.check_events_dict([
('type', equals('realm')),
('op', equals('update_dict')),
('property', equals('logo')),
('data', check_dict_only([
('logo_url', check_string),
('logo_source', check_string),
])),
])
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_change_realm_night_mode_logo_source(self) -> None:
action = lambda: do_change_logo_source(self.user_profile.realm, Realm.LOGO_UPLOADED, True)
events = self.do_test(action, state_change_expected=True)
schema_checker = self.check_events_dict([
('type', equals('realm')),
('op', equals('update_dict')),
('property', equals('night_logo')),
('data', check_dict_only([
('night_logo_url', check_string),
('night_logo_source', check_string),
])),
])
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_change_bot_default_all_public_streams(self) -> None:
bot = self.create_bot('test')
action = lambda: do_change_default_all_public_streams(bot, True)
events = self.do_test(action)
error = self.realm_bot_schema('default_all_public_streams', check_bool)('events[0]', events[0])
self.assert_on_error(error)
def test_change_bot_default_sending_stream(self) -> None:
bot = self.create_bot('test')
stream = get_stream("Rome", bot.realm)
action = lambda: do_change_default_sending_stream(bot, stream)
events = self.do_test(action)
error = self.realm_bot_schema('default_sending_stream', check_string)('events[0]', events[0])
self.assert_on_error(error)
action = lambda: do_change_default_sending_stream(bot, None)
events = self.do_test(action)
error = self.realm_bot_schema('default_sending_stream', equals(None))('events[0]', events[0])
self.assert_on_error(error)
def test_change_bot_default_events_register_stream(self) -> None:
bot = self.create_bot('test')
stream = get_stream("Rome", bot.realm)
action = lambda: do_change_default_events_register_stream(bot, stream)
events = self.do_test(action)
error = self.realm_bot_schema('default_events_register_stream', check_string)('events[0]', events[0])
self.assert_on_error(error)
action = lambda: do_change_default_events_register_stream(bot, None)
events = self.do_test(action)
error = self.realm_bot_schema('default_events_register_stream', equals(None))('events[0]', events[0])
self.assert_on_error(error)
def test_change_bot_owner(self) -> None:
change_bot_owner_checker_user = self.check_events_dict([
('type', equals('realm_user')),
('op', equals('update')),
('person', check_dict_only([
('user_id', check_int),
('bot_owner_id', check_int),
])),
])
change_bot_owner_checker_bot = self.check_events_dict([
('type', equals('realm_bot')),
('op', equals('update')),
('bot', check_dict_only([
('email', check_string),
('user_id', check_int),
('owner_id', check_int),
])),
])
self.user_profile = self.example_user('iago')
owner = self.example_user('hamlet')
bot = self.create_bot('test')
action = lambda: do_change_bot_owner(bot, owner, self.user_profile)
events = self.do_test(action, num_events=2)
error = change_bot_owner_checker_bot('events[0]', events[0])
self.assert_on_error(error)
error = change_bot_owner_checker_user('events[1]', events[1])
self.assert_on_error(error)
change_bot_owner_checker_bot = self.check_events_dict([
('type', equals('realm_bot')),
('op', equals('delete')),
('bot', check_dict_only([
('email', check_string),
('user_id', check_int),
])),
])
self.user_profile = self.example_user('aaron')
owner = self.example_user('hamlet')
bot = self.create_bot('test1', full_name='Test1 Testerson')
action = lambda: do_change_bot_owner(bot, owner, self.user_profile)
events = self.do_test(action, num_events=2)
error = change_bot_owner_checker_bot('events[0]', events[0])
self.assert_on_error(error)
error = change_bot_owner_checker_user('events[1]', events[1])
self.assert_on_error(error)
check_services = check_list(sub_validator=None, length=0)
change_bot_owner_checker_bot = self.check_events_dict([
('type', equals('realm_bot')),
('op', equals('add')),
('bot', check_dict_only([
('email', check_string),
('user_id', check_int),
('bot_type', check_int),
('full_name', check_string),
('is_active', check_bool),
('api_key', check_string),
('default_sending_stream', check_none_or(check_string)),
('default_events_register_stream', check_none_or(check_string)),
('default_all_public_streams', check_bool),
('avatar_url', check_string),
('owner', check_string),
('services', check_services),
])),
])
previous_owner = self.example_user('aaron')
self.user_profile = self.example_user('hamlet')
bot = self.create_test_bot('test2', previous_owner, full_name='Test2 Testerson')
action = lambda: do_change_bot_owner(bot, self.user_profile, previous_owner)
events = self.do_test(action, num_events=2)
error = change_bot_owner_checker_bot('events[0]', events[0])
self.assert_on_error(error)
error = change_bot_owner_checker_user('events[1]', events[1])
self.assert_on_error(error)
def test_do_update_outgoing_webhook_service(self):
# type: () -> None
update_outgoing_webhook_service_checker = self.check_events_dict([
('type', equals('realm_bot')),
('op', equals('update')),
('bot', check_dict_only([
('email', check_string),
('user_id', check_int),
('services', check_list(check_dict_only([
('base_url', check_url),
('interface', check_int),
('token', check_string),
]))),
])),
])
self.user_profile = self.example_user('iago')
bot = self.create_test_bot('test', self.user_profile,
full_name='Test Bot',
bot_type=UserProfile.OUTGOING_WEBHOOK_BOT,
payload_url=ujson.dumps('http://hostname.domain2.com'),
interface_type=Service.GENERIC,
)
action = lambda: do_update_outgoing_webhook_service(bot, 2, 'http://hostname.domain2.com')
events = self.do_test(action)
error = update_outgoing_webhook_service_checker('events[0]', events[0])
self.assert_on_error(error)
def test_do_deactivate_user(self) -> None:
bot_deactivate_checker = self.check_events_dict([
('type', equals('realm_bot')),
('op', equals('remove')),
('bot', check_dict_only([
('email', check_string),
('full_name', check_string),
('user_id', check_int),
])),
])
bot = self.create_bot('test')
action = lambda: do_deactivate_user(bot)
events = self.do_test(action, num_events=2)
error = bot_deactivate_checker('events[1]', events[1])
self.assert_on_error(error)
def test_do_reactivate_user(self) -> None:
bot_reactivate_checker = self.check_events_dict([
('type', equals('realm_bot')),
('op', equals('add')),
('bot', check_dict_only([
('email', check_string),
('user_id', check_int),
('bot_type', check_int),
('full_name', check_string),
('is_active', check_bool),
('api_key', check_string),
('default_sending_stream', check_none_or(check_string)),
('default_events_register_stream', check_none_or(check_string)),
('default_all_public_streams', check_bool),
('avatar_url', check_string),
('owner', check_none_or(check_string)),
('services', check_list(check_dict_only([
('base_url', check_url),
('interface', check_int),
]))),
])),
])
bot = self.create_bot('test')
do_deactivate_user(bot)
action = lambda: do_reactivate_user(bot)
events = self.do_test(action, num_events=2)
error = bot_reactivate_checker('events[1]', events[1])
self.assert_on_error(error)
def test_do_mark_hotspot_as_read(self) -> None:
self.user_profile.tutorial_status = UserProfile.TUTORIAL_WAITING
self.user_profile.save(update_fields=['tutorial_status'])
schema_checker = self.check_events_dict([
('type', equals('hotspots')),
('hotspots', check_list(check_dict_only([
('name', check_string),
('title', check_string),
('description', check_string),
('delay', check_float),
]))),
])
events = self.do_test(lambda: do_mark_hotspot_as_read(self.user_profile, 'intro_reply'))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_rename_stream(self) -> None:
stream = self.make_stream('old_name')
new_name = u'stream with a brand new name'
self.subscribe(self.user_profile, stream.name)
notification = '<p><span class="user-mention silent" data-user-id="{user_id}">King Hamlet</span> renamed stream <strong>old_name</strong> to <strong>stream with a brand new name</strong>.</p>'
notification = notification.format(user_id=self.user_profile.id)
action = lambda: do_rename_stream(stream, new_name, self.user_profile)
events = self.do_test(action, num_events=3)
schema_checker = self.check_events_dict([
('type', equals('stream')),
('op', equals('update')),
('property', equals('email_address')),
('value', check_string),
('stream_id', check_int),
('name', equals('old_name')),
])
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
schema_checker = self.check_events_dict([
('type', equals('stream')),
('op', equals('update')),
('property', equals('name')),
('value', equals(new_name)),
('name', equals('old_name')),
('stream_id', check_int),
])
error = schema_checker('events[1]', events[1])
self.assert_on_error(error)
schema_checker = check_dict([
('flags', check_list(check_string)),
('type', equals('message')),
('message', check_dict([
('timestamp', check_int),
('content', equals(notification)),
('content_type', equals('text/html')),
('sender_email', equals('notification-bot@zulip.com')),
('sender_id', check_int),
('sender_short_name', equals('notification-bot')),
('display_recipient', equals(new_name)),
('id', check_int),
('stream_id', check_int),
('sender_realm_str', check_string),
('sender_full_name', equals('Notification Bot')),
('is_me_message', equals(False)),
('type', equals('stream')),
('submessages', check_list(check_string)),
(TOPIC_LINKS, check_list(check_url)),
('avatar_url', check_url),
('reactions', check_list(None)),
('client', equals('Internal')),
(TOPIC_NAME, equals('stream events')),
('recipient_id', check_int)
])),
('id', check_int)
])
error = schema_checker('events[2]', events[2])
self.assert_on_error(error)
def test_deactivate_stream_neversubscribed(self) -> None:
stream = self.make_stream('old_name')
action = lambda: do_deactivate_stream(stream)
events = self.do_test(action)
schema_checker = self.check_events_dict([
('type', equals('stream')),
('op', equals('delete')),
('streams', check_list(check_dict([]))),
])
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_subscribe_other_user_never_subscribed(self) -> None:
action = lambda: self.subscribe(self.example_user("othello"), u"test_stream")
events = self.do_test(action, num_events=2)
peer_add_schema_checker = self.check_events_dict([
('type', equals('subscription')),
('op', equals('peer_add')),
('user_id', check_int),
('subscriptions', check_list(check_string)),
])
error = peer_add_schema_checker('events[1]', events[1])
self.assert_on_error(error)
@slow("Actually several tests combined together")
def test_subscribe_events(self) -> None:
self.do_test_subscribe_events(include_subscribers=True)
@slow("Actually several tests combined together")
def test_subscribe_events_no_include_subscribers(self) -> None:
self.do_test_subscribe_events(include_subscribers=False)
def do_test_subscribe_events(self, include_subscribers: bool) -> None:
subscription_fields = [
('color', check_string),
('description', check_string),
('rendered_description', check_string),
('email_address', check_string),
('invite_only', check_bool),
('is_web_public', check_bool),
('is_announcement_only', check_bool),
('is_muted', check_bool),
('in_home_view', check_bool),
('name', check_string),
('audible_notifications', check_none_or(check_bool)),
('email_notifications', check_none_or(check_bool)),
('desktop_notifications', check_none_or(check_bool)),
('push_notifications', check_none_or(check_bool)),
('stream_id', check_int),
('first_message_id', check_none_or(check_int)),
('history_public_to_subscribers', check_bool),
('pin_to_top', check_bool),
('stream_weekly_traffic', check_none_or(check_int)),
('is_old_stream', check_bool),
]
if include_subscribers:
subscription_fields.append(('subscribers', check_list(check_int)))
subscription_schema_checker = check_list(
check_dict_only(subscription_fields),
)
stream_create_schema_checker = self.check_events_dict([
('type', equals('stream')),
('op', equals('create')),
('streams', check_list(check_dict_only([
('name', check_string),
('stream_id', check_int),
('invite_only', check_bool),
('description', check_string),
('rendered_description', check_string),
]))),
])
add_schema_checker = self.check_events_dict([
('type', equals('subscription')),
('op', equals('add')),
('subscriptions', subscription_schema_checker),
])
remove_schema_checker = self.check_events_dict([
('type', equals('subscription')),
('op', equals('remove')),
('subscriptions', check_list(
check_dict_only([
('name', equals('test_stream')),
('stream_id', check_int),
]),
)),
])
peer_add_schema_checker = self.check_events_dict([
('type', equals('subscription')),
('op', equals('peer_add')),
('user_id', check_int),
('subscriptions', check_list(check_string)),
])
peer_remove_schema_checker = self.check_events_dict([
('type', equals('subscription')),
('op', equals('peer_remove')),
('user_id', check_int),
('subscriptions', check_list(check_string)),
])
stream_update_schema_checker = self.check_events_dict([
('type', equals('stream')),
('op', equals('update')),
('property', equals('description')),
('value', check_string),
('rendered_description', check_string),
('stream_id', check_int),
('name', check_string),
])
stream_update_invite_only_schema_checker = self.check_events_dict([
('type', equals('stream')),
('op', equals('update')),
('property', equals('invite_only')),
('stream_id', check_int),
('name', check_string),
('value', check_bool),
('history_public_to_subscribers', check_bool),
])
stream_update_is_announcement_only_schema_checker = self.check_events_dict([
('type', equals('stream')),
('op', equals('update')),
('property', equals('is_announcement_only')),
('stream_id', check_int),
('name', check_string),
('value', check_bool),
])
# Subscribe to a totally new stream, so it's just Hamlet on it
action = lambda: self.subscribe(self.example_user("hamlet"), "test_stream") # type: Callable[[], object]
events = self.do_test(action, event_types=["subscription", "realm_user"],
include_subscribers=include_subscribers)
error = add_schema_checker('events[0]', events[0])
self.assert_on_error(error)
# Add another user to that totally new stream
action = lambda: self.subscribe(self.example_user("othello"), "test_stream")
events = self.do_test(action,
include_subscribers=include_subscribers,
state_change_expected=include_subscribers,
)
error = peer_add_schema_checker('events[0]', events[0])
self.assert_on_error(error)
stream = get_stream("test_stream", self.user_profile.realm)
# Now remove the first user, to test the normal unsubscribe flow
action = lambda: bulk_remove_subscriptions(
[self.example_user('othello')],
[stream],
get_client("website"))
events = self.do_test(action,
include_subscribers=include_subscribers,
state_change_expected=include_subscribers,
)
error = peer_remove_schema_checker('events[0]', events[0])
self.assert_on_error(error)
# Now remove the second user, to test the 'vacate' event flow
action = lambda: bulk_remove_subscriptions(
[self.example_user('hamlet')],
[stream],
get_client("website"))
events = self.do_test(action,
include_subscribers=include_subscribers,
num_events=3)
error = remove_schema_checker('events[0]', events[0])
self.assert_on_error(error)
# Now resubscribe a user, to make sure that works on a vacated stream
action = lambda: self.subscribe(self.example_user("hamlet"), "test_stream")
events = self.do_test(action,
include_subscribers=include_subscribers,
num_events=2)
error = add_schema_checker('events[1]', events[1])
self.assert_on_error(error)
action = lambda: do_change_stream_description(stream, u'new description')
events = self.do_test(action,
include_subscribers=include_subscribers)
error = stream_update_schema_checker('events[0]', events[0])
self.assert_on_error(error)
# Update stream privacy
action = lambda: do_change_stream_invite_only(stream, True, history_public_to_subscribers=True)
events = self.do_test(action,
include_subscribers=include_subscribers)
error = stream_update_invite_only_schema_checker('events[0]', events[0])
self.assert_on_error(error)
# Update stream is_announcement_only property
action = lambda: do_change_stream_announcement_only(stream, True)
events = self.do_test(action,
include_subscribers=include_subscribers)
error = stream_update_is_announcement_only_schema_checker('events[0]', events[0])
self.assert_on_error(error)
# Subscribe to a totally new invite-only stream, so it's just Hamlet on it
stream = self.make_stream("private", self.user_profile.realm, invite_only=True)
user_profile = self.example_user('hamlet')
action = lambda: bulk_add_subscriptions([stream], [user_profile])
events = self.do_test(action, include_subscribers=include_subscribers,
num_events=2)
error = stream_create_schema_checker('events[0]', events[0])
error = add_schema_checker('events[1]', events[1])
self.assert_on_error(error)
def test_do_delete_message_stream(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('delete_message')),
('message_id', check_int),
('sender', check_string),
('sender_id', check_int),
('message_type', equals("stream")),
('stream_id', check_int),
('topic', check_string),
])
msg_id = self.send_stream_message("hamlet@zulip.com", "Verona")
message = Message.objects.get(id=msg_id)
events = self.do_test(
lambda: do_delete_messages(self.user_profile, [message]),
state_change_expected=True,
)
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_do_delete_message_personal(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('delete_message')),
('message_id', check_int),
('sender', check_string),
('sender_id', check_int),
('message_type', equals("private")),
('recipient_id', check_int),
])
msg_id = self.send_personal_message(
self.example_email("cordelia"),
self.user_profile.email,
"hello",
)
message = Message.objects.get(id=msg_id)
events = self.do_test(
lambda: do_delete_messages(self.user_profile, [message]),
state_change_expected=True,
)
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_do_delete_message_no_max_id(self) -> None:
user_profile = self.example_user('aaron')
# Delete all historical messages for this user
user_profile = self.example_user('hamlet')
UserMessage.objects.filter(user_profile=user_profile).delete()
msg_id = self.send_stream_message("hamlet@zulip.com", "Verona")
message = Message.objects.get(id=msg_id)
self.do_test(
lambda: do_delete_messages(self.user_profile, [message]),
state_change_expected=True,
)
result = fetch_initial_state_data(user_profile, None, "", client_gravatar=False)
self.assertEqual(result['max_message_id'], -1)
def test_add_attachment(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('attachment')),
('op', equals('add')),
('attachment', check_dict_only([
('id', check_int),
('name', check_string),
('size', check_int),
('path_id', check_string),
('create_time', check_float),
('messages', check_list(check_dict_only([
('id', check_int),
('name', check_float),
]))),
])),
('upload_space_used', equals(6)),
])
self.login(self.example_email("hamlet"))
fp = StringIO("zulip!")
fp.name = "zulip.txt"
data = {'uri': None}
def do_upload() -> None:
result = self.client_post("/json/user_uploads", {'file': fp})
self.assert_json_success(result)
self.assertIn("uri", result.json())
uri = result.json()["uri"]
base = '/user_uploads/'
self.assertEqual(base, uri[:len(base)])
data['uri'] = uri
events = self.do_test(
lambda: do_upload(),
num_events=1, state_change_expected=False)
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
# Verify that the DB has the attachment marked as unclaimed
entry = Attachment.objects.get(file_name='zulip.txt')
self.assertEqual(entry.is_claimed(), False)
# Now we send an actual message using this attachment.
schema_checker = self.check_events_dict([
('type', equals('attachment')),
('op', equals('update')),
('attachment', check_dict_only([
('id', check_int),
('name', check_string),
('size', check_int),
('path_id', check_string),
('create_time', check_float),
('messages', check_list(check_dict_only([
('id', check_int),
('name', check_float),
]))),
])),
('upload_space_used', equals(6)),
])
self.subscribe(self.example_user("hamlet"), "Denmark")
body = "First message ...[zulip.txt](http://localhost:9991" + data['uri'] + ")"
events = self.do_test(
lambda: self.send_stream_message(self.example_email("hamlet"), "Denmark", body, "test"),
num_events=2)
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
# Now remove the attachment
schema_checker = self.check_events_dict([
('type', equals('attachment')),
('op', equals('remove')),
('attachment', check_dict_only([
('id', check_int),
])),
('upload_space_used', equals(0)),
])
events = self.do_test(
lambda: self.client_delete("/json/attachments/%s" % (entry.id,)),
num_events=1, state_change_expected=False)
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_notify_realm_export(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('realm_export')),
('exports', check_list(check_dict_only([
('id', check_int),
('export_time', check_float),
('acting_user_id', check_int),
('export_url', check_string),
('deleted_timestamp', equals(None)),
]))),
])
do_change_is_admin(self.user_profile, True)
self.login(self.user_profile.email)
with mock.patch('zerver.lib.export.do_export_realm',
return_value=create_dummy_file('test-export.tar.gz')):
with stdout_suppressed():
events = self.do_test(
lambda: self.client_post('/json/export/realm'),
state_change_expected=True, num_events=2)
# The first event is a message from notification-bot.
error = schema_checker('events[1]', events[1])
self.assert_on_error(error)
# Now we check the deletion of the export.
deletion_schema_checker = self.check_events_dict([
('type', equals('realm_export')),
('exports', check_list(check_dict_only([
('id', check_int),
('export_time', check_float),
('acting_user_id', check_int),
('export_url', check_string),
('deleted_timestamp', check_float),
]))),
])
audit_log_entry = RealmAuditLog.objects.filter(
event_type=RealmAuditLog.REALM_EXPORTED).first()
events = self.do_test(
lambda: self.client_delete('/json/export/realm/{id}'.format(id=audit_log_entry.id)),
state_change_expected=False, num_events=1)
error = deletion_schema_checker('events[0]', events[0])
self.assert_on_error(error)
class FetchInitialStateDataTest(ZulipTestCase):
# Non-admin users don't have access to all bots
def test_realm_bots_non_admin(self) -> None:
user_profile = self.example_user('cordelia')
self.assertFalse(user_profile.is_realm_admin)
result = fetch_initial_state_data(user_profile, None, "", client_gravatar=False)
self.assert_length(result['realm_bots'], 0)
# additionally the API key for a random bot is not present in the data
api_key = get_api_key(self.notification_bot())
self.assertNotIn(api_key, str(result))
# Admin users have access to all bots in the realm_bots field
def test_realm_bots_e(self) -> None:
user_profile = self.example_user('hamlet')
do_change_is_admin(user_profile, True)
self.assertTrue(user_profile.is_realm_admin)
result = fetch_initial_state_data(user_profile, None, "", client_gravatar=False)
self.assertTrue(len(result['realm_bots']) > 2)
def test_max_message_id_with_no_history(self) -> None:
user_profile = self.example_user('aaron')
# Delete all historical messages for this user
UserMessage.objects.filter(user_profile=user_profile).delete()
result = fetch_initial_state_data(user_profile, None, "", client_gravatar=False)
self.assertEqual(result['max_message_id'], -1)
def test_delivery_email_presence_for_non_admins(self) -> None:
user_profile = self.example_user('aaron')
self.assertFalse(user_profile.is_realm_admin)
do_set_realm_property(user_profile.realm, "email_address_visibility",
Realm.EMAIL_ADDRESS_VISIBILITY_EVERYONE)
result = fetch_initial_state_data(user_profile, None, "", client_gravatar=False)
for key, value in result['raw_users'].items():
self.assertNotIn('delivery_email', value)
do_set_realm_property(user_profile.realm, "email_address_visibility",
Realm.EMAIL_ADDRESS_VISIBILITY_ADMINS)
result = fetch_initial_state_data(user_profile, None, "", client_gravatar=False)
for key, value in result['raw_users'].items():
self.assertNotIn('delivery_email', value)
def test_delivery_email_presence_for_admins(self) -> None:
user_profile = self.example_user('iago')
self.assertTrue(user_profile.is_realm_admin)
do_set_realm_property(user_profile.realm, "email_address_visibility",
Realm.EMAIL_ADDRESS_VISIBILITY_EVERYONE)
result = fetch_initial_state_data(user_profile, None, "", client_gravatar=False)
for key, value in result['raw_users'].items():
self.assertNotIn('delivery_email', value)
do_set_realm_property(user_profile.realm, "email_address_visibility",
Realm.EMAIL_ADDRESS_VISIBILITY_ADMINS)
result = fetch_initial_state_data(user_profile, None, "", client_gravatar=False)
for key, value in result['raw_users'].items():
self.assertIn('delivery_email', value)
class GetUnreadMsgsTest(ZulipTestCase):
def mute_stream(self, user_profile: UserProfile, stream: Stream) -> None:
recipient = Recipient.objects.get(type_id=stream.id, type=Recipient.STREAM)
subscription = Subscription.objects.get(
user_profile=user_profile,
recipient=recipient
)
subscription.is_muted = True
subscription.save()
def mute_topic(self, user_profile: UserProfile, stream_name: str,
topic_name: str) -> None:
realm = user_profile.realm
stream = get_stream(stream_name, realm)
recipient = get_stream_recipient(stream.id)
add_topic_mute(
user_profile=user_profile,
stream_id=stream.id,
recipient_id=recipient.id,
topic_name=topic_name,
)
def test_raw_unread_stream(self) -> None:
cordelia = self.example_user('cordelia')
hamlet = self.example_user('hamlet')
realm = hamlet.realm
for stream_name in ['social', 'devel', 'test here']:
self.subscribe(hamlet, stream_name)
self.subscribe(cordelia, stream_name)
all_message_ids = set() # type: Set[int]
message_ids = dict()
tups = [
('social', 'lunch'),
('test here', 'bla'),
('devel', 'python'),
('devel', 'ruby'),
]
for stream_name, topic_name in tups:
message_ids[topic_name] = [
self.send_stream_message(
sender_email=cordelia.email,
stream_name=stream_name,
topic_name=topic_name,
) for i in range(3)
]
all_message_ids |= set(message_ids[topic_name])
self.assertEqual(len(all_message_ids), 12) # sanity check on test setup
self.mute_stream(
user_profile=hamlet,
stream=get_stream('test here', realm),
)
self.mute_topic(
user_profile=hamlet,
stream_name='devel',
topic_name='ruby',
)
raw_unread_data = get_raw_unread_data(
user_profile=hamlet,
)
stream_dict = raw_unread_data['stream_dict']
self.assertEqual(
set(stream_dict.keys()),
all_message_ids,
)
self.assertEqual(
raw_unread_data['unmuted_stream_msgs'],
set(message_ids['python']) | set(message_ids['lunch']),
)
self.assertEqual(
stream_dict[message_ids['lunch'][0]],
dict(
sender_id=cordelia.id,
stream_id=get_stream('social', realm).id,
topic='lunch',
)
)
def test_raw_unread_huddle(self) -> None:
cordelia = self.example_user('cordelia')
othello = self.example_user('othello')
hamlet = self.example_user('hamlet')
prospero = self.example_user('prospero')
huddle1_message_ids = [
self.send_huddle_message(
cordelia.email,
[hamlet.email, othello.email]
)
for i in range(3)
]
huddle2_message_ids = [
self.send_huddle_message(
cordelia.email,
[hamlet.email, prospero.email]
)
for i in range(3)
]
raw_unread_data = get_raw_unread_data(
user_profile=hamlet,
)
huddle_dict = raw_unread_data['huddle_dict']
self.assertEqual(
set(huddle_dict.keys()),
set(huddle1_message_ids) | set(huddle2_message_ids)
)
huddle_string = ','.join(
str(uid)
for uid in sorted([cordelia.id, hamlet.id, othello.id])
)
self.assertEqual(
huddle_dict[huddle1_message_ids[0]],
dict(user_ids_string=huddle_string),
)
def test_raw_unread_personal(self) -> None:
cordelia = self.example_user('cordelia')
othello = self.example_user('othello')
hamlet = self.example_user('hamlet')
cordelia_pm_message_ids = [
self.send_personal_message(cordelia.email, hamlet.email)
for i in range(3)
]
othello_pm_message_ids = [
self.send_personal_message(othello.email, hamlet.email)
for i in range(3)
]
raw_unread_data = get_raw_unread_data(
user_profile=hamlet,
)
pm_dict = raw_unread_data['pm_dict']
self.assertEqual(
set(pm_dict.keys()),
set(cordelia_pm_message_ids) | set(othello_pm_message_ids)
)
self.assertEqual(
pm_dict[cordelia_pm_message_ids[0]],
dict(sender_id=cordelia.id),
)
def test_unread_msgs(self) -> None:
cordelia = self.example_user('cordelia')
sender_id = cordelia.id
sender_email = cordelia.email
user_profile = self.example_user('hamlet')
othello = self.example_user('othello')
# our tests rely on order
assert(sender_email < user_profile.email)
assert(user_profile.email < othello.email)
pm1_message_id = self.send_personal_message(sender_email, user_profile.email, "hello1")
pm2_message_id = self.send_personal_message(sender_email, user_profile.email, "hello2")
muted_stream = self.subscribe(user_profile, 'Muted Stream')
self.mute_stream(user_profile, muted_stream)
self.mute_topic(user_profile, 'Denmark', 'muted-topic')
stream_message_id = self.send_stream_message(sender_email, "Denmark", "hello")
muted_stream_message_id = self.send_stream_message(sender_email, "Muted Stream", "hello")
muted_topic_message_id = self.send_stream_message(
sender_email,
"Denmark",
topic_name="muted-topic",
content="hello",
)
huddle_message_id = self.send_huddle_message(
sender_email,
[user_profile.email, othello.email],
'hello3',
)
def get_unread_data() -> UnreadMessagesResult:
raw_unread_data = get_raw_unread_data(user_profile)
aggregated_data = aggregate_unread_data(raw_unread_data)
return aggregated_data
result = get_unread_data()
# The count here reflects the count of unread messages that we will
# report to users in the bankruptcy dialog, and for now it excludes unread messages
# from muted treams, but it doesn't exclude unread messages from muted topics yet.
self.assertEqual(result['count'], 4)
unread_pm = result['pms'][0]
self.assertEqual(unread_pm['sender_id'], sender_id)
self.assertEqual(unread_pm['unread_message_ids'], [pm1_message_id, pm2_message_id])
self.assertTrue('sender_ids' not in unread_pm)
unread_stream = result['streams'][0]
self.assertEqual(unread_stream['stream_id'], get_stream('Denmark', user_profile.realm).id)
self.assertEqual(unread_stream['topic'], 'muted-topic')
self.assertEqual(unread_stream['unread_message_ids'], [muted_topic_message_id])
self.assertEqual(unread_stream['sender_ids'], [sender_id])
unread_stream = result['streams'][1]
self.assertEqual(unread_stream['stream_id'], get_stream('Denmark', user_profile.realm).id)
self.assertEqual(unread_stream['topic'], 'test')
self.assertEqual(unread_stream['unread_message_ids'], [stream_message_id])
self.assertEqual(unread_stream['sender_ids'], [sender_id])
unread_stream = result['streams'][2]
self.assertEqual(unread_stream['stream_id'], get_stream('Muted Stream', user_profile.realm).id)
self.assertEqual(unread_stream['topic'], 'test')
self.assertEqual(unread_stream['unread_message_ids'], [muted_stream_message_id])
self.assertEqual(unread_stream['sender_ids'], [sender_id])
huddle_string = ','.join(str(uid) for uid in sorted([sender_id, user_profile.id, othello.id]))
unread_huddle = result['huddles'][0]
self.assertEqual(unread_huddle['user_ids_string'], huddle_string)
self.assertEqual(unread_huddle['unread_message_ids'], [huddle_message_id])
self.assertTrue('sender_ids' not in unread_huddle)
self.assertEqual(result['mentions'], [])
um = UserMessage.objects.get(
user_profile_id=user_profile.id,
message_id=stream_message_id
)
um.flags |= UserMessage.flags.mentioned
um.save()
result = get_unread_data()
self.assertEqual(result['mentions'], [stream_message_id])
um.flags = UserMessage.flags.has_alert_word
um.save()
result = get_unread_data()
# TODO: This should change when we make alert words work better.
self.assertEqual(result['mentions'], [])
um.flags = UserMessage.flags.wildcard_mentioned
um.save()
result = get_unread_data()
self.assertEqual(result['mentions'], [stream_message_id])
um.flags = 0
um.save()
result = get_unread_data()
self.assertEqual(result['mentions'], [])
# Test with a muted stream
um = UserMessage.objects.get(
user_profile_id=user_profile.id,
message_id=muted_stream_message_id
)
um.flags = UserMessage.flags.mentioned
um.save()
result = get_unread_data()
self.assertEqual(result['mentions'], [muted_stream_message_id])
um.flags = UserMessage.flags.has_alert_word
um.save()
result = get_unread_data()
self.assertEqual(result['mentions'], [])
um.flags = UserMessage.flags.wildcard_mentioned
um.save()
result = get_unread_data()
self.assertEqual(result['mentions'], [])
um.flags = 0
um.save()
result = get_unread_data()
self.assertEqual(result['mentions'], [])
# Test with a muted topic
um = UserMessage.objects.get(
user_profile_id=user_profile.id,
message_id=muted_topic_message_id
)
um.flags = UserMessage.flags.mentioned
um.save()
result = get_unread_data()
self.assertEqual(result['mentions'], [muted_topic_message_id])
um.flags = UserMessage.flags.has_alert_word
um.save()
result = get_unread_data()
self.assertEqual(result['mentions'], [])
um.flags = UserMessage.flags.wildcard_mentioned
um.save()
result = get_unread_data()
self.assertEqual(result['mentions'], [])
um.flags = 0
um.save()
result = get_unread_data()
self.assertEqual(result['mentions'], [])
class ClientDescriptorsTest(ZulipTestCase):
def test_get_client_info_for_all_public_streams(self) -> None:
hamlet = self.example_user('hamlet')
realm = hamlet.realm
queue_data = dict(
all_public_streams=True,
apply_markdown=True,
client_gravatar=True,
client_type_name='website',
event_types=['message'],
last_connection_time=time.time(),
queue_timeout=0,
realm_id=realm.id,
user_profile_id=hamlet.id,
)
client = allocate_client_descriptor(queue_data)
message_event = dict(
realm_id=realm.id,
stream_name='whatever',
)
client_info = get_client_info_for_message_event(
message_event,
users=[],
)
self.assertEqual(len(client_info), 1)
dct = client_info[client.event_queue.id]
self.assertEqual(dct['client'].apply_markdown, True)
self.assertEqual(dct['client'].client_gravatar, True)
self.assertEqual(dct['client'].user_profile_id, hamlet.id)
self.assertEqual(dct['flags'], [])
self.assertEqual(dct['is_sender'], False)
message_event = dict(
realm_id=realm.id,
stream_name='whatever',
sender_queue_id=client.event_queue.id,
)
client_info = get_client_info_for_message_event(
message_event,
users=[],
)
dct = client_info[client.event_queue.id]
self.assertEqual(dct['is_sender'], True)
def test_get_client_info_for_normal_users(self) -> None:
hamlet = self.example_user('hamlet')
cordelia = self.example_user('cordelia')
realm = hamlet.realm
def test_get_info(apply_markdown: bool, client_gravatar: bool) -> None:
clear_client_event_queues_for_testing()
queue_data = dict(
all_public_streams=False,
apply_markdown=apply_markdown,
client_gravatar=client_gravatar,
client_type_name='website',
event_types=['message'],
last_connection_time=time.time(),
queue_timeout=0,
realm_id=realm.id,
user_profile_id=hamlet.id,
)
client = allocate_client_descriptor(queue_data)
message_event = dict(
realm_id=realm.id,
stream_name='whatever',
)
client_info = get_client_info_for_message_event(
message_event,
users=[
dict(id=cordelia.id),
],
)
self.assertEqual(len(client_info), 0)
client_info = get_client_info_for_message_event(
message_event,
users=[
dict(id=cordelia.id),
dict(id=hamlet.id, flags=['mentioned']),
],
)
self.assertEqual(len(client_info), 1)
dct = client_info[client.event_queue.id]
self.assertEqual(dct['client'].apply_markdown, apply_markdown)
self.assertEqual(dct['client'].client_gravatar, client_gravatar)
self.assertEqual(dct['client'].user_profile_id, hamlet.id)
self.assertEqual(dct['flags'], ['mentioned'])
self.assertEqual(dct['is_sender'], False)
test_get_info(apply_markdown=False, client_gravatar=False)
test_get_info(apply_markdown=True, client_gravatar=False)
test_get_info(apply_markdown=False, client_gravatar=True)
test_get_info(apply_markdown=True, client_gravatar=True)
def test_process_message_event_with_mocked_client_info(self) -> None:
hamlet = self.example_user("hamlet")
class MockClient:
def __init__(self, user_profile_id: int,
apply_markdown: bool,
client_gravatar: bool) -> None:
self.user_profile_id = user_profile_id
self.apply_markdown = apply_markdown
self.client_gravatar = client_gravatar
self.client_type_name = 'whatever'
self.events = [] # type: List[Dict[str, Any]]
def accepts_messages(self) -> bool:
return True
def accepts_event(self, event: Dict[str, Any]) -> bool:
assert(event['type'] == 'message')
return True
def add_event(self, event: Dict[str, Any]) -> None:
self.events.append(event)
client1 = MockClient(
user_profile_id=hamlet.id,
apply_markdown=True,
client_gravatar=False,
)
client2 = MockClient(
user_profile_id=hamlet.id,
apply_markdown=False,
client_gravatar=False,
)
client3 = MockClient(
user_profile_id=hamlet.id,
apply_markdown=True,
client_gravatar=True,
)
client4 = MockClient(
user_profile_id=hamlet.id,
apply_markdown=False,
client_gravatar=True,
)
client_info = {
'client:1': dict(
client=client1,
flags=['starred'],
),
'client:2': dict(
client=client2,
flags=['has_alert_word'],
),
'client:3': dict(
client=client3,
flags=[],
),
'client:4': dict(
client=client4,
flags=[],
),
}
sender = hamlet
message_event = dict(
message_dict=dict(
id=999,
content='**hello**',
rendered_content='<b>hello</b>',
sender_id=sender.id,
type='stream',
client='website',
# NOTE: Some of these fields are clutter, but some
# will be useful when we let clients specify
# that they can compute their own gravatar URLs.
sender_email=sender.email,
sender_realm_id=sender.realm_id,
sender_avatar_source=UserProfile.AVATAR_FROM_GRAVATAR,
sender_avatar_version=1,
sender_is_mirror_dummy=None,
recipient_type=None,
recipient_type_id=None,
),
)
# Setting users to `[]` bypasses code we don't care about
# for this test--we assume client_info is correct in our mocks,
# and we are interested in how messages are put on event queue.
users = [] # type: List[Dict[str, Any]]
with mock.patch('zerver.tornado.event_queue.get_client_info_for_message_event',
return_value=client_info):
process_message_event(message_event, users)
# We are not closely examining avatar_url at this point, so
# just sanity check them and then delete the keys so that
# upcoming comparisons work.
for client in [client1, client2]:
message = client.events[0]['message']
self.assertIn('gravatar.com', message['avatar_url'])
message.pop('avatar_url')
self.assertEqual(client1.events, [
dict(
type='message',
message=dict(
type='stream',
sender_id=sender.id,
sender_email=sender.email,
id=999,
content='<b>hello</b>',
content_type='text/html',
client='website',
),
flags=['starred'],
),
])
self.assertEqual(client2.events, [
dict(
type='message',
message=dict(
type='stream',
sender_id=sender.id,
sender_email=sender.email,
id=999,
content='**hello**',
content_type='text/x-markdown',
client='website',
),
flags=['has_alert_word'],
),
])
self.assertEqual(client3.events, [
dict(
type='message',
message=dict(
type='stream',
sender_id=sender.id,
sender_email=sender.email,
avatar_url=None,
id=999,
content='<b>hello</b>',
content_type='text/html',
client='website',
),
flags=[],
),
])
self.assertEqual(client4.events, [
dict(
type='message',
message=dict(
type='stream',
sender_id=sender.id,
sender_email=sender.email,
avatar_url=None,
id=999,
content='**hello**',
content_type='text/x-markdown',
client='website',
),
flags=[],
),
])
class FetchQueriesTest(ZulipTestCase):
def test_queries(self) -> None:
user = self.example_user("hamlet")
self.login(user.email)
flush_per_request_caches()
with queries_captured() as queries:
with mock.patch('zerver.lib.events.always_want') as want_mock:
fetch_initial_state_data(
user_profile=user,
event_types=None,
queue_id='x',
client_gravatar=False,
)
self.assert_length(queries, 33)
expected_counts = dict(
alert_words=0,
custom_profile_fields=1,
default_streams=1,
default_stream_groups=1,
hotspots=0,
message=1,
muted_topics=1,
pointer=0,
presence=3,
realm=0,
realm_bot=1,
realm_domains=1,
realm_embedded_bots=0,
realm_incoming_webhook_bots=0,
realm_emoji=1,
realm_filters=1,
realm_user=3,
realm_user_groups=2,
recent_private_conversations=2,
starred_messages=1,
stream=2,
stop_words=0,
subscription=6,
update_display_settings=0,
update_global_notifications=0,
update_message_flags=5,
user_status=1,
zulip_version=0,
)
wanted_event_types = {
item[0][0] for item
in want_mock.call_args_list
}
self.assertEqual(wanted_event_types, set(expected_counts))
for event_type in sorted(wanted_event_types):
count = expected_counts[event_type]
flush_per_request_caches()
with queries_captured() as queries:
if event_type == 'update_message_flags':
event_types = ['update_message_flags', 'message']
else:
event_types = [event_type]
fetch_initial_state_data(
user_profile=user,
event_types=event_types,
queue_id='x',
client_gravatar=False,
)
self.assert_length(queries, count)
class TestEventsRegisterAllPublicStreamsDefaults(ZulipTestCase):
def setUp(self) -> None:
super().setUp()
self.user_profile = self.example_user('hamlet')
self.email = self.user_profile.email
def test_use_passed_all_public_true_default_false(self) -> None:
self.user_profile.default_all_public_streams = False
self.user_profile.save()
result = _default_all_public_streams(self.user_profile, True)
self.assertTrue(result)
def test_use_passed_all_public_true_default(self) -> None:
self.user_profile.default_all_public_streams = True
self.user_profile.save()
result = _default_all_public_streams(self.user_profile, True)
self.assertTrue(result)
def test_use_passed_all_public_false_default_false(self) -> None:
self.user_profile.default_all_public_streams = False
self.user_profile.save()
result = _default_all_public_streams(self.user_profile, False)
self.assertFalse(result)
def test_use_passed_all_public_false_default_true(self) -> None:
self.user_profile.default_all_public_streams = True
self.user_profile.save()
result = _default_all_public_streams(self.user_profile, False)
self.assertFalse(result)
def test_use_true_default_for_none(self) -> None:
self.user_profile.default_all_public_streams = True
self.user_profile.save()
result = _default_all_public_streams(self.user_profile, None)
self.assertTrue(result)
def test_use_false_default_for_none(self) -> None:
self.user_profile.default_all_public_streams = False
self.user_profile.save()
result = _default_all_public_streams(self.user_profile, None)
self.assertFalse(result)
class TestEventsRegisterNarrowDefaults(ZulipTestCase):
def setUp(self) -> None:
super().setUp()
self.user_profile = self.example_user('hamlet')
self.email = self.user_profile.email
self.stream = get_stream('Verona', self.user_profile.realm)
def test_use_passed_narrow_no_default(self) -> None:
self.user_profile.default_events_register_stream_id = None
self.user_profile.save()
result = _default_narrow(self.user_profile, [[u'stream', u'my_stream']])
self.assertEqual(result, [[u'stream', u'my_stream']])
def test_use_passed_narrow_with_default(self) -> None:
self.user_profile.default_events_register_stream_id = self.stream.id
self.user_profile.save()
result = _default_narrow(self.user_profile, [[u'stream', u'my_stream']])
self.assertEqual(result, [[u'stream', u'my_stream']])
def test_use_default_if_narrow_is_empty(self) -> None:
self.user_profile.default_events_register_stream_id = self.stream.id
self.user_profile.save()
result = _default_narrow(self.user_profile, [])
self.assertEqual(result, [[u'stream', u'Verona']])
def test_use_narrow_if_default_is_none(self) -> None:
self.user_profile.default_events_register_stream_id = None
self.user_profile.save()
result = _default_narrow(self.user_profile, [])
self.assertEqual(result, [])
class TestGetRawUserDataSystemBotRealm(ZulipTestCase):
def test_get_raw_user_data_on_system_bot_realm(self) -> None:
result = get_raw_user_data(get_realm("zulipinternal"), self.example_user('hamlet'), True)
for bot_email in settings.CROSS_REALM_BOT_EMAILS:
bot_profile = get_system_bot(bot_email)
self.assertTrue(bot_profile.id in result)
self.assertTrue(result[bot_profile.id]['is_cross_realm_bot'])
| 41.856907 | 200 | 0.58101 |
from typing import Any, Callable, Dict, List, Optional, Set, Tuple
import copy
import os
import shutil
import sys
from django.conf import settings
from django.http import HttpRequest, HttpResponse
from django.utils.timezone import now as timezone_now
from io import StringIO
from zerver.models import (
get_client, get_stream_recipient, get_stream, get_realm, get_system_bot,
Message, RealmDomain, Recipient, UserMessage, UserPresence, UserProfile,
Realm, Subscription, Stream, flush_per_request_caches, UserGroup, Service,
Attachment, PreregistrationUser, get_user_by_delivery_email, MultiuseInvite,
RealmAuditLog
)
from zerver.lib.actions import (
try_update_realm_custom_profile_field,
bulk_add_subscriptions,
bulk_remove_subscriptions,
check_add_realm_emoji,
check_send_message,
check_send_typing_notification,
do_add_alert_words,
do_add_default_stream,
do_add_reaction,
do_add_reaction_legacy,
do_add_realm_domain,
do_add_realm_filter,
do_add_streams_to_default_stream_group,
do_add_submessage,
do_change_avatar_fields,
do_change_bot_owner,
do_change_default_all_public_streams,
do_change_default_events_register_stream,
do_change_default_sending_stream,
do_change_default_stream_group_description,
do_change_default_stream_group_name,
do_change_full_name,
do_change_icon_source,
do_change_logo_source,
do_change_is_admin,
do_change_is_guest,
do_change_notification_settings,
do_change_plan_type,
do_change_realm_domain,
do_change_stream_description,
do_change_stream_invite_only,
do_change_stream_announcement_only,
do_change_subscription_property,
do_change_user_delivery_email,
do_create_user,
do_create_default_stream_group,
do_create_multiuse_invite_link,
do_deactivate_stream,
do_deactivate_user,
do_delete_messages,
do_invite_users,
do_mark_hotspot_as_read,
do_mute_topic,
do_reactivate_user,
do_regenerate_api_key,
do_remove_alert_words,
do_remove_default_stream,
do_remove_default_stream_group,
do_remove_reaction,
do_remove_reaction_legacy,
do_remove_realm_domain,
do_remove_realm_emoji,
do_remove_realm_filter,
do_remove_streams_from_default_stream_group,
do_rename_stream,
do_revoke_multi_use_invite,
do_revoke_user_invite,
do_set_realm_authentication_methods,
do_set_realm_message_editing,
do_set_realm_property,
do_set_user_display_setting,
do_set_realm_notifications_stream,
do_set_realm_signup_notifications_stream,
do_unmute_topic,
do_update_embedded_data,
do_update_message,
do_update_message_flags,
do_update_outgoing_webhook_service,
do_update_pointer,
do_update_user_presence,
do_update_user_status,
get_typing_user_profiles,
log_event,
lookup_default_stream_groups,
notify_realm_custom_profile_fields,
check_add_user_group,
do_update_user_group_name,
do_update_user_group_description,
bulk_add_members_to_user_group,
remove_members_from_user_group,
check_delete_user_group,
do_update_user_custom_profile_data_if_changed,
)
from zerver.lib.events import (
apply_events,
fetch_initial_state_data,
get_raw_user_data,
post_process_state,
)
from zerver.lib.message import (
aggregate_unread_data,
get_raw_unread_data,
render_markdown,
UnreadMessagesResult,
)
from zerver.lib.test_helpers import POSTRequestMock, get_subscription, \
get_test_image_file, stub_event_queue_user_events, queries_captured, \
create_dummy_file, stdout_suppressed
from zerver.lib.test_classes import (
ZulipTestCase,
)
from zerver.lib.test_runner import slow
from zerver.lib.topic import (
ORIG_TOPIC,
TOPIC_NAME,
TOPIC_LINKS,
)
from zerver.lib.topic_mutes import (
add_topic_mute,
)
from zerver.lib.validator import (
check_bool, check_dict, check_dict_only, check_float, check_int, check_list, check_string,
equals, check_none_or, Validator, check_url
)
from zerver.lib.users import get_api_key
from zerver.views.events_register import _default_all_public_streams, _default_narrow
from zerver.tornado.event_queue import (
allocate_client_descriptor,
clear_client_event_queues_for_testing,
get_client_info_for_message_event,
process_message_event,
)
from zerver.tornado.views import get_events
import mock
import time
import ujson
class LogEventsTest(ZulipTestCase):
def test_with_missing_event_log_dir_setting(self) -> None:
with self.settings(EVENT_LOG_DIR=None):
log_event(dict())
def test_log_event_mkdir(self) -> None:
dir_name = os.path.join(settings.TEST_WORKER_DIR, "test-log-dir")
try:
shutil.rmtree(dir_name)
except OSError:
pass
self.assertFalse(os.path.exists(dir_name))
with self.settings(EVENT_LOG_DIR=dir_name):
event = {} # type: Dict[str, int]
log_event(event)
self.assertTrue(os.path.exists(dir_name))
class EventsEndpointTest(ZulipTestCase):
def test_events_register_endpoint(self) -> None:
# This test is intended to get minimal coverage on the
# events_register code paths
email = self.example_email("hamlet")
with mock.patch('zerver.views.events_register.do_events_register', return_value={}):
result = self.api_post(email, '/json/register')
self.assert_json_success(result)
with mock.patch('zerver.lib.events.request_event_queue', return_value=None):
result = self.api_post(email, '/json/register')
self.assert_json_error(result, "Could not allocate event queue")
return_event_queue = '15:11'
return_user_events = [] # type: List[Dict[str, Any]]
# Test that call is made to deal with a returning soft deactivated user.
with mock.patch('zerver.lib.events.reactivate_user_if_soft_deactivated') as fa:
with stub_event_queue_user_events(return_event_queue, return_user_events):
result = self.api_post(email, '/json/register', dict(event_types=ujson.dumps(['pointer'])))
self.assertEqual(fa.call_count, 1)
with stub_event_queue_user_events(return_event_queue, return_user_events):
result = self.api_post(email, '/json/register', dict(event_types=ujson.dumps(['pointer'])))
self.assert_json_success(result)
result_dict = result.json()
self.assertEqual(result_dict['last_event_id'], -1)
self.assertEqual(result_dict['queue_id'], '15:11')
return_event_queue = '15:12'
return_user_events = [
{
'id': 6,
'type': 'pointer',
'pointer': 15,
}
]
with stub_event_queue_user_events(return_event_queue, return_user_events):
result = self.api_post(email, '/json/register', dict(event_types=ujson.dumps(['pointer'])))
self.assert_json_success(result)
result_dict = result.json()
self.assertEqual(result_dict['last_event_id'], 6)
self.assertEqual(result_dict['pointer'], 15)
self.assertEqual(result_dict['queue_id'], '15:12')
# Now test with `fetch_event_types` not matching the event
return_event_queue = '15:13'
with stub_event_queue_user_events(return_event_queue, return_user_events):
result = self.api_post(email, '/json/register',
dict(event_types=ujson.dumps(['pointer']),
fetch_event_types=ujson.dumps(['message'])))
self.assert_json_success(result)
result_dict = result.json()
self.assertEqual(result_dict['last_event_id'], 6)
# Check that the message event types data is in there
self.assertIn('max_message_id', result_dict)
# Check that the pointer event types data is not in there
self.assertNotIn('pointer', result_dict)
self.assertEqual(result_dict['queue_id'], '15:13')
# Now test with `fetch_event_types` matching the event
with stub_event_queue_user_events(return_event_queue, return_user_events):
result = self.api_post(email, '/json/register',
dict(fetch_event_types=ujson.dumps(['pointer']),
event_types=ujson.dumps(['message'])))
self.assert_json_success(result)
result_dict = result.json()
self.assertEqual(result_dict['last_event_id'], 6)
# Check that we didn't fetch the messages data
self.assertNotIn('max_message_id', result_dict)
self.assertIn('pointer', result_dict)
self.assertEqual(result_dict['pointer'], 15)
self.assertEqual(result_dict['queue_id'], '15:13')
def test_tornado_endpoint(self) -> None:
# This test is mostly intended to get minimal coverage on
# the /notify_tornado endpoint, so we can have 100% URL coverage,
# but it does exercise a little bit of the codepath.
post_data = dict(
data=ujson.dumps(
dict(
event=dict(
type='other'
),
users=[self.example_user('hamlet').id],
),
),
)
req = POSTRequestMock(post_data, user_profile=None)
req.META['REMOTE_ADDR'] = '127.0.0.1'
result = self.client_post_request('/notify_tornado', req)
self.assert_json_error(result, 'Access denied', status_code=403)
post_data['secret'] = settings.SHARED_SECRET
req = POSTRequestMock(post_data, user_profile=None)
req.META['REMOTE_ADDR'] = '127.0.0.1'
result = self.client_post_request('/notify_tornado', req)
self.assert_json_success(result)
class GetEventsTest(ZulipTestCase):
def tornado_call(self, view_func: Callable[[HttpRequest, UserProfile], HttpResponse],
user_profile: UserProfile,
post_data: Dict[str, Any]) -> HttpResponse:
request = POSTRequestMock(post_data, user_profile)
return view_func(request, user_profile)
def test_get_events(self) -> None:
user_profile = self.example_user('hamlet')
email = user_profile.email
recipient_user_profile = self.example_user('othello')
recipient_email = recipient_user_profile.email
self.login(email)
result = self.tornado_call(get_events, user_profile,
{"apply_markdown": ujson.dumps(True),
"client_gravatar": ujson.dumps(True),
"event_types": ujson.dumps(["message"]),
"user_client": "website",
"dont_block": ujson.dumps(True),
})
self.assert_json_success(result)
queue_id = ujson.loads(result.content)["queue_id"]
recipient_result = self.tornado_call(get_events, recipient_user_profile,
{"apply_markdown": ujson.dumps(True),
"client_gravatar": ujson.dumps(True),
"event_types": ujson.dumps(["message"]),
"user_client": "website",
"dont_block": ujson.dumps(True),
})
self.assert_json_success(recipient_result)
recipient_queue_id = ujson.loads(recipient_result.content)["queue_id"]
result = self.tornado_call(get_events, user_profile,
{"queue_id": queue_id,
"user_client": "website",
"last_event_id": -1,
"dont_block": ujson.dumps(True),
})
events = ujson.loads(result.content)["events"]
self.assert_json_success(result)
self.assert_length(events, 0)
local_id = '10.01'
check_send_message(
sender=user_profile,
client=get_client('whatever'),
message_type_name='private',
message_to=[recipient_email],
topic_name=None,
message_content='hello',
local_id=local_id,
sender_queue_id=queue_id,
)
result = self.tornado_call(get_events, user_profile,
{"queue_id": queue_id,
"user_client": "website",
"last_event_id": -1,
"dont_block": ujson.dumps(True),
})
events = ujson.loads(result.content)["events"]
self.assert_json_success(result)
self.assert_length(events, 1)
self.assertEqual(events[0]["type"], "message")
self.assertEqual(events[0]["message"]["sender_email"], email)
self.assertEqual(events[0]["local_message_id"], local_id)
self.assertEqual(events[0]["message"]["display_recipient"][0]["is_mirror_dummy"], False)
self.assertEqual(events[0]["message"]["display_recipient"][1]["is_mirror_dummy"], False)
last_event_id = events[0]["id"]
local_id = '10.02'
check_send_message(
sender=user_profile,
client=get_client('whatever'),
message_type_name='private',
message_to=[recipient_email],
topic_name=None,
message_content='hello',
local_id=local_id,
sender_queue_id=queue_id,
)
result = self.tornado_call(get_events, user_profile,
{"queue_id": queue_id,
"user_client": "website",
"last_event_id": last_event_id,
"dont_block": ujson.dumps(True),
})
events = ujson.loads(result.content)["events"]
self.assert_json_success(result)
self.assert_length(events, 1)
self.assertEqual(events[0]["type"], "message")
self.assertEqual(events[0]["message"]["sender_email"], email)
self.assertEqual(events[0]["local_message_id"], local_id)
# Test that the received message in the receiver's event queue
recipient_result = self.tornado_call(get_events, recipient_user_profile,
{"queue_id": recipient_queue_id,
"user_client": "website",
"last_event_id": -1,
"dont_block": ujson.dumps(True),
})
recipient_events = ujson.loads(recipient_result.content)["events"]
self.assert_json_success(recipient_result)
self.assertEqual(len(recipient_events), 2)
self.assertEqual(recipient_events[0]["type"], "message")
self.assertEqual(recipient_events[0]["message"]["sender_email"], email)
self.assertTrue("local_message_id" not in recipient_events[0])
self.assertEqual(recipient_events[1]["type"], "message")
self.assertEqual(recipient_events[1]["message"]["sender_email"], email)
self.assertTrue("local_message_id" not in recipient_events[1])
def test_get_events_narrow(self) -> None:
user_profile = self.example_user('hamlet')
email = user_profile.email
self.login(email)
def get_message(apply_markdown: bool, client_gravatar: bool) -> Dict[str, Any]:
result = self.tornado_call(
get_events,
user_profile,
dict(
apply_markdown=ujson.dumps(apply_markdown),
client_gravatar=ujson.dumps(client_gravatar),
event_types=ujson.dumps(["message"]),
narrow=ujson.dumps([["stream", "denmark"]]),
user_client="website",
dont_block=ujson.dumps(True),
)
)
self.assert_json_success(result)
queue_id = ujson.loads(result.content)["queue_id"]
result = self.tornado_call(get_events, user_profile,
{"queue_id": queue_id,
"user_client": "website",
"last_event_id": -1,
"dont_block": ujson.dumps(True),
})
events = ujson.loads(result.content)["events"]
self.assert_json_success(result)
self.assert_length(events, 0)
self.send_personal_message(email, self.example_email("othello"), "hello")
self.send_stream_message(email, "Denmark", "**hello**")
result = self.tornado_call(get_events, user_profile,
{"queue_id": queue_id,
"user_client": "website",
"last_event_id": -1,
"dont_block": ujson.dumps(True),
})
events = ujson.loads(result.content)["events"]
self.assert_json_success(result)
self.assert_length(events, 1)
self.assertEqual(events[0]["type"], "message")
return events[0]['message']
message = get_message(apply_markdown=False, client_gravatar=False)
self.assertEqual(message["display_recipient"], "Denmark")
self.assertEqual(message["content"], "**hello**")
self.assertIn('gravatar.com', message["avatar_url"])
message = get_message(apply_markdown=True, client_gravatar=False)
self.assertEqual(message["display_recipient"], "Denmark")
self.assertEqual(message["content"], "<p><strong>hello</strong></p>")
self.assertIn('gravatar.com', message["avatar_url"])
message = get_message(apply_markdown=False, client_gravatar=True)
self.assertEqual(message["display_recipient"], "Denmark")
self.assertEqual(message["content"], "**hello**")
self.assertEqual(message["avatar_url"], None)
message = get_message(apply_markdown=True, client_gravatar=True)
self.assertEqual(message["display_recipient"], "Denmark")
self.assertEqual(message["content"], "<p><strong>hello</strong></p>")
self.assertEqual(message["avatar_url"], None)
class EventsRegisterTest(ZulipTestCase):
def setUp(self) -> None:
super().setUp()
self.user_profile = self.example_user('hamlet')
def create_bot(self, email: str, **extras: Any) -> Optional[UserProfile]:
return self.create_test_bot(email, self.user_profile, **extras)
def realm_bot_schema(self, field_name: str, check: Validator) -> Validator:
return self.check_events_dict([
('type', equals('realm_bot')),
('op', equals('update')),
('bot', check_dict_only([
('email', check_string),
('user_id', check_int),
(field_name, check),
])),
])
def do_test(self, action: Callable[[], object], event_types: Optional[List[str]]=None,
include_subscribers: bool=True, state_change_expected: bool=True,
notification_settings_null: bool=False,
client_gravatar: bool=False, num_events: int=1) -> List[Dict[str, Any]]:
clear_client_event_queues_for_testing()
client = allocate_client_descriptor(
dict(user_profile_id = self.user_profile.id,
user_profile_email = self.user_profile.email,
realm_id = self.user_profile.realm_id,
event_types = event_types,
client_type_name = "website",
apply_markdown = True,
client_gravatar = client_gravatar,
all_public_streams = False,
queue_timeout = 600,
last_connection_time = time.time(),
narrow = [])
)
hybrid_state = fetch_initial_state_data(
self.user_profile, event_types, "",
client_gravatar=True,
include_subscribers=include_subscribers
)
action()
events = client.event_queue.contents()
self.assertEqual(len(events), num_events)
initial_state = copy.deepcopy(hybrid_state)
post_process_state(self.user_profile, initial_state, notification_settings_null)
before = ujson.dumps(initial_state)
apply_events(hybrid_state, events, self.user_profile,
client_gravatar=True, include_subscribers=include_subscribers)
post_process_state(self.user_profile, hybrid_state, notification_settings_null)
after = ujson.dumps(hybrid_state)
if state_change_expected:
if before == after:
print(ujson.dumps(initial_state, indent=2))
print(events)
raise AssertionError('Test does not exercise enough code -- events do not change state.')
else:
try:
self.match_states(initial_state, copy.deepcopy(hybrid_state), events)
except AssertionError:
raise AssertionError('Test is invalid--state actually does change here.')
normal_state = fetch_initial_state_data(
self.user_profile, event_types, "",
client_gravatar=True,
include_subscribers=include_subscribers,
)
post_process_state(self.user_profile, normal_state, notification_settings_null)
self.match_states(hybrid_state, normal_state, events)
return events
def assert_on_error(self, error: Optional[str]) -> None:
if error:
raise AssertionError(error)
def match_states(self, state1: Dict[str, Any], state2: Dict[str, Any],
events: List[Dict[str, Any]]) -> None:
def normalize(state: Dict[str, Any]) -> None:
for u in state['never_subscribed']:
if 'subscribers' in u:
u['subscribers'].sort()
for u in state['subscriptions']:
if 'subscribers' in u:
u['subscribers'].sort()
state['subscriptions'] = {u['name']: u for u in state['subscriptions']}
state['unsubscribed'] = {u['name']: u for u in state['unsubscribed']}
if 'realm_bots' in state:
state['realm_bots'] = {u['email']: u for u in state['realm_bots']}
normalize(state1)
normalize(state2)
self.assertEqual(state1.keys(), state2.keys())
if state1 != state2:
print('\n---States DO NOT MATCH---')
print('\nEVENTS:\n')
import json
for event in events:
print(json.dumps(event, indent=4))
print('\nMISMATCHES:\n')
for k in state1:
if state1[k] != state2[k]:
print('\nkey = ' + k)
try:
self.assertEqual({k: state1[k]}, {k: state2[k]})
except AssertionError as e:
print(e)
print('''
NOTE:
This is an advanced test that verifies how
we apply events after fetching data. If you
do not know how to debug it, you can ask for
help on chat.
''')
sys.stdout.flush()
raise AssertionError('Mismatching states')
def check_events_dict(self, required_keys: List[Tuple[str, Validator]]) -> Validator:
required_keys.append(('id', check_int))
keys = [key[0] for key in required_keys]
self.assertEqual(len(keys), len(set(keys)), 'Duplicate items found in required_keys.')
return check_dict_only(required_keys)
def test_mentioned_send_message_events(self) -> None:
user = self.example_user('hamlet')
for i in range(3):
content = 'mentioning... @**' + user.full_name + '** hello ' + str(i)
self.do_test(
lambda: self.send_stream_message(self.example_email('cordelia'),
"Verona",
content)
)
def test_wildcard_mentioned_send_message_events(self) -> None:
for i in range(3):
content = 'mentioning... @**all** hello ' + str(i)
self.do_test(
lambda: self.send_stream_message(self.example_email('cordelia'),
"Verona",
content)
)
def test_pm_send_message_events(self) -> None:
self.do_test(
lambda: self.send_personal_message(self.example_email('cordelia'),
self.example_email('hamlet'),
'hola')
)
def test_huddle_send_message_events(self) -> None:
huddle = [
self.example_email('hamlet'),
self.example_email('othello'),
]
self.do_test(
lambda: self.send_huddle_message(self.example_email('cordelia'),
huddle,
'hola')
)
def test_stream_send_message_events(self) -> None:
def get_checker(check_gravatar: Validator) -> Validator:
schema_checker = self.check_events_dict([
('type', equals('message')),
('flags', check_list(None)),
('message', self.check_events_dict([
('avatar_url', check_gravatar),
('client', check_string),
('content', check_string),
('content_type', equals('text/html')),
('display_recipient', check_string),
('is_me_message', check_bool),
('reactions', check_list(None)),
('recipient_id', check_int),
('sender_realm_str', check_string),
('sender_email', check_string),
('sender_full_name', check_string),
('sender_id', check_int),
('sender_short_name', check_string),
('stream_id', check_int),
(TOPIC_NAME, check_string),
(TOPIC_LINKS, check_list(None)),
('submessages', check_list(None)),
('timestamp', check_int),
('type', check_string),
])),
])
return schema_checker
events = self.do_test(
lambda: self.send_stream_message(self.example_email("hamlet"), "Verona", "hello"),
client_gravatar=False,
)
schema_checker = get_checker(check_gravatar=check_string)
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
events = self.do_test(
lambda: self.send_stream_message(self.example_email("hamlet"), "Verona", "hello"),
client_gravatar=True,
)
schema_checker = get_checker(check_gravatar=equals(None))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
schema_checker = self.check_events_dict([
('type', equals('update_message')),
('flags', check_list(None)),
('content', check_string),
('edit_timestamp', check_int),
('message_id', check_int),
('message_ids', check_list(check_int)),
('prior_mention_user_ids', check_list(check_int)),
('mention_user_ids', check_list(check_int)),
('presence_idle_user_ids', check_list(check_int)),
('stream_push_user_ids', check_list(check_int)),
('stream_email_user_ids', check_list(check_int)),
('push_notify_user_ids', check_list(check_int)),
('orig_content', check_string),
('orig_rendered_content', check_string),
(ORIG_TOPIC, check_string),
('prev_rendered_content_version', check_int),
('propagate_mode', check_string),
('rendered_content', check_string),
('sender', check_string),
('stream_id', check_int),
('stream_name', check_string),
(TOPIC_NAME, check_string),
(TOPIC_LINKS, check_list(None)),
('user_id', check_int),
('is_me_message', check_bool),
])
message = Message.objects.order_by('-id')[0]
topic = 'new_topic'
propagate_mode = 'change_all'
content = 'new content'
rendered_content = render_markdown(message, content)
prior_mention_user_ids = set()
mentioned_user_ids = set()
events = self.do_test(
lambda: do_update_message(self.user_profile, message, topic,
propagate_mode, content, rendered_content,
prior_mention_user_ids,
mentioned_user_ids),
state_change_expected=True,
)
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
schema_checker = self.check_events_dict([
('type', equals('update_message')),
('flags', check_list(None)),
('content', check_string),
('message_id', check_int),
('message_ids', check_list(check_int)),
('rendered_content', check_string),
('sender', check_string),
])
events = self.do_test(
lambda: do_update_embedded_data(self.user_profile, message,
u"embed_content", "<p>embed_content</p>"),
state_change_expected=False,
)
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_update_message_flags(self) -> None:
schema_checker = self.check_events_dict([
('all', check_bool),
('type', equals('update_message_flags')),
('flag', check_string),
('messages', check_list(check_int)),
('operation', equals("add")),
])
message = self.send_personal_message(
self.example_email("cordelia"),
self.example_email("hamlet"),
"hello",
)
user_profile = self.example_user('hamlet')
events = self.do_test(
lambda: do_update_message_flags(user_profile, get_client("website"), 'add', 'starred', [message]),
state_change_expected=True,
)
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
schema_checker = self.check_events_dict([
('all', check_bool),
('type', equals('update_message_flags')),
('flag', check_string),
('messages', check_list(check_int)),
('operation', equals("remove")),
])
events = self.do_test(
lambda: do_update_message_flags(user_profile, get_client("website"), 'remove', 'starred', [message]),
state_change_expected=True,
)
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_update_read_flag_removes_unread_msg_ids(self) -> None:
user_profile = self.example_user('hamlet')
mention = '@**' + user_profile.full_name + '**'
for content in ['hello', mention]:
message = self.send_stream_message(
self.example_email('cordelia'),
"Verona",
content
)
self.do_test(
lambda: do_update_message_flags(user_profile, get_client("website"), 'add', 'read', [message]),
state_change_expected=True,
)
def test_send_message_to_existing_recipient(self) -> None:
self.send_stream_message(
self.example_email('cordelia'),
"Verona",
"hello 1"
)
self.do_test(
lambda: self.send_stream_message("cordelia@zulip.com", "Verona", "hello 2"),
state_change_expected=True,
)
def test_add_reaction_legacy(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('reaction')),
('op', equals('add')),
('message_id', check_int),
('emoji_name', check_string),
('emoji_code', check_string),
('reaction_type', check_string),
('user', check_dict_only([
('email', check_string),
('full_name', check_string),
('user_id', check_int)
])),
])
message_id = self.send_stream_message(self.example_email("hamlet"), "Verona", "hello")
message = Message.objects.get(id=message_id)
events = self.do_test(
lambda: do_add_reaction_legacy(
self.user_profile, message, "tada"),
state_change_expected=False,
)
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_remove_reaction_legacy(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('reaction')),
('op', equals('remove')),
('message_id', check_int),
('emoji_name', check_string),
('emoji_code', check_string),
('reaction_type', check_string),
('user', check_dict_only([
('email', check_string),
('full_name', check_string),
('user_id', check_int)
])),
])
message_id = self.send_stream_message(self.example_email("hamlet"), "Verona", "hello")
message = Message.objects.get(id=message_id)
do_add_reaction_legacy(self.user_profile, message, "tada")
events = self.do_test(
lambda: do_remove_reaction_legacy(
self.user_profile, message, "tada"),
state_change_expected=False,
)
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_add_reaction(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('reaction')),
('op', equals('add')),
('message_id', check_int),
('emoji_name', check_string),
('emoji_code', check_string),
('reaction_type', check_string),
('user', check_dict_only([
('email', check_string),
('full_name', check_string),
('user_id', check_int)
])),
])
message_id = self.send_stream_message(self.example_email("hamlet"), "Verona", "hello")
message = Message.objects.get(id=message_id)
events = self.do_test(
lambda: do_add_reaction(
self.user_profile, message, "tada", "1f389", "unicode_emoji"),
state_change_expected=False,
)
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_add_submessage(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('submessage')),
('message_id', check_int),
('submessage_id', check_int),
('sender_id', check_int),
('msg_type', check_string),
('content', check_string),
])
cordelia = self.example_user('cordelia')
stream_name = 'Verona'
message_id = self.send_stream_message(
sender_email=cordelia.email,
stream_name=stream_name,
)
events = self.do_test(
lambda: do_add_submessage(
realm=cordelia.realm,
sender_id=cordelia.id,
message_id=message_id,
msg_type='whatever',
content='"stuff"',
),
state_change_expected=False,
)
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_remove_reaction(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('reaction')),
('op', equals('remove')),
('message_id', check_int),
('emoji_name', check_string),
('emoji_code', check_string),
('reaction_type', check_string),
('user', check_dict_only([
('email', check_string),
('full_name', check_string),
('user_id', check_int)
])),
])
message_id = self.send_stream_message(self.example_email("hamlet"), "Verona", "hello")
message = Message.objects.get(id=message_id)
do_add_reaction(self.user_profile, message, "tada", "1f389", "unicode_emoji")
events = self.do_test(
lambda: do_remove_reaction(
self.user_profile, message, "1f389", "unicode_emoji"),
state_change_expected=False,
)
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_invite_user_event(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('invites_changed')),
])
self.user_profile = self.example_user('iago')
streams = []
for stream_name in ["Denmark", "Scotland"]:
streams.append(get_stream(stream_name, self.user_profile.realm))
events = self.do_test(
lambda: do_invite_users(self.user_profile, ["foo@zulip.com"], streams, False),
state_change_expected=False,
)
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_create_multiuse_invite_event(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('invites_changed')),
])
self.user_profile = self.example_user('iago')
streams = []
for stream_name in ["Denmark", "Verona"]:
streams.append(get_stream(stream_name, self.user_profile.realm))
events = self.do_test(
lambda: do_create_multiuse_invite_link(self.user_profile, PreregistrationUser.INVITE_AS['MEMBER'], streams),
state_change_expected=False,
)
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_revoke_user_invite_event(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('invites_changed')),
])
self.user_profile = self.example_user('iago')
streams = []
for stream_name in ["Denmark", "Verona"]:
streams.append(get_stream(stream_name, self.user_profile.realm))
do_invite_users(self.user_profile, ["foo@zulip.com"], streams, False)
prereg_users = PreregistrationUser.objects.filter(referred_by__realm=self.user_profile.realm)
events = self.do_test(
lambda: do_revoke_user_invite(prereg_users[0]),
state_change_expected=False,
)
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_revoke_multiuse_invite_event(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('invites_changed')),
])
self.user_profile = self.example_user('iago')
streams = []
for stream_name in ["Denmark", "Verona"]:
streams.append(get_stream(stream_name, self.user_profile.realm))
do_create_multiuse_invite_link(self.user_profile, PreregistrationUser.INVITE_AS['MEMBER'], streams)
multiuse_object = MultiuseInvite.objects.get()
events = self.do_test(
lambda: do_revoke_multi_use_invite(multiuse_object),
state_change_expected=False,
)
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_invitation_accept_invite_event(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('invites_changed')),
])
self.user_profile = self.example_user('iago')
streams = []
for stream_name in ["Denmark", "Scotland"]:
streams.append(get_stream(stream_name, self.user_profile.realm))
do_invite_users(self.user_profile, ["foo@zulip.com"], streams, False)
prereg_users = PreregistrationUser.objects.get(email="foo@zulip.com")
events = self.do_test(
lambda: do_create_user('foo@zulip.com', 'password', self.user_profile.realm,
'full name', 'short name', prereg_user=prereg_users),
state_change_expected=True,
num_events=5,
)
error = schema_checker('events[4]', events[4])
self.assert_on_error(error)
def test_typing_events(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('typing')),
('op', equals('start')),
('sender', check_dict_only([
('email', check_string),
('user_id', check_int)])),
('recipients', check_list(check_dict_only([
('email', check_string),
('user_id', check_int),
]))),
])
events = self.do_test(
lambda: check_send_typing_notification(
self.user_profile, [self.example_email("cordelia")], "start"),
state_change_expected=False,
)
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_get_typing_user_profiles(self) -> None:
sender_profile = self.example_user('cordelia')
stream = get_stream('Rome', sender_profile.realm)
with self.assertRaisesRegex(ValueError, 'not supported for streams'):
recipient = Recipient.objects.get(type_id=stream.id, type=Recipient.STREAM)
get_typing_user_profiles(recipient, sender_profile.id)
with self.assertRaisesRegex(ValueError, 'Bad recipient type'):
recipient = Recipient(type=999)
get_typing_user_profiles(recipient, sender_profile.id)
def test_custom_profile_fields_events(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('custom_profile_fields')),
('op', equals('add')),
('fields', check_list(check_dict_only([
('id', check_int),
('type', check_int),
('name', check_string),
('hint', check_string),
('field_data', check_string),
('order', check_int),
]))),
])
events = self.do_test(
lambda: notify_realm_custom_profile_fields(
self.user_profile.realm, 'add'),
state_change_expected=False,
)
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
realm = self.user_profile.realm
field = realm.customprofilefield_set.get(realm=realm, name='Biography')
name = field.name
hint = 'Biography of the user'
try_update_realm_custom_profile_field(realm, field, name, hint=hint)
events = self.do_test(
lambda: notify_realm_custom_profile_fields(
self.user_profile.realm, 'add'),
state_change_expected=False,
)
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_custom_profile_field_data_events(self) -> None:
schema_checker_basic = self.check_events_dict([
('type', equals('realm_user')),
('op', equals('update')),
('person', check_dict_only([
('user_id', check_int),
('custom_profile_field', check_dict([
('id', check_int),
('value', check_none_or(check_string)),
])),
])),
])
schema_checker_with_rendered_value = self.check_events_dict([
('type', equals('realm_user')),
('op', equals('update')),
('person', check_dict_only([
('user_id', check_int),
('custom_profile_field', check_dict([
('id', check_int),
('value', check_none_or(check_string)),
('rendered_value', check_none_or(check_string)),
])),
])),
])
field_id = self.user_profile.realm.customprofilefield_set.get(
realm=self.user_profile.realm, name='Biography').id
field = {
"id": field_id,
"value": "New value",
}
events = self.do_test(lambda: do_update_user_custom_profile_data_if_changed(self.user_profile, [field]))
error = schema_checker_with_rendered_value('events[0]', events[0])
self.assert_on_error(error)
field_id = self.user_profile.realm.customprofilefield_set.get(
realm=self.user_profile.realm, name='Mentor').id
field = {
"id": field_id,
"value": [self.example_user("ZOE").id],
}
events = self.do_test(lambda: do_update_user_custom_profile_data_if_changed(self.user_profile, [field]))
error = schema_checker_basic('events[0]', events[0])
self.assert_on_error(error)
def test_presence_events(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('presence')),
('email', check_string),
('server_timestamp', check_float),
('presence', check_dict_only([
('website', check_dict_only([
('status', equals('active')),
('timestamp', check_int),
('client', check_string),
('pushable', check_bool),
])),
])),
])
events = self.do_test(lambda: do_update_user_presence(
self.user_profile, get_client("website"), timezone_now(), UserPresence.ACTIVE))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_presence_events_multiple_clients(self) -> None:
schema_checker_android = self.check_events_dict([
('type', equals('presence')),
('email', check_string),
('server_timestamp', check_float),
('presence', check_dict_only([
('ZulipAndroid/1.0', check_dict_only([
('status', equals('idle')),
('timestamp', check_int),
('client', check_string),
('pushable', check_bool),
])),
])),
])
self.api_post(self.user_profile.email, "/api/v1/users/me/presence", {'status': 'idle'},
HTTP_USER_AGENT="ZulipAndroid/1.0")
self.do_test(lambda: do_update_user_presence(
self.user_profile, get_client("website"), timezone_now(), UserPresence.ACTIVE))
events = self.do_test(lambda: do_update_user_presence(
self.user_profile, get_client("ZulipAndroid/1.0"), timezone_now(), UserPresence.IDLE))
error = schema_checker_android('events[0]', events[0])
self.assert_on_error(error)
def test_pointer_events(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('pointer')),
('pointer', check_int)
])
events = self.do_test(lambda: do_update_pointer(self.user_profile, get_client("website"), 1500))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_register_events(self) -> None:
realm_user_add_checker = self.check_events_dict([
('type', equals('realm_user')),
('op', equals('add')),
('person', check_dict_only([
('user_id', check_int),
('email', check_string),
('avatar_url', check_none_or(check_string)),
('full_name', check_string),
('is_admin', check_bool),
('is_bot', check_bool),
('is_guest', check_bool),
('profile_data', check_dict_only([])),
('timezone', check_string),
('date_joined', check_string),
])),
])
events = self.do_test(lambda: self.register("test1@zulip.com", "test1"))
self.assert_length(events, 1)
error = realm_user_add_checker('events[0]', events[0])
self.assert_on_error(error)
new_user_profile = get_user_by_delivery_email("test1@zulip.com", self.user_profile.realm)
self.assertEqual(new_user_profile.email, "test1@zulip.com")
def test_register_events_email_address_visibility(self) -> None:
realm_user_add_checker = self.check_events_dict([
('type', equals('realm_user')),
('op', equals('add')),
('person', check_dict_only([
('user_id', check_int),
('email', check_string),
('avatar_url', check_none_or(check_string)),
('full_name', check_string),
('is_admin', check_bool),
('is_bot', check_bool),
('is_guest', check_bool),
('profile_data', check_dict_only([])),
('timezone', check_string),
('date_joined', check_string),
])),
])
do_set_realm_property(self.user_profile.realm, "email_address_visibility",
Realm.EMAIL_ADDRESS_VISIBILITY_ADMINS)
events = self.do_test(lambda: self.register("test1@zulip.com", "test1"))
self.assert_length(events, 1)
error = realm_user_add_checker('events[0]', events[0])
self.assert_on_error(error)
new_user_profile = get_user_by_delivery_email("test1@zulip.com", self.user_profile.realm)
self.assertEqual(new_user_profile.email, "user%s@zulip.testserver" % (new_user_profile.id,))
def test_alert_words_events(self) -> None:
alert_words_checker = self.check_events_dict([
('type', equals('alert_words')),
('alert_words', check_list(check_string)),
])
events = self.do_test(lambda: do_add_alert_words(self.user_profile, ["alert_word"]))
error = alert_words_checker('events[0]', events[0])
self.assert_on_error(error)
events = self.do_test(lambda: do_remove_alert_words(self.user_profile, ["alert_word"]))
error = alert_words_checker('events[0]', events[0])
self.assert_on_error(error)
def test_away_events(self) -> None:
checker = self.check_events_dict([
('type', equals('user_status')),
('user_id', check_int),
('away', check_bool),
('status_text', check_string),
])
client = get_client("website")
events = self.do_test(lambda: do_update_user_status(user_profile=self.user_profile,
away=True,
status_text='out to lunch',
client_id=client.id))
error = checker('events[0]', events[0])
self.assert_on_error(error)
events = self.do_test(lambda: do_update_user_status(user_profile=self.user_profile,
away=False,
status_text='',
client_id=client.id))
error = checker('events[0]', events[0])
self.assert_on_error(error)
def test_user_group_events(self) -> None:
user_group_add_checker = self.check_events_dict([
('type', equals('user_group')),
('op', equals('add')),
('group', check_dict_only([
('id', check_int),
('name', check_string),
('members', check_list(check_int)),
('description', check_string),
])),
])
othello = self.example_user('othello')
events = self.do_test(lambda: check_add_user_group(self.user_profile.realm,
'backend', [othello],
'Backend team'))
error = user_group_add_checker('events[0]', events[0])
self.assert_on_error(error)
user_group_update_checker = self.check_events_dict([
('type', equals('user_group')),
('op', equals('update')),
('group_id', check_int),
('data', check_dict_only([
('name', check_string),
])),
])
backend = UserGroup.objects.get(name='backend')
events = self.do_test(lambda: do_update_user_group_name(backend, 'backendteam'))
error = user_group_update_checker('events[0]', events[0])
self.assert_on_error(error)
user_group_update_checker = self.check_events_dict([
('type', equals('user_group')),
('op', equals('update')),
('group_id', check_int),
('data', check_dict_only([
('description', check_string),
])),
])
description = "Backend team to deal with backend code."
events = self.do_test(lambda: do_update_user_group_description(backend, description))
error = user_group_update_checker('events[0]', events[0])
self.assert_on_error(error)
user_group_add_member_checker = self.check_events_dict([
('type', equals('user_group')),
('op', equals('add_members')),
('group_id', check_int),
('user_ids', check_list(check_int)),
])
hamlet = self.example_user('hamlet')
events = self.do_test(lambda: bulk_add_members_to_user_group(backend, [hamlet]))
error = user_group_add_member_checker('events[0]', events[0])
self.assert_on_error(error)
user_group_remove_member_checker = self.check_events_dict([
('type', equals('user_group')),
('op', equals('remove_members')),
('group_id', check_int),
('user_ids', check_list(check_int)),
])
hamlet = self.example_user('hamlet')
events = self.do_test(lambda: remove_members_from_user_group(backend, [hamlet]))
error = user_group_remove_member_checker('events[0]', events[0])
self.assert_on_error(error)
user_group_remove_checker = self.check_events_dict([
('type', equals('user_group')),
('op', equals('remove')),
('group_id', check_int),
])
events = self.do_test(lambda: check_delete_user_group(backend.id, othello))
error = user_group_remove_checker('events[0]', events[0])
self.assert_on_error(error)
def test_default_stream_groups_events(self) -> None:
default_stream_groups_checker = self.check_events_dict([
('type', equals('default_stream_groups')),
('default_stream_groups', check_list(check_dict_only([
('name', check_string),
('id', check_int),
('description', check_string),
('streams', check_list(check_dict_only([
('description', check_string),
('rendered_description', check_string),
('invite_only', check_bool),
('is_web_public', check_bool),
('is_announcement_only', check_bool),
('name', check_string),
('stream_id', check_int),
('first_message_id', check_none_or(check_int)),
('history_public_to_subscribers', check_bool)]))),
]))),
])
streams = []
for stream_name in ["Scotland", "Verona", "Denmark"]:
streams.append(get_stream(stream_name, self.user_profile.realm))
events = self.do_test(lambda: do_create_default_stream_group(
self.user_profile.realm, "group1", "This is group1", streams))
error = default_stream_groups_checker('events[0]', events[0])
self.assert_on_error(error)
group = lookup_default_stream_groups(["group1"], self.user_profile.realm)[0]
venice_stream = get_stream("Venice", self.user_profile.realm)
events = self.do_test(lambda: do_add_streams_to_default_stream_group(self.user_profile.realm,
group, [venice_stream]))
error = default_stream_groups_checker('events[0]', events[0])
self.assert_on_error(error)
events = self.do_test(lambda: do_remove_streams_from_default_stream_group(self.user_profile.realm,
group, [venice_stream]))
error = default_stream_groups_checker('events[0]', events[0])
self.assert_on_error(error)
events = self.do_test(lambda: do_change_default_stream_group_description(self.user_profile.realm,
group, "New description"))
error = default_stream_groups_checker('events[0]', events[0])
self.assert_on_error(error)
events = self.do_test(lambda: do_change_default_stream_group_name(self.user_profile.realm,
group, "New Group Name"))
error = default_stream_groups_checker('events[0]', events[0])
self.assert_on_error(error)
events = self.do_test(lambda: do_remove_default_stream_group(self.user_profile.realm, group))
error = default_stream_groups_checker('events[0]', events[0])
self.assert_on_error(error)
def test_default_stream_group_events_guest(self) -> None:
streams = []
for stream_name in ["Scotland", "Verona", "Denmark"]:
streams.append(get_stream(stream_name, self.user_profile.realm))
do_create_default_stream_group(self.user_profile.realm, "group1",
"This is group1", streams)
group = lookup_default_stream_groups(["group1"], self.user_profile.realm)[0]
do_change_is_guest(self.user_profile, True)
venice_stream = get_stream("Venice", self.user_profile.realm)
self.do_test(lambda: do_add_streams_to_default_stream_group(self.user_profile.realm,
group, [venice_stream]),
state_change_expected = False, num_events=0)
def test_default_streams_events(self) -> None:
default_streams_checker = self.check_events_dict([
('type', equals('default_streams')),
('default_streams', check_list(check_dict_only([
('description', check_string),
('invite_only', check_bool),
('name', check_string),
('stream_id', check_int),
]))),
])
stream = get_stream("Scotland", self.user_profile.realm)
events = self.do_test(lambda: do_add_default_stream(stream))
error = default_streams_checker('events[0]', events[0])
events = self.do_test(lambda: do_remove_default_stream(stream))
error = default_streams_checker('events[0]', events[0])
self.assert_on_error(error)
def test_default_streams_events_guest(self) -> None:
do_change_is_guest(self.user_profile, True)
stream = get_stream("Scotland", self.user_profile.realm)
self.do_test(lambda: do_add_default_stream(stream),
state_change_expected = False, num_events=0)
self.do_test(lambda: do_remove_default_stream(stream),
state_change_expected = False, num_events=0)
def test_muted_topics_events(self) -> None:
muted_topics_checker = self.check_events_dict([
('type', equals('muted_topics')),
('muted_topics', check_list(check_list(check_string, 2))),
])
stream = get_stream('Denmark', self.user_profile.realm)
recipient = get_stream_recipient(stream.id)
events = self.do_test(lambda: do_mute_topic(
self.user_profile, stream, recipient, "topic"))
error = muted_topics_checker('events[0]', events[0])
self.assert_on_error(error)
events = self.do_test(lambda: do_unmute_topic(
self.user_profile, stream, "topic"))
error = muted_topics_checker('events[0]', events[0])
self.assert_on_error(error)
def test_change_avatar_fields(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('realm_user')),
('op', equals('update')),
('person', check_dict_only([
('email', check_string),
('user_id', check_int),
('avatar_url', check_string),
('avatar_url_medium', check_string),
('avatar_source', check_string),
])),
])
events = self.do_test(
lambda: do_change_avatar_fields(self.user_profile, UserProfile.AVATAR_FROM_USER),
)
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
schema_checker = self.check_events_dict([
('type', equals('realm_user')),
('op', equals('update')),
('person', check_dict_only([
('email', check_string),
('user_id', check_int),
('avatar_url', check_none_or(check_string)),
('avatar_url_medium', check_none_or(check_string)),
('avatar_source', check_string),
])),
])
events = self.do_test(
lambda: do_change_avatar_fields(self.user_profile, UserProfile.AVATAR_FROM_GRAVATAR),
)
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_change_full_name(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('realm_user')),
('op', equals('update')),
('person', check_dict_only([
('email', check_string),
('full_name', check_string),
('user_id', check_int),
])),
])
events = self.do_test(lambda: do_change_full_name(self.user_profile, 'Sir Hamlet', self.user_profile))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_change_user_delivery_email_email_address_visibilty_admins(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('realm_user')),
('op', equals('update')),
('person', check_dict_only([
('delivery_email', check_string),
('user_id', check_int),
])),
])
do_set_realm_property(self.user_profile.realm, "email_address_visibility",
Realm.EMAIL_ADDRESS_VISIBILITY_ADMINS)
# for email being passed into this next function.
self.user_profile.refresh_from_db()
action = lambda: do_change_user_delivery_email(self.user_profile, 'newhamlet@zulip.com')
events = self.do_test(action, num_events=1)
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def do_set_realm_property_test(self, name: str) -> None:
bool_tests = [True, False, True] # type: List[bool]
test_values = dict(
default_language=[u'es', u'de', u'en'],
description=[u'Realm description', u'New description'],
digest_weekday=[0, 1, 2],
message_retention_days=[10, 20],
name=[u'Zulip', u'New Name'],
waiting_period_threshold=[10, 20],
create_stream_policy=[3, 2, 1],
invite_to_stream_policy=[3, 2, 1],
email_address_visibility=[Realm.EMAIL_ADDRESS_VISIBILITY_ADMINS],
bot_creation_policy=[Realm.BOT_CREATION_EVERYONE],
video_chat_provider=[
Realm.VIDEO_CHAT_PROVIDERS['jitsi_meet']['id'],
Realm.VIDEO_CHAT_PROVIDERS['google_hangouts']['id']
],
google_hangouts_domain=[u"zulip.com", u"zulip.org"],
zoom_api_secret=[u"abc", u"xyz"],
zoom_api_key=[u"abc", u"xyz"],
zoom_user_id=[u"example@example.com", u"example@example.org"]
) # type: Dict[str, Any]
vals = test_values.get(name)
property_type = Realm.property_types[name]
if property_type is bool:
validator = check_bool
vals = bool_tests
elif property_type is str:
validator = check_string
elif property_type is int:
validator = check_int
elif property_type == (int, type(None)):
validator = check_int
else:
raise AssertionError("Unexpected property type %s" % (property_type,))
schema_checker = self.check_events_dict([
('type', equals('realm')),
('op', equals('update')),
('property', equals(name)),
('value', validator),
])
if vals is None:
raise AssertionError('No test created for %s' % (name,))
do_set_realm_property(self.user_profile.realm, name, vals[0])
for val in vals[1:]:
state_change_expected = True
if name == "zoom_api_secret":
state_change_expected = False
events = self.do_test(
lambda: do_set_realm_property(self.user_profile.realm, name, val),
state_change_expected=state_change_expected)
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
@slow("Actually runs several full-stack fetching tests")
def test_change_realm_property(self) -> None:
for prop in Realm.property_types:
with self.settings(SEND_DIGEST_EMAILS=True):
self.do_set_realm_property_test(prop)
@slow("Runs a large matrix of tests")
def test_change_realm_authentication_methods(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('realm')),
('op', equals('update_dict')),
('property', equals('default')),
('data', check_dict_only([
('authentication_methods', check_dict([]))
])),
])
def fake_backends() -> Any:
backends = (
'zproject.backends.DevAuthBackend',
'zproject.backends.EmailAuthBackend',
'zproject.backends.GitHubAuthBackend',
'zproject.backends.GoogleAuthBackend',
'zproject.backends.ZulipLDAPAuthBackend',
)
return self.settings(AUTHENTICATION_BACKENDS=backends)
# Test transitions; any new backends should be tested with T/T/T/F/T
for (auth_method_dict) in \
({'Google': True, 'Email': True, 'GitHub': True, 'LDAP': False, 'Dev': False},
{'Google': True, 'Email': True, 'GitHub': False, 'LDAP': False, 'Dev': False},
{'Google': True, 'Email': False, 'GitHub': False, 'LDAP': False, 'Dev': False},
{'Google': True, 'Email': False, 'GitHub': True, 'LDAP': False, 'Dev': False},
{'Google': False, 'Email': False, 'GitHub': False, 'LDAP': False, 'Dev': True},
{'Google': False, 'Email': False, 'GitHub': True, 'LDAP': False, 'Dev': True},
{'Google': False, 'Email': True, 'GitHub': True, 'LDAP': True, 'Dev': False}):
with fake_backends():
events = self.do_test(
lambda: do_set_realm_authentication_methods(
self.user_profile.realm,
auth_method_dict))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_change_pin_stream(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('subscription')),
('op', equals('update')),
('property', equals('pin_to_top')),
('stream_id', check_int),
('value', check_bool),
('name', check_string),
('email', check_string),
])
stream = get_stream("Denmark", self.user_profile.realm)
sub = get_subscription(stream.name, self.user_profile)
do_change_subscription_property(self.user_profile, sub, stream, "pin_to_top", False)
for pinned in (True, False):
events = self.do_test(lambda: do_change_subscription_property(self.user_profile, sub, stream, "pin_to_top", pinned))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_change_stream_notification_settings(self) -> None:
for setting_name in ['email_notifications']:
schema_checker = self.check_events_dict([
('type', equals('subscription')),
('op', equals('update')),
('property', equals(setting_name)),
('stream_id', check_int),
('value', check_bool),
('name', check_string),
('email', check_string),
])
stream = get_stream("Denmark", self.user_profile.realm)
sub = get_subscription(stream.name, self.user_profile)
# First test with notification_settings_null enabled
for value in (True, False):
events = self.do_test(lambda: do_change_subscription_property(self.user_profile, sub, stream,
setting_name, value),
notification_settings_null=True)
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
for value in (True, False):
events = self.do_test(lambda: do_change_subscription_property(self.user_profile, sub, stream,
setting_name, value))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
@slow("Runs a matrix of 6 queries to the /home view")
def test_change_realm_message_edit_settings(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('realm')),
('op', equals('update_dict')),
('property', equals('default')),
('data', check_dict_only([
('allow_message_editing', check_bool),
('message_content_edit_limit_seconds', check_int),
('allow_community_topic_editing', check_bool),
])),
])
# Test every transition among the four possibilities {T,F} x {0, non-0}
for (allow_message_editing, message_content_edit_limit_seconds) in \
((True, 0), (False, 0), (False, 1234),
(True, 600), (False, 0), (True, 1234)):
events = self.do_test(
lambda: do_set_realm_message_editing(self.user_profile.realm,
allow_message_editing,
message_content_edit_limit_seconds,
False))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_change_realm_notifications_stream(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('realm')),
('op', equals('update')),
('property', equals('notifications_stream_id')),
('value', check_int),
])
stream = get_stream("Rome", self.user_profile.realm)
for notifications_stream, notifications_stream_id in ((stream, stream.id), (None, -1)):
events = self.do_test(
lambda: do_set_realm_notifications_stream(self.user_profile.realm,
notifications_stream,
notifications_stream_id))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_change_realm_signup_notifications_stream(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('realm')),
('op', equals('update')),
('property', equals('signup_notifications_stream_id')),
('value', check_int),
])
stream = get_stream("Rome", self.user_profile.realm)
for signup_notifications_stream, signup_notifications_stream_id in ((stream, stream.id), (None, -1)):
events = self.do_test(
lambda: do_set_realm_signup_notifications_stream(self.user_profile.realm,
signup_notifications_stream,
signup_notifications_stream_id))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_change_is_admin(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('realm_user')),
('op', equals('update')),
('person', check_dict_only([
('email', check_string),
('is_admin', check_bool),
('user_id', check_int),
])),
])
do_change_is_admin(self.user_profile, False)
for is_admin in [True, False]:
events = self.do_test(lambda: do_change_is_admin(self.user_profile, is_admin))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def do_set_user_display_settings_test(self, setting_name: str) -> None:
test_changes = dict(
emojiset = [u'twitter'],
default_language = [u'es', u'de', u'en'],
timezone = [u'US/Mountain', u'US/Samoa', u'Pacific/Galapogos', u''],
demote_inactive_streams = [2, 3, 1],
) # type: Dict[str, Any]
property_type = UserProfile.property_types[setting_name]
if property_type is bool:
validator = check_bool
elif property_type is str:
validator = check_string
elif property_type is int:
validator = check_int
else:
raise AssertionError("Unexpected property type %s" % (property_type,))
num_events = 1
if setting_name == "timezone":
num_events = 2
values = test_changes.get(setting_name)
if property_type is bool:
if getattr(self.user_profile, setting_name) is False:
values = [True, False, True]
else:
values = [False, True, False]
if values is None:
raise AssertionError('No test created for %s' % (setting_name,))
for value in values:
events = self.do_test(lambda: do_set_user_display_setting(
self.user_profile, setting_name, value), num_events=num_events)
schema_checker = self.check_events_dict([
('type', equals('update_display_settings')),
('setting_name', equals(setting_name)),
('user', check_string),
('setting', validator),
])
language_schema_checker = self.check_events_dict([
('type', equals('update_display_settings')),
('language_name', check_string),
('setting_name', equals(setting_name)),
('user', check_string),
('setting', validator),
])
if setting_name == "default_language":
error = language_schema_checker('events[0]', events[0])
else:
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
timezone_schema_checker = self.check_events_dict([
('type', equals('realm_user')),
('op', equals('update')),
('person', check_dict_only([
('email', check_string),
('user_id', check_int),
('timezone', check_string),
])),
])
if setting_name == "timezone":
error = timezone_schema_checker('events[1]', events[1])
@slow("Actually runs several full-stack fetching tests")
def test_set_user_display_settings(self) -> None:
for prop in UserProfile.property_types:
self.do_set_user_display_settings_test(prop)
@slow("Actually runs several full-stack fetching tests")
def test_change_notification_settings(self) -> None:
for notification_setting, v in self.user_profile.notification_setting_types.items():
if notification_setting in ["notification_sound", "desktop_icon_count_display"]:
# These settings are tested in their own tests.
continue
schema_checker = self.check_events_dict([
('type', equals('update_global_notifications')),
('notification_name', equals(notification_setting)),
('user', check_string),
('setting', check_bool),
])
do_change_notification_settings(self.user_profile, notification_setting, False)
for setting_value in [True, False]:
events = self.do_test(lambda: do_change_notification_settings(
self.user_profile, notification_setting, setting_value, log=False))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
# Also test with notification_settings_null=True
events = self.do_test(
lambda: do_change_notification_settings(
self.user_profile, notification_setting, setting_value, log=False),
notification_settings_null=True,
state_change_expected=False)
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_change_notification_sound(self) -> None:
notification_setting = "notification_sound"
schema_checker = self.check_events_dict([
('type', equals('update_global_notifications')),
('notification_name', equals(notification_setting)),
('user', check_string),
('setting', equals("ding")),
])
events = self.do_test(lambda: do_change_notification_settings(
self.user_profile, notification_setting, 'ding', log=False))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_change_desktop_icon_count_display(self) -> None:
notification_setting = "desktop_icon_count_display"
schema_checker = self.check_events_dict([
('type', equals('update_global_notifications')),
('notification_name', equals(notification_setting)),
('user', check_string),
('setting', equals(2)),
])
events = self.do_test(lambda: do_change_notification_settings(
self.user_profile, notification_setting, 2, log=False))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
schema_checker = self.check_events_dict([
('type', equals('update_global_notifications')),
('notification_name', equals(notification_setting)),
('user', check_string),
('setting', equals(1)),
])
events = self.do_test(lambda: do_change_notification_settings(
self.user_profile, notification_setting, 1, log=False))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_realm_update_plan_type(self) -> None:
realm = self.user_profile.realm
state_data = fetch_initial_state_data(self.user_profile, None, "", False)
self.assertEqual(state_data['realm_plan_type'], Realm.SELF_HOSTED)
self.assertEqual(state_data['plan_includes_wide_organization_logo'], True)
schema_checker = self.check_events_dict([
('type', equals('realm')),
('op', equals('update')),
('property', equals('plan_type')),
('value', equals(Realm.LIMITED)),
('extra_data', check_dict_only([
('upload_quota', check_int)
])),
])
events = self.do_test(lambda: do_change_plan_type(realm, Realm.LIMITED))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
state_data = fetch_initial_state_data(self.user_profile, None, "", False)
self.assertEqual(state_data['realm_plan_type'], Realm.LIMITED)
self.assertEqual(state_data['plan_includes_wide_organization_logo'], False)
def test_realm_emoji_events(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('realm_emoji')),
('op', equals('update')),
('realm_emoji', check_dict([])),
])
author = self.example_user('iago')
with get_test_image_file('img.png') as img_file:
events = self.do_test(lambda: check_add_realm_emoji(self.user_profile.realm,
"my_emoji",
author,
img_file))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
events = self.do_test(lambda: do_remove_realm_emoji(self.user_profile.realm, "my_emoji"))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_realm_filter_events(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('realm_filters')),
('realm_filters', check_list(None)), # TODO: validate tuples in the list
])
events = self.do_test(lambda: do_add_realm_filter(self.user_profile.realm, "#(?P<id>[123])",
"https://realm.com/my_realm_filter/%(id)s"))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
self.do_test(lambda: do_remove_realm_filter(self.user_profile.realm, "#(?P<id>[123])"))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_realm_domain_events(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('realm_domains')),
('op', equals('add')),
('realm_domain', check_dict_only([
('domain', check_string),
('allow_subdomains', check_bool),
])),
])
events = self.do_test(lambda: do_add_realm_domain(
self.user_profile.realm, 'zulip.org', False))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
schema_checker = self.check_events_dict([
('type', equals('realm_domains')),
('op', equals('change')),
('realm_domain', check_dict_only([
('domain', equals('zulip.org')),
('allow_subdomains', equals(True)),
])),
])
test_domain = RealmDomain.objects.get(realm=self.user_profile.realm,
domain='zulip.org')
events = self.do_test(lambda: do_change_realm_domain(test_domain, True))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
schema_checker = self.check_events_dict([
('type', equals('realm_domains')),
('op', equals('remove')),
('domain', equals('zulip.org')),
])
events = self.do_test(lambda: do_remove_realm_domain(test_domain))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_create_bot(self) -> None:
def get_bot_created_checker(bot_type: str) -> Validator:
if bot_type == "GENERIC_BOT":
check_services = check_list(sub_validator=None, length=0)
elif bot_type == "OUTGOING_WEBHOOK_BOT":
check_services = check_list(check_dict_only([
('base_url', check_url),
('interface', check_int),
('token', check_string),
]), length=1)
elif bot_type == "EMBEDDED_BOT":
check_services = check_list(check_dict_only([
('service_name', check_string),
('config_data', check_dict(value_validator=check_string)),
]), length=1)
return self.check_events_dict([
('type', equals('realm_bot')),
('op', equals('add')),
('bot', check_dict_only([
('email', check_string),
('user_id', check_int),
('bot_type', check_int),
('full_name', check_string),
('is_active', check_bool),
('api_key', check_string),
('default_sending_stream', check_none_or(check_string)),
('default_events_register_stream', check_none_or(check_string)),
('default_all_public_streams', check_bool),
('avatar_url', check_string),
('owner', check_string),
('services', check_services),
])),
])
action = lambda: self.create_bot('test')
events = self.do_test(action, num_events=3)
error = get_bot_created_checker(bot_type="GENERIC_BOT")('events[1]', events[1])
self.assert_on_error(error)
action = lambda: self.create_bot('test_outgoing_webhook',
full_name='Outgoing Webhook Bot',
payload_url=ujson.dumps('https://foo.bar.com'),
interface_type=Service.GENERIC,
bot_type=UserProfile.OUTGOING_WEBHOOK_BOT)
events = self.do_test(action, num_events=3)
# The third event is the second call of notify_created_bot, which contains additional
# data for services (in contrast to the first call).
error = get_bot_created_checker(bot_type="OUTGOING_WEBHOOK_BOT")('events[2]', events[2])
self.assert_on_error(error)
action = lambda: self.create_bot('test_embedded',
full_name='Embedded Bot',
service_name='helloworld',
config_data=ujson.dumps({'foo': 'bar'}),
bot_type=UserProfile.EMBEDDED_BOT)
events = self.do_test(action, num_events=3)
error = get_bot_created_checker(bot_type="EMBEDDED_BOT")('events[2]', events[2])
self.assert_on_error(error)
def test_change_bot_full_name(self) -> None:
bot = self.create_bot('test')
action = lambda: do_change_full_name(bot, 'New Bot Name', self.user_profile)
events = self.do_test(action, num_events=2)
error = self.realm_bot_schema('full_name', check_string)('events[1]', events[1])
self.assert_on_error(error)
def test_regenerate_bot_api_key(self) -> None:
bot = self.create_bot('test')
action = lambda: do_regenerate_api_key(bot, self.user_profile)
events = self.do_test(action)
error = self.realm_bot_schema('api_key', check_string)('events[0]', events[0])
self.assert_on_error(error)
def test_change_bot_avatar_source(self) -> None:
bot = self.create_bot('test')
action = lambda: do_change_avatar_fields(bot, bot.AVATAR_FROM_USER)
events = self.do_test(action, num_events=2)
error = self.realm_bot_schema('avatar_url', check_string)('events[0]', events[0])
self.assertEqual(events[1]['type'], 'realm_user')
self.assert_on_error(error)
def test_change_realm_icon_source(self) -> None:
action = lambda: do_change_icon_source(self.user_profile.realm, Realm.ICON_UPLOADED)
events = self.do_test(action, state_change_expected=True)
schema_checker = self.check_events_dict([
('type', equals('realm')),
('op', equals('update_dict')),
('property', equals('icon')),
('data', check_dict_only([
('icon_url', check_string),
('icon_source', check_string),
])),
])
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_change_realm_day_mode_logo_source(self) -> None:
action = lambda: do_change_logo_source(self.user_profile.realm, Realm.LOGO_UPLOADED, False)
events = self.do_test(action, state_change_expected=True)
schema_checker = self.check_events_dict([
('type', equals('realm')),
('op', equals('update_dict')),
('property', equals('logo')),
('data', check_dict_only([
('logo_url', check_string),
('logo_source', check_string),
])),
])
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_change_realm_night_mode_logo_source(self) -> None:
action = lambda: do_change_logo_source(self.user_profile.realm, Realm.LOGO_UPLOADED, True)
events = self.do_test(action, state_change_expected=True)
schema_checker = self.check_events_dict([
('type', equals('realm')),
('op', equals('update_dict')),
('property', equals('night_logo')),
('data', check_dict_only([
('night_logo_url', check_string),
('night_logo_source', check_string),
])),
])
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_change_bot_default_all_public_streams(self) -> None:
bot = self.create_bot('test')
action = lambda: do_change_default_all_public_streams(bot, True)
events = self.do_test(action)
error = self.realm_bot_schema('default_all_public_streams', check_bool)('events[0]', events[0])
self.assert_on_error(error)
def test_change_bot_default_sending_stream(self) -> None:
bot = self.create_bot('test')
stream = get_stream("Rome", bot.realm)
action = lambda: do_change_default_sending_stream(bot, stream)
events = self.do_test(action)
error = self.realm_bot_schema('default_sending_stream', check_string)('events[0]', events[0])
self.assert_on_error(error)
action = lambda: do_change_default_sending_stream(bot, None)
events = self.do_test(action)
error = self.realm_bot_schema('default_sending_stream', equals(None))('events[0]', events[0])
self.assert_on_error(error)
def test_change_bot_default_events_register_stream(self) -> None:
bot = self.create_bot('test')
stream = get_stream("Rome", bot.realm)
action = lambda: do_change_default_events_register_stream(bot, stream)
events = self.do_test(action)
error = self.realm_bot_schema('default_events_register_stream', check_string)('events[0]', events[0])
self.assert_on_error(error)
action = lambda: do_change_default_events_register_stream(bot, None)
events = self.do_test(action)
error = self.realm_bot_schema('default_events_register_stream', equals(None))('events[0]', events[0])
self.assert_on_error(error)
def test_change_bot_owner(self) -> None:
change_bot_owner_checker_user = self.check_events_dict([
('type', equals('realm_user')),
('op', equals('update')),
('person', check_dict_only([
('user_id', check_int),
('bot_owner_id', check_int),
])),
])
change_bot_owner_checker_bot = self.check_events_dict([
('type', equals('realm_bot')),
('op', equals('update')),
('bot', check_dict_only([
('email', check_string),
('user_id', check_int),
('owner_id', check_int),
])),
])
self.user_profile = self.example_user('iago')
owner = self.example_user('hamlet')
bot = self.create_bot('test')
action = lambda: do_change_bot_owner(bot, owner, self.user_profile)
events = self.do_test(action, num_events=2)
error = change_bot_owner_checker_bot('events[0]', events[0])
self.assert_on_error(error)
error = change_bot_owner_checker_user('events[1]', events[1])
self.assert_on_error(error)
change_bot_owner_checker_bot = self.check_events_dict([
('type', equals('realm_bot')),
('op', equals('delete')),
('bot', check_dict_only([
('email', check_string),
('user_id', check_int),
])),
])
self.user_profile = self.example_user('aaron')
owner = self.example_user('hamlet')
bot = self.create_bot('test1', full_name='Test1 Testerson')
action = lambda: do_change_bot_owner(bot, owner, self.user_profile)
events = self.do_test(action, num_events=2)
error = change_bot_owner_checker_bot('events[0]', events[0])
self.assert_on_error(error)
error = change_bot_owner_checker_user('events[1]', events[1])
self.assert_on_error(error)
check_services = check_list(sub_validator=None, length=0)
change_bot_owner_checker_bot = self.check_events_dict([
('type', equals('realm_bot')),
('op', equals('add')),
('bot', check_dict_only([
('email', check_string),
('user_id', check_int),
('bot_type', check_int),
('full_name', check_string),
('is_active', check_bool),
('api_key', check_string),
('default_sending_stream', check_none_or(check_string)),
('default_events_register_stream', check_none_or(check_string)),
('default_all_public_streams', check_bool),
('avatar_url', check_string),
('owner', check_string),
('services', check_services),
])),
])
previous_owner = self.example_user('aaron')
self.user_profile = self.example_user('hamlet')
bot = self.create_test_bot('test2', previous_owner, full_name='Test2 Testerson')
action = lambda: do_change_bot_owner(bot, self.user_profile, previous_owner)
events = self.do_test(action, num_events=2)
error = change_bot_owner_checker_bot('events[0]', events[0])
self.assert_on_error(error)
error = change_bot_owner_checker_user('events[1]', events[1])
self.assert_on_error(error)
def test_do_update_outgoing_webhook_service(self):
# type: () -> None
update_outgoing_webhook_service_checker = self.check_events_dict([
('type', equals('realm_bot')),
('op', equals('update')),
('bot', check_dict_only([
('email', check_string),
('user_id', check_int),
('services', check_list(check_dict_only([
('base_url', check_url),
('interface', check_int),
('token', check_string),
]))),
])),
])
self.user_profile = self.example_user('iago')
bot = self.create_test_bot('test', self.user_profile,
full_name='Test Bot',
bot_type=UserProfile.OUTGOING_WEBHOOK_BOT,
payload_url=ujson.dumps('http://hostname.domain2.com'),
interface_type=Service.GENERIC,
)
action = lambda: do_update_outgoing_webhook_service(bot, 2, 'http://hostname.domain2.com')
events = self.do_test(action)
error = update_outgoing_webhook_service_checker('events[0]', events[0])
self.assert_on_error(error)
def test_do_deactivate_user(self) -> None:
bot_deactivate_checker = self.check_events_dict([
('type', equals('realm_bot')),
('op', equals('remove')),
('bot', check_dict_only([
('email', check_string),
('full_name', check_string),
('user_id', check_int),
])),
])
bot = self.create_bot('test')
action = lambda: do_deactivate_user(bot)
events = self.do_test(action, num_events=2)
error = bot_deactivate_checker('events[1]', events[1])
self.assert_on_error(error)
def test_do_reactivate_user(self) -> None:
bot_reactivate_checker = self.check_events_dict([
('type', equals('realm_bot')),
('op', equals('add')),
('bot', check_dict_only([
('email', check_string),
('user_id', check_int),
('bot_type', check_int),
('full_name', check_string),
('is_active', check_bool),
('api_key', check_string),
('default_sending_stream', check_none_or(check_string)),
('default_events_register_stream', check_none_or(check_string)),
('default_all_public_streams', check_bool),
('avatar_url', check_string),
('owner', check_none_or(check_string)),
('services', check_list(check_dict_only([
('base_url', check_url),
('interface', check_int),
]))),
])),
])
bot = self.create_bot('test')
do_deactivate_user(bot)
action = lambda: do_reactivate_user(bot)
events = self.do_test(action, num_events=2)
error = bot_reactivate_checker('events[1]', events[1])
self.assert_on_error(error)
def test_do_mark_hotspot_as_read(self) -> None:
self.user_profile.tutorial_status = UserProfile.TUTORIAL_WAITING
self.user_profile.save(update_fields=['tutorial_status'])
schema_checker = self.check_events_dict([
('type', equals('hotspots')),
('hotspots', check_list(check_dict_only([
('name', check_string),
('title', check_string),
('description', check_string),
('delay', check_float),
]))),
])
events = self.do_test(lambda: do_mark_hotspot_as_read(self.user_profile, 'intro_reply'))
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_rename_stream(self) -> None:
stream = self.make_stream('old_name')
new_name = u'stream with a brand new name'
self.subscribe(self.user_profile, stream.name)
notification = '<p><span class="user-mention silent" data-user-id="{user_id}">King Hamlet</span> renamed stream <strong>old_name</strong> to <strong>stream with a brand new name</strong>.</p>'
notification = notification.format(user_id=self.user_profile.id)
action = lambda: do_rename_stream(stream, new_name, self.user_profile)
events = self.do_test(action, num_events=3)
schema_checker = self.check_events_dict([
('type', equals('stream')),
('op', equals('update')),
('property', equals('email_address')),
('value', check_string),
('stream_id', check_int),
('name', equals('old_name')),
])
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
schema_checker = self.check_events_dict([
('type', equals('stream')),
('op', equals('update')),
('property', equals('name')),
('value', equals(new_name)),
('name', equals('old_name')),
('stream_id', check_int),
])
error = schema_checker('events[1]', events[1])
self.assert_on_error(error)
schema_checker = check_dict([
('flags', check_list(check_string)),
('type', equals('message')),
('message', check_dict([
('timestamp', check_int),
('content', equals(notification)),
('content_type', equals('text/html')),
('sender_email', equals('notification-bot@zulip.com')),
('sender_id', check_int),
('sender_short_name', equals('notification-bot')),
('display_recipient', equals(new_name)),
('id', check_int),
('stream_id', check_int),
('sender_realm_str', check_string),
('sender_full_name', equals('Notification Bot')),
('is_me_message', equals(False)),
('type', equals('stream')),
('submessages', check_list(check_string)),
(TOPIC_LINKS, check_list(check_url)),
('avatar_url', check_url),
('reactions', check_list(None)),
('client', equals('Internal')),
(TOPIC_NAME, equals('stream events')),
('recipient_id', check_int)
])),
('id', check_int)
])
error = schema_checker('events[2]', events[2])
self.assert_on_error(error)
def test_deactivate_stream_neversubscribed(self) -> None:
stream = self.make_stream('old_name')
action = lambda: do_deactivate_stream(stream)
events = self.do_test(action)
schema_checker = self.check_events_dict([
('type', equals('stream')),
('op', equals('delete')),
('streams', check_list(check_dict([]))),
])
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_subscribe_other_user_never_subscribed(self) -> None:
action = lambda: self.subscribe(self.example_user("othello"), u"test_stream")
events = self.do_test(action, num_events=2)
peer_add_schema_checker = self.check_events_dict([
('type', equals('subscription')),
('op', equals('peer_add')),
('user_id', check_int),
('subscriptions', check_list(check_string)),
])
error = peer_add_schema_checker('events[1]', events[1])
self.assert_on_error(error)
@slow("Actually several tests combined together")
def test_subscribe_events(self) -> None:
self.do_test_subscribe_events(include_subscribers=True)
@slow("Actually several tests combined together")
def test_subscribe_events_no_include_subscribers(self) -> None:
self.do_test_subscribe_events(include_subscribers=False)
def do_test_subscribe_events(self, include_subscribers: bool) -> None:
subscription_fields = [
('color', check_string),
('description', check_string),
('rendered_description', check_string),
('email_address', check_string),
('invite_only', check_bool),
('is_web_public', check_bool),
('is_announcement_only', check_bool),
('is_muted', check_bool),
('in_home_view', check_bool),
('name', check_string),
('audible_notifications', check_none_or(check_bool)),
('email_notifications', check_none_or(check_bool)),
('desktop_notifications', check_none_or(check_bool)),
('push_notifications', check_none_or(check_bool)),
('stream_id', check_int),
('first_message_id', check_none_or(check_int)),
('history_public_to_subscribers', check_bool),
('pin_to_top', check_bool),
('stream_weekly_traffic', check_none_or(check_int)),
('is_old_stream', check_bool),
]
if include_subscribers:
subscription_fields.append(('subscribers', check_list(check_int)))
subscription_schema_checker = check_list(
check_dict_only(subscription_fields),
)
stream_create_schema_checker = self.check_events_dict([
('type', equals('stream')),
('op', equals('create')),
('streams', check_list(check_dict_only([
('name', check_string),
('stream_id', check_int),
('invite_only', check_bool),
('description', check_string),
('rendered_description', check_string),
]))),
])
add_schema_checker = self.check_events_dict([
('type', equals('subscription')),
('op', equals('add')),
('subscriptions', subscription_schema_checker),
])
remove_schema_checker = self.check_events_dict([
('type', equals('subscription')),
('op', equals('remove')),
('subscriptions', check_list(
check_dict_only([
('name', equals('test_stream')),
('stream_id', check_int),
]),
)),
])
peer_add_schema_checker = self.check_events_dict([
('type', equals('subscription')),
('op', equals('peer_add')),
('user_id', check_int),
('subscriptions', check_list(check_string)),
])
peer_remove_schema_checker = self.check_events_dict([
('type', equals('subscription')),
('op', equals('peer_remove')),
('user_id', check_int),
('subscriptions', check_list(check_string)),
])
stream_update_schema_checker = self.check_events_dict([
('type', equals('stream')),
('op', equals('update')),
('property', equals('description')),
('value', check_string),
('rendered_description', check_string),
('stream_id', check_int),
('name', check_string),
])
stream_update_invite_only_schema_checker = self.check_events_dict([
('type', equals('stream')),
('op', equals('update')),
('property', equals('invite_only')),
('stream_id', check_int),
('name', check_string),
('value', check_bool),
('history_public_to_subscribers', check_bool),
])
stream_update_is_announcement_only_schema_checker = self.check_events_dict([
('type', equals('stream')),
('op', equals('update')),
('property', equals('is_announcement_only')),
('stream_id', check_int),
('name', check_string),
('value', check_bool),
])
# Subscribe to a totally new stream, so it's just Hamlet on it
action = lambda: self.subscribe(self.example_user("hamlet"), "test_stream")
events = self.do_test(action, event_types=["subscription", "realm_user"],
include_subscribers=include_subscribers)
error = add_schema_checker('events[0]', events[0])
self.assert_on_error(error)
action = lambda: self.subscribe(self.example_user("othello"), "test_stream")
events = self.do_test(action,
include_subscribers=include_subscribers,
state_change_expected=include_subscribers,
)
error = peer_add_schema_checker('events[0]', events[0])
self.assert_on_error(error)
stream = get_stream("test_stream", self.user_profile.realm)
action = lambda: bulk_remove_subscriptions(
[self.example_user('othello')],
[stream],
get_client("website"))
events = self.do_test(action,
include_subscribers=include_subscribers,
state_change_expected=include_subscribers,
)
error = peer_remove_schema_checker('events[0]', events[0])
self.assert_on_error(error)
action = lambda: bulk_remove_subscriptions(
[self.example_user('hamlet')],
[stream],
get_client("website"))
events = self.do_test(action,
include_subscribers=include_subscribers,
num_events=3)
error = remove_schema_checker('events[0]', events[0])
self.assert_on_error(error)
action = lambda: self.subscribe(self.example_user("hamlet"), "test_stream")
events = self.do_test(action,
include_subscribers=include_subscribers,
num_events=2)
error = add_schema_checker('events[1]', events[1])
self.assert_on_error(error)
action = lambda: do_change_stream_description(stream, u'new description')
events = self.do_test(action,
include_subscribers=include_subscribers)
error = stream_update_schema_checker('events[0]', events[0])
self.assert_on_error(error)
action = lambda: do_change_stream_invite_only(stream, True, history_public_to_subscribers=True)
events = self.do_test(action,
include_subscribers=include_subscribers)
error = stream_update_invite_only_schema_checker('events[0]', events[0])
self.assert_on_error(error)
action = lambda: do_change_stream_announcement_only(stream, True)
events = self.do_test(action,
include_subscribers=include_subscribers)
error = stream_update_is_announcement_only_schema_checker('events[0]', events[0])
self.assert_on_error(error)
stream = self.make_stream("private", self.user_profile.realm, invite_only=True)
user_profile = self.example_user('hamlet')
action = lambda: bulk_add_subscriptions([stream], [user_profile])
events = self.do_test(action, include_subscribers=include_subscribers,
num_events=2)
error = stream_create_schema_checker('events[0]', events[0])
error = add_schema_checker('events[1]', events[1])
self.assert_on_error(error)
def test_do_delete_message_stream(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('delete_message')),
('message_id', check_int),
('sender', check_string),
('sender_id', check_int),
('message_type', equals("stream")),
('stream_id', check_int),
('topic', check_string),
])
msg_id = self.send_stream_message("hamlet@zulip.com", "Verona")
message = Message.objects.get(id=msg_id)
events = self.do_test(
lambda: do_delete_messages(self.user_profile, [message]),
state_change_expected=True,
)
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_do_delete_message_personal(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('delete_message')),
('message_id', check_int),
('sender', check_string),
('sender_id', check_int),
('message_type', equals("private")),
('recipient_id', check_int),
])
msg_id = self.send_personal_message(
self.example_email("cordelia"),
self.user_profile.email,
"hello",
)
message = Message.objects.get(id=msg_id)
events = self.do_test(
lambda: do_delete_messages(self.user_profile, [message]),
state_change_expected=True,
)
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_do_delete_message_no_max_id(self) -> None:
user_profile = self.example_user('aaron')
# Delete all historical messages for this user
user_profile = self.example_user('hamlet')
UserMessage.objects.filter(user_profile=user_profile).delete()
msg_id = self.send_stream_message("hamlet@zulip.com", "Verona")
message = Message.objects.get(id=msg_id)
self.do_test(
lambda: do_delete_messages(self.user_profile, [message]),
state_change_expected=True,
)
result = fetch_initial_state_data(user_profile, None, "", client_gravatar=False)
self.assertEqual(result['max_message_id'], -1)
def test_add_attachment(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('attachment')),
('op', equals('add')),
('attachment', check_dict_only([
('id', check_int),
('name', check_string),
('size', check_int),
('path_id', check_string),
('create_time', check_float),
('messages', check_list(check_dict_only([
('id', check_int),
('name', check_float),
]))),
])),
('upload_space_used', equals(6)),
])
self.login(self.example_email("hamlet"))
fp = StringIO("zulip!")
fp.name = "zulip.txt"
data = {'uri': None}
def do_upload() -> None:
result = self.client_post("/json/user_uploads", {'file': fp})
self.assert_json_success(result)
self.assertIn("uri", result.json())
uri = result.json()["uri"]
base = '/user_uploads/'
self.assertEqual(base, uri[:len(base)])
data['uri'] = uri
events = self.do_test(
lambda: do_upload(),
num_events=1, state_change_expected=False)
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
# Verify that the DB has the attachment marked as unclaimed
entry = Attachment.objects.get(file_name='zulip.txt')
self.assertEqual(entry.is_claimed(), False)
# Now we send an actual message using this attachment.
schema_checker = self.check_events_dict([
('type', equals('attachment')),
('op', equals('update')),
('attachment', check_dict_only([
('id', check_int),
('name', check_string),
('size', check_int),
('path_id', check_string),
('create_time', check_float),
('messages', check_list(check_dict_only([
('id', check_int),
('name', check_float),
]))),
])),
('upload_space_used', equals(6)),
])
self.subscribe(self.example_user("hamlet"), "Denmark")
body = "First message ...[zulip.txt](http://localhost:9991" + data['uri'] + ")"
events = self.do_test(
lambda: self.send_stream_message(self.example_email("hamlet"), "Denmark", body, "test"),
num_events=2)
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
# Now remove the attachment
schema_checker = self.check_events_dict([
('type', equals('attachment')),
('op', equals('remove')),
('attachment', check_dict_only([
('id', check_int),
])),
('upload_space_used', equals(0)),
])
events = self.do_test(
lambda: self.client_delete("/json/attachments/%s" % (entry.id,)),
num_events=1, state_change_expected=False)
error = schema_checker('events[0]', events[0])
self.assert_on_error(error)
def test_notify_realm_export(self) -> None:
schema_checker = self.check_events_dict([
('type', equals('realm_export')),
('exports', check_list(check_dict_only([
('id', check_int),
('export_time', check_float),
('acting_user_id', check_int),
('export_url', check_string),
('deleted_timestamp', equals(None)),
]))),
])
do_change_is_admin(self.user_profile, True)
self.login(self.user_profile.email)
with mock.patch('zerver.lib.export.do_export_realm',
return_value=create_dummy_file('test-export.tar.gz')):
with stdout_suppressed():
events = self.do_test(
lambda: self.client_post('/json/export/realm'),
state_change_expected=True, num_events=2)
# The first event is a message from notification-bot.
error = schema_checker('events[1]', events[1])
self.assert_on_error(error)
# Now we check the deletion of the export.
deletion_schema_checker = self.check_events_dict([
('type', equals('realm_export')),
('exports', check_list(check_dict_only([
('id', check_int),
('export_time', check_float),
('acting_user_id', check_int),
('export_url', check_string),
('deleted_timestamp', check_float),
]))),
])
audit_log_entry = RealmAuditLog.objects.filter(
event_type=RealmAuditLog.REALM_EXPORTED).first()
events = self.do_test(
lambda: self.client_delete('/json/export/realm/{id}'.format(id=audit_log_entry.id)),
state_change_expected=False, num_events=1)
error = deletion_schema_checker('events[0]', events[0])
self.assert_on_error(error)
class FetchInitialStateDataTest(ZulipTestCase):
# Non-admin users don't have access to all bots
def test_realm_bots_non_admin(self) -> None:
user_profile = self.example_user('cordelia')
self.assertFalse(user_profile.is_realm_admin)
result = fetch_initial_state_data(user_profile, None, "", client_gravatar=False)
self.assert_length(result['realm_bots'], 0)
api_key = get_api_key(self.notification_bot())
self.assertNotIn(api_key, str(result))
def test_realm_bots_e(self) -> None:
user_profile = self.example_user('hamlet')
do_change_is_admin(user_profile, True)
self.assertTrue(user_profile.is_realm_admin)
result = fetch_initial_state_data(user_profile, None, "", client_gravatar=False)
self.assertTrue(len(result['realm_bots']) > 2)
def test_max_message_id_with_no_history(self) -> None:
user_profile = self.example_user('aaron')
UserMessage.objects.filter(user_profile=user_profile).delete()
result = fetch_initial_state_data(user_profile, None, "", client_gravatar=False)
self.assertEqual(result['max_message_id'], -1)
def test_delivery_email_presence_for_non_admins(self) -> None:
user_profile = self.example_user('aaron')
self.assertFalse(user_profile.is_realm_admin)
do_set_realm_property(user_profile.realm, "email_address_visibility",
Realm.EMAIL_ADDRESS_VISIBILITY_EVERYONE)
result = fetch_initial_state_data(user_profile, None, "", client_gravatar=False)
for key, value in result['raw_users'].items():
self.assertNotIn('delivery_email', value)
do_set_realm_property(user_profile.realm, "email_address_visibility",
Realm.EMAIL_ADDRESS_VISIBILITY_ADMINS)
result = fetch_initial_state_data(user_profile, None, "", client_gravatar=False)
for key, value in result['raw_users'].items():
self.assertNotIn('delivery_email', value)
def test_delivery_email_presence_for_admins(self) -> None:
user_profile = self.example_user('iago')
self.assertTrue(user_profile.is_realm_admin)
do_set_realm_property(user_profile.realm, "email_address_visibility",
Realm.EMAIL_ADDRESS_VISIBILITY_EVERYONE)
result = fetch_initial_state_data(user_profile, None, "", client_gravatar=False)
for key, value in result['raw_users'].items():
self.assertNotIn('delivery_email', value)
do_set_realm_property(user_profile.realm, "email_address_visibility",
Realm.EMAIL_ADDRESS_VISIBILITY_ADMINS)
result = fetch_initial_state_data(user_profile, None, "", client_gravatar=False)
for key, value in result['raw_users'].items():
self.assertIn('delivery_email', value)
class GetUnreadMsgsTest(ZulipTestCase):
def mute_stream(self, user_profile: UserProfile, stream: Stream) -> None:
recipient = Recipient.objects.get(type_id=stream.id, type=Recipient.STREAM)
subscription = Subscription.objects.get(
user_profile=user_profile,
recipient=recipient
)
subscription.is_muted = True
subscription.save()
def mute_topic(self, user_profile: UserProfile, stream_name: str,
topic_name: str) -> None:
realm = user_profile.realm
stream = get_stream(stream_name, realm)
recipient = get_stream_recipient(stream.id)
add_topic_mute(
user_profile=user_profile,
stream_id=stream.id,
recipient_id=recipient.id,
topic_name=topic_name,
)
def test_raw_unread_stream(self) -> None:
cordelia = self.example_user('cordelia')
hamlet = self.example_user('hamlet')
realm = hamlet.realm
for stream_name in ['social', 'devel', 'test here']:
self.subscribe(hamlet, stream_name)
self.subscribe(cordelia, stream_name)
all_message_ids = set()
message_ids = dict()
tups = [
('social', 'lunch'),
('test here', 'bla'),
('devel', 'python'),
('devel', 'ruby'),
]
for stream_name, topic_name in tups:
message_ids[topic_name] = [
self.send_stream_message(
sender_email=cordelia.email,
stream_name=stream_name,
topic_name=topic_name,
) for i in range(3)
]
all_message_ids |= set(message_ids[topic_name])
self.assertEqual(len(all_message_ids), 12)
self.mute_stream(
user_profile=hamlet,
stream=get_stream('test here', realm),
)
self.mute_topic(
user_profile=hamlet,
stream_name='devel',
topic_name='ruby',
)
raw_unread_data = get_raw_unread_data(
user_profile=hamlet,
)
stream_dict = raw_unread_data['stream_dict']
self.assertEqual(
set(stream_dict.keys()),
all_message_ids,
)
self.assertEqual(
raw_unread_data['unmuted_stream_msgs'],
set(message_ids['python']) | set(message_ids['lunch']),
)
self.assertEqual(
stream_dict[message_ids['lunch'][0]],
dict(
sender_id=cordelia.id,
stream_id=get_stream('social', realm).id,
topic='lunch',
)
)
def test_raw_unread_huddle(self) -> None:
cordelia = self.example_user('cordelia')
othello = self.example_user('othello')
hamlet = self.example_user('hamlet')
prospero = self.example_user('prospero')
huddle1_message_ids = [
self.send_huddle_message(
cordelia.email,
[hamlet.email, othello.email]
)
for i in range(3)
]
huddle2_message_ids = [
self.send_huddle_message(
cordelia.email,
[hamlet.email, prospero.email]
)
for i in range(3)
]
raw_unread_data = get_raw_unread_data(
user_profile=hamlet,
)
huddle_dict = raw_unread_data['huddle_dict']
self.assertEqual(
set(huddle_dict.keys()),
set(huddle1_message_ids) | set(huddle2_message_ids)
)
huddle_string = ','.join(
str(uid)
for uid in sorted([cordelia.id, hamlet.id, othello.id])
)
self.assertEqual(
huddle_dict[huddle1_message_ids[0]],
dict(user_ids_string=huddle_string),
)
def test_raw_unread_personal(self) -> None:
cordelia = self.example_user('cordelia')
othello = self.example_user('othello')
hamlet = self.example_user('hamlet')
cordelia_pm_message_ids = [
self.send_personal_message(cordelia.email, hamlet.email)
for i in range(3)
]
othello_pm_message_ids = [
self.send_personal_message(othello.email, hamlet.email)
for i in range(3)
]
raw_unread_data = get_raw_unread_data(
user_profile=hamlet,
)
pm_dict = raw_unread_data['pm_dict']
self.assertEqual(
set(pm_dict.keys()),
set(cordelia_pm_message_ids) | set(othello_pm_message_ids)
)
self.assertEqual(
pm_dict[cordelia_pm_message_ids[0]],
dict(sender_id=cordelia.id),
)
def test_unread_msgs(self) -> None:
cordelia = self.example_user('cordelia')
sender_id = cordelia.id
sender_email = cordelia.email
user_profile = self.example_user('hamlet')
othello = self.example_user('othello')
assert(sender_email < user_profile.email)
assert(user_profile.email < othello.email)
pm1_message_id = self.send_personal_message(sender_email, user_profile.email, "hello1")
pm2_message_id = self.send_personal_message(sender_email, user_profile.email, "hello2")
muted_stream = self.subscribe(user_profile, 'Muted Stream')
self.mute_stream(user_profile, muted_stream)
self.mute_topic(user_profile, 'Denmark', 'muted-topic')
stream_message_id = self.send_stream_message(sender_email, "Denmark", "hello")
muted_stream_message_id = self.send_stream_message(sender_email, "Muted Stream", "hello")
muted_topic_message_id = self.send_stream_message(
sender_email,
"Denmark",
topic_name="muted-topic",
content="hello",
)
huddle_message_id = self.send_huddle_message(
sender_email,
[user_profile.email, othello.email],
'hello3',
)
def get_unread_data() -> UnreadMessagesResult:
raw_unread_data = get_raw_unread_data(user_profile)
aggregated_data = aggregate_unread_data(raw_unread_data)
return aggregated_data
result = get_unread_data()
self.assertEqual(result['count'], 4)
unread_pm = result['pms'][0]
self.assertEqual(unread_pm['sender_id'], sender_id)
self.assertEqual(unread_pm['unread_message_ids'], [pm1_message_id, pm2_message_id])
self.assertTrue('sender_ids' not in unread_pm)
unread_stream = result['streams'][0]
self.assertEqual(unread_stream['stream_id'], get_stream('Denmark', user_profile.realm).id)
self.assertEqual(unread_stream['topic'], 'muted-topic')
self.assertEqual(unread_stream['unread_message_ids'], [muted_topic_message_id])
self.assertEqual(unread_stream['sender_ids'], [sender_id])
unread_stream = result['streams'][1]
self.assertEqual(unread_stream['stream_id'], get_stream('Denmark', user_profile.realm).id)
self.assertEqual(unread_stream['topic'], 'test')
self.assertEqual(unread_stream['unread_message_ids'], [stream_message_id])
self.assertEqual(unread_stream['sender_ids'], [sender_id])
unread_stream = result['streams'][2]
self.assertEqual(unread_stream['stream_id'], get_stream('Muted Stream', user_profile.realm).id)
self.assertEqual(unread_stream['topic'], 'test')
self.assertEqual(unread_stream['unread_message_ids'], [muted_stream_message_id])
self.assertEqual(unread_stream['sender_ids'], [sender_id])
huddle_string = ','.join(str(uid) for uid in sorted([sender_id, user_profile.id, othello.id]))
unread_huddle = result['huddles'][0]
self.assertEqual(unread_huddle['user_ids_string'], huddle_string)
self.assertEqual(unread_huddle['unread_message_ids'], [huddle_message_id])
self.assertTrue('sender_ids' not in unread_huddle)
self.assertEqual(result['mentions'], [])
um = UserMessage.objects.get(
user_profile_id=user_profile.id,
message_id=stream_message_id
)
um.flags |= UserMessage.flags.mentioned
um.save()
result = get_unread_data()
self.assertEqual(result['mentions'], [stream_message_id])
um.flags = UserMessage.flags.has_alert_word
um.save()
result = get_unread_data()
# TODO: This should change when we make alert words work better.
self.assertEqual(result['mentions'], [])
um.flags = UserMessage.flags.wildcard_mentioned
um.save()
result = get_unread_data()
self.assertEqual(result['mentions'], [stream_message_id])
um.flags = 0
um.save()
result = get_unread_data()
self.assertEqual(result['mentions'], [])
# Test with a muted stream
um = UserMessage.objects.get(
user_profile_id=user_profile.id,
message_id=muted_stream_message_id
)
um.flags = UserMessage.flags.mentioned
um.save()
result = get_unread_data()
self.assertEqual(result['mentions'], [muted_stream_message_id])
um.flags = UserMessage.flags.has_alert_word
um.save()
result = get_unread_data()
self.assertEqual(result['mentions'], [])
um.flags = UserMessage.flags.wildcard_mentioned
um.save()
result = get_unread_data()
self.assertEqual(result['mentions'], [])
um.flags = 0
um.save()
result = get_unread_data()
self.assertEqual(result['mentions'], [])
# Test with a muted topic
um = UserMessage.objects.get(
user_profile_id=user_profile.id,
message_id=muted_topic_message_id
)
um.flags = UserMessage.flags.mentioned
um.save()
result = get_unread_data()
self.assertEqual(result['mentions'], [muted_topic_message_id])
um.flags = UserMessage.flags.has_alert_word
um.save()
result = get_unread_data()
self.assertEqual(result['mentions'], [])
um.flags = UserMessage.flags.wildcard_mentioned
um.save()
result = get_unread_data()
self.assertEqual(result['mentions'], [])
um.flags = 0
um.save()
result = get_unread_data()
self.assertEqual(result['mentions'], [])
class ClientDescriptorsTest(ZulipTestCase):
def test_get_client_info_for_all_public_streams(self) -> None:
hamlet = self.example_user('hamlet')
realm = hamlet.realm
queue_data = dict(
all_public_streams=True,
apply_markdown=True,
client_gravatar=True,
client_type_name='website',
event_types=['message'],
last_connection_time=time.time(),
queue_timeout=0,
realm_id=realm.id,
user_profile_id=hamlet.id,
)
client = allocate_client_descriptor(queue_data)
message_event = dict(
realm_id=realm.id,
stream_name='whatever',
)
client_info = get_client_info_for_message_event(
message_event,
users=[],
)
self.assertEqual(len(client_info), 1)
dct = client_info[client.event_queue.id]
self.assertEqual(dct['client'].apply_markdown, True)
self.assertEqual(dct['client'].client_gravatar, True)
self.assertEqual(dct['client'].user_profile_id, hamlet.id)
self.assertEqual(dct['flags'], [])
self.assertEqual(dct['is_sender'], False)
message_event = dict(
realm_id=realm.id,
stream_name='whatever',
sender_queue_id=client.event_queue.id,
)
client_info = get_client_info_for_message_event(
message_event,
users=[],
)
dct = client_info[client.event_queue.id]
self.assertEqual(dct['is_sender'], True)
def test_get_client_info_for_normal_users(self) -> None:
hamlet = self.example_user('hamlet')
cordelia = self.example_user('cordelia')
realm = hamlet.realm
def test_get_info(apply_markdown: bool, client_gravatar: bool) -> None:
clear_client_event_queues_for_testing()
queue_data = dict(
all_public_streams=False,
apply_markdown=apply_markdown,
client_gravatar=client_gravatar,
client_type_name='website',
event_types=['message'],
last_connection_time=time.time(),
queue_timeout=0,
realm_id=realm.id,
user_profile_id=hamlet.id,
)
client = allocate_client_descriptor(queue_data)
message_event = dict(
realm_id=realm.id,
stream_name='whatever',
)
client_info = get_client_info_for_message_event(
message_event,
users=[
dict(id=cordelia.id),
],
)
self.assertEqual(len(client_info), 0)
client_info = get_client_info_for_message_event(
message_event,
users=[
dict(id=cordelia.id),
dict(id=hamlet.id, flags=['mentioned']),
],
)
self.assertEqual(len(client_info), 1)
dct = client_info[client.event_queue.id]
self.assertEqual(dct['client'].apply_markdown, apply_markdown)
self.assertEqual(dct['client'].client_gravatar, client_gravatar)
self.assertEqual(dct['client'].user_profile_id, hamlet.id)
self.assertEqual(dct['flags'], ['mentioned'])
self.assertEqual(dct['is_sender'], False)
test_get_info(apply_markdown=False, client_gravatar=False)
test_get_info(apply_markdown=True, client_gravatar=False)
test_get_info(apply_markdown=False, client_gravatar=True)
test_get_info(apply_markdown=True, client_gravatar=True)
def test_process_message_event_with_mocked_client_info(self) -> None:
hamlet = self.example_user("hamlet")
class MockClient:
def __init__(self, user_profile_id: int,
apply_markdown: bool,
client_gravatar: bool) -> None:
self.user_profile_id = user_profile_id
self.apply_markdown = apply_markdown
self.client_gravatar = client_gravatar
self.client_type_name = 'whatever'
self.events = [] # type: List[Dict[str, Any]]
def accepts_messages(self) -> bool:
return True
def accepts_event(self, event: Dict[str, Any]) -> bool:
assert(event['type'] == 'message')
return True
def add_event(self, event: Dict[str, Any]) -> None:
self.events.append(event)
client1 = MockClient(
user_profile_id=hamlet.id,
apply_markdown=True,
client_gravatar=False,
)
client2 = MockClient(
user_profile_id=hamlet.id,
apply_markdown=False,
client_gravatar=False,
)
client3 = MockClient(
user_profile_id=hamlet.id,
apply_markdown=True,
client_gravatar=True,
)
client4 = MockClient(
user_profile_id=hamlet.id,
apply_markdown=False,
client_gravatar=True,
)
client_info = {
'client:1': dict(
client=client1,
flags=['starred'],
),
'client:2': dict(
client=client2,
flags=['has_alert_word'],
),
'client:3': dict(
client=client3,
flags=[],
),
'client:4': dict(
client=client4,
flags=[],
),
}
sender = hamlet
message_event = dict(
message_dict=dict(
id=999,
content='**hello**',
rendered_content='<b>hello</b>',
sender_id=sender.id,
type='stream',
client='website',
# NOTE: Some of these fields are clutter, but some
# will be useful when we let clients specify
# that they can compute their own gravatar URLs.
sender_email=sender.email,
sender_realm_id=sender.realm_id,
sender_avatar_source=UserProfile.AVATAR_FROM_GRAVATAR,
sender_avatar_version=1,
sender_is_mirror_dummy=None,
recipient_type=None,
recipient_type_id=None,
),
)
# Setting users to `[]` bypasses code we don't care about
users = []
with mock.patch('zerver.tornado.event_queue.get_client_info_for_message_event',
return_value=client_info):
process_message_event(message_event, users)
for client in [client1, client2]:
message = client.events[0]['message']
self.assertIn('gravatar.com', message['avatar_url'])
message.pop('avatar_url')
self.assertEqual(client1.events, [
dict(
type='message',
message=dict(
type='stream',
sender_id=sender.id,
sender_email=sender.email,
id=999,
content='<b>hello</b>',
content_type='text/html',
client='website',
),
flags=['starred'],
),
])
self.assertEqual(client2.events, [
dict(
type='message',
message=dict(
type='stream',
sender_id=sender.id,
sender_email=sender.email,
id=999,
content='**hello**',
content_type='text/x-markdown',
client='website',
),
flags=['has_alert_word'],
),
])
self.assertEqual(client3.events, [
dict(
type='message',
message=dict(
type='stream',
sender_id=sender.id,
sender_email=sender.email,
avatar_url=None,
id=999,
content='<b>hello</b>',
content_type='text/html',
client='website',
),
flags=[],
),
])
self.assertEqual(client4.events, [
dict(
type='message',
message=dict(
type='stream',
sender_id=sender.id,
sender_email=sender.email,
avatar_url=None,
id=999,
content='**hello**',
content_type='text/x-markdown',
client='website',
),
flags=[],
),
])
class FetchQueriesTest(ZulipTestCase):
def test_queries(self) -> None:
user = self.example_user("hamlet")
self.login(user.email)
flush_per_request_caches()
with queries_captured() as queries:
with mock.patch('zerver.lib.events.always_want') as want_mock:
fetch_initial_state_data(
user_profile=user,
event_types=None,
queue_id='x',
client_gravatar=False,
)
self.assert_length(queries, 33)
expected_counts = dict(
alert_words=0,
custom_profile_fields=1,
default_streams=1,
default_stream_groups=1,
hotspots=0,
message=1,
muted_topics=1,
pointer=0,
presence=3,
realm=0,
realm_bot=1,
realm_domains=1,
realm_embedded_bots=0,
realm_incoming_webhook_bots=0,
realm_emoji=1,
realm_filters=1,
realm_user=3,
realm_user_groups=2,
recent_private_conversations=2,
starred_messages=1,
stream=2,
stop_words=0,
subscription=6,
update_display_settings=0,
update_global_notifications=0,
update_message_flags=5,
user_status=1,
zulip_version=0,
)
wanted_event_types = {
item[0][0] for item
in want_mock.call_args_list
}
self.assertEqual(wanted_event_types, set(expected_counts))
for event_type in sorted(wanted_event_types):
count = expected_counts[event_type]
flush_per_request_caches()
with queries_captured() as queries:
if event_type == 'update_message_flags':
event_types = ['update_message_flags', 'message']
else:
event_types = [event_type]
fetch_initial_state_data(
user_profile=user,
event_types=event_types,
queue_id='x',
client_gravatar=False,
)
self.assert_length(queries, count)
class TestEventsRegisterAllPublicStreamsDefaults(ZulipTestCase):
def setUp(self) -> None:
super().setUp()
self.user_profile = self.example_user('hamlet')
self.email = self.user_profile.email
def test_use_passed_all_public_true_default_false(self) -> None:
self.user_profile.default_all_public_streams = False
self.user_profile.save()
result = _default_all_public_streams(self.user_profile, True)
self.assertTrue(result)
def test_use_passed_all_public_true_default(self) -> None:
self.user_profile.default_all_public_streams = True
self.user_profile.save()
result = _default_all_public_streams(self.user_profile, True)
self.assertTrue(result)
def test_use_passed_all_public_false_default_false(self) -> None:
self.user_profile.default_all_public_streams = False
self.user_profile.save()
result = _default_all_public_streams(self.user_profile, False)
self.assertFalse(result)
def test_use_passed_all_public_false_default_true(self) -> None:
self.user_profile.default_all_public_streams = True
self.user_profile.save()
result = _default_all_public_streams(self.user_profile, False)
self.assertFalse(result)
def test_use_true_default_for_none(self) -> None:
self.user_profile.default_all_public_streams = True
self.user_profile.save()
result = _default_all_public_streams(self.user_profile, None)
self.assertTrue(result)
def test_use_false_default_for_none(self) -> None:
self.user_profile.default_all_public_streams = False
self.user_profile.save()
result = _default_all_public_streams(self.user_profile, None)
self.assertFalse(result)
class TestEventsRegisterNarrowDefaults(ZulipTestCase):
def setUp(self) -> None:
super().setUp()
self.user_profile = self.example_user('hamlet')
self.email = self.user_profile.email
self.stream = get_stream('Verona', self.user_profile.realm)
def test_use_passed_narrow_no_default(self) -> None:
self.user_profile.default_events_register_stream_id = None
self.user_profile.save()
result = _default_narrow(self.user_profile, [[u'stream', u'my_stream']])
self.assertEqual(result, [[u'stream', u'my_stream']])
def test_use_passed_narrow_with_default(self) -> None:
self.user_profile.default_events_register_stream_id = self.stream.id
self.user_profile.save()
result = _default_narrow(self.user_profile, [[u'stream', u'my_stream']])
self.assertEqual(result, [[u'stream', u'my_stream']])
def test_use_default_if_narrow_is_empty(self) -> None:
self.user_profile.default_events_register_stream_id = self.stream.id
self.user_profile.save()
result = _default_narrow(self.user_profile, [])
self.assertEqual(result, [[u'stream', u'Verona']])
def test_use_narrow_if_default_is_none(self) -> None:
self.user_profile.default_events_register_stream_id = None
self.user_profile.save()
result = _default_narrow(self.user_profile, [])
self.assertEqual(result, [])
class TestGetRawUserDataSystemBotRealm(ZulipTestCase):
def test_get_raw_user_data_on_system_bot_realm(self) -> None:
result = get_raw_user_data(get_realm("zulipinternal"), self.example_user('hamlet'), True)
for bot_email in settings.CROSS_REALM_BOT_EMAILS:
bot_profile = get_system_bot(bot_email)
self.assertTrue(bot_profile.id in result)
self.assertTrue(result[bot_profile.id]['is_cross_realm_bot'])
| true | true |
f730098e8822814319a07f20bdedf1174871a9d9 | 674 | py | Python | setup.py | akuhnregnier/jupyter-notebook-tools | c0afc5769d5f53b36fdd0fee976126a2587dfb35 | [
"Apache-2.0"
] | null | null | null | setup.py | akuhnregnier/jupyter-notebook-tools | c0afc5769d5f53b36fdd0fee976126a2587dfb35 | [
"Apache-2.0"
] | null | null | null | setup.py | akuhnregnier/jupyter-notebook-tools | c0afc5769d5f53b36fdd0fee976126a2587dfb35 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
from setuptools import find_packages, setup
NAME = "jupyter-notebook-tools"
with open("README.md", "r") as f:
readme = f.read()
setup(
name=NAME,
url=f"https://github.com/akuhnregnier/{NAME}",
author="Alexander Kuhn-Regnier",
author_email="ahf.kuhnregnier@gmail.com",
long_description=readme,
package_dir={"": "src"},
packages=find_packages("src"),
entry_points={
"console_scripts": ["nbstripout-fast=nbstripout.nbstripout_fast:main"]
},
python_requires=">=3.6",
setup_requires=["setuptools-scm"],
use_scm_version=dict(write_to="src/nbstripout/_version.py"),
install_requires=(),
)
| 25.923077 | 78 | 0.667656 |
from setuptools import find_packages, setup
NAME = "jupyter-notebook-tools"
with open("README.md", "r") as f:
readme = f.read()
setup(
name=NAME,
url=f"https://github.com/akuhnregnier/{NAME}",
author="Alexander Kuhn-Regnier",
author_email="ahf.kuhnregnier@gmail.com",
long_description=readme,
package_dir={"": "src"},
packages=find_packages("src"),
entry_points={
"console_scripts": ["nbstripout-fast=nbstripout.nbstripout_fast:main"]
},
python_requires=">=3.6",
setup_requires=["setuptools-scm"],
use_scm_version=dict(write_to="src/nbstripout/_version.py"),
install_requires=(),
)
| true | true |
f7300a2ffabcd3bc05da38c7d5de492b1f25e72f | 901 | py | Python | project/product/views.py | steetstyle/Django-Ecommerce-API | 89c2c973e560346a5be74019709dc9a9f8ab7b2a | [
"MIT"
] | null | null | null | project/product/views.py | steetstyle/Django-Ecommerce-API | 89c2c973e560346a5be74019709dc9a9f8ab7b2a | [
"MIT"
] | null | null | null | project/product/views.py | steetstyle/Django-Ecommerce-API | 89c2c973e560346a5be74019709dc9a9f8ab7b2a | [
"MIT"
] | null | null | null | from django.shortcuts import render
from rest_framework import viewsets
from .models import Product
from .serializers import ProductSerializer
from core.permissions import MarketOwnerPermission
from rest_framework.permissions import IsAuthenticated, AllowAny
from rest_framework import filters
class ProductViewSet(viewsets.ModelViewSet):
"""
A ModelViewSet for viewing and editing Products.
"""
queryset = Product.objects.all()
serializer_class = ProductSerializer
permission_classes = []
filter_backends = [filters.SearchFilter]
search_fields = '__all__'
def get_permissions(self):
if self.action in ['update','partial_update','destroy','create']:
self.permission_classes = [IsAuthenticated, MarketOwnerPermission]
else :
self.permission_classes = [AllowAny]
return super(self.__class__, self).get_permissions() | 34.653846 | 78 | 0.746948 | from django.shortcuts import render
from rest_framework import viewsets
from .models import Product
from .serializers import ProductSerializer
from core.permissions import MarketOwnerPermission
from rest_framework.permissions import IsAuthenticated, AllowAny
from rest_framework import filters
class ProductViewSet(viewsets.ModelViewSet):
queryset = Product.objects.all()
serializer_class = ProductSerializer
permission_classes = []
filter_backends = [filters.SearchFilter]
search_fields = '__all__'
def get_permissions(self):
if self.action in ['update','partial_update','destroy','create']:
self.permission_classes = [IsAuthenticated, MarketOwnerPermission]
else :
self.permission_classes = [AllowAny]
return super(self.__class__, self).get_permissions() | true | true |
f7300b19d159e1cd832307bc71cecb184c4499e1 | 48,458 | py | Python | tests/unit/gapic/videointelligence_v1p2beta1/test_video_intelligence_service.py | renovate-bot/python-videointelligence | 8a7920066cffa98c5a98d451a6a924fa82281544 | [
"Apache-2.0"
] | null | null | null | tests/unit/gapic/videointelligence_v1p2beta1/test_video_intelligence_service.py | renovate-bot/python-videointelligence | 8a7920066cffa98c5a98d451a6a924fa82281544 | [
"Apache-2.0"
] | null | null | null | tests/unit/gapic/videointelligence_v1p2beta1/test_video_intelligence_service.py | renovate-bot/python-videointelligence | 8a7920066cffa98c5a98d451a6a924fa82281544 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import mock
import grpc
from grpc.experimental import aio
import math
import pytest
from proto.marshal.rules.dates import DurationRule, TimestampRule
from google.api_core import client_options
from google.api_core import exceptions as core_exceptions
from google.api_core import future
from google.api_core import gapic_v1
from google.api_core import grpc_helpers
from google.api_core import grpc_helpers_async
from google.api_core import operation_async # type: ignore
from google.api_core import operations_v1
from google.api_core import path_template
from google.auth import credentials as ga_credentials
from google.auth.exceptions import MutualTLSChannelError
from google.cloud.videointelligence_v1p2beta1.services.video_intelligence_service import (
VideoIntelligenceServiceAsyncClient,
)
from google.cloud.videointelligence_v1p2beta1.services.video_intelligence_service import (
VideoIntelligenceServiceClient,
)
from google.cloud.videointelligence_v1p2beta1.services.video_intelligence_service import (
transports,
)
from google.cloud.videointelligence_v1p2beta1.types import video_intelligence
from google.longrunning import operations_pb2
from google.oauth2 import service_account
from google.protobuf import duration_pb2 # type: ignore
import google.auth
def client_cert_source_callback():
return b"cert bytes", b"key bytes"
# If default endpoint is localhost, then default mtls endpoint will be the same.
# This method modifies the default endpoint so the client can produce a different
# mtls endpoint for endpoint testing purposes.
def modify_default_endpoint(client):
return (
"foo.googleapis.com"
if ("localhost" in client.DEFAULT_ENDPOINT)
else client.DEFAULT_ENDPOINT
)
def test__get_default_mtls_endpoint():
api_endpoint = "example.googleapis.com"
api_mtls_endpoint = "example.mtls.googleapis.com"
sandbox_endpoint = "example.sandbox.googleapis.com"
sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
non_googleapi = "api.example.com"
assert VideoIntelligenceServiceClient._get_default_mtls_endpoint(None) is None
assert (
VideoIntelligenceServiceClient._get_default_mtls_endpoint(api_endpoint)
== api_mtls_endpoint
)
assert (
VideoIntelligenceServiceClient._get_default_mtls_endpoint(api_mtls_endpoint)
== api_mtls_endpoint
)
assert (
VideoIntelligenceServiceClient._get_default_mtls_endpoint(sandbox_endpoint)
== sandbox_mtls_endpoint
)
assert (
VideoIntelligenceServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint)
== sandbox_mtls_endpoint
)
assert (
VideoIntelligenceServiceClient._get_default_mtls_endpoint(non_googleapi)
== non_googleapi
)
@pytest.mark.parametrize(
"client_class",
[VideoIntelligenceServiceClient, VideoIntelligenceServiceAsyncClient,],
)
def test_video_intelligence_service_client_from_service_account_info(client_class):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_info"
) as factory:
factory.return_value = creds
info = {"valid": True}
client = client_class.from_service_account_info(info)
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == "videointelligence.googleapis.com:443"
@pytest.mark.parametrize(
"transport_class,transport_name",
[
(transports.VideoIntelligenceServiceGrpcTransport, "grpc"),
(transports.VideoIntelligenceServiceGrpcAsyncIOTransport, "grpc_asyncio"),
],
)
def test_video_intelligence_service_client_service_account_always_use_jwt(
transport_class, transport_name
):
with mock.patch.object(
service_account.Credentials, "with_always_use_jwt_access", create=True
) as use_jwt:
creds = service_account.Credentials(None, None, None)
transport = transport_class(credentials=creds, always_use_jwt_access=True)
use_jwt.assert_called_once_with(True)
with mock.patch.object(
service_account.Credentials, "with_always_use_jwt_access", create=True
) as use_jwt:
creds = service_account.Credentials(None, None, None)
transport = transport_class(credentials=creds, always_use_jwt_access=False)
use_jwt.assert_not_called()
@pytest.mark.parametrize(
"client_class",
[VideoIntelligenceServiceClient, VideoIntelligenceServiceAsyncClient,],
)
def test_video_intelligence_service_client_from_service_account_file(client_class):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_file"
) as factory:
factory.return_value = creds
client = client_class.from_service_account_file("dummy/file/path.json")
assert client.transport._credentials == creds
assert isinstance(client, client_class)
client = client_class.from_service_account_json("dummy/file/path.json")
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == "videointelligence.googleapis.com:443"
def test_video_intelligence_service_client_get_transport_class():
transport = VideoIntelligenceServiceClient.get_transport_class()
available_transports = [
transports.VideoIntelligenceServiceGrpcTransport,
]
assert transport in available_transports
transport = VideoIntelligenceServiceClient.get_transport_class("grpc")
assert transport == transports.VideoIntelligenceServiceGrpcTransport
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[
(
VideoIntelligenceServiceClient,
transports.VideoIntelligenceServiceGrpcTransport,
"grpc",
),
(
VideoIntelligenceServiceAsyncClient,
transports.VideoIntelligenceServiceGrpcAsyncIOTransport,
"grpc_asyncio",
),
],
)
@mock.patch.object(
VideoIntelligenceServiceClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(VideoIntelligenceServiceClient),
)
@mock.patch.object(
VideoIntelligenceServiceAsyncClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(VideoIntelligenceServiceAsyncClient),
)
def test_video_intelligence_service_client_client_options(
client_class, transport_class, transport_name
):
# Check that if channel is provided we won't create a new one.
with mock.patch.object(
VideoIntelligenceServiceClient, "get_transport_class"
) as gtc:
transport = transport_class(credentials=ga_credentials.AnonymousCredentials())
client = client_class(transport=transport)
gtc.assert_not_called()
# Check that if channel is provided via str we will create a new one.
with mock.patch.object(
VideoIntelligenceServiceClient, "get_transport_class"
) as gtc:
client = client_class(transport=transport_name)
gtc.assert_called()
# Check the case api_endpoint is provided.
options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name, client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "never".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "always".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
# unsupported value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
with pytest.raises(MutualTLSChannelError):
client = client_class()
# Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
):
with pytest.raises(ValueError):
client = client_class()
# Check the case quota_project_id is provided
options = client_options.ClientOptions(quota_project_id="octopus")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name, client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id="octopus",
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name,use_client_cert_env",
[
(
VideoIntelligenceServiceClient,
transports.VideoIntelligenceServiceGrpcTransport,
"grpc",
"true",
),
(
VideoIntelligenceServiceAsyncClient,
transports.VideoIntelligenceServiceGrpcAsyncIOTransport,
"grpc_asyncio",
"true",
),
(
VideoIntelligenceServiceClient,
transports.VideoIntelligenceServiceGrpcTransport,
"grpc",
"false",
),
(
VideoIntelligenceServiceAsyncClient,
transports.VideoIntelligenceServiceGrpcAsyncIOTransport,
"grpc_asyncio",
"false",
),
],
)
@mock.patch.object(
VideoIntelligenceServiceClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(VideoIntelligenceServiceClient),
)
@mock.patch.object(
VideoIntelligenceServiceAsyncClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(VideoIntelligenceServiceAsyncClient),
)
@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
def test_video_intelligence_service_client_mtls_env_auto(
client_class, transport_class, transport_name, use_client_cert_env
):
# This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
# mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
# Check the case client_cert_source is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
options = client_options.ClientOptions(
client_cert_source=client_cert_source_callback
)
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name, client_options=options)
if use_client_cert_env == "false":
expected_client_cert_source = None
expected_host = client.DEFAULT_ENDPOINT
else:
expected_client_cert_source = client_cert_source_callback
expected_host = client.DEFAULT_MTLS_ENDPOINT
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case ADC client cert is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=True,
):
with mock.patch(
"google.auth.transport.mtls.default_client_cert_source",
return_value=client_cert_source_callback,
):
if use_client_cert_env == "false":
expected_host = client.DEFAULT_ENDPOINT
expected_client_cert_source = None
else:
expected_host = client.DEFAULT_MTLS_ENDPOINT
expected_client_cert_source = client_cert_source_callback
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case client_cert_source and ADC client cert are not provided.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=False,
):
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[
(
VideoIntelligenceServiceClient,
transports.VideoIntelligenceServiceGrpcTransport,
"grpc",
),
(
VideoIntelligenceServiceAsyncClient,
transports.VideoIntelligenceServiceGrpcAsyncIOTransport,
"grpc_asyncio",
),
],
)
def test_video_intelligence_service_client_client_options_scopes(
client_class, transport_class, transport_name
):
# Check the case scopes are provided.
options = client_options.ClientOptions(scopes=["1", "2"],)
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name, client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=["1", "2"],
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[
(
VideoIntelligenceServiceClient,
transports.VideoIntelligenceServiceGrpcTransport,
"grpc",
),
(
VideoIntelligenceServiceAsyncClient,
transports.VideoIntelligenceServiceGrpcAsyncIOTransport,
"grpc_asyncio",
),
],
)
def test_video_intelligence_service_client_client_options_credentials_file(
client_class, transport_class, transport_name
):
# Check the case credentials file is provided.
options = client_options.ClientOptions(credentials_file="credentials.json")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name, client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
def test_video_intelligence_service_client_client_options_from_dict():
with mock.patch(
"google.cloud.videointelligence_v1p2beta1.services.video_intelligence_service.transports.VideoIntelligenceServiceGrpcTransport.__init__"
) as grpc_transport:
grpc_transport.return_value = None
client = VideoIntelligenceServiceClient(
client_options={"api_endpoint": "squid.clam.whelk"}
)
grpc_transport.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
def test_annotate_video(
transport: str = "grpc", request_type=video_intelligence.AnnotateVideoRequest
):
client = VideoIntelligenceServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.annotate_video), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
response = client.annotate_video(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == video_intelligence.AnnotateVideoRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_annotate_video_from_dict():
test_annotate_video(request_type=dict)
def test_annotate_video_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = VideoIntelligenceServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.annotate_video), "__call__") as call:
client.annotate_video()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == video_intelligence.AnnotateVideoRequest()
@pytest.mark.asyncio
async def test_annotate_video_async(
transport: str = "grpc_asyncio",
request_type=video_intelligence.AnnotateVideoRequest,
):
client = VideoIntelligenceServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.annotate_video), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
response = await client.annotate_video(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == video_intelligence.AnnotateVideoRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_annotate_video_async_from_dict():
await test_annotate_video_async(request_type=dict)
def test_annotate_video_flattened():
client = VideoIntelligenceServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.annotate_video), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.annotate_video(
input_uri="input_uri_value",
features=[video_intelligence.Feature.LABEL_DETECTION],
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].input_uri
mock_val = "input_uri_value"
assert arg == mock_val
arg = args[0].features
mock_val = [video_intelligence.Feature.LABEL_DETECTION]
assert arg == mock_val
def test_annotate_video_flattened_error():
client = VideoIntelligenceServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.annotate_video(
video_intelligence.AnnotateVideoRequest(),
input_uri="input_uri_value",
features=[video_intelligence.Feature.LABEL_DETECTION],
)
@pytest.mark.asyncio
async def test_annotate_video_flattened_async():
client = VideoIntelligenceServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.annotate_video), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.annotate_video(
input_uri="input_uri_value",
features=[video_intelligence.Feature.LABEL_DETECTION],
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].input_uri
mock_val = "input_uri_value"
assert arg == mock_val
arg = args[0].features
mock_val = [video_intelligence.Feature.LABEL_DETECTION]
assert arg == mock_val
@pytest.mark.asyncio
async def test_annotate_video_flattened_error_async():
client = VideoIntelligenceServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.annotate_video(
video_intelligence.AnnotateVideoRequest(),
input_uri="input_uri_value",
features=[video_intelligence.Feature.LABEL_DETECTION],
)
def test_credentials_transport_error():
# It is an error to provide credentials and a transport instance.
transport = transports.VideoIntelligenceServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = VideoIntelligenceServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# It is an error to provide a credentials file and a transport instance.
transport = transports.VideoIntelligenceServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = VideoIntelligenceServiceClient(
client_options={"credentials_file": "credentials.json"},
transport=transport,
)
# It is an error to provide scopes and a transport instance.
transport = transports.VideoIntelligenceServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = VideoIntelligenceServiceClient(
client_options={"scopes": ["1", "2"]}, transport=transport,
)
def test_transport_instance():
# A client may be instantiated with a custom transport instance.
transport = transports.VideoIntelligenceServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
client = VideoIntelligenceServiceClient(transport=transport)
assert client.transport is transport
def test_transport_get_channel():
# A client may be instantiated with a custom transport instance.
transport = transports.VideoIntelligenceServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
channel = transport.grpc_channel
assert channel
transport = transports.VideoIntelligenceServiceGrpcAsyncIOTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
channel = transport.grpc_channel
assert channel
@pytest.mark.parametrize(
"transport_class",
[
transports.VideoIntelligenceServiceGrpcTransport,
transports.VideoIntelligenceServiceGrpcAsyncIOTransport,
],
)
def test_transport_adc(transport_class):
# Test default credentials are used if not provided.
with mock.patch.object(google.auth, "default") as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class()
adc.assert_called_once()
def test_transport_grpc_default():
# A client should use the gRPC transport by default.
client = VideoIntelligenceServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
assert isinstance(
client.transport, transports.VideoIntelligenceServiceGrpcTransport,
)
def test_video_intelligence_service_base_transport_error():
# Passing both a credentials object and credentials_file should raise an error
with pytest.raises(core_exceptions.DuplicateCredentialArgs):
transport = transports.VideoIntelligenceServiceTransport(
credentials=ga_credentials.AnonymousCredentials(),
credentials_file="credentials.json",
)
def test_video_intelligence_service_base_transport():
# Instantiate the base transport.
with mock.patch(
"google.cloud.videointelligence_v1p2beta1.services.video_intelligence_service.transports.VideoIntelligenceServiceTransport.__init__"
) as Transport:
Transport.return_value = None
transport = transports.VideoIntelligenceServiceTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
# Every method on the transport should just blindly
# raise NotImplementedError.
methods = ("annotate_video",)
for method in methods:
with pytest.raises(NotImplementedError):
getattr(transport, method)(request=object())
with pytest.raises(NotImplementedError):
transport.close()
# Additionally, the LRO client (a property) should
# also raise NotImplementedError
with pytest.raises(NotImplementedError):
transport.operations_client
def test_video_intelligence_service_base_transport_with_credentials_file():
# Instantiate the base transport with a credentials file
with mock.patch.object(
google.auth, "load_credentials_from_file", autospec=True
) as load_creds, mock.patch(
"google.cloud.videointelligence_v1p2beta1.services.video_intelligence_service.transports.VideoIntelligenceServiceTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.VideoIntelligenceServiceTransport(
credentials_file="credentials.json", quota_project_id="octopus",
)
load_creds.assert_called_once_with(
"credentials.json",
scopes=None,
default_scopes=("https://www.googleapis.com/auth/cloud-platform",),
quota_project_id="octopus",
)
def test_video_intelligence_service_base_transport_with_adc():
# Test the default credentials are used if credentials and credentials_file are None.
with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch(
"google.cloud.videointelligence_v1p2beta1.services.video_intelligence_service.transports.VideoIntelligenceServiceTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.VideoIntelligenceServiceTransport()
adc.assert_called_once()
def test_video_intelligence_service_auth_adc():
# If no credentials are provided, we should use ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
VideoIntelligenceServiceClient()
adc.assert_called_once_with(
scopes=None,
default_scopes=("https://www.googleapis.com/auth/cloud-platform",),
quota_project_id=None,
)
@pytest.mark.parametrize(
"transport_class",
[
transports.VideoIntelligenceServiceGrpcTransport,
transports.VideoIntelligenceServiceGrpcAsyncIOTransport,
],
)
def test_video_intelligence_service_transport_auth_adc(transport_class):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class(quota_project_id="octopus", scopes=["1", "2"])
adc.assert_called_once_with(
scopes=["1", "2"],
default_scopes=("https://www.googleapis.com/auth/cloud-platform",),
quota_project_id="octopus",
)
@pytest.mark.parametrize(
"transport_class,grpc_helpers",
[
(transports.VideoIntelligenceServiceGrpcTransport, grpc_helpers),
(transports.VideoIntelligenceServiceGrpcAsyncIOTransport, grpc_helpers_async),
],
)
def test_video_intelligence_service_transport_create_channel(
transport_class, grpc_helpers
):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(
google.auth, "default", autospec=True
) as adc, mock.patch.object(
grpc_helpers, "create_channel", autospec=True
) as create_channel:
creds = ga_credentials.AnonymousCredentials()
adc.return_value = (creds, None)
transport_class(quota_project_id="octopus", scopes=["1", "2"])
create_channel.assert_called_with(
"videointelligence.googleapis.com:443",
credentials=creds,
credentials_file=None,
quota_project_id="octopus",
default_scopes=("https://www.googleapis.com/auth/cloud-platform",),
scopes=["1", "2"],
default_host="videointelligence.googleapis.com",
ssl_credentials=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
@pytest.mark.parametrize(
"transport_class",
[
transports.VideoIntelligenceServiceGrpcTransport,
transports.VideoIntelligenceServiceGrpcAsyncIOTransport,
],
)
def test_video_intelligence_service_grpc_transport_client_cert_source_for_mtls(
transport_class,
):
cred = ga_credentials.AnonymousCredentials()
# Check ssl_channel_credentials is used if provided.
with mock.patch.object(transport_class, "create_channel") as mock_create_channel:
mock_ssl_channel_creds = mock.Mock()
transport_class(
host="squid.clam.whelk",
credentials=cred,
ssl_channel_credentials=mock_ssl_channel_creds,
)
mock_create_channel.assert_called_once_with(
"squid.clam.whelk:443",
credentials=cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_channel_creds,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls
# is used.
with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()):
with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred:
transport_class(
credentials=cred,
client_cert_source_for_mtls=client_cert_source_callback,
)
expected_cert, expected_key = client_cert_source_callback()
mock_ssl_cred.assert_called_once_with(
certificate_chain=expected_cert, private_key=expected_key
)
def test_video_intelligence_service_host_no_port():
client = VideoIntelligenceServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="videointelligence.googleapis.com"
),
)
assert client.transport._host == "videointelligence.googleapis.com:443"
def test_video_intelligence_service_host_with_port():
client = VideoIntelligenceServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="videointelligence.googleapis.com:8000"
),
)
assert client.transport._host == "videointelligence.googleapis.com:8000"
def test_video_intelligence_service_grpc_transport_channel():
channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials())
# Check that channel is used if provided.
transport = transports.VideoIntelligenceServiceGrpcTransport(
host="squid.clam.whelk", channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
assert transport._ssl_channel_credentials == None
def test_video_intelligence_service_grpc_asyncio_transport_channel():
channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials())
# Check that channel is used if provided.
transport = transports.VideoIntelligenceServiceGrpcAsyncIOTransport(
host="squid.clam.whelk", channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
assert transport._ssl_channel_credentials == None
# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize(
"transport_class",
[
transports.VideoIntelligenceServiceGrpcTransport,
transports.VideoIntelligenceServiceGrpcAsyncIOTransport,
],
)
def test_video_intelligence_service_transport_channel_mtls_with_client_cert_source(
transport_class,
):
with mock.patch(
"grpc.ssl_channel_credentials", autospec=True
) as grpc_ssl_channel_cred:
with mock.patch.object(
transport_class, "create_channel"
) as grpc_create_channel:
mock_ssl_cred = mock.Mock()
grpc_ssl_channel_cred.return_value = mock_ssl_cred
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
cred = ga_credentials.AnonymousCredentials()
with pytest.warns(DeprecationWarning):
with mock.patch.object(google.auth, "default") as adc:
adc.return_value = (cred, None)
transport = transport_class(
host="squid.clam.whelk",
api_mtls_endpoint="mtls.squid.clam.whelk",
client_cert_source=client_cert_source_callback,
)
adc.assert_called_once()
grpc_ssl_channel_cred.assert_called_once_with(
certificate_chain=b"cert bytes", private_key=b"key bytes"
)
grpc_create_channel.assert_called_once_with(
"mtls.squid.clam.whelk:443",
credentials=cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
assert transport.grpc_channel == mock_grpc_channel
assert transport._ssl_channel_credentials == mock_ssl_cred
# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize(
"transport_class",
[
transports.VideoIntelligenceServiceGrpcTransport,
transports.VideoIntelligenceServiceGrpcAsyncIOTransport,
],
)
def test_video_intelligence_service_transport_channel_mtls_with_adc(transport_class):
mock_ssl_cred = mock.Mock()
with mock.patch.multiple(
"google.auth.transport.grpc.SslCredentials",
__init__=mock.Mock(return_value=None),
ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
):
with mock.patch.object(
transport_class, "create_channel"
) as grpc_create_channel:
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
mock_cred = mock.Mock()
with pytest.warns(DeprecationWarning):
transport = transport_class(
host="squid.clam.whelk",
credentials=mock_cred,
api_mtls_endpoint="mtls.squid.clam.whelk",
client_cert_source=None,
)
grpc_create_channel.assert_called_once_with(
"mtls.squid.clam.whelk:443",
credentials=mock_cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
assert transport.grpc_channel == mock_grpc_channel
def test_video_intelligence_service_grpc_lro_client():
client = VideoIntelligenceServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
transport = client.transport
# Ensure that we have a api-core operations client.
assert isinstance(transport.operations_client, operations_v1.OperationsClient,)
# Ensure that subsequent calls to the property send the exact same object.
assert transport.operations_client is transport.operations_client
def test_video_intelligence_service_grpc_lro_async_client():
client = VideoIntelligenceServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio",
)
transport = client.transport
# Ensure that we have a api-core operations client.
assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,)
# Ensure that subsequent calls to the property send the exact same object.
assert transport.operations_client is transport.operations_client
def test_common_billing_account_path():
billing_account = "squid"
expected = "billingAccounts/{billing_account}".format(
billing_account=billing_account,
)
actual = VideoIntelligenceServiceClient.common_billing_account_path(billing_account)
assert expected == actual
def test_parse_common_billing_account_path():
expected = {
"billing_account": "clam",
}
path = VideoIntelligenceServiceClient.common_billing_account_path(**expected)
# Check that the path construction is reversible.
actual = VideoIntelligenceServiceClient.parse_common_billing_account_path(path)
assert expected == actual
def test_common_folder_path():
folder = "whelk"
expected = "folders/{folder}".format(folder=folder,)
actual = VideoIntelligenceServiceClient.common_folder_path(folder)
assert expected == actual
def test_parse_common_folder_path():
expected = {
"folder": "octopus",
}
path = VideoIntelligenceServiceClient.common_folder_path(**expected)
# Check that the path construction is reversible.
actual = VideoIntelligenceServiceClient.parse_common_folder_path(path)
assert expected == actual
def test_common_organization_path():
organization = "oyster"
expected = "organizations/{organization}".format(organization=organization,)
actual = VideoIntelligenceServiceClient.common_organization_path(organization)
assert expected == actual
def test_parse_common_organization_path():
expected = {
"organization": "nudibranch",
}
path = VideoIntelligenceServiceClient.common_organization_path(**expected)
# Check that the path construction is reversible.
actual = VideoIntelligenceServiceClient.parse_common_organization_path(path)
assert expected == actual
def test_common_project_path():
project = "cuttlefish"
expected = "projects/{project}".format(project=project,)
actual = VideoIntelligenceServiceClient.common_project_path(project)
assert expected == actual
def test_parse_common_project_path():
expected = {
"project": "mussel",
}
path = VideoIntelligenceServiceClient.common_project_path(**expected)
# Check that the path construction is reversible.
actual = VideoIntelligenceServiceClient.parse_common_project_path(path)
assert expected == actual
def test_common_location_path():
project = "winkle"
location = "nautilus"
expected = "projects/{project}/locations/{location}".format(
project=project, location=location,
)
actual = VideoIntelligenceServiceClient.common_location_path(project, location)
assert expected == actual
def test_parse_common_location_path():
expected = {
"project": "scallop",
"location": "abalone",
}
path = VideoIntelligenceServiceClient.common_location_path(**expected)
# Check that the path construction is reversible.
actual = VideoIntelligenceServiceClient.parse_common_location_path(path)
assert expected == actual
def test_client_withDEFAULT_CLIENT_INFO():
client_info = gapic_v1.client_info.ClientInfo()
with mock.patch.object(
transports.VideoIntelligenceServiceTransport, "_prep_wrapped_messages"
) as prep:
client = VideoIntelligenceServiceClient(
credentials=ga_credentials.AnonymousCredentials(), client_info=client_info,
)
prep.assert_called_once_with(client_info)
with mock.patch.object(
transports.VideoIntelligenceServiceTransport, "_prep_wrapped_messages"
) as prep:
transport_class = VideoIntelligenceServiceClient.get_transport_class()
transport = transport_class(
credentials=ga_credentials.AnonymousCredentials(), client_info=client_info,
)
prep.assert_called_once_with(client_info)
@pytest.mark.asyncio
async def test_transport_close_async():
client = VideoIntelligenceServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio",
)
with mock.patch.object(
type(getattr(client.transport, "grpc_channel")), "close"
) as close:
async with client:
close.assert_not_called()
close.assert_called_once()
def test_transport_close():
transports = {
"grpc": "_grpc_channel",
}
for transport, close_name in transports.items():
client = VideoIntelligenceServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport
)
with mock.patch.object(
type(getattr(client.transport, close_name)), "close"
) as close:
with client:
close.assert_not_called()
close.assert_called_once()
def test_client_ctx():
transports = [
"grpc",
]
for transport in transports:
client = VideoIntelligenceServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport
)
# Test client calls underlying transport.
with mock.patch.object(type(client.transport), "close") as close:
close.assert_not_called()
with client:
pass
close.assert_called()
| 37.769291 | 154 | 0.694725 |
import os
import mock
import grpc
from grpc.experimental import aio
import math
import pytest
from proto.marshal.rules.dates import DurationRule, TimestampRule
from google.api_core import client_options
from google.api_core import exceptions as core_exceptions
from google.api_core import future
from google.api_core import gapic_v1
from google.api_core import grpc_helpers
from google.api_core import grpc_helpers_async
from google.api_core import operation_async
from google.api_core import operations_v1
from google.api_core import path_template
from google.auth import credentials as ga_credentials
from google.auth.exceptions import MutualTLSChannelError
from google.cloud.videointelligence_v1p2beta1.services.video_intelligence_service import (
VideoIntelligenceServiceAsyncClient,
)
from google.cloud.videointelligence_v1p2beta1.services.video_intelligence_service import (
VideoIntelligenceServiceClient,
)
from google.cloud.videointelligence_v1p2beta1.services.video_intelligence_service import (
transports,
)
from google.cloud.videointelligence_v1p2beta1.types import video_intelligence
from google.longrunning import operations_pb2
from google.oauth2 import service_account
from google.protobuf import duration_pb2
import google.auth
def client_cert_source_callback():
return b"cert bytes", b"key bytes"
def modify_default_endpoint(client):
return (
"foo.googleapis.com"
if ("localhost" in client.DEFAULT_ENDPOINT)
else client.DEFAULT_ENDPOINT
)
def test__get_default_mtls_endpoint():
api_endpoint = "example.googleapis.com"
api_mtls_endpoint = "example.mtls.googleapis.com"
sandbox_endpoint = "example.sandbox.googleapis.com"
sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
non_googleapi = "api.example.com"
assert VideoIntelligenceServiceClient._get_default_mtls_endpoint(None) is None
assert (
VideoIntelligenceServiceClient._get_default_mtls_endpoint(api_endpoint)
== api_mtls_endpoint
)
assert (
VideoIntelligenceServiceClient._get_default_mtls_endpoint(api_mtls_endpoint)
== api_mtls_endpoint
)
assert (
VideoIntelligenceServiceClient._get_default_mtls_endpoint(sandbox_endpoint)
== sandbox_mtls_endpoint
)
assert (
VideoIntelligenceServiceClient._get_default_mtls_endpoint(sandbox_mtls_endpoint)
== sandbox_mtls_endpoint
)
assert (
VideoIntelligenceServiceClient._get_default_mtls_endpoint(non_googleapi)
== non_googleapi
)
@pytest.mark.parametrize(
"client_class",
[VideoIntelligenceServiceClient, VideoIntelligenceServiceAsyncClient,],
)
def test_video_intelligence_service_client_from_service_account_info(client_class):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_info"
) as factory:
factory.return_value = creds
info = {"valid": True}
client = client_class.from_service_account_info(info)
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == "videointelligence.googleapis.com:443"
@pytest.mark.parametrize(
"transport_class,transport_name",
[
(transports.VideoIntelligenceServiceGrpcTransport, "grpc"),
(transports.VideoIntelligenceServiceGrpcAsyncIOTransport, "grpc_asyncio"),
],
)
def test_video_intelligence_service_client_service_account_always_use_jwt(
transport_class, transport_name
):
with mock.patch.object(
service_account.Credentials, "with_always_use_jwt_access", create=True
) as use_jwt:
creds = service_account.Credentials(None, None, None)
transport = transport_class(credentials=creds, always_use_jwt_access=True)
use_jwt.assert_called_once_with(True)
with mock.patch.object(
service_account.Credentials, "with_always_use_jwt_access", create=True
) as use_jwt:
creds = service_account.Credentials(None, None, None)
transport = transport_class(credentials=creds, always_use_jwt_access=False)
use_jwt.assert_not_called()
@pytest.mark.parametrize(
"client_class",
[VideoIntelligenceServiceClient, VideoIntelligenceServiceAsyncClient,],
)
def test_video_intelligence_service_client_from_service_account_file(client_class):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(
service_account.Credentials, "from_service_account_file"
) as factory:
factory.return_value = creds
client = client_class.from_service_account_file("dummy/file/path.json")
assert client.transport._credentials == creds
assert isinstance(client, client_class)
client = client_class.from_service_account_json("dummy/file/path.json")
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == "videointelligence.googleapis.com:443"
def test_video_intelligence_service_client_get_transport_class():
transport = VideoIntelligenceServiceClient.get_transport_class()
available_transports = [
transports.VideoIntelligenceServiceGrpcTransport,
]
assert transport in available_transports
transport = VideoIntelligenceServiceClient.get_transport_class("grpc")
assert transport == transports.VideoIntelligenceServiceGrpcTransport
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[
(
VideoIntelligenceServiceClient,
transports.VideoIntelligenceServiceGrpcTransport,
"grpc",
),
(
VideoIntelligenceServiceAsyncClient,
transports.VideoIntelligenceServiceGrpcAsyncIOTransport,
"grpc_asyncio",
),
],
)
@mock.patch.object(
VideoIntelligenceServiceClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(VideoIntelligenceServiceClient),
)
@mock.patch.object(
VideoIntelligenceServiceAsyncClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(VideoIntelligenceServiceAsyncClient),
)
def test_video_intelligence_service_client_client_options(
client_class, transport_class, transport_name
):
with mock.patch.object(
VideoIntelligenceServiceClient, "get_transport_class"
) as gtc:
transport = transport_class(credentials=ga_credentials.AnonymousCredentials())
client = client_class(transport=transport)
gtc.assert_not_called()
# Check that if channel is provided via str we will create a new one.
with mock.patch.object(
VideoIntelligenceServiceClient, "get_transport_class"
) as gtc:
client = client_class(transport=transport_name)
gtc.assert_called()
# Check the case api_endpoint is provided.
options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name, client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "never".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "always".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
# unsupported value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
with pytest.raises(MutualTLSChannelError):
client = client_class()
# Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}
):
with pytest.raises(ValueError):
client = client_class()
# Check the case quota_project_id is provided
options = client_options.ClientOptions(quota_project_id="octopus")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name, client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id="octopus",
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name,use_client_cert_env",
[
(
VideoIntelligenceServiceClient,
transports.VideoIntelligenceServiceGrpcTransport,
"grpc",
"true",
),
(
VideoIntelligenceServiceAsyncClient,
transports.VideoIntelligenceServiceGrpcAsyncIOTransport,
"grpc_asyncio",
"true",
),
(
VideoIntelligenceServiceClient,
transports.VideoIntelligenceServiceGrpcTransport,
"grpc",
"false",
),
(
VideoIntelligenceServiceAsyncClient,
transports.VideoIntelligenceServiceGrpcAsyncIOTransport,
"grpc_asyncio",
"false",
),
],
)
@mock.patch.object(
VideoIntelligenceServiceClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(VideoIntelligenceServiceClient),
)
@mock.patch.object(
VideoIntelligenceServiceAsyncClient,
"DEFAULT_ENDPOINT",
modify_default_endpoint(VideoIntelligenceServiceAsyncClient),
)
@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
def test_video_intelligence_service_client_mtls_env_auto(
client_class, transport_class, transport_name, use_client_cert_env
):
# This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
# mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
# Check the case client_cert_source is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
options = client_options.ClientOptions(
client_cert_source=client_cert_source_callback
)
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name, client_options=options)
if use_client_cert_env == "false":
expected_client_cert_source = None
expected_host = client.DEFAULT_ENDPOINT
else:
expected_client_cert_source = client_cert_source_callback
expected_host = client.DEFAULT_MTLS_ENDPOINT
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case ADC client cert is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=True,
):
with mock.patch(
"google.auth.transport.mtls.default_client_cert_source",
return_value=client_cert_source_callback,
):
if use_client_cert_env == "false":
expected_host = client.DEFAULT_ENDPOINT
expected_client_cert_source = None
else:
expected_host = client.DEFAULT_MTLS_ENDPOINT
expected_client_cert_source = client_cert_source_callback
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case client_cert_source and ADC client cert are not provided.
with mock.patch.dict(
os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}
):
with mock.patch.object(transport_class, "__init__") as patched:
with mock.patch(
"google.auth.transport.mtls.has_default_client_cert_source",
return_value=False,
):
patched.return_value = None
client = client_class(transport=transport_name)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[
(
VideoIntelligenceServiceClient,
transports.VideoIntelligenceServiceGrpcTransport,
"grpc",
),
(
VideoIntelligenceServiceAsyncClient,
transports.VideoIntelligenceServiceGrpcAsyncIOTransport,
"grpc_asyncio",
),
],
)
def test_video_intelligence_service_client_client_options_scopes(
client_class, transport_class, transport_name
):
# Check the case scopes are provided.
options = client_options.ClientOptions(scopes=["1", "2"],)
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name, client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=["1", "2"],
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize(
"client_class,transport_class,transport_name",
[
(
VideoIntelligenceServiceClient,
transports.VideoIntelligenceServiceGrpcTransport,
"grpc",
),
(
VideoIntelligenceServiceAsyncClient,
transports.VideoIntelligenceServiceGrpcAsyncIOTransport,
"grpc_asyncio",
),
],
)
def test_video_intelligence_service_client_client_options_credentials_file(
client_class, transport_class, transport_name
):
# Check the case credentials file is provided.
options = client_options.ClientOptions(credentials_file="credentials.json")
with mock.patch.object(transport_class, "__init__") as patched:
patched.return_value = None
client = client_class(transport=transport_name, client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
def test_video_intelligence_service_client_client_options_from_dict():
with mock.patch(
"google.cloud.videointelligence_v1p2beta1.services.video_intelligence_service.transports.VideoIntelligenceServiceGrpcTransport.__init__"
) as grpc_transport:
grpc_transport.return_value = None
client = VideoIntelligenceServiceClient(
client_options={"api_endpoint": "squid.clam.whelk"}
)
grpc_transport.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
def test_annotate_video(
transport: str = "grpc", request_type=video_intelligence.AnnotateVideoRequest
):
client = VideoIntelligenceServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.annotate_video), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/spam")
response = client.annotate_video(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == video_intelligence.AnnotateVideoRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_annotate_video_from_dict():
test_annotate_video(request_type=dict)
def test_annotate_video_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = VideoIntelligenceServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.annotate_video), "__call__") as call:
client.annotate_video()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == video_intelligence.AnnotateVideoRequest()
@pytest.mark.asyncio
async def test_annotate_video_async(
transport: str = "grpc_asyncio",
request_type=video_intelligence.AnnotateVideoRequest,
):
client = VideoIntelligenceServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.annotate_video), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
response = await client.annotate_video(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == video_intelligence.AnnotateVideoRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_annotate_video_async_from_dict():
await test_annotate_video_async(request_type=dict)
def test_annotate_video_flattened():
client = VideoIntelligenceServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.annotate_video), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
client.annotate_video(
input_uri="input_uri_value",
features=[video_intelligence.Feature.LABEL_DETECTION],
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
arg = args[0].input_uri
mock_val = "input_uri_value"
assert arg == mock_val
arg = args[0].features
mock_val = [video_intelligence.Feature.LABEL_DETECTION]
assert arg == mock_val
def test_annotate_video_flattened_error():
client = VideoIntelligenceServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
client.annotate_video(
video_intelligence.AnnotateVideoRequest(),
input_uri="input_uri_value",
features=[video_intelligence.Feature.LABEL_DETECTION],
)
@pytest.mark.asyncio
async def test_annotate_video_flattened_async():
client = VideoIntelligenceServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(type(client.transport.annotate_video), "__call__") as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name="operations/op")
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name="operations/spam")
)
# Call the method with a truthy value for each flattened field,
# using the keyword arguments to the method.
response = await client.annotate_video(
input_uri="input_uri_value",
features=[video_intelligence.Feature.LABEL_DETECTION],
)
# Establish that the underlying call was made with the expected
# request object values.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
arg = args[0].input_uri
mock_val = "input_uri_value"
assert arg == mock_val
arg = args[0].features
mock_val = [video_intelligence.Feature.LABEL_DETECTION]
assert arg == mock_val
@pytest.mark.asyncio
async def test_annotate_video_flattened_error_async():
client = VideoIntelligenceServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Attempting to call a method with both a request object and flattened
# fields is an error.
with pytest.raises(ValueError):
await client.annotate_video(
video_intelligence.AnnotateVideoRequest(),
input_uri="input_uri_value",
features=[video_intelligence.Feature.LABEL_DETECTION],
)
def test_credentials_transport_error():
# It is an error to provide credentials and a transport instance.
transport = transports.VideoIntelligenceServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = VideoIntelligenceServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport,
)
# It is an error to provide a credentials file and a transport instance.
transport = transports.VideoIntelligenceServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = VideoIntelligenceServiceClient(
client_options={"credentials_file": "credentials.json"},
transport=transport,
)
# It is an error to provide scopes and a transport instance.
transport = transports.VideoIntelligenceServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = VideoIntelligenceServiceClient(
client_options={"scopes": ["1", "2"]}, transport=transport,
)
def test_transport_instance():
# A client may be instantiated with a custom transport instance.
transport = transports.VideoIntelligenceServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
client = VideoIntelligenceServiceClient(transport=transport)
assert client.transport is transport
def test_transport_get_channel():
# A client may be instantiated with a custom transport instance.
transport = transports.VideoIntelligenceServiceGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
channel = transport.grpc_channel
assert channel
transport = transports.VideoIntelligenceServiceGrpcAsyncIOTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
channel = transport.grpc_channel
assert channel
@pytest.mark.parametrize(
"transport_class",
[
transports.VideoIntelligenceServiceGrpcTransport,
transports.VideoIntelligenceServiceGrpcAsyncIOTransport,
],
)
def test_transport_adc(transport_class):
# Test default credentials are used if not provided.
with mock.patch.object(google.auth, "default") as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class()
adc.assert_called_once()
def test_transport_grpc_default():
# A client should use the gRPC transport by default.
client = VideoIntelligenceServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
)
assert isinstance(
client.transport, transports.VideoIntelligenceServiceGrpcTransport,
)
def test_video_intelligence_service_base_transport_error():
# Passing both a credentials object and credentials_file should raise an error
with pytest.raises(core_exceptions.DuplicateCredentialArgs):
transport = transports.VideoIntelligenceServiceTransport(
credentials=ga_credentials.AnonymousCredentials(),
credentials_file="credentials.json",
)
def test_video_intelligence_service_base_transport():
# Instantiate the base transport.
with mock.patch(
"google.cloud.videointelligence_v1p2beta1.services.video_intelligence_service.transports.VideoIntelligenceServiceTransport.__init__"
) as Transport:
Transport.return_value = None
transport = transports.VideoIntelligenceServiceTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
# Every method on the transport should just blindly
# raise NotImplementedError.
methods = ("annotate_video",)
for method in methods:
with pytest.raises(NotImplementedError):
getattr(transport, method)(request=object())
with pytest.raises(NotImplementedError):
transport.close()
# Additionally, the LRO client (a property) should
# also raise NotImplementedError
with pytest.raises(NotImplementedError):
transport.operations_client
def test_video_intelligence_service_base_transport_with_credentials_file():
# Instantiate the base transport with a credentials file
with mock.patch.object(
google.auth, "load_credentials_from_file", autospec=True
) as load_creds, mock.patch(
"google.cloud.videointelligence_v1p2beta1.services.video_intelligence_service.transports.VideoIntelligenceServiceTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.VideoIntelligenceServiceTransport(
credentials_file="credentials.json", quota_project_id="octopus",
)
load_creds.assert_called_once_with(
"credentials.json",
scopes=None,
default_scopes=("https://www.googleapis.com/auth/cloud-platform",),
quota_project_id="octopus",
)
def test_video_intelligence_service_base_transport_with_adc():
# Test the default credentials are used if credentials and credentials_file are None.
with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch(
"google.cloud.videointelligence_v1p2beta1.services.video_intelligence_service.transports.VideoIntelligenceServiceTransport._prep_wrapped_messages"
) as Transport:
Transport.return_value = None
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.VideoIntelligenceServiceTransport()
adc.assert_called_once()
def test_video_intelligence_service_auth_adc():
# If no credentials are provided, we should use ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
VideoIntelligenceServiceClient()
adc.assert_called_once_with(
scopes=None,
default_scopes=("https://www.googleapis.com/auth/cloud-platform",),
quota_project_id=None,
)
@pytest.mark.parametrize(
"transport_class",
[
transports.VideoIntelligenceServiceGrpcTransport,
transports.VideoIntelligenceServiceGrpcAsyncIOTransport,
],
)
def test_video_intelligence_service_transport_auth_adc(transport_class):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class(quota_project_id="octopus", scopes=["1", "2"])
adc.assert_called_once_with(
scopes=["1", "2"],
default_scopes=("https://www.googleapis.com/auth/cloud-platform",),
quota_project_id="octopus",
)
@pytest.mark.parametrize(
"transport_class,grpc_helpers",
[
(transports.VideoIntelligenceServiceGrpcTransport, grpc_helpers),
(transports.VideoIntelligenceServiceGrpcAsyncIOTransport, grpc_helpers_async),
],
)
def test_video_intelligence_service_transport_create_channel(
transport_class, grpc_helpers
):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(
google.auth, "default", autospec=True
) as adc, mock.patch.object(
grpc_helpers, "create_channel", autospec=True
) as create_channel:
creds = ga_credentials.AnonymousCredentials()
adc.return_value = (creds, None)
transport_class(quota_project_id="octopus", scopes=["1", "2"])
create_channel.assert_called_with(
"videointelligence.googleapis.com:443",
credentials=creds,
credentials_file=None,
quota_project_id="octopus",
default_scopes=("https://www.googleapis.com/auth/cloud-platform",),
scopes=["1", "2"],
default_host="videointelligence.googleapis.com",
ssl_credentials=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
@pytest.mark.parametrize(
"transport_class",
[
transports.VideoIntelligenceServiceGrpcTransport,
transports.VideoIntelligenceServiceGrpcAsyncIOTransport,
],
)
def test_video_intelligence_service_grpc_transport_client_cert_source_for_mtls(
transport_class,
):
cred = ga_credentials.AnonymousCredentials()
# Check ssl_channel_credentials is used if provided.
with mock.patch.object(transport_class, "create_channel") as mock_create_channel:
mock_ssl_channel_creds = mock.Mock()
transport_class(
host="squid.clam.whelk",
credentials=cred,
ssl_channel_credentials=mock_ssl_channel_creds,
)
mock_create_channel.assert_called_once_with(
"squid.clam.whelk:443",
credentials=cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_channel_creds,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls
# is used.
with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()):
with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred:
transport_class(
credentials=cred,
client_cert_source_for_mtls=client_cert_source_callback,
)
expected_cert, expected_key = client_cert_source_callback()
mock_ssl_cred.assert_called_once_with(
certificate_chain=expected_cert, private_key=expected_key
)
def test_video_intelligence_service_host_no_port():
client = VideoIntelligenceServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="videointelligence.googleapis.com"
),
)
assert client.transport._host == "videointelligence.googleapis.com:443"
def test_video_intelligence_service_host_with_port():
client = VideoIntelligenceServiceClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(
api_endpoint="videointelligence.googleapis.com:8000"
),
)
assert client.transport._host == "videointelligence.googleapis.com:8000"
def test_video_intelligence_service_grpc_transport_channel():
channel = grpc.secure_channel("http://localhost/", grpc.local_channel_credentials())
# Check that channel is used if provided.
transport = transports.VideoIntelligenceServiceGrpcTransport(
host="squid.clam.whelk", channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
assert transport._ssl_channel_credentials == None
def test_video_intelligence_service_grpc_asyncio_transport_channel():
channel = aio.secure_channel("http://localhost/", grpc.local_channel_credentials())
# Check that channel is used if provided.
transport = transports.VideoIntelligenceServiceGrpcAsyncIOTransport(
host="squid.clam.whelk", channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
assert transport._ssl_channel_credentials == None
# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize(
"transport_class",
[
transports.VideoIntelligenceServiceGrpcTransport,
transports.VideoIntelligenceServiceGrpcAsyncIOTransport,
],
)
def test_video_intelligence_service_transport_channel_mtls_with_client_cert_source(
transport_class,
):
with mock.patch(
"grpc.ssl_channel_credentials", autospec=True
) as grpc_ssl_channel_cred:
with mock.patch.object(
transport_class, "create_channel"
) as grpc_create_channel:
mock_ssl_cred = mock.Mock()
grpc_ssl_channel_cred.return_value = mock_ssl_cred
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
cred = ga_credentials.AnonymousCredentials()
with pytest.warns(DeprecationWarning):
with mock.patch.object(google.auth, "default") as adc:
adc.return_value = (cred, None)
transport = transport_class(
host="squid.clam.whelk",
api_mtls_endpoint="mtls.squid.clam.whelk",
client_cert_source=client_cert_source_callback,
)
adc.assert_called_once()
grpc_ssl_channel_cred.assert_called_once_with(
certificate_chain=b"cert bytes", private_key=b"key bytes"
)
grpc_create_channel.assert_called_once_with(
"mtls.squid.clam.whelk:443",
credentials=cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
assert transport.grpc_channel == mock_grpc_channel
assert transport._ssl_channel_credentials == mock_ssl_cred
# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize(
"transport_class",
[
transports.VideoIntelligenceServiceGrpcTransport,
transports.VideoIntelligenceServiceGrpcAsyncIOTransport,
],
)
def test_video_intelligence_service_transport_channel_mtls_with_adc(transport_class):
mock_ssl_cred = mock.Mock()
with mock.patch.multiple(
"google.auth.transport.grpc.SslCredentials",
__init__=mock.Mock(return_value=None),
ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
):
with mock.patch.object(
transport_class, "create_channel"
) as grpc_create_channel:
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
mock_cred = mock.Mock()
with pytest.warns(DeprecationWarning):
transport = transport_class(
host="squid.clam.whelk",
credentials=mock_cred,
api_mtls_endpoint="mtls.squid.clam.whelk",
client_cert_source=None,
)
grpc_create_channel.assert_called_once_with(
"mtls.squid.clam.whelk:443",
credentials=mock_cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
assert transport.grpc_channel == mock_grpc_channel
def test_video_intelligence_service_grpc_lro_client():
client = VideoIntelligenceServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc",
)
transport = client.transport
# Ensure that we have a api-core operations client.
assert isinstance(transport.operations_client, operations_v1.OperationsClient,)
# Ensure that subsequent calls to the property send the exact same object.
assert transport.operations_client is transport.operations_client
def test_video_intelligence_service_grpc_lro_async_client():
client = VideoIntelligenceServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio",
)
transport = client.transport
# Ensure that we have a api-core operations client.
assert isinstance(transport.operations_client, operations_v1.OperationsAsyncClient,)
# Ensure that subsequent calls to the property send the exact same object.
assert transport.operations_client is transport.operations_client
def test_common_billing_account_path():
billing_account = "squid"
expected = "billingAccounts/{billing_account}".format(
billing_account=billing_account,
)
actual = VideoIntelligenceServiceClient.common_billing_account_path(billing_account)
assert expected == actual
def test_parse_common_billing_account_path():
expected = {
"billing_account": "clam",
}
path = VideoIntelligenceServiceClient.common_billing_account_path(**expected)
# Check that the path construction is reversible.
actual = VideoIntelligenceServiceClient.parse_common_billing_account_path(path)
assert expected == actual
def test_common_folder_path():
folder = "whelk"
expected = "folders/{folder}".format(folder=folder,)
actual = VideoIntelligenceServiceClient.common_folder_path(folder)
assert expected == actual
def test_parse_common_folder_path():
expected = {
"folder": "octopus",
}
path = VideoIntelligenceServiceClient.common_folder_path(**expected)
# Check that the path construction is reversible.
actual = VideoIntelligenceServiceClient.parse_common_folder_path(path)
assert expected == actual
def test_common_organization_path():
organization = "oyster"
expected = "organizations/{organization}".format(organization=organization,)
actual = VideoIntelligenceServiceClient.common_organization_path(organization)
assert expected == actual
def test_parse_common_organization_path():
expected = {
"organization": "nudibranch",
}
path = VideoIntelligenceServiceClient.common_organization_path(**expected)
# Check that the path construction is reversible.
actual = VideoIntelligenceServiceClient.parse_common_organization_path(path)
assert expected == actual
def test_common_project_path():
project = "cuttlefish"
expected = "projects/{project}".format(project=project,)
actual = VideoIntelligenceServiceClient.common_project_path(project)
assert expected == actual
def test_parse_common_project_path():
expected = {
"project": "mussel",
}
path = VideoIntelligenceServiceClient.common_project_path(**expected)
# Check that the path construction is reversible.
actual = VideoIntelligenceServiceClient.parse_common_project_path(path)
assert expected == actual
def test_common_location_path():
project = "winkle"
location = "nautilus"
expected = "projects/{project}/locations/{location}".format(
project=project, location=location,
)
actual = VideoIntelligenceServiceClient.common_location_path(project, location)
assert expected == actual
def test_parse_common_location_path():
expected = {
"project": "scallop",
"location": "abalone",
}
path = VideoIntelligenceServiceClient.common_location_path(**expected)
# Check that the path construction is reversible.
actual = VideoIntelligenceServiceClient.parse_common_location_path(path)
assert expected == actual
def test_client_withDEFAULT_CLIENT_INFO():
client_info = gapic_v1.client_info.ClientInfo()
with mock.patch.object(
transports.VideoIntelligenceServiceTransport, "_prep_wrapped_messages"
) as prep:
client = VideoIntelligenceServiceClient(
credentials=ga_credentials.AnonymousCredentials(), client_info=client_info,
)
prep.assert_called_once_with(client_info)
with mock.patch.object(
transports.VideoIntelligenceServiceTransport, "_prep_wrapped_messages"
) as prep:
transport_class = VideoIntelligenceServiceClient.get_transport_class()
transport = transport_class(
credentials=ga_credentials.AnonymousCredentials(), client_info=client_info,
)
prep.assert_called_once_with(client_info)
@pytest.mark.asyncio
async def test_transport_close_async():
client = VideoIntelligenceServiceAsyncClient(
credentials=ga_credentials.AnonymousCredentials(), transport="grpc_asyncio",
)
with mock.patch.object(
type(getattr(client.transport, "grpc_channel")), "close"
) as close:
async with client:
close.assert_not_called()
close.assert_called_once()
def test_transport_close():
transports = {
"grpc": "_grpc_channel",
}
for transport, close_name in transports.items():
client = VideoIntelligenceServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport
)
with mock.patch.object(
type(getattr(client.transport, close_name)), "close"
) as close:
with client:
close.assert_not_called()
close.assert_called_once()
def test_client_ctx():
transports = [
"grpc",
]
for transport in transports:
client = VideoIntelligenceServiceClient(
credentials=ga_credentials.AnonymousCredentials(), transport=transport
)
# Test client calls underlying transport.
with mock.patch.object(type(client.transport), "close") as close:
close.assert_not_called()
with client:
pass
close.assert_called()
| true | true |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.